template.go 1.1 KB

12345678910111213141516171819202122232425262728293031
  1. // Package chat provides chat template formatting for LLM prompts
  2. package chat
  3. import (
  4. "strings"
  5. )
  6. // FormatQwen formats messages for Qwen/ChatML style models
  7. // Uses role\ncontent format
  8. func FormatQwen(messages []Message, addGenerationPrompt bool) string {
  9. out, _ := (&Qwen3Renderer{}).Render(messages, Options{AddGenerationPrompt: addGenerationPrompt})
  10. return out
  11. }
  12. // FormatLlama formats messages for Llama 3 style models
  13. func FormatLlama(messages []Message, addGenerationPrompt bool) string {
  14. out, _ := (&Qwen3Renderer{}).Render(messages, Options{AddGenerationPrompt: addGenerationPrompt})
  15. return out
  16. }
  17. // Format automatically selects format based on model type
  18. func Format(messages []Message, modelType string, addGenerationPrompt bool) string {
  19. switch {
  20. case strings.Contains(strings.ToLower(modelType), "qwen"):
  21. return FormatQwen(messages, addGenerationPrompt)
  22. case strings.Contains(strings.ToLower(modelType), "llama"):
  23. return FormatLlama(messages, addGenerationPrompt)
  24. default:
  25. return FormatQwen(messages, addGenerationPrompt) // Default to ChatML
  26. }
  27. }