| 12345678910111213141516171819202122232425262728293031 |
- // Package chat provides chat template formatting for LLM prompts
- package chat
- import (
- "strings"
- )
- // FormatQwen formats messages for Qwen/ChatML style models
- // Uses role\ncontent format
- func FormatQwen(messages []Message, addGenerationPrompt bool) string {
- out, _ := (&Qwen3Renderer{}).Render(messages, Options{AddGenerationPrompt: addGenerationPrompt})
- return out
- }
- // FormatLlama formats messages for Llama 3 style models
- func FormatLlama(messages []Message, addGenerationPrompt bool) string {
- out, _ := (&Qwen3Renderer{}).Render(messages, Options{AddGenerationPrompt: addGenerationPrompt})
- return out
- }
- // Format automatically selects format based on model type
- func Format(messages []Message, modelType string, addGenerationPrompt bool) string {
- switch {
- case strings.Contains(strings.ToLower(modelType), "qwen"):
- return FormatQwen(messages, addGenerationPrompt)
- case strings.Contains(strings.ToLower(modelType), "llama"):
- return FormatLlama(messages, addGenerationPrompt)
- default:
- return FormatQwen(messages, addGenerationPrompt) // Default to ChatML
- }
- }
|