Improve title generation prompt performance

The previous prompt was utterly broken with Anthropic models, they would
just try to continue the conversation
This commit is contained in:
Matt Low 2024-03-17 20:12:53 +00:00
parent 5e880d3b31
commit c2c61e2aaa

View File

@ -118,11 +118,18 @@ func HandleConversationReply(ctx *lmcli.Context, c *model.Conversation, persist
func FormatForExternalPrompt(messages []model.Message, system bool) string { func FormatForExternalPrompt(messages []model.Message, system bool) string {
sb := strings.Builder{} sb := strings.Builder{}
for _, message := range messages { for _, message := range messages {
if message.Role != model.MessageRoleUser && (message.Role != model.MessageRoleSystem || !system) { if message.Content == "" {
continue continue
} }
sb.WriteString(fmt.Sprintf("<%s>\n", message.Role.FriendlyRole())) switch message.Role {
sb.WriteString(fmt.Sprintf("\"\"\"\n%s\n\"\"\"\n\n", message.Content)) case model.MessageRoleAssistant, model.MessageRoleToolCall:
sb.WriteString("Assistant:\n\n")
case model.MessageRoleUser:
sb.WriteString("User:\n\n")
default:
continue
}
sb.WriteString(fmt.Sprintf("%s", lipgloss.NewStyle().PaddingLeft(1).Render(message.Content)))
} }
return sb.String() return sb.String()
} }
@ -133,13 +140,32 @@ func GenerateTitle(ctx *lmcli.Context, c *model.Conversation) (string, error) {
return "", err return "", err
} }
const header = "Generate a concise 4-5 word title for the conversation below." const prompt = `Above is an excerpt from a conversation between a user and AI assistant. Please reply with a short title (no more than 8 words) that reflects the topic of the conversation, read from the user's perspective.
prompt := fmt.Sprintf("%s\n\n---\n\n%s", header, FormatForExternalPrompt(messages, false))
Example conversation:
"""
User:
Hello!
Assistant:
Hello! How may I assist you?
"""
Example response:
"""
Title: A brief introduction
"""
`
conversation := FormatForExternalPrompt(messages, false)
generateRequest := []model.Message{ generateRequest := []model.Message{
{ {
Role: model.MessageRoleUser, Role: model.MessageRoleUser,
Content: prompt, Content: fmt.Sprintf("\"\"\"\n%s\n\"\"\"\n\n%s", conversation, prompt),
}, },
} }
@ -158,12 +184,15 @@ func GenerateTitle(ctx *lmcli.Context, c *model.Conversation) (string, error) {
return "", err return "", err
} }
response = strings.TrimPrefix(response, "Title: ")
response = strings.Trim(response, "\"")
return response, nil return response, nil
} }
// ShowWaitAnimation prints an animated ellipses to stdout until something is // ShowWaitAnimation prints an animated ellipses to stdout until something is
// received on the signal channel. An empty string sent to the channel to // received on the signal channel. An empty string sent to the channel to
// noftify the caller that the animation has completed (carriage returned). // notify the caller that the animation has completed (carriage returned).
func ShowWaitAnimation(signal chan any) { func ShowWaitAnimation(signal chan any) {
// Save the current cursor position // Save the current cursor position
fmt.Print("\033[s") fmt.Print("\033[s")