From c2c61e2aaa09c1f07f0987224627fbaadd3becdf Mon Sep 17 00:00:00 2001 From: Matt Low Date: Sun, 17 Mar 2024 20:12:53 +0000 Subject: [PATCH] Improve title generation prompt performance The previous prompt was utterly broken with Anthropic models, they would just try to continue the conversation --- pkg/cmd/util/util.go | 43 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/pkg/cmd/util/util.go b/pkg/cmd/util/util.go index e5fb923..62b71cb 100644 --- a/pkg/cmd/util/util.go +++ b/pkg/cmd/util/util.go @@ -118,11 +118,18 @@ func HandleConversationReply(ctx *lmcli.Context, c *model.Conversation, persist func FormatForExternalPrompt(messages []model.Message, system bool) string { sb := strings.Builder{} for _, message := range messages { - if message.Role != model.MessageRoleUser && (message.Role != model.MessageRoleSystem || !system) { + if message.Content == "" { continue } - sb.WriteString(fmt.Sprintf("<%s>\n", message.Role.FriendlyRole())) - sb.WriteString(fmt.Sprintf("\"\"\"\n%s\n\"\"\"\n\n", message.Content)) + switch message.Role { + case model.MessageRoleAssistant, model.MessageRoleToolCall: + sb.WriteString("Assistant:\n\n") + case model.MessageRoleUser: + sb.WriteString("User:\n\n") + default: + continue + } + sb.WriteString(fmt.Sprintf("%s", lipgloss.NewStyle().PaddingLeft(1).Render(message.Content))) } return sb.String() } @@ -133,13 +140,32 @@ func GenerateTitle(ctx *lmcli.Context, c *model.Conversation) (string, error) { return "", err } - const header = "Generate a concise 4-5 word title for the conversation below." - prompt := fmt.Sprintf("%s\n\n---\n\n%s", header, FormatForExternalPrompt(messages, false)) + const prompt = `Above is an excerpt from a conversation between a user and AI assistant. Please reply with a short title (no more than 8 words) that reflects the topic of the conversation, read from the user's perspective. + +Example conversation: + +""" +User: + + Hello! + +Assistant: + + Hello! How may I assist you? +""" + +Example response: + +""" +Title: A brief introduction +""" +` + conversation := FormatForExternalPrompt(messages, false) generateRequest := []model.Message{ { Role: model.MessageRoleUser, - Content: prompt, + Content: fmt.Sprintf("\"\"\"\n%s\n\"\"\"\n\n%s", conversation, prompt), }, } @@ -158,12 +184,15 @@ func GenerateTitle(ctx *lmcli.Context, c *model.Conversation) (string, error) { return "", err } + response = strings.TrimPrefix(response, "Title: ") + response = strings.Trim(response, "\"") + return response, nil } // ShowWaitAnimation prints an animated ellipses to stdout until something is // received on the signal channel. An empty string sent to the channel to -// noftify the caller that the animation has completed (carriage returned). +// notify the caller that the animation has completed (carriage returned). func ShowWaitAnimation(signal chan any) { // Save the current cursor position fmt.Print("\033[s")