Private
Public Access
1
0

Add message branching

Updated the behaviour of commands:

- `lmcli edit`
  - by default create a new branch/message branch with the edited contents
  - add --in-place to avoid creating a branch
  - no longer delete messages after the edited message
  - only do the edit, don't fetch a new response
- `lmcli retry`
  - create a new branch rather than replacing old messages
  - add --offset to change where to retry from
This commit is contained in:
2024-05-20 18:12:44 +00:00
parent f6e55f6bff
commit 8c53752146
16 changed files with 505 additions and 308 deletions

View File

@@ -73,43 +73,57 @@ func LookupConversationE(ctx *lmcli.Context, shortName string) (*model.Conversat
return c, nil
}
func HandleConversationReply(ctx *lmcli.Context, c *model.Conversation, persist bool, toSend ...model.Message) {
messages, err := ctx.Store.PathToLeaf(c.SelectedRoot)
if err != nil {
lmcli.Fatal("Could not load messages: %v\n", err)
}
HandleReply(ctx, &messages[len(messages)-1], persist, toSend...)
}
// handleConversationReply handles sending messages to an existing
// conversation, optionally persisting both the sent replies and responses.
func HandleConversationReply(ctx *lmcli.Context, c *model.Conversation, persist bool, toSend ...model.Message) {
existing, err := ctx.Store.Messages(c)
if err != nil {
lmcli.Fatal("Could not retrieve messages for conversation: %s\n", c.Title)
func HandleReply(ctx *lmcli.Context, to *model.Message, persist bool, messages ...model.Message) {
if to == nil {
lmcli.Fatal("Can't prompt from an empty message.")
}
if persist {
for _, message := range toSend {
err = ctx.Store.SaveMessage(&message)
if err != nil {
lmcli.Warn("Could not save %s message: %v\n", message.Role, err)
}
existing, err := ctx.Store.PathToRoot(to)
if err != nil {
lmcli.Fatal("Could not load messages: %v\n", err)
}
RenderConversation(ctx, append(existing, messages...), true)
var savedReplies []model.Message
if persist && len(messages) > 0 {
savedReplies, err = ctx.Store.Reply(to, messages...)
if err != nil {
lmcli.Warn("Could not save messages: %v\n", err)
}
}
allMessages := append(existing, toSend...)
RenderConversation(ctx, allMessages, true)
// render a message header with no contents
RenderMessage(ctx, (&model.Message{Role: model.MessageRoleAssistant}))
var lastSavedMessage *model.Message
lastSavedMessage = to
if len(savedReplies) > 0 {
lastSavedMessage = &savedReplies[len(savedReplies)-1]
}
replyCallback := func(reply model.Message) {
if !persist {
return
}
reply.ConversationID = c.ID
err = ctx.Store.SaveMessage(&reply)
savedReplies, err = ctx.Store.Reply(lastSavedMessage, reply)
if err != nil {
lmcli.Warn("Could not save reply: %v\n", err)
}
lastSavedMessage = &savedReplies[0]
}
_, err = Prompt(ctx, allMessages, replyCallback)
_, err = Prompt(ctx, append(existing, messages...), replyCallback)
if err != nil {
lmcli.Fatal("Error fetching LLM response: %v\n", err)
}
@@ -134,12 +148,7 @@ func FormatForExternalPrompt(messages []model.Message, system bool) string {
return sb.String()
}
func GenerateTitle(ctx *lmcli.Context, c *model.Conversation) (string, error) {
messages, err := ctx.Store.Messages(c)
if err != nil {
return "", err
}
func GenerateTitle(ctx *lmcli.Context, messages []model.Message) (string, error) {
const prompt = `Above is an excerpt from a conversation between a user and AI assistant. Please reply with a short title (no more than 8 words) that reflects the topic of the conversation, read from the user's perspective.
Example conversation: