Matt Low
3fde58b77d
- More emphasis on `api` package. It now holds database model structs from `lmcli/models` (which is now gone) as well as the tool spec, call, and result types. `tools.Tool` is now `api.ToolSpec`. `api.ChatCompletionClient` was renamed to `api.ChatCompletionProvider`. - Change ChatCompletion interface and implementations to no longer do automatic tool call recursion - they simply return a ToolCall message which the caller can decide what to do with (e.g. prompt for user confirmation before executing) - `api.ChatCompletionProvider` functions have had their ReplyCallback parameter removed, as now they only return a single reply. - Added a top-level `agent` package, moved the current built-in tools implementations under `agent/toolbox`. `tools.ExecuteToolCalls` is now `agent.ExecuteToolCalls`. - Fixed request context handling in openai, google, ollama (use `NewRequestWithContext`), cleaned up request cancellation in TUI - Fix tool call tui persistence bug (we were skipping message with empty content) - Now handle tool calling from TUI layer TODO: - Prompt users before executing tool calls - Automatically send tool results to the model (or make this toggleable)
63 lines
1.6 KiB
Go
63 lines
1.6 KiB
Go
package cmd
|
|
|
|
import (
|
|
"fmt"
|
|
|
|
"git.mlow.ca/mlow/lmcli/pkg/api"
|
|
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
|
"github.com/spf13/cobra"
|
|
)
|
|
|
|
func NewCmd(ctx *lmcli.Context) *cobra.Command {
|
|
cmd := &cobra.Command{
|
|
Use: "new [message]",
|
|
Short: "Start a new conversation",
|
|
Long: `Start a new conversation with the Large Language Model.`,
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
input := inputFromArgsOrEditor(args, "# Start a new conversation below\n", "")
|
|
if input == "" {
|
|
return fmt.Errorf("No message was provided.")
|
|
}
|
|
|
|
var messages []api.Message
|
|
|
|
// TODO: probably just make this part of the conversation
|
|
system := ctx.GetSystemPrompt()
|
|
if system != "" {
|
|
messages = append(messages, api.Message{
|
|
Role: api.MessageRoleSystem,
|
|
Content: system,
|
|
})
|
|
}
|
|
|
|
messages = append(messages, api.Message{
|
|
Role: api.MessageRoleUser,
|
|
Content: input,
|
|
})
|
|
|
|
conversation, messages, err := ctx.Store.StartConversation(messages...)
|
|
if err != nil {
|
|
return fmt.Errorf("Could not start a new conversation: %v", err)
|
|
}
|
|
|
|
cmdutil.HandleReply(ctx, &messages[len(messages)-1], true)
|
|
|
|
title, err := cmdutil.GenerateTitle(ctx, messages)
|
|
if err != nil {
|
|
lmcli.Warn("Could not generate title for conversation %s: %v\n", conversation.ShortName.String, err)
|
|
}
|
|
|
|
conversation.Title = title
|
|
err = ctx.Store.UpdateConversation(conversation)
|
|
if err != nil {
|
|
lmcli.Warn("Could not save conversation title: %v\n", err)
|
|
}
|
|
return nil
|
|
},
|
|
}
|
|
|
|
applyPromptFlags(ctx, cmd)
|
|
return cmd
|
|
}
|