Matt Low
3fde58b77d
- More emphasis on `api` package. It now holds database model structs from `lmcli/models` (which is now gone) as well as the tool spec, call, and result types. `tools.Tool` is now `api.ToolSpec`. `api.ChatCompletionClient` was renamed to `api.ChatCompletionProvider`. - Change ChatCompletion interface and implementations to no longer do automatic tool call recursion - they simply return a ToolCall message which the caller can decide what to do with (e.g. prompt for user confirmation before executing) - `api.ChatCompletionProvider` functions have had their ReplyCallback parameter removed, as now they only return a single reply. - Added a top-level `agent` package, moved the current built-in tools implementations under `agent/toolbox`. `tools.ExecuteToolCalls` is now `agent.ExecuteToolCalls`. - Fixed request context handling in openai, google, ollama (use `NewRequestWithContext`), cleaned up request cancellation in TUI - Fix tool call tui persistence bug (we were skipping message with empty content) - Now handle tool calling from TUI layer TODO: - Prompt users before executing tool calls - Automatically send tool results to the model (or make this toggleable)
50 lines
1.1 KiB
Go
50 lines
1.1 KiB
Go
package cmd
|
|
|
|
import (
|
|
"fmt"
|
|
|
|
"git.mlow.ca/mlow/lmcli/pkg/api"
|
|
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
|
"github.com/spf13/cobra"
|
|
)
|
|
|
|
func PromptCmd(ctx *lmcli.Context) *cobra.Command {
|
|
cmd := &cobra.Command{
|
|
Use: "prompt [message]",
|
|
Short: "Do a one-shot prompt",
|
|
Long: `Prompt the Large Language Model and get a response.`,
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
input := inputFromArgsOrEditor(args, "# Write your prompt below\n", "")
|
|
if input == "" {
|
|
return fmt.Errorf("No message was provided.")
|
|
}
|
|
|
|
var messages []api.Message
|
|
|
|
// TODO: stop supplying system prompt as a message
|
|
system := ctx.GetSystemPrompt()
|
|
if system != "" {
|
|
messages = append(messages, api.Message{
|
|
Role: api.MessageRoleSystem,
|
|
Content: system,
|
|
})
|
|
}
|
|
|
|
messages = append(messages, api.Message{
|
|
Role: api.MessageRoleUser,
|
|
Content: input,
|
|
})
|
|
|
|
_, err := cmdutil.Prompt(ctx, messages, nil)
|
|
if err != nil {
|
|
return fmt.Errorf("Error fetching LLM response: %v", err)
|
|
}
|
|
return nil
|
|
},
|
|
}
|
|
|
|
applyPromptFlags(ctx, cmd)
|
|
return cmd
|
|
}
|