Matt Low
2b38db7db7
`lmcli chat` now supports common prompt flags (model, length, system prompt, etc)
45 lines
1000 B
Go
45 lines
1000 B
Go
package cmd
|
|
|
|
import (
|
|
"fmt"
|
|
|
|
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
|
"github.com/spf13/cobra"
|
|
)
|
|
|
|
func PromptCmd(ctx *lmcli.Context) *cobra.Command {
|
|
cmd := &cobra.Command{
|
|
Use: "prompt [message]",
|
|
Short: "Do a one-shot prompt",
|
|
Long: `Prompt the Large Language Model and get a response.`,
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
message := inputFromArgsOrEditor(args, "# What would you like to say?\n", "")
|
|
if message == "" {
|
|
return fmt.Errorf("No message was provided.")
|
|
}
|
|
|
|
messages := []model.Message{
|
|
{
|
|
Role: model.MessageRoleSystem,
|
|
Content: ctx.GetSystemPrompt(),
|
|
},
|
|
{
|
|
Role: model.MessageRoleUser,
|
|
Content: message,
|
|
},
|
|
}
|
|
|
|
_, err := cmdutil.Prompt(ctx, messages, nil)
|
|
if err != nil {
|
|
return fmt.Errorf("Error fetching LLM response: %v", err)
|
|
}
|
|
return nil
|
|
},
|
|
}
|
|
|
|
applyPromptFlags(ctx, cmd)
|
|
return cmd
|
|
}
|