Add --model flag completion
This commit is contained in:
parent
f2c7d2bdd0
commit
7d56726c78
@ -38,6 +38,9 @@ func RootCmd(ctx *lmcli.Context) *cobra.Command {
|
|||||||
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
|
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
|
||||||
for _, cmd := range inputCmds {
|
for _, cmd := range inputCmds {
|
||||||
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
|
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
|
||||||
|
cmd.RegisterFlagCompletionFunc("model", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
|
||||||
|
return ctx.GetModels(), cobra.ShellCompDirectiveDefault
|
||||||
|
})
|
||||||
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
|
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
|
||||||
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
|
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
|
||||||
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
|
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
|
||||||
|
@ -42,6 +42,16 @@ func NewContext() (*Context, error) {
|
|||||||
return &Context{*config, s, hl}, nil
|
return &Context{*config, s, hl}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Context) GetModels() (models []string) {
|
||||||
|
for _, m := range *c.Config.Anthropic.Models {
|
||||||
|
models = append(models, m)
|
||||||
|
}
|
||||||
|
for _, m := range *c.Config.OpenAI.Models {
|
||||||
|
models = append(models, m)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {
|
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {
|
||||||
for _, m := range *c.Config.Anthropic.Models {
|
for _, m := range *c.Config.Anthropic.Models {
|
||||||
if m == model {
|
if m == model {
|
||||||
|
Loading…
Reference in New Issue
Block a user