Private
Public Access
1
0

Add validation to command line flags + update system prompt handling

Renamed `applyPromptFlags` to `applyGenerationFlags` and added
`validateGenerationFlags`
This commit is contained in:
2024-06-23 04:47:47 +00:00
parent 677cfcfebf
commit f89cc7b410
11 changed files with 90 additions and 44 deletions

View File

@@ -10,10 +10,11 @@ import (
type Config struct {
Defaults *struct {
SystemPrompt *string `yaml:"systemPrompt" default:"You are a helpful assistant."`
MaxTokens *int `yaml:"maxTokens" default:"256"`
Temperature *float32 `yaml:"temperature" default:"0.7"`
Model *string `yaml:"model" default:"gpt-4"`
SystemPromptFile string `yaml:"systemPromptFile,omitempty"`
SystemPrompt *string `yaml:"systemPrompt" default:"You are a helpful assistant."`
MaxTokens *int `yaml:"maxTokens" default:"256"`
Temperature *float32 `yaml:"temperature" default:"0.2"`
Model *string `yaml:"model" default:"gpt-4"`
} `yaml:"defaults"`
Conversations *struct {
TitleGenerationModel *string `yaml:"titleGenerationModel" default:"gpt-3.5-turbo"`
@@ -22,10 +23,10 @@ type Config struct {
EnabledTools []string `yaml:"enabledTools"`
} `yaml:"tools"`
Providers []*struct {
Name *string `yaml:"name"`
Name *string `yaml:"name,omitempty"`
Kind *string `yaml:"kind"`
BaseURL *string `yaml:"baseUrl"`
APIKey *string `yaml:"apiKey"`
BaseURL *string `yaml:"baseUrl,omitempty"`
APIKey *string `yaml:"apiKey,omitempty"`
Models *[]string `yaml:"models"`
} `yaml:"providers"`
Chroma *struct {
@@ -68,3 +69,17 @@ func NewConfig(configFile string) (*Config, error) {
return c, nil
}
func (c *Config) GetSystemPrompt() string {
if c.Defaults.SystemPromptFile != "" {
content, err := util.ReadFileContents(c.Defaults.SystemPromptFile)
if err != nil {
Fatal("Could not read file contents at %s: %v\n", c.Defaults.SystemPromptFile, err)
}
return content
}
if c.Defaults.SystemPrompt == nil {
return ""
}
return *c.Defaults.SystemPrompt
}

View File

@@ -12,7 +12,6 @@ import (
"git.mlow.ca/mlow/lmcli/pkg/api/provider/google"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/ollama"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/openai"
"git.mlow.ca/mlow/lmcli/pkg/util"
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
@@ -24,8 +23,6 @@ type Context struct {
Chroma *tty.ChromaHighlighter
EnabledTools []api.ToolSpec
SystemPromptFile string
}
func NewContext() (*Context, error) {
@@ -57,7 +54,7 @@ func NewContext() (*Context, error) {
}
}
return &Context{config, store, chroma, enabledTools, ""}, nil
return &Context{config, store, chroma, enabledTools}, nil
}
func (c *Context) GetModels() (models []string) {
@@ -139,17 +136,6 @@ func (c *Context) GetModelProvider(model string) (string, api.ChatCompletionProv
return "", nil, fmt.Errorf("unknown model: %s", model)
}
func (c *Context) GetSystemPrompt() string {
if c.SystemPromptFile != "" {
content, err := util.ReadFileContents(c.SystemPromptFile)
if err != nil {
Fatal("Could not read file contents at %s: %v\n", c.SystemPromptFile, err)
}
return content
}
return *c.Config.Defaults.SystemPrompt
}
func configDir() string {
var configDir string