Private
Public Access
1
0

Update command flag handling

`lmcli chat` now supports common prompt flags (model, length, system
prompt, etc)
This commit is contained in:
2024-05-07 07:11:04 +00:00
parent 8e4ff90ab4
commit 2b38db7db7
11 changed files with 74 additions and 71 deletions

View File

@@ -10,17 +10,20 @@ import (
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
"git.mlow.ca/mlow/lmcli/pkg/util"
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
type Context struct {
Config *Config
Config *Config // may be updated at runtime
Store ConversationStore
Chroma *tty.ChromaHighlighter
EnabledTools []model.Tool
SystemPromptFile string
}
func NewContext() (*Context, error) {
@@ -50,7 +53,7 @@ func NewContext() (*Context, error) {
}
}
return &Context{config, store, chroma, enabledTools}, nil
return &Context{config, store, chroma, enabledTools, ""}, nil
}
func (c *Context) GetModels() (models []string) {
@@ -96,6 +99,17 @@ func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionCl
return nil, fmt.Errorf("unknown model: %s", model)
}
func (c *Context) GetSystemPrompt() string {
if c.SystemPromptFile != "" {
content, err := util.ReadFileContents(c.SystemPromptFile)
if err != nil {
Fatal("Could not read file contents at %s: %v\n", c.SystemPromptFile, err)
}
return content
}
return *c.Config.Defaults.SystemPrompt
}
func configDir() string {
var configDir string

View File

@@ -32,13 +32,13 @@ type Conversation struct {
}
type RequestParameters struct {
Model string
Model string
MaxTokens int
Temperature float32
TopP float32
SystemPrompt string
ToolBag []Tool
ToolBag []Tool
}
func (m *MessageRole) IsAssistant() bool {

View File

@@ -19,7 +19,6 @@ func buildRequest(params model.RequestParameters, messages []model.Message) Requ
requestBody := Request{
Model: params.Model,
Messages: make([]Message, len(messages)),
System: params.SystemPrompt,
MaxTokens: params.MaxTokens,
Temperature: params.Temperature,
Stream: false,