Compare commits

..

4 Commits

Author SHA1 Message Date
045146bb5c Moved flag 2024-03-12 08:03:04 +00:00
2c7bdd8ebf Store enabled tools in lmcli.Context 2024-03-12 08:01:53 +00:00
7d56726c78 Add --model flag completion 2024-03-12 07:43:57 +00:00
f2c7d2bdd0 Store ChromaHighlighter in lmcli.Context and use it
In preparation for TUI
2024-03-12 07:43:40 +00:00
4 changed files with 36 additions and 28 deletions

View File

@ -38,14 +38,15 @@ func RootCmd(ctx *lmcli.Context) *cobra.Command {
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd} inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
for _, cmd := range inputCmds { for _, cmd := range inputCmds {
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use") cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
cmd.RegisterFlagCompletionFunc("model", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
return ctx.GetModels(), cobra.ShellCompDirectiveDefault
})
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens") cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt") cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt") cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
cmd.MarkFlagsMutuallyExclusive("system-prompt", "system-prompt-file") cmd.MarkFlagsMutuallyExclusive("system-prompt", "system-prompt-file")
} }
renameCmd.Flags().Bool("generate", false, "Generate a conversation title")
root.AddCommand( root.AddCommand(
cloneCmd, cloneCmd,
continueCmd, continueCmd,

View File

@ -56,5 +56,7 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
}, },
} }
cmd.Flags().Bool("generate", false, "Generate a conversation title")
return cmd return cmd
} }

View File

@ -2,16 +2,13 @@ package util
import ( import (
"fmt" "fmt"
"io"
"os" "os"
"strings" "strings"
"time" "time"
"git.mlow.ca/mlow/lmcli/pkg/lmcli" "git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
"git.mlow.ca/mlow/lmcli/pkg/util" "git.mlow.ca/mlow/lmcli/pkg/util"
"github.com/alecthomas/chroma/v2/quick"
"github.com/charmbracelet/lipgloss" "github.com/charmbracelet/lipgloss"
) )
@ -29,19 +26,11 @@ func FetchAndShowCompletion(ctx *lmcli.Context, messages []model.Message) ([]mod
return nil, err return nil, err
} }
var toolBag []model.Tool
for _, toolName := range *ctx.Config.Tools.EnabledTools {
tool, ok := tools.AvailableTools[toolName]
if ok {
toolBag = append(toolBag, tool)
}
}
requestParams := model.RequestParameters{ requestParams := model.RequestParameters{
Model: *ctx.Config.Defaults.Model, Model: *ctx.Config.Defaults.Model,
MaxTokens: *ctx.Config.Defaults.MaxTokens, MaxTokens: *ctx.Config.Defaults.MaxTokens,
Temperature: *ctx.Config.Defaults.Temperature, Temperature: *ctx.Config.Defaults.Temperature,
ToolBag: toolBag, ToolBag: ctx.EnabledTools,
} }
var apiReplies []model.Message var apiReplies []model.Message
@ -240,12 +229,6 @@ func RenderConversation(ctx *lmcli.Context, messages []model.Message, spaceForRe
} }
} }
// HighlightMarkdown applies syntax highlighting to the provided markdown text
// and writes it to stdout.
func HighlightMarkdown(w io.Writer, markdownText string, formatter string, style string) error {
return quick.Highlight(w, markdownText, "md", formatter, style)
}
func RenderMessage(ctx *lmcli.Context, m *model.Message) { func RenderMessage(ctx *lmcli.Context, m *model.Message) {
var messageAge string var messageAge string
if m.CreatedAt.IsZero() { if m.CreatedAt.IsZero() {
@ -274,11 +257,7 @@ func RenderMessage(ctx *lmcli.Context, m *model.Message) {
fmt.Printf("%s %s - %s %s\n\n", separator, role, timestamp, separator) fmt.Printf("%s %s - %s %s\n\n", separator, role, timestamp, separator)
if m.Content != "" { if m.Content != "" {
HighlightMarkdown( ctx.Chroma.Highlight(os.Stdout, m.Content)
os.Stdout, m.Content,
*ctx.Config.Chroma.Formatter,
*ctx.Config.Chroma.Style,
)
fmt.Println() fmt.Println()
} }
} }

View File

@ -5,16 +5,22 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic" "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai" "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai"
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
"gorm.io/driver/sqlite" "gorm.io/driver/sqlite"
"gorm.io/gorm" "gorm.io/gorm"
) )
type Context struct { type Context struct {
Config Config Config *Config
Store ConversationStore Store ConversationStore
Chroma *tty.ChromaHighlighter
EnabledTools []model.Tool
} }
func NewContext() (*Context, error) { func NewContext() (*Context, error) {
@ -29,12 +35,32 @@ func NewContext() (*Context, error) {
if err != nil { if err != nil {
return nil, fmt.Errorf("Error establishing connection to store: %v", err) return nil, fmt.Errorf("Error establishing connection to store: %v", err)
} }
s, err := NewSQLStore(db) store, err := NewSQLStore(db)
if err != nil { if err != nil {
Fatal("%v\n", err) Fatal("%v\n", err)
} }
return &Context{*config, s}, nil chroma := tty.NewChromaHighlighter("markdown", *config.Chroma.Formatter, *config.Chroma.Style)
var enabledTools []model.Tool
for _, toolName := range *config.Tools.EnabledTools {
tool, ok := tools.AvailableTools[toolName]
if ok {
enabledTools = append(enabledTools, tool)
}
}
return &Context{config, store, chroma, enabledTools}, nil
}
func (c *Context) GetModels() (models []string) {
for _, m := range *c.Config.Anthropic.Models {
models = append(models, m)
}
for _, m := range *c.Config.OpenAI.Models {
models = append(models, m)
}
return
} }
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) { func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {