Compare commits
1 Commits
f3c17c9a9b
...
aaa9b3d503
Author | SHA1 | Date | |
---|---|---|---|
aaa9b3d503 |
@ -39,15 +39,14 @@ func RootCmd(ctx *lmcli.Context) *cobra.Command {
|
||||
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
|
||||
for _, cmd := range inputCmds {
|
||||
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
|
||||
cmd.RegisterFlagCompletionFunc("model", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
|
||||
return ctx.GetModels(), cobra.ShellCompDirectiveDefault
|
||||
})
|
||||
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
|
||||
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
|
||||
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
|
||||
cmd.MarkFlagsMutuallyExclusive("system-prompt", "system-prompt-file")
|
||||
}
|
||||
|
||||
renameCmd.Flags().Bool("generate", false, "Generate a conversation title")
|
||||
|
||||
root.AddCommand(
|
||||
chatCmd,
|
||||
cloneCmd,
|
||||
|
@ -56,7 +56,5 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Flags().Bool("generate", false, "Generate a conversation title")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
@ -2,13 +2,16 @@ package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/util"
|
||||
"github.com/alecthomas/chroma/v2/quick"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
)
|
||||
|
||||
@ -26,11 +29,19 @@ func FetchAndShowCompletion(ctx *lmcli.Context, messages []model.Message) ([]mod
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var toolBag []model.Tool
|
||||
for _, toolName := range *ctx.Config.Tools.EnabledTools {
|
||||
tool, ok := tools.AvailableTools[toolName]
|
||||
if ok {
|
||||
toolBag = append(toolBag, tool)
|
||||
}
|
||||
}
|
||||
|
||||
requestParams := model.RequestParameters{
|
||||
Model: *ctx.Config.Defaults.Model,
|
||||
MaxTokens: *ctx.Config.Defaults.MaxTokens,
|
||||
Temperature: *ctx.Config.Defaults.Temperature,
|
||||
ToolBag: ctx.EnabledTools,
|
||||
ToolBag: toolBag,
|
||||
}
|
||||
|
||||
var apiReplies []model.Message
|
||||
@ -229,6 +240,12 @@ func RenderConversation(ctx *lmcli.Context, messages []model.Message, spaceForRe
|
||||
}
|
||||
}
|
||||
|
||||
// HighlightMarkdown applies syntax highlighting to the provided markdown text
|
||||
// and writes it to stdout.
|
||||
func HighlightMarkdown(w io.Writer, markdownText string, formatter string, style string) error {
|
||||
return quick.Highlight(w, markdownText, "md", formatter, style)
|
||||
}
|
||||
|
||||
func RenderMessage(ctx *lmcli.Context, m *model.Message) {
|
||||
var messageAge string
|
||||
if m.CreatedAt.IsZero() {
|
||||
@ -257,7 +274,11 @@ func RenderMessage(ctx *lmcli.Context, m *model.Message) {
|
||||
|
||||
fmt.Printf("%s %s - %s %s\n\n", separator, role, timestamp, separator)
|
||||
if m.Content != "" {
|
||||
ctx.Chroma.Highlight(os.Stdout, m.Content)
|
||||
HighlightMarkdown(
|
||||
os.Stdout, m.Content,
|
||||
*ctx.Config.Chroma.Formatter,
|
||||
*ctx.Config.Chroma.Style,
|
||||
)
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
@ -5,22 +5,16 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type Context struct {
|
||||
Config *Config
|
||||
Config Config
|
||||
Store ConversationStore
|
||||
|
||||
Chroma *tty.ChromaHighlighter
|
||||
EnabledTools []model.Tool
|
||||
}
|
||||
|
||||
func NewContext() (*Context, error) {
|
||||
@ -35,32 +29,12 @@ func NewContext() (*Context, error) {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error establishing connection to store: %v", err)
|
||||
}
|
||||
store, err := NewSQLStore(db)
|
||||
s, err := NewSQLStore(db)
|
||||
if err != nil {
|
||||
Fatal("%v\n", err)
|
||||
}
|
||||
|
||||
chroma := tty.NewChromaHighlighter("markdown", *config.Chroma.Formatter, *config.Chroma.Style)
|
||||
|
||||
var enabledTools []model.Tool
|
||||
for _, toolName := range *config.Tools.EnabledTools {
|
||||
tool, ok := tools.AvailableTools[toolName]
|
||||
if ok {
|
||||
enabledTools = append(enabledTools, tool)
|
||||
}
|
||||
}
|
||||
|
||||
return &Context{config, store, chroma, enabledTools}, nil
|
||||
}
|
||||
|
||||
func (c *Context) GetModels() (models []string) {
|
||||
for _, m := range *c.Config.Anthropic.Models {
|
||||
models = append(models, m)
|
||||
}
|
||||
for _, m := range *c.Config.OpenAI.Models {
|
||||
models = append(models, m)
|
||||
}
|
||||
return
|
||||
return &Context{*config, s}, nil
|
||||
}
|
||||
|
||||
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {
|
||||
|
Loading…
Reference in New Issue
Block a user