Compare commits
No commits in common. "045146bb5c57601c787fd67c32bdf503d114cbff" and "0a27b9a8d34530be58e9250e5b6e15b5ca20354a" have entirely different histories.
045146bb5c
...
0a27b9a8d3
@ -38,15 +38,14 @@ func RootCmd(ctx *lmcli.Context) *cobra.Command {
|
|||||||
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
|
inputCmds := []*cobra.Command{newCmd, promptCmd, replyCmd, retryCmd, continueCmd, editCmd}
|
||||||
for _, cmd := range inputCmds {
|
for _, cmd := range inputCmds {
|
||||||
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
|
cmd.Flags().StringVar(ctx.Config.Defaults.Model, "model", *ctx.Config.Defaults.Model, "Which model to use")
|
||||||
cmd.RegisterFlagCompletionFunc("model", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
|
|
||||||
return ctx.GetModels(), cobra.ShellCompDirectiveDefault
|
|
||||||
})
|
|
||||||
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
|
cmd.Flags().IntVar(ctx.Config.Defaults.MaxTokens, "length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
|
||||||
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
|
cmd.Flags().StringVar(ctx.Config.Defaults.SystemPrompt, "system-prompt", *ctx.Config.Defaults.SystemPrompt, "System prompt")
|
||||||
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
|
cmd.Flags().StringVar(&systemPromptFile, "system-prompt-file", "", "A path to a file containing the system prompt")
|
||||||
cmd.MarkFlagsMutuallyExclusive("system-prompt", "system-prompt-file")
|
cmd.MarkFlagsMutuallyExclusive("system-prompt", "system-prompt-file")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
renameCmd.Flags().Bool("generate", false, "Generate a conversation title")
|
||||||
|
|
||||||
root.AddCommand(
|
root.AddCommand(
|
||||||
cloneCmd,
|
cloneCmd,
|
||||||
continueCmd,
|
continueCmd,
|
||||||
|
@ -56,7 +56,5 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd.Flags().Bool("generate", false, "Generate a conversation title")
|
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,16 @@ package util
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
||||||
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/util"
|
"git.mlow.ca/mlow/lmcli/pkg/util"
|
||||||
|
"github.com/alecthomas/chroma/v2/quick"
|
||||||
"github.com/charmbracelet/lipgloss"
|
"github.com/charmbracelet/lipgloss"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -26,11 +29,19 @@ func FetchAndShowCompletion(ctx *lmcli.Context, messages []model.Message) ([]mod
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var toolBag []model.Tool
|
||||||
|
for _, toolName := range *ctx.Config.Tools.EnabledTools {
|
||||||
|
tool, ok := tools.AvailableTools[toolName]
|
||||||
|
if ok {
|
||||||
|
toolBag = append(toolBag, tool)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
requestParams := model.RequestParameters{
|
requestParams := model.RequestParameters{
|
||||||
Model: *ctx.Config.Defaults.Model,
|
Model: *ctx.Config.Defaults.Model,
|
||||||
MaxTokens: *ctx.Config.Defaults.MaxTokens,
|
MaxTokens: *ctx.Config.Defaults.MaxTokens,
|
||||||
Temperature: *ctx.Config.Defaults.Temperature,
|
Temperature: *ctx.Config.Defaults.Temperature,
|
||||||
ToolBag: ctx.EnabledTools,
|
ToolBag: toolBag,
|
||||||
}
|
}
|
||||||
|
|
||||||
var apiReplies []model.Message
|
var apiReplies []model.Message
|
||||||
@ -229,6 +240,12 @@ func RenderConversation(ctx *lmcli.Context, messages []model.Message, spaceForRe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// HighlightMarkdown applies syntax highlighting to the provided markdown text
|
||||||
|
// and writes it to stdout.
|
||||||
|
func HighlightMarkdown(w io.Writer, markdownText string, formatter string, style string) error {
|
||||||
|
return quick.Highlight(w, markdownText, "md", formatter, style)
|
||||||
|
}
|
||||||
|
|
||||||
func RenderMessage(ctx *lmcli.Context, m *model.Message) {
|
func RenderMessage(ctx *lmcli.Context, m *model.Message) {
|
||||||
var messageAge string
|
var messageAge string
|
||||||
if m.CreatedAt.IsZero() {
|
if m.CreatedAt.IsZero() {
|
||||||
@ -257,7 +274,11 @@ func RenderMessage(ctx *lmcli.Context, m *model.Message) {
|
|||||||
|
|
||||||
fmt.Printf("%s %s - %s %s\n\n", separator, role, timestamp, separator)
|
fmt.Printf("%s %s - %s %s\n\n", separator, role, timestamp, separator)
|
||||||
if m.Content != "" {
|
if m.Content != "" {
|
||||||
ctx.Chroma.Highlight(os.Stdout, m.Content)
|
HighlightMarkdown(
|
||||||
|
os.Stdout, m.Content,
|
||||||
|
*ctx.Config.Chroma.Formatter,
|
||||||
|
*ctx.Config.Chroma.Style,
|
||||||
|
)
|
||||||
fmt.Println()
|
fmt.Println()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,22 +5,16 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider"
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider"
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic"
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic"
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai"
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai"
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli/tools"
|
|
||||||
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
|
|
||||||
"gorm.io/driver/sqlite"
|
"gorm.io/driver/sqlite"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Context struct {
|
type Context struct {
|
||||||
Config *Config
|
Config Config
|
||||||
Store ConversationStore
|
Store ConversationStore
|
||||||
|
|
||||||
Chroma *tty.ChromaHighlighter
|
|
||||||
EnabledTools []model.Tool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewContext() (*Context, error) {
|
func NewContext() (*Context, error) {
|
||||||
@ -35,32 +29,12 @@ func NewContext() (*Context, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("Error establishing connection to store: %v", err)
|
return nil, fmt.Errorf("Error establishing connection to store: %v", err)
|
||||||
}
|
}
|
||||||
store, err := NewSQLStore(db)
|
s, err := NewSQLStore(db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
Fatal("%v\n", err)
|
Fatal("%v\n", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
chroma := tty.NewChromaHighlighter("markdown", *config.Chroma.Formatter, *config.Chroma.Style)
|
return &Context{*config, s}, nil
|
||||||
|
|
||||||
var enabledTools []model.Tool
|
|
||||||
for _, toolName := range *config.Tools.EnabledTools {
|
|
||||||
tool, ok := tools.AvailableTools[toolName]
|
|
||||||
if ok {
|
|
||||||
enabledTools = append(enabledTools, tool)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Context{config, store, chroma, enabledTools}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Context) GetModels() (models []string) {
|
|
||||||
for _, m := range *c.Config.Anthropic.Models {
|
|
||||||
models = append(models, m)
|
|
||||||
}
|
|
||||||
for _, m := range *c.Config.OpenAI.Models {
|
|
||||||
models = append(models, m)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {
|
func (c *Context) GetCompletionProvider(model string) (provider.ChatCompletionClient, error) {
|
||||||
|
Loading…
Reference in New Issue
Block a user