From a2c860252fd010a528feaed74b3242be69467330 Mon Sep 17 00:00:00 2001 From: Matt Low Date: Sun, 9 Jun 2024 16:42:53 +0000 Subject: [PATCH] Refactor pkg/lmcli/provider Moved `ChangeCompletionInterface` to `pkg/api`, moved individual providers to `pkg/api/provider` --- pkg/{lmcli/provider/provider.go => api/api.go} | 2 +- pkg/{lmcli => api}/provider/anthropic/anthropic.go | 12 ++++++------ pkg/{lmcli => api}/provider/anthropic/tools.go | 0 pkg/{lmcli => api}/provider/anthropic/types.go | 0 pkg/{lmcli => api}/provider/google/google.go | 12 ++++++------ pkg/{lmcli => api}/provider/google/types.go | 0 pkg/{lmcli => api}/provider/ollama/ollama.go | 10 +++++----- pkg/{lmcli => api}/provider/openai/openai.go | 12 ++++++------ pkg/{lmcli => api}/provider/openai/types.go | 0 pkg/cmd/util/util.go | 6 +++--- pkg/lmcli/lmcli.go | 12 ++++++------ pkg/tui/views/chat/chat.go | 8 ++++---- 12 files changed, 37 insertions(+), 37 deletions(-) rename pkg/{lmcli/provider/provider.go => api/api.go} (97%) rename pkg/{lmcli => api}/provider/anthropic/anthropic.go (97%) rename pkg/{lmcli => api}/provider/anthropic/tools.go (100%) rename pkg/{lmcli => api}/provider/anthropic/types.go (100%) rename pkg/{lmcli => api}/provider/google/google.go (97%) rename pkg/{lmcli => api}/provider/google/types.go (100%) rename pkg/{lmcli => api}/provider/ollama/ollama.go (96%) rename pkg/{lmcli => api}/provider/openai/openai.go (97%) rename pkg/{lmcli => api}/provider/openai/types.go (100%) diff --git a/pkg/lmcli/provider/provider.go b/pkg/api/api.go similarity index 97% rename from pkg/lmcli/provider/provider.go rename to pkg/api/api.go index d3fc1c8..dbb3428 100644 --- a/pkg/lmcli/provider/provider.go +++ b/pkg/api/api.go @@ -1,4 +1,4 @@ -package provider +package api import ( "context" diff --git a/pkg/lmcli/provider/anthropic/anthropic.go b/pkg/api/provider/anthropic/anthropic.go similarity index 97% rename from pkg/lmcli/provider/anthropic/anthropic.go rename to pkg/api/provider/anthropic/anthropic.go index 2b0b075..7d14f7f 100644 --- a/pkg/lmcli/provider/anthropic/anthropic.go +++ b/pkg/api/provider/anthropic/anthropic.go @@ -10,8 +10,8 @@ import ( "net/http" "strings" + "git.mlow.ca/mlow/lmcli/pkg/api" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/lmcli/tools" ) @@ -107,7 +107,7 @@ func (c *AnthropicClient) CreateChatCompletion( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, + callback api.ReplyCallback, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -160,8 +160,8 @@ func (c *AnthropicClient) CreateChatCompletionStream( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, - output chan<- provider.Chunk, + callback api.ReplyCallback, + output chan<- api.Chunk, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -242,7 +242,7 @@ func (c *AnthropicClient) CreateChatCompletionStream( return "", fmt.Errorf("invalid text delta") } sb.WriteString(text) - output <- provider.Chunk{ + output <- api.Chunk{ Content: text, } case "content_block_stop": @@ -264,7 +264,7 @@ func (c *AnthropicClient) CreateChatCompletionStream( } sb.WriteString(FUNCTION_STOP_SEQUENCE) - output <- provider.Chunk{ + output <- api.Chunk{ Content: FUNCTION_STOP_SEQUENCE, } diff --git a/pkg/lmcli/provider/anthropic/tools.go b/pkg/api/provider/anthropic/tools.go similarity index 100% rename from pkg/lmcli/provider/anthropic/tools.go rename to pkg/api/provider/anthropic/tools.go diff --git a/pkg/lmcli/provider/anthropic/types.go b/pkg/api/provider/anthropic/types.go similarity index 100% rename from pkg/lmcli/provider/anthropic/types.go rename to pkg/api/provider/anthropic/types.go diff --git a/pkg/lmcli/provider/google/google.go b/pkg/api/provider/google/google.go similarity index 97% rename from pkg/lmcli/provider/google/google.go rename to pkg/api/provider/google/google.go index c7a3d35..8e44ab0 100644 --- a/pkg/lmcli/provider/google/google.go +++ b/pkg/api/provider/google/google.go @@ -10,8 +10,8 @@ import ( "net/http" "strings" + "git.mlow.ca/mlow/lmcli/pkg/api" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/lmcli/tools" ) @@ -187,7 +187,7 @@ func handleToolCalls( params model.RequestParameters, content string, toolCalls []model.ToolCall, - callback provider.ReplyCallback, + callback api.ReplyCallback, messages []model.Message, ) ([]model.Message, error) { lastMessage := messages[len(messages)-1] @@ -245,7 +245,7 @@ func (c *Client) CreateChatCompletion( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, + callback api.ReplyCallback, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -325,8 +325,8 @@ func (c *Client) CreateChatCompletionStream( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, - output chan<- provider.Chunk, + callback api.ReplyCallback, + output chan<- api.Chunk, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -393,7 +393,7 @@ func (c *Client) CreateChatCompletionStream( if part.FunctionCall != nil { toolCalls = append(toolCalls, *part.FunctionCall) } else if part.Text != "" { - output <- provider.Chunk { + output <- api.Chunk { Content: part.Text, } content.WriteString(part.Text) diff --git a/pkg/lmcli/provider/google/types.go b/pkg/api/provider/google/types.go similarity index 100% rename from pkg/lmcli/provider/google/types.go rename to pkg/api/provider/google/types.go diff --git a/pkg/lmcli/provider/ollama/ollama.go b/pkg/api/provider/ollama/ollama.go similarity index 96% rename from pkg/lmcli/provider/ollama/ollama.go rename to pkg/api/provider/ollama/ollama.go index b2df01e..4825fa3 100644 --- a/pkg/lmcli/provider/ollama/ollama.go +++ b/pkg/api/provider/ollama/ollama.go @@ -10,8 +10,8 @@ import ( "net/http" "strings" + "git.mlow.ca/mlow/lmcli/pkg/api" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" ) type OllamaClient struct { @@ -85,7 +85,7 @@ func (c *OllamaClient) CreateChatCompletion( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, + callback api.ReplyCallback, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -131,8 +131,8 @@ func (c *OllamaClient) CreateChatCompletionStream( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, - output chan<- provider.Chunk, + callback api.ReplyCallback, + output chan<- api.Chunk, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -181,7 +181,7 @@ func (c *OllamaClient) CreateChatCompletionStream( } if len(streamResp.Message.Content) > 0 { - output <- provider.Chunk{ + output <- api.Chunk{ Content: streamResp.Message.Content, } content.WriteString(streamResp.Message.Content) diff --git a/pkg/lmcli/provider/openai/openai.go b/pkg/api/provider/openai/openai.go similarity index 97% rename from pkg/lmcli/provider/openai/openai.go rename to pkg/api/provider/openai/openai.go index 79915cc..bc291cd 100644 --- a/pkg/lmcli/provider/openai/openai.go +++ b/pkg/api/provider/openai/openai.go @@ -10,8 +10,8 @@ import ( "net/http" "strings" + "git.mlow.ca/mlow/lmcli/pkg/api" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/lmcli/tools" ) @@ -121,7 +121,7 @@ func handleToolCalls( params model.RequestParameters, content string, toolCalls []ToolCall, - callback provider.ReplyCallback, + callback api.ReplyCallback, messages []model.Message, ) ([]model.Message, error) { lastMessage := messages[len(messages)-1] @@ -180,7 +180,7 @@ func (c *OpenAIClient) CreateChatCompletion( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, + callback api.ReplyCallback, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -244,8 +244,8 @@ func (c *OpenAIClient) CreateChatCompletionStream( ctx context.Context, params model.RequestParameters, messages []model.Message, - callback provider.ReplyCallback, - output chan<- provider.Chunk, + callback api.ReplyCallback, + output chan<- api.Chunk, ) (string, error) { if len(messages) == 0 { return "", fmt.Errorf("Can't create completion from no messages") @@ -319,7 +319,7 @@ func (c *OpenAIClient) CreateChatCompletionStream( } } if len(delta.Content) > 0 { - output <- provider.Chunk { + output <- api.Chunk { Content: delta.Content, } content.WriteString(delta.Content) diff --git a/pkg/lmcli/provider/openai/types.go b/pkg/api/provider/openai/types.go similarity index 100% rename from pkg/lmcli/provider/openai/types.go rename to pkg/api/provider/openai/types.go diff --git a/pkg/cmd/util/util.go b/pkg/cmd/util/util.go index 9c85376..8c30685 100644 --- a/pkg/cmd/util/util.go +++ b/pkg/cmd/util/util.go @@ -8,9 +8,9 @@ import ( "strings" "time" + "git.mlow.ca/mlow/lmcli/pkg/api" "git.mlow.ca/mlow/lmcli/pkg/lmcli" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/util" "github.com/charmbracelet/lipgloss" ) @@ -18,7 +18,7 @@ import ( // Prompt prompts the configured the configured model and streams the response // to stdout. Returns all model reply messages. func Prompt(ctx *lmcli.Context, messages []model.Message, callback func(model.Message)) (string, error) { - content := make(chan provider.Chunk) // receives the reponse from LLM + content := make(chan api.Chunk) // receives the reponse from LLM defer close(content) // render all content received over the channel @@ -252,7 +252,7 @@ func ShowWaitAnimation(signal chan any) { // chunked) content is received on the channel, the waiting animation is // replaced by the content. // Blocks until the channel is closed. -func ShowDelayedContent(content <-chan provider.Chunk) { +func ShowDelayedContent(content <-chan api.Chunk) { waitSignal := make(chan any) go ShowWaitAnimation(waitSignal) diff --git a/pkg/lmcli/lmcli.go b/pkg/lmcli/lmcli.go index 7f288c9..b5389c1 100644 --- a/pkg/lmcli/lmcli.go +++ b/pkg/lmcli/lmcli.go @@ -6,12 +6,12 @@ import ( "path/filepath" "strings" + "git.mlow.ca/mlow/lmcli/pkg/api" + "git.mlow.ca/mlow/lmcli/pkg/api/provider/anthropic" + "git.mlow.ca/mlow/lmcli/pkg/api/provider/google" + "git.mlow.ca/mlow/lmcli/pkg/api/provider/ollama" + "git.mlow.ca/mlow/lmcli/pkg/api/provider/openai" "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/anthropic" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/google" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/ollama" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider/openai" "git.mlow.ca/mlow/lmcli/pkg/lmcli/tools" "git.mlow.ca/mlow/lmcli/pkg/util" "git.mlow.ca/mlow/lmcli/pkg/util/tty" @@ -79,7 +79,7 @@ func (c *Context) GetModels() (models []string) { return } -func (c *Context) GetModelProvider(model string) (string, provider.ChatCompletionClient, error) { +func (c *Context) GetModelProvider(model string) (string, api.ChatCompletionClient, error) { parts := strings.Split(model, "/") var provider string diff --git a/pkg/tui/views/chat/chat.go b/pkg/tui/views/chat/chat.go index 4ccd983..cbace1e 100644 --- a/pkg/tui/views/chat/chat.go +++ b/pkg/tui/views/chat/chat.go @@ -3,8 +3,8 @@ package chat import ( "time" + "git.mlow.ca/mlow/lmcli/pkg/api" models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" - "git.mlow.ca/mlow/lmcli/pkg/lmcli/provider" "git.mlow.ca/mlow/lmcli/pkg/tui/shared" "github.com/charmbracelet/bubbles/cursor" "github.com/charmbracelet/bubbles/spinner" @@ -17,7 +17,7 @@ import ( // custom tea.Msg types type ( // sent on each chunk received from LLM - msgResponseChunk provider.Chunk + msgResponseChunk api.Chunk // sent when response is finished being received msgResponseEnd string // a special case of common.MsgError that stops the response waiting animation @@ -83,7 +83,7 @@ type Model struct { editorTarget editorTarget stopSignal chan struct{} replyChan chan models.Message - replyChunkChan chan provider.Chunk + replyChunkChan chan api.Chunk persistence bool // whether we will save new messages in the conversation // ui state @@ -115,7 +115,7 @@ func Chat(shared shared.Shared) Model { stopSignal: make(chan struct{}), replyChan: make(chan models.Message), - replyChunkChan: make(chan provider.Chunk), + replyChunkChan: make(chan api.Chunk), wrap: true, selectedMessage: -1,