Compare commits

..

8 Commits

Author SHA1 Message Date
6eca84dab8 Pull message rendering into its own method 2023-11-05 08:50:07 +00:00
2c64ab501b Treat the system message like any other
Removed the system parameter on ChatCopmletion functions, and persist it
in conversations as well.
2023-11-05 07:55:07 +00:00
3d518efd6f Implement persistence for lmcli new 2023-11-05 07:47:24 +00:00
78bcc11a4b Update HandleDelayedResponse to return to complete output 2023-11-05 07:40:55 +00:00
1ac8f7d046 Trim content before returning InputFromEditor 2023-11-05 07:22:45 +00:00
bb895460ad Formatting 2023-11-05 06:55:38 +00:00
b46bbef80b Spelling 2023-11-05 06:51:56 +00:00
794ccc52ff Show waiting animation while waiting for LLM response 2023-11-05 06:50:28 +00:00
6 changed files with 93 additions and 28 deletions

View File

@ -12,3 +12,7 @@ func Fatal(format string, args ...any) {
fmt.Fprintf(os.Stderr, format, args...) fmt.Fprintf(os.Stderr, format, args...)
os.Exit(1) os.Exit(1)
} }
func Warn(format string, args ...any) {
fmt.Fprintf(os.Stderr, format, args...)
}

View File

@ -71,29 +71,65 @@ var newCmd = &cobra.Command{
messageContents, err := InputFromEditor("# What would you like to say?\n", "message.*.md") messageContents, err := InputFromEditor("# What would you like to say?\n", "message.*.md")
if err != nil { if err != nil {
Fatal("Failed to get input: %v\n", err) Fatal("Failed to get input: %v\n", err)
return
} }
if messageContents == "" { if messageContents == "" {
Fatal("No message was provided.\n") Fatal("No message was provided.\n")
return
} }
fmt.Printf("> %s\n", messageContents) // TODO: set title if --title provided, otherwise defer for later(?)
conversation := Conversation{}
err = store.SaveConversation(&conversation)
if err != nil {
Fatal("Could not save new conversation: %v\n", err)
}
const system = "You are a helpful assistant."
messages := []Message{ messages := []Message{
{ {
ConversationID: conversation.ID,
Role: "system",
OriginalContent: system,
},
{
ConversationID: conversation.ID,
Role: "user", Role: "user",
OriginalContent: messageContents, OriginalContent: messageContents,
}, },
} }
for _, message := range messages {
err = store.SaveMessage(&message)
if err != nil {
Warn("Could not save %s message: %v\n", message.Role, err)
}
}
for _, message := range messages {
message.RenderTTY(true)
}
reply := Message{
ConversationID: conversation.ID,
Role: "assistant",
}
reply.RenderTTY(false)
receiver := make(chan string) receiver := make(chan string)
go HandleDelayedResponse(receiver) response := make(chan string)
err = CreateChatCompletionStream("You are a helpful assistant.", messages, receiver) go func() {
response <- HandleDelayedResponse(receiver)
}()
err = CreateChatCompletionStream(messages, receiver)
if err != nil { if err != nil {
Fatal("%v\n", err) Fatal("%v\n", err)
return }
reply.OriginalContent = <-response
err = store.SaveMessage(&reply)
if err != nil {
Fatal("Could not save reply: %v\n", err)
} }
fmt.Println() fmt.Println()
@ -108,10 +144,14 @@ var promptCmd = &cobra.Command{
message := strings.Join(args, " ") message := strings.Join(args, " ")
if len(strings.Trim(message, " \t\n")) == 0 { if len(strings.Trim(message, " \t\n")) == 0 {
Fatal("No message was provided.\n") Fatal("No message was provided.\n")
return
} }
const system = "You are a helpful assistant."
messages := []Message{ messages := []Message{
{
Role: "system",
OriginalContent: system,
},
{ {
Role: "user", Role: "user",
OriginalContent: message, OriginalContent: message,
@ -120,10 +160,9 @@ var promptCmd = &cobra.Command{
receiver := make(chan string) receiver := make(chan string)
go HandleDelayedResponse(receiver) go HandleDelayedResponse(receiver)
err := CreateChatCompletionStream("You are a helpful assistant.", messages, receiver) err := CreateChatCompletionStream(messages, receiver)
if err != nil { if err != nil {
Fatal("%v\n", err) Fatal("%v\n", err)
return
} }
fmt.Println() fmt.Println()
@ -131,6 +170,9 @@ var promptCmd = &cobra.Command{
} }
func NewRootCmd() *cobra.Command { func NewRootCmd() *cobra.Command {
rootCmd.AddCommand(newCmd, promptCmd) rootCmd.AddCommand(
newCmd,
promptCmd,
)
return rootCmd return rootCmd
} }

View File

@ -49,7 +49,7 @@ func InitializeConfig() *Config {
_, err = file.Write(bytes) _, err = file.Write(bytes)
if err != nil { if err != nil {
Fatal("Could not save default configuratoin: %v", err) Fatal("Could not save default configuration: %v", err)
return nil return nil
} }
} else if err != nil { } else if err != nil {

View File

@ -8,14 +8,8 @@ import (
openai "github.com/sashabaranov/go-openai" openai "github.com/sashabaranov/go-openai"
) )
func CreateChatCompletionRequest(system string, messages []Message) *openai.ChatCompletionRequest { func CreateChatCompletionRequest(messages []Message) *openai.ChatCompletionRequest {
chatCompletionMessages := []openai.ChatCompletionMessage{ chatCompletionMessages := []openai.ChatCompletionMessage{}
{
Role: "system",
Content: system,
},
}
for _, m := range messages { for _, m := range messages {
chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{ chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{
Role: m.Role, Role: m.Role,
@ -33,11 +27,11 @@ func CreateChatCompletionRequest(system string, messages []Message) *openai.Chat
// CreateChatCompletion accepts a slice of Message and returns the response // CreateChatCompletion accepts a slice of Message and returns the response
// of the Large Language Model. // of the Large Language Model.
func CreateChatCompletion(system string, messages []Message) (string, error) { func CreateChatCompletion(messages []Message) (string, error) {
client := openai.NewClient(config.OpenAI.APIKey) client := openai.NewClient(config.OpenAI.APIKey)
resp, err := client.CreateChatCompletion( resp, err := client.CreateChatCompletion(
context.Background(), context.Background(),
*CreateChatCompletionRequest(system, messages), *CreateChatCompletionRequest(messages),
) )
if err != nil { if err != nil {
@ -47,13 +41,13 @@ func CreateChatCompletion(system string, messages []Message) (string, error) {
return resp.Choices[0].Message.Content, nil return resp.Choices[0].Message.Content, nil
} }
// CreateChatCompletionStream submits an streaming Chat Completion API request // CreateChatCompletionStream submits an streaming Chat Completion API request
// and sends the received data to the output channel. // and sends the received data to the output channel.
func CreateChatCompletionStream(system string, messages []Message, output chan string) error { func CreateChatCompletionStream(messages []Message, output chan string) error {
client := openai.NewClient(config.OpenAI.APIKey) client := openai.NewClient(config.OpenAI.APIKey)
ctx := context.Background() ctx := context.Background()
req := CreateChatCompletionRequest(system, messages) req := CreateChatCompletionRequest(messages)
req.Stream = true req.Stream = true
defer close(output) defer close(output)

View File

@ -2,6 +2,7 @@ package cli
import ( import (
"fmt" "fmt"
"strings"
"time" "time"
) )
@ -33,12 +34,14 @@ func ShowWaitAnimation(signal chan any) {
} }
// HandledDelayedResponse writes a waiting animation (abusing \r) and the // HandledDelayedResponse writes a waiting animation (abusing \r) and the
// content received on the response channel to stdout. Blocks until the channel // (possibly chunked) content received on the response channel to stdout.
// is closed. // Blocks until the channel is closed.
func HandleDelayedResponse(response chan string) { func HandleDelayedResponse(response chan string) string {
waitSignal := make(chan any) waitSignal := make(chan any)
go ShowWaitAnimation(waitSignal) go ShowWaitAnimation(waitSignal)
sb := strings.Builder{}
firstChunk := true firstChunk := true
for chunk := range response { for chunk := range response {
if firstChunk { if firstChunk {
@ -47,5 +50,27 @@ func HandleDelayedResponse(response chan string) {
firstChunk = false firstChunk = false
} }
fmt.Print(chunk) fmt.Print(chunk)
sb.WriteString(chunk)
}
return sb.String()
}
func (m *Message) RenderTTY(paddingDown bool) {
var friendlyRole string
switch m.Role {
case "user":
friendlyRole = "You"
case "system":
friendlyRole = "System"
case "assistant":
friendlyRole = "Assistant"
}
fmt.Printf("<%s>\n\n", friendlyRole)
if m.OriginalContent != "" {
fmt.Print(m.OriginalContent)
}
if paddingDown {
fmt.Print("\n\n")
} }
} }

View File

@ -46,5 +46,5 @@ func InputFromEditor(placeholder string, pattern string) (string, error) {
} }
} }
return content, nil return strings.Trim(content, "\n \t"), nil
} }