Compare commits

..

1 Commits

Author SHA1 Message Date
a28a7a0054 Show waiting animation while waiting for LLM response 2023-11-05 06:44:06 +00:00
6 changed files with 28 additions and 93 deletions

View File

@ -12,7 +12,3 @@ func Fatal(format string, args ...any) {
fmt.Fprintf(os.Stderr, format, args...)
os.Exit(1)
}
func Warn(format string, args ...any) {
fmt.Fprintf(os.Stderr, format, args...)
}

View File

@ -71,65 +71,29 @@ var newCmd = &cobra.Command{
messageContents, err := InputFromEditor("# What would you like to say?\n", "message.*.md")
if err != nil {
Fatal("Failed to get input: %v\n", err)
return
}
if messageContents == "" {
Fatal("No message was provided.\n")
return
}
// TODO: set title if --title provided, otherwise defer for later(?)
conversation := Conversation{}
err = store.SaveConversation(&conversation)
if err != nil {
Fatal("Could not save new conversation: %v\n", err)
}
fmt.Printf("> %s\n", messageContents)
const system = "You are a helpful assistant."
messages := []Message{
{
ConversationID: conversation.ID,
Role: "system",
OriginalContent: system,
},
{
ConversationID: conversation.ID,
Role: "user",
OriginalContent: messageContents,
},
}
for _, message := range messages {
err = store.SaveMessage(&message)
if err != nil {
Warn("Could not save %s message: %v\n", message.Role, err)
}
}
for _, message := range messages {
message.RenderTTY(true)
}
reply := Message{
ConversationID: conversation.ID,
Role: "assistant",
}
reply.RenderTTY(false)
receiver := make(chan string)
response := make(chan string)
go func() {
response <- HandleDelayedResponse(receiver)
}()
err = CreateChatCompletionStream(messages, receiver)
go HandleDelayedResponse(receiver)
err = CreateChatCompletionStream("You are a helpful assistant.", messages, receiver)
if err != nil {
Fatal("%v\n", err)
}
reply.OriginalContent = <-response
err = store.SaveMessage(&reply)
if err != nil {
Fatal("Could not save reply: %v\n", err)
return
}
fmt.Println()
@ -144,14 +108,10 @@ var promptCmd = &cobra.Command{
message := strings.Join(args, " ")
if len(strings.Trim(message, " \t\n")) == 0 {
Fatal("No message was provided.\n")
return
}
const system = "You are a helpful assistant."
messages := []Message{
{
Role: "system",
OriginalContent: system,
},
{
Role: "user",
OriginalContent: message,
@ -160,9 +120,10 @@ var promptCmd = &cobra.Command{
receiver := make(chan string)
go HandleDelayedResponse(receiver)
err := CreateChatCompletionStream(messages, receiver)
err := CreateChatCompletionStream("You are a helpful assistant.", messages, receiver)
if err != nil {
Fatal("%v\n", err)
return
}
fmt.Println()
@ -170,9 +131,6 @@ var promptCmd = &cobra.Command{
}
func NewRootCmd() *cobra.Command {
rootCmd.AddCommand(
newCmd,
promptCmd,
)
rootCmd.AddCommand(newCmd, promptCmd)
return rootCmd
}

View File

@ -49,7 +49,7 @@ func InitializeConfig() *Config {
_, err = file.Write(bytes)
if err != nil {
Fatal("Could not save default configuration: %v", err)
Fatal("Could not save default configuratoin: %v", err)
return nil
}
} else if err != nil {

View File

@ -8,8 +8,14 @@ import (
openai "github.com/sashabaranov/go-openai"
)
func CreateChatCompletionRequest(messages []Message) *openai.ChatCompletionRequest {
chatCompletionMessages := []openai.ChatCompletionMessage{}
func CreateChatCompletionRequest(system string, messages []Message) *openai.ChatCompletionRequest {
chatCompletionMessages := []openai.ChatCompletionMessage{
{
Role: "system",
Content: system,
},
}
for _, m := range messages {
chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{
Role: m.Role,
@ -27,11 +33,11 @@ func CreateChatCompletionRequest(messages []Message) *openai.ChatCompletionReque
// CreateChatCompletion accepts a slice of Message and returns the response
// of the Large Language Model.
func CreateChatCompletion(messages []Message) (string, error) {
func CreateChatCompletion(system string, messages []Message) (string, error) {
client := openai.NewClient(config.OpenAI.APIKey)
resp, err := client.CreateChatCompletion(
context.Background(),
*CreateChatCompletionRequest(messages),
*CreateChatCompletionRequest(system, messages),
)
if err != nil {
@ -43,11 +49,11 @@ func CreateChatCompletion(messages []Message) (string, error) {
// CreateChatCompletionStream submits an streaming Chat Completion API request
// and sends the received data to the output channel.
func CreateChatCompletionStream(messages []Message, output chan string) error {
func CreateChatCompletionStream(system string, messages []Message, output chan string) error {
client := openai.NewClient(config.OpenAI.APIKey)
ctx := context.Background()
req := CreateChatCompletionRequest(messages)
req := CreateChatCompletionRequest(system, messages)
req.Stream = true
defer close(output)

View File

@ -2,7 +2,6 @@ package cli
import (
"fmt"
"strings"
"time"
)
@ -34,14 +33,12 @@ func ShowWaitAnimation(signal chan any) {
}
// HandledDelayedResponse writes a waiting animation (abusing \r) and the
// (possibly chunked) content received on the response channel to stdout.
// Blocks until the channel is closed.
func HandleDelayedResponse(response chan string) string {
// content received on the response channel to stdout. Blocks until the channel
// is closed.
func HandleDelayedResponse(response chan string) {
waitSignal := make(chan any)
go ShowWaitAnimation(waitSignal)
sb := strings.Builder{}
firstChunk := true
for chunk := range response {
if firstChunk {
@ -50,27 +47,5 @@ func HandleDelayedResponse(response chan string) string {
firstChunk = false
}
fmt.Print(chunk)
sb.WriteString(chunk)
}
return sb.String()
}
func (m *Message) RenderTTY(paddingDown bool) {
var friendlyRole string
switch m.Role {
case "user":
friendlyRole = "You"
case "system":
friendlyRole = "System"
case "assistant":
friendlyRole = "Assistant"
}
fmt.Printf("<%s>\n\n", friendlyRole)
if m.OriginalContent != "" {
fmt.Print(m.OriginalContent)
}
if paddingDown {
fmt.Print("\n\n")
}
}

View File

@ -46,5 +46,5 @@ func InputFromEditor(placeholder string, pattern string) (string, error) {
}
}
return strings.Trim(content, "\n \t"), nil
return content, nil
}