Treat the system message like any other

Removed the system parameter on ChatCopmletion functions, and persist it
in conversations as well.
This commit is contained in:
Matt Low 2023-11-05 07:54:12 +00:00
parent 3d518efd6f
commit 2c64ab501b
2 changed files with 30 additions and 22 deletions

View File

@ -84,17 +84,26 @@ var newCmd = &cobra.Command{
Fatal("Could not save new conversation: %v\n", err) Fatal("Could not save new conversation: %v\n", err)
} }
message := Message{ const system = "You are a helpful assistant."
messages := []Message{
{
ConversationID: conversation.ID,
Role: "system",
OriginalContent: system,
},
{
ConversationID: conversation.ID, ConversationID: conversation.ID,
Role: "user", Role: "user",
OriginalContent: messageContents, OriginalContent: messageContents,
},
} }
for _, message := range(messages) {
err = store.SaveMessage(&message) err = store.SaveMessage(&message)
if err != nil { if err != nil {
Warn("Could not save message: %v\n", err) Warn("Could not save %s message: %v\n", message.Role, err)
}
} }
const system = "You are a helpful assistant."
fmt.Printf("<System>\n\n%s\n\n", system) fmt.Printf("<System>\n\n%s\n\n", system)
fmt.Printf("<You>\n\n%s\n\n", messageContents) fmt.Printf("<You>\n\n%s\n\n", messageContents)
fmt.Print("<Assistant>\n\n") fmt.Print("<Assistant>\n\n")
@ -105,7 +114,7 @@ var newCmd = &cobra.Command{
response <- HandleDelayedResponse(receiver) response <- HandleDelayedResponse(receiver)
}() }()
err = CreateChatCompletionStream(system, []Message{message}, receiver) err = CreateChatCompletionStream(messages, receiver)
if err != nil { if err != nil {
Fatal("%v\n", err) Fatal("%v\n", err)
} }
@ -134,7 +143,12 @@ var promptCmd = &cobra.Command{
Fatal("No message was provided.\n") Fatal("No message was provided.\n")
} }
const system = "You are a helpful assistant."
messages := []Message{ messages := []Message{
{
Role: "system",
OriginalContent: system,
},
{ {
Role: "user", Role: "user",
OriginalContent: message, OriginalContent: message,
@ -143,7 +157,7 @@ var promptCmd = &cobra.Command{
receiver := make(chan string) receiver := make(chan string)
go HandleDelayedResponse(receiver) go HandleDelayedResponse(receiver)
err := CreateChatCompletionStream("You are a helpful assistant.", messages, receiver) err := CreateChatCompletionStream(messages, receiver)
if err != nil { if err != nil {
Fatal("%v\n", err) Fatal("%v\n", err)
} }

View File

@ -8,14 +8,8 @@ import (
openai "github.com/sashabaranov/go-openai" openai "github.com/sashabaranov/go-openai"
) )
func CreateChatCompletionRequest(system string, messages []Message) *openai.ChatCompletionRequest { func CreateChatCompletionRequest(messages []Message) *openai.ChatCompletionRequest {
chatCompletionMessages := []openai.ChatCompletionMessage{ chatCompletionMessages := []openai.ChatCompletionMessage{}
{
Role: "system",
Content: system,
},
}
for _, m := range messages { for _, m := range messages {
chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{ chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{
Role: m.Role, Role: m.Role,
@ -33,11 +27,11 @@ func CreateChatCompletionRequest(system string, messages []Message) *openai.Chat
// CreateChatCompletion accepts a slice of Message and returns the response // CreateChatCompletion accepts a slice of Message and returns the response
// of the Large Language Model. // of the Large Language Model.
func CreateChatCompletion(system string, messages []Message) (string, error) { func CreateChatCompletion(messages []Message) (string, error) {
client := openai.NewClient(config.OpenAI.APIKey) client := openai.NewClient(config.OpenAI.APIKey)
resp, err := client.CreateChatCompletion( resp, err := client.CreateChatCompletion(
context.Background(), context.Background(),
*CreateChatCompletionRequest(system, messages), *CreateChatCompletionRequest(messages),
) )
if err != nil { if err != nil {
@ -49,11 +43,11 @@ func CreateChatCompletion(system string, messages []Message) (string, error) {
// CreateChatCompletionStream submits an streaming Chat Completion API request // CreateChatCompletionStream submits an streaming Chat Completion API request
// and sends the received data to the output channel. // and sends the received data to the output channel.
func CreateChatCompletionStream(system string, messages []Message, output chan string) error { func CreateChatCompletionStream(messages []Message, output chan string) error {
client := openai.NewClient(config.OpenAI.APIKey) client := openai.NewClient(config.OpenAI.APIKey)
ctx := context.Background() ctx := context.Background()
req := CreateChatCompletionRequest(system, messages) req := CreateChatCompletionRequest(messages)
req.Stream = true req.Stream = true
defer close(output) defer close(output)