Compare commits

..

2 Commits

Author SHA1 Message Date
2f3d95356a Be explicit with openai response choices limit (n parameter) 2023-11-25 13:39:52 -07:00
137c568129 Minor cleanup 2023-11-25 01:26:37 +00:00
2 changed files with 5 additions and 5 deletions

View File

@ -64,12 +64,11 @@ func LLMRequest(messages []Message) (string, error) {
go HandleDelayedContent(receiver)
response, err := CreateChatCompletionStream(model, messages, maxTokens, receiver)
if err != nil && response != "" {
Warn("Received partial response. Error: %v\n", err)
err = nil // ignore partial response error
}
if response != "" {
if err != nil {
Warn("Received partial response. Error: %v\n", err)
err = nil
}
// there was some content, so break to a new line after it
fmt.Println()
}

View File

@ -22,6 +22,7 @@ func CreateChatCompletionRequest(model string, messages []Message, maxTokens int
Model: model,
Messages: chatCompletionMessages,
MaxTokens: maxTokens,
N: 1, // limit responses to 1 "choice". we use choices[0] to reference it
}
}