package cli import ( "context" "errors" "io" "strings" openai "github.com/sashabaranov/go-openai" ) func CreateChatCompletionRequest(model string, messages []Message, maxTokens int) openai.ChatCompletionRequest { chatCompletionMessages := []openai.ChatCompletionMessage{} for _, m := range messages { chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{ Role: m.Role, Content: m.OriginalContent, }) } return openai.ChatCompletionRequest{ Model: model, Messages: chatCompletionMessages, MaxTokens: maxTokens, } } // CreateChatCompletion submits a Chat Completion API request and returns the // response. func CreateChatCompletion(model string, messages []Message, maxTokens int) (string, error) { client := openai.NewClient(*config.OpenAI.APIKey) req := CreateChatCompletionRequest(model, messages, maxTokens) resp, err := client.CreateChatCompletion(context.Background(), req) if err != nil { return "", err } return resp.Choices[0].Message.Content, nil } // CreateChatCompletionStream submits a streaming Chat Completion API request // and both returns and streams the response to the provided output channel. // May return a partial response if an error occurs mid-stream. func CreateChatCompletionStream(model string, messages []Message, maxTokens int, output chan<- string) (string, error) { client := openai.NewClient(*config.OpenAI.APIKey) req := CreateChatCompletionRequest(model, messages, maxTokens) defer close(output) stream, err := client.CreateChatCompletionStream(context.Background(), req) if err != nil { return "", err } defer stream.Close() sb := strings.Builder{} for { response, e := stream.Recv() if errors.Is(e, io.EOF) { break } if e != nil { err = e break } chunk := response.Choices[0].Delta.Content output <- chunk sb.WriteString(chunk) } return sb.String(), err }