package main import ( "context" "errors" "fmt" "io" "os" openai "github.com/sashabaranov/go-openai" ) // CreateChatCompletion accepts a slice of Message and returns the response // of the Large Language Model. func CreateChatCompletion(system string, messages []Message) (string, error) { client := openai.NewClient(os.Getenv("OPENAI_APIKEY")) var openaiMessages []openai.ChatCompletionMessage for _, m := range(messages) { openaiMessages = append(openaiMessages, openai.ChatCompletionMessage{ Role: m.Role, Content: m.OriginalContent, }) } resp, err := client.CreateChatCompletion( context.Background(), openai.ChatCompletionRequest{ Model: openai.GPT4, Messages: openaiMessages, }, ) if err != nil { return "", fmt.Errorf("ChatCompletion error: %v\n", err) } return resp.Choices[0].Message.Content, nil } func CreateChatCompletionStream(system string, messages []Message, output io.Writer) (error) { client := openai.NewClient(os.Getenv("OPENAI_APIKEY")) ctx := context.Background() var chatCompletionMessages []openai.ChatCompletionMessage for _, m := range(messages) { chatCompletionMessages = append(chatCompletionMessages, openai.ChatCompletionMessage{ Role: m.Role, Content: m.OriginalContent, }) } req := openai.ChatCompletionRequest{ Model: openai.GPT3Dot5Turbo, MaxTokens: 20, Messages: chatCompletionMessages, Stream: true, } stream, err := client.CreateChatCompletionStream(ctx, req) if err != nil { return err } defer stream.Close() for { response, err := stream.Recv() if errors.Is(err, io.EOF) { return nil } if err != nil { //fmt.Printf("\nStream error: %v\n", err) return err } fmt.Fprint(output, response.Choices[0].Delta.Content) } }