2024-06-09 10:42:53 -06:00
|
|
|
package api
|
2024-02-21 21:55:38 -07:00
|
|
|
|
2024-03-12 12:24:05 -06:00
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
|
|
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
|
|
|
)
|
2024-02-21 21:55:38 -07:00
|
|
|
|
2024-03-12 14:36:24 -06:00
|
|
|
type ReplyCallback func(model.Message)
|
|
|
|
|
2024-06-08 17:37:58 -06:00
|
|
|
type Chunk struct {
|
2024-06-09 14:45:18 -06:00
|
|
|
Content string
|
|
|
|
TokenCount uint
|
2024-06-08 17:37:58 -06:00
|
|
|
}
|
|
|
|
|
2024-02-21 21:55:38 -07:00
|
|
|
type ChatCompletionClient interface {
|
|
|
|
// CreateChatCompletion requests a response to the provided messages.
|
|
|
|
// Replies are appended to the given replies struct, and the
|
|
|
|
// complete user-facing response is returned as a string.
|
|
|
|
CreateChatCompletion(
|
2024-03-12 12:24:05 -06:00
|
|
|
ctx context.Context,
|
2024-02-21 21:55:38 -07:00
|
|
|
params model.RequestParameters,
|
|
|
|
messages []model.Message,
|
2024-03-12 14:36:24 -06:00
|
|
|
callback ReplyCallback,
|
2024-02-21 21:55:38 -07:00
|
|
|
) (string, error)
|
|
|
|
|
|
|
|
// Like CreateChageCompletion, except the response is streamed via
|
|
|
|
// the output channel as it's received.
|
|
|
|
CreateChatCompletionStream(
|
2024-03-12 12:24:05 -06:00
|
|
|
ctx context.Context,
|
2024-02-21 21:55:38 -07:00
|
|
|
params model.RequestParameters,
|
|
|
|
messages []model.Message,
|
2024-03-12 14:36:24 -06:00
|
|
|
callback ReplyCallback,
|
2024-06-08 17:37:58 -06:00
|
|
|
output chan<- Chunk,
|
2024-02-21 21:55:38 -07:00
|
|
|
) (string, error)
|
|
|
|
}
|