Private
Public Access
1
0
Files
lmcli/pkg/provider/provider.go
Matt Low 5335b5c28f Add support for openrouter reasoning + refactor
Started work to make it possible to pass in per-model reasoning config
Cleaned up how we instantiate RequestParameters (TBD: remove
RequestParameters?)
2025-07-29 00:55:28 +00:00

96 lines
1.9 KiB
Go

package provider
import (
"context"
"git.mlow.ca/mlow/lmcli/pkg/api"
)
type Chunk struct {
Content string
ReasoningContent string
TokenCount uint
}
type ModelConfig struct {
Provider string
Client ChatCompletionProvider
Model string
MaxTokens int
Temperature float32
Reasoning bool
}
func NewRequestParameters(modelConfig ModelConfig) RequestParameters {
params := RequestParameters{
Model: modelConfig.Model,
MaxTokens: modelConfig.MaxTokens,
Temperature: modelConfig.Temperature,
Reasoning: ReasoningConfig{
Enabled: modelConfig.Reasoning,
},
}
return params
}
type ReasoningEffort string
const (
High ReasoningEffort = "high"
Medium ReasoningEffort = "medium"
Low ReasoningEffort = "low"
)
// ProviderKind is a bit leaky, it informs the ChatCompletionProvider what
// provider we're on so we know how to format requests, etc.
type ProviderKind string
const (
OpenRouter ProviderKind = "openrouter"
OpenAI ProviderKind = "openai"
Anthropic ProviderKind = "anthropic"
)
type ProviderConfig struct {
Kind ProviderKind
SupportPrefill bool
}
type ReasoningConfig struct {
Effort ReasoningEffort
MaxTokens int
Exclude bool
Enabled bool
}
type RequestParameters struct {
Provider ProviderConfig
Model string
Reasoning ReasoningConfig
MaxTokens int
Temperature float32
TopP float32
Toolbox []api.ToolSpec
}
type ChatCompletionProvider interface {
// CreateChatCompletion generates a chat completion response to the
// provided messages.
CreateChatCompletion(
ctx context.Context,
params RequestParameters,
messages []api.Message,
) (*api.Message, error)
// Like CreateChageCompletion, except the response is streamed via
// the output channel.
CreateChatCompletionStream(
ctx context.Context,
params RequestParameters,
messages []api.Message,
chunks chan<- Chunk,
) (*api.Message, error)
}