Private
Public Access
1
0

Add support for openrouter reasoning + refactor

Started work to make it possible to pass in per-model reasoning config
Cleaned up how we instantiate RequestParameters (TBD: remove
RequestParameters?)
This commit is contained in:
2025-07-29 00:55:28 +00:00
parent 54da088dee
commit 5335b5c28f
6 changed files with 125 additions and 41 deletions

View File

@@ -12,8 +12,61 @@ type Chunk struct {
TokenCount uint
}
type ModelConfig struct {
Provider string
Client ChatCompletionProvider
Model string
MaxTokens int
Temperature float32
Reasoning bool
}
func NewRequestParameters(modelConfig ModelConfig) RequestParameters {
params := RequestParameters{
Model: modelConfig.Model,
MaxTokens: modelConfig.MaxTokens,
Temperature: modelConfig.Temperature,
Reasoning: ReasoningConfig{
Enabled: modelConfig.Reasoning,
},
}
return params
}
type ReasoningEffort string
const (
High ReasoningEffort = "high"
Medium ReasoningEffort = "medium"
Low ReasoningEffort = "low"
)
// ProviderKind is a bit leaky, it informs the ChatCompletionProvider what
// provider we're on so we know how to format requests, etc.
type ProviderKind string
const (
OpenRouter ProviderKind = "openrouter"
OpenAI ProviderKind = "openai"
Anthropic ProviderKind = "anthropic"
)
type ProviderConfig struct {
Kind ProviderKind
SupportPrefill bool
}
type ReasoningConfig struct {
Effort ReasoningEffort
MaxTokens int
Exclude bool
Enabled bool
}
type RequestParameters struct {
Model string
Provider ProviderConfig
Model string
Reasoning ReasoningConfig
MaxTokens int
Temperature float32