Private
Public Access
1
0

Rough-in support for deepseeek-style separate reasoning output

This commit is contained in:
2025-01-25 19:18:52 +00:00
parent fb3edad0c3
commit 9372c1d2c0
10 changed files with 94 additions and 39 deletions

View File

@@ -443,5 +443,5 @@ func convertResponseToMessage(resp ChatCompletionResponse) (*api.Message, error)
return api.NewMessageWithToolCalls(content.String(), toolCalls), nil
}
return api.NewMessageWithAssistant(content.String()), nil
return api.NewMessageWithAssistant(content.String(), ""), nil
}

View File

@@ -340,7 +340,7 @@ func (c *Client) CreateChatCompletion(
return api.NewMessageWithToolCalls(content, convertToolCallToAPI(toolCalls)), nil
}
return api.NewMessageWithAssistant(content), nil
return api.NewMessageWithAssistant(content, ""), nil
}
func (c *Client) CreateChatCompletionStream(
@@ -432,5 +432,5 @@ func (c *Client) CreateChatCompletionStream(
return api.NewMessageWithToolCalls(content.String(), convertToolCallToAPI(toolCalls)), nil
}
return api.NewMessageWithAssistant(content.String()), nil
return api.NewMessageWithAssistant(content.String(), ""), nil
}

View File

@@ -115,7 +115,7 @@ func (c *OllamaClient) CreateChatCompletion(
return nil, err
}
return api.NewMessageWithAssistant(completionResp.Message.Content), nil
return api.NewMessageWithAssistant(completionResp.Message.Content, ""), nil
}
func (c *OllamaClient) CreateChatCompletionStream(
@@ -179,5 +179,5 @@ func (c *OllamaClient) CreateChatCompletionStream(
}
}
return api.NewMessageWithAssistant(content.String()), nil
return api.NewMessageWithAssistant(content.String(), ""), nil
}

View File

@@ -21,10 +21,11 @@ type OpenAIClient struct {
}
type ChatCompletionMessage struct {
Role string `json:"role"`
Content string `json:"content,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
Role string `json:"role"`
Content string `json:"content,omitempty"`
ReasoningContent string `json:"reasoning_content,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
}
type ToolCall struct {
@@ -256,7 +257,7 @@ func (c *OpenAIClient) CreateChatCompletion(
return api.NewMessageWithToolCalls(content, convertToolCallToAPI(toolCalls)), nil
}
return api.NewMessageWithAssistant(content), nil
return api.NewMessageWithAssistant(content, ""), nil
}
func (c *OpenAIClient) CreateChatCompletionStream(
@@ -279,6 +280,7 @@ func (c *OpenAIClient) CreateChatCompletionStream(
defer resp.Body.Close()
content := strings.Builder{}
reasoning := strings.Builder{}
toolCalls := []ToolCall{}
lastMessage := messages[len(messages)-1]
@@ -333,11 +335,18 @@ func (c *OpenAIClient) CreateChatCompletionStream(
}
content.WriteString(delta.Content)
}
if len(delta.ReasoningContent) > 0 {
output <- provider.Chunk{
ReasoningContent: delta.ReasoningContent,
TokenCount: 1,
}
reasoning.WriteString(delta.ReasoningContent)
}
}
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content.String(), convertToolCallToAPI(toolCalls)), nil
}
return api.NewMessageWithAssistant(content.String()), nil
return api.NewMessageWithAssistant(content.String(), reasoning.String()), nil
}

View File

@@ -7,8 +7,9 @@ import (
)
type Chunk struct {
Content string
TokenCount uint
Content string
ReasoningContent string
TokenCount uint
}
type RequestParameters struct {