Private
Public Access
1
0

Rough-in support for deepseeek-style separate reasoning output

This commit is contained in:
2025-01-25 19:18:52 +00:00
parent fb3edad0c3
commit 9372c1d2c0
10 changed files with 94 additions and 39 deletions

View File

@@ -33,6 +33,14 @@ func (m *Model) setMessageContents(i int, content string) {
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) setReasoningContents(i int, content string) {
if i >= len(m.App.Messages) {
panic("i out of range")
}
m.App.Messages[i].ReasoningContent = content
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) rebuildMessageCache() {
m.messageCache = make([]string, len(m.App.Messages))
for i := range m.App.Messages {
@@ -108,7 +116,7 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
case msgChatResponseChunk:
cmds = append(cmds, m.waitForResponseChunk()) // wait for the next chunk
if msg.Content == "" {
if msg.Content == "" && msg.ReasoningContent == "" {
// skip empty chunks
break
}
@@ -116,19 +124,27 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
last := len(m.App.Messages) - 1
if last >= 0 && m.App.Messages[last].Role.IsAssistant() {
// append chunk to existing message
m.setMessageContents(last, m.App.Messages[last].Content+msg.Content)
if msg.Content != "" {
m.setMessageContents(last, m.App.Messages[last].Content+msg.Content)
}
if msg.ReasoningContent != "" {
m.setReasoningContents(last, m.App.Messages[last].ReasoningContent+msg.ReasoningContent)
}
} else {
// use chunk in a new message
m.addMessage(conversation.Message{
Role: api.MessageRoleAssistant,
Content: msg.Content,
ReasoningContent: msg.ReasoningContent,
})
}
m.updateContent()
// show cursor and reset blink interval (simulate typing)
m.replyCursor.Blink = false
cmds = append(cmds, m.replyCursor.BlinkCmd())
if msg.ReasoningContent == "" || m.showDetails {
m.replyCursor.Blink = false
cmds = append(cmds, m.replyCursor.BlinkCmd())
}
m.tokenCount += msg.TokenCount
m.elapsed = time.Now().Sub(m.startTime)
@@ -137,6 +153,7 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
reply := conversation.Message(msg)
reply.Content = strings.TrimSpace(reply.Content)
reply.ReasoningContent = strings.TrimSpace(reply.ReasoningContent)
last := len(m.App.Messages) - 1
if last < 0 {

View File

@@ -116,19 +116,42 @@ func (m *Model) renderMessage(i int) string {
// Write message contents
sb := &strings.Builder{}
sb.Grow(len(msg.Content) * 2)
sb.Grow((len(msg.Content) + len(msg.ReasoningContent) * 2))
isLast := i == len(m.App.Messages)-1
isAssistant := msg.Role == api.MessageRoleAssistant
hasReasoning := msg.ReasoningContent != ""
if hasReasoning {
reasoning := strings.Builder{}
reasoning.WriteString("<thinking>\n")
if m.showDetails {
//_ = m.App.Ctx.Chroma.Highlight(sb, msg.ReasoningContent)
reasoning.WriteString(msg.ReasoningContent)
} else {
reasoning.WriteString("...")
}
if m.state == pendingResponse && isLast && isAssistant && msg.Content == "" {
// Show the assistant's cursor
reasoning.WriteString(m.replyCursor.View())
}
reasoning.WriteString("\n</thinking>")
_ = m.App.Ctx.Chroma.Highlight(sb, reasoning.String())
}
if msg.Content != "" {
if hasReasoning {
sb.WriteString("\n\n")
}
err := m.App.Ctx.Chroma.Highlight(sb, msg.Content)
if err != nil {
// This would wipe out the thinking text
sb.Reset()
sb.WriteString(msg.Content)
}
}
isLast := i == len(m.App.Messages)-1
isAssistant := msg.Role == api.MessageRoleAssistant
if m.state == pendingResponse && isLast && isAssistant {
if m.state == pendingResponse && isLast && isAssistant && (!hasReasoning || msg.Content != "") {
// Show the assistant's cursor
sb.WriteString(m.replyCursor.View())
}