Fixed regression from 3536438d

We were sending an empty string to the output channel when `ping`
messages were received from Anthropic's API. This was causing the TUI
to break since we were doing an empty chunk check (and mistakenly not
waiting for future chunks if one was received).

This commit makes it so we no longer an empty string on the ping
message from Anthropic, and, we update the handling of
msgAssistantChunk and msgAssistantReply to make it less likely that we
forget to wait for the next chunk/reply.
This commit is contained in:
Matt Low 2024-05-30 18:52:23 +00:00
parent 58e1b84fea
commit 38fed741af
2 changed files with 5 additions and 5 deletions

View File

@ -229,8 +229,7 @@ func (c *AnthropicClient) CreateChatCompletionStream(
case "message_start":
// noop
case "ping":
// write an empty string to signal start of text
output <- ""
// signals start of text - currently ignoring
case "content_block_start":
// ignore?
case "content_block_delta":

View File

@ -278,6 +278,8 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
m.updateContent()
m.content.GotoBottom()
case msgResponseChunk:
cmds = append(cmds, m.waitForChunk()) // wait for the next chunk
chunk := string(msg)
if chunk == "" {
break
@ -295,7 +297,6 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
})
}
m.updateContent()
cmds = append(cmds, m.waitForChunk()) // wait for the next chunk
// show cursor and reset blink interval (simulate typing)
m.replyCursor.Blink = false
@ -304,7 +305,8 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
m.tokenCount++
m.elapsed = time.Now().Sub(m.startTime)
case msgAssistantReply:
// the last reply that was being worked on is finished
cmds = append(cmds, m.waitForReply()) // wait for the next reply
reply := models.Message(msg)
reply.Content = strings.TrimSpace(reply.Content)
@ -332,7 +334,6 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
}
m.updateContent()
cmds = append(cmds, m.waitForReply())
case msgResponseEnd:
m.waitingForReply = false
last := len(m.messages) - 1