From 97f81a0cbb0fbf332aec6031b7ad4f4494f1a6e8 Mon Sep 17 00:00:00 2001 From: Matt Low Date: Tue, 12 Mar 2024 23:31:48 +0000 Subject: [PATCH] tui: scroll content view with output clean up msgResponseChunk handling --- pkg/tui/tui.go | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/pkg/tui/tui.go b/pkg/tui/tui.go index e5eeb6b..ab15909 100644 --- a/pkg/tui/tui.go +++ b/pkg/tui/tui.go @@ -125,19 +125,16 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.updateContent() case msgResponseChunk: chunk := string(msg) - if len(m.messages) > 0 { - i := len(m.messages) - 1 - switch m.messages[i].Role { - case models.MessageRoleAssistant: - m.messages[i].Content += chunk - default: - m.messages = append(m.messages, models.Message{ - Role: models.MessageRoleAssistant, - Content: chunk, - }) - } - m.updateContent() + last := len(m.messages) - 1 + if last >= 0 && m.messages[last].Role == models.MessageRoleAssistant { + m.messages[last].Content += chunk + } else { + m.messages = append(m.messages, models.Message{ + Role: models.MessageRoleAssistant, + Content: chunk, + }) } + m.updateContent() cmd = waitForChunk(m.replyChan) // wait for the next chunk case msgResponseEnd: m.replyCancelFunc = nil @@ -315,7 +312,12 @@ func (m *model) updateContent() { sb.WriteString("\n\n") } } + atBottom := m.content.AtBottom() m.content.SetContent(sb.String()) + if atBottom { + // if we were at bottom before the update, scroll with the output + m.content.GotoBottom() + } } func (m model) inputView() string {