tui: scroll content view with output
clean up msgResponseChunk handling
This commit is contained in:
parent
eca120cde6
commit
97f81a0cbb
@ -125,19 +125,16 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
m.updateContent()
|
||||
case msgResponseChunk:
|
||||
chunk := string(msg)
|
||||
if len(m.messages) > 0 {
|
||||
i := len(m.messages) - 1
|
||||
switch m.messages[i].Role {
|
||||
case models.MessageRoleAssistant:
|
||||
m.messages[i].Content += chunk
|
||||
default:
|
||||
last := len(m.messages) - 1
|
||||
if last >= 0 && m.messages[last].Role == models.MessageRoleAssistant {
|
||||
m.messages[last].Content += chunk
|
||||
} else {
|
||||
m.messages = append(m.messages, models.Message{
|
||||
Role: models.MessageRoleAssistant,
|
||||
Content: chunk,
|
||||
})
|
||||
}
|
||||
m.updateContent()
|
||||
}
|
||||
cmd = waitForChunk(m.replyChan) // wait for the next chunk
|
||||
case msgResponseEnd:
|
||||
m.replyCancelFunc = nil
|
||||
@ -315,7 +312,12 @@ func (m *model) updateContent() {
|
||||
sb.WriteString("\n\n")
|
||||
}
|
||||
}
|
||||
atBottom := m.content.AtBottom()
|
||||
m.content.SetContent(sb.String())
|
||||
if atBottom {
|
||||
// if we were at bottom before the update, scroll with the output
|
||||
m.content.GotoBottom()
|
||||
}
|
||||
}
|
||||
|
||||
func (m model) inputView() string {
|
||||
|
Loading…
Reference in New Issue
Block a user