tui: add LLM response error handling

+ various other small tweaks
This commit is contained in:
Matt Low 2024-03-14 03:07:41 +00:00
parent 000a2ec6f2
commit 377a4f1dfa

View File

@ -66,6 +66,8 @@ type (
msgResponseChunk string msgResponseChunk string
// sent when response is finished being received // sent when response is finished being received
msgResponseEnd string msgResponseEnd string
// a special case of msgError that stops the response waiting animation
msgResponseError error
// sent on each completed reply // sent on each completed reply
msgAssistantReply models.Message msgAssistantReply models.Message
// sent when a conversation is (re)loaded // sent when a conversation is (re)loaded
@ -198,6 +200,11 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
m.replyCancelFunc = nil m.replyCancelFunc = nil
m.waitingForReply = false m.waitingForReply = false
m.status = "Press ctrl+s to send" m.status = "Press ctrl+s to send"
case msgResponseError:
m.replyCancelFunc = nil
m.waitingForReply = false
m.status = "Press ctrl+s to send"
m.err = error(msg)
case msgConversationTitleChanged: case msgConversationTitleChanged:
title := string(msg) title := string(msg)
m.conversation.Title = title m.conversation.Title = title
@ -288,7 +295,7 @@ func (m *model) headerView() string {
} }
part := titleStyle.Render(title) part := titleStyle.Render(title)
return headerStyle.Width(m.content.Width).Render(part) return headerStyle.Width(m.width).Render(part)
} }
func (m *model) contentView() string { func (m *model) contentView() string {
@ -300,10 +307,10 @@ func (m *model) errorView() string {
return "" return ""
} }
return lipgloss.NewStyle(). return lipgloss.NewStyle().
Width(m.width).
AlignHorizontal(lipgloss.Center).
Bold(true). Bold(true).
Foreground(lipgloss.Color("1")). Foreground(lipgloss.Color("1")).
Width(m.content.Width).
AlignHorizontal(lipgloss.Center).
Render(fmt.Sprintf("%s", m.err)) Render(fmt.Sprintf("%s", m.err))
} }
@ -312,12 +319,12 @@ func (m *model) scrollbarView() string {
return "" return ""
} }
count := int(m.content.ScrollPercent() * float64(m.content.Width-2)) count := int(m.content.ScrollPercent() * float64(m.width-2))
fill := strings.Repeat("-", count) fill := strings.Repeat("-", count)
return lipgloss.NewStyle(). return lipgloss.NewStyle().
Width(m.width).
PaddingLeft(1). PaddingLeft(1).
PaddingRight(1). PaddingRight(1).
Width(m.content.Width).
Render(fill) Render(fill)
} }
@ -381,6 +388,7 @@ func initialModel(ctx *lmcli.Context, convShortname string) model {
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle() m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
m.input.ShowLineNumbers = false m.input.ShowLineNumbers = false
m.input.SetHeight(4)
m.input.Focus() m.input.Focus()
m.updateContent() m.updateContent()
@ -458,10 +466,10 @@ func (m *model) loadConversation(shortname string) tea.Cmd {
} }
c, err := m.ctx.Store.ConversationByShortName(shortname) c, err := m.ctx.Store.ConversationByShortName(shortname)
if err != nil { if err != nil {
return msgError(fmt.Errorf("Could not lookup conversation: %v\n", err)) return msgError(fmt.Errorf("Could not lookup conversation: %v", err))
} }
if c.ID == 0 { if c.ID == 0 {
return msgError(fmt.Errorf("Conversation not found with short name: %s\n", shortname)) return msgError(fmt.Errorf("Conversation not found: %s", shortname))
} }
return msgConversationLoaded(c) return msgConversationLoaded(c)
} }
@ -520,11 +528,13 @@ func (m *model) promptLLM() tea.Cmd {
ctx, replyCancelFunc := context.WithCancel(context.Background()) ctx, replyCancelFunc := context.WithCancel(context.Background())
m.replyCancelFunc = replyCancelFunc m.replyCancelFunc = replyCancelFunc
// TODO: handle error resp, err := completionProvider.CreateChatCompletionStream(
resp, _ := completionProvider.CreateChatCompletionStream(
ctx, requestParams, m.messages, replyHandler, m.replyChunkChan, ctx, requestParams, m.messages, replyHandler, m.replyChunkChan,
) )
if err != nil {
return msgResponseError(err)
}
return msgResponseEnd(resp) return msgResponseEnd(resp)
} }
} }