diff --git a/pkg/tui/tui.go b/pkg/tui/tui.go index 846c92a..10b4d42 100644 --- a/pkg/tui/tui.go +++ b/pkg/tui/tui.go @@ -28,6 +28,15 @@ import ( "gopkg.in/yaml.v2" ) +type appState int + +const ( + stateConversation = iota + //stateConversationList + //stateModelSelect // stateOptions? + //stateHelp +) + type focusState int const ( @@ -50,8 +59,10 @@ type model struct { convShortname string // application state + state appState conversation *models.Conversation messages []models.Message + selectedMessage int waitingForReply bool editorTarget editorTarget stopSignal chan interface{} @@ -67,7 +78,6 @@ type model struct { showToolResults bool // whether tool calls and results are shown messageCache []string // cache of syntax highlighted and wrapped message content messageOffsets []int - selectedMessage int // ui elements content viewport.Model @@ -131,7 +141,7 @@ type ( msgConversationLoaded *models.Conversation // sent when a new conversation title is set msgConversationTitleChanged string - // send when a conversation's messages are laoded + // sent when a conversation's messages are laoded msgMessagesLoaded []models.Message // sent when an error occurs msgError error @@ -153,9 +163,58 @@ func (m model) Init() tea.Cmd { ) } -func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { - var cmds []tea.Cmd +func (m *model) handleGlobalInput(msg tea.KeyMsg) tea.Cmd { + switch msg.String() { + case "ctrl+c": + if m.waitingForReply { + m.status = "Cancelling..." + m.stopSignal <- "" + return nil + } else { + return tea.Quit + } + case "q": + if m.focus != focusInput { + return tea.Quit + } + default: + switch m.state { + case stateConversation: + return m.handleConversationInput(msg) + } + } + return nil +} +func (m *model) handleConversationInput(msg tea.KeyMsg) tea.Cmd { + switch msg.String() { + case "ctrl+p": + m.persistence = !m.persistence + case "ctrl+t": + m.showToolResults = !m.showToolResults + m.rebuildMessageCache() + m.updateContent() + case "ctrl+w": + m.wrap = !m.wrap + m.rebuildMessageCache() + m.updateContent() + default: + switch m.focus { + case focusInput: + return m.handleInputKey(msg) + case focusMessages: + return m.handleMessagesKey(msg) + } + } + return nil +} + +func (m *model) handleConversationListInput(msg tea.KeyMsg) tea.Cmd { + return nil +} + +func (m *model) handleConversationUpdate(msg tea.Msg) []tea.Cmd { + var cmds []tea.Cmd switch msg := msg.(type) { case msgTempfileEditorClosed: contents := string(msg) @@ -173,48 +232,6 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } m.updateContent() } - case tea.KeyMsg: - switch msg.String() { - case "ctrl+c": - if m.waitingForReply { - m.stopSignal <- "" - return m, nil - } else { - return m, tea.Quit - } - case "ctrl+p": - m.persistence = !m.persistence - case "ctrl+w": - m.wrap = !m.wrap - m.rebuildMessageCache() - m.updateContent() - case "ctrl+t": - m.showToolResults = !m.showToolResults - m.rebuildMessageCache() - m.updateContent() - case "q": - if m.focus != focusInput { - return m, tea.Quit - } - default: - var inputHandled tea.Cmd - switch m.focus { - case focusInput: - inputHandled = m.handleInputKey(msg) - case focusMessages: - inputHandled = m.handleMessagesKey(msg) - } - if inputHandled != nil { - return m, inputHandled - } - } - case tea.WindowSizeMsg: - m.width = msg.Width - m.height = msg.Height - m.content.Width = msg.Width - m.input.SetWidth(msg.Width - m.input.FocusedStyle.Base.GetHorizontalBorderSize()) - m.rebuildMessageCache() - m.updateContent() case msgConversationLoaded: m.conversation = (*models.Conversation)(msg) cmds = append(cmds, m.loadMessages(m.conversation)) @@ -291,8 +308,6 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { cmds = append(cmds, wrapError(err)) } } - case msgError: - m.err = error(msg) } var cmd tea.Cmd @@ -316,11 +331,11 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } } + // update views once window dimensions are known if m.width > 0 { m.views.header = m.headerView() m.views.footer = m.footerView() m.views.error = m.errorView() - fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer) // calculate clamped input height to accomodate input text @@ -328,7 +343,7 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.input.SetHeight(newHeight) m.views.input = m.input.View() - m.content.Height = m.height - height(m.views.input) - fixedHeight + m.content.Height = m.height - fixedHeight - height(m.views.input) m.views.content = m.content.View() } @@ -356,6 +371,34 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } } + return cmds +} + +func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + var cmds []tea.Cmd + + switch msg := msg.(type) { + case tea.KeyMsg: + cmd := m.handleGlobalInput(msg) + if cmd != nil { + return m, cmd + } + case tea.WindowSizeMsg: + m.content.Width = msg.Width + m.input.SetWidth(msg.Width - m.input.FocusedStyle.Base.GetHorizontalBorderSize()) + m.rebuildMessageCache() + m.updateContent() + m.width = msg.Width + m.height = msg.Height + case msgError: + m.err = msg + } + + switch m.state { + case stateConversation: + cmds = append(cmds, m.handleConversationUpdate(msg)...) + } + return m, tea.Batch(cmds...) } @@ -389,15 +432,24 @@ func (m model) View() string { // without this, the m.*View() functions may crash return "" } - sections := make([]string, 0, 6) - sections = append(sections, m.views.header) - sections = append(sections, m.views.content) - if m.views.error != "" { - sections = append(sections, m.views.error) + + if m.views.header != "" { + sections = append(sections, m.views.header) + } + + switch m.state { + case stateConversation: + sections = append(sections, m.views.content) + if m.views.error != "" { + sections = append(sections, m.views.error) + } + sections = append(sections, m.views.input) + } + + if m.views.footer != "" { + sections = append(sections, m.views.footer) } - sections = append(sections, m.views.input) - sections = append(sections, m.views.footer) return lipgloss.JoinVertical( lipgloss.Left, @@ -490,6 +542,8 @@ func initialModel(ctx *lmcli.Context, convShortname string) model { views: &views{}, } + m.state = stateConversation + m.content = viewport.New(0, 0) m.input = textarea.New() @@ -893,7 +947,7 @@ func (m *model) renderMessage(msg *models.Message) string { content := strings.TrimRight(sb.String(), "\n") if m.wrap { - wrapWidth := m.content.Width - messageStyle.GetHorizontalPadding() - 2 + wrapWidth := m.content.Width - messageStyle.GetHorizontalPadding() - 1 content = wordwrap.String(content, wrapWidth) } @@ -935,7 +989,7 @@ func (m *model) rebuildMessageCache() { func (m *model) updateContent() { atBottom := m.content.AtBottom() - m.content.SetContent(m.conversationView()) + m.content.SetContent(m.conversationMessagesView()) if atBottom { // if we were at bottom before the update, scroll with the output m.content.GotoBottom() @@ -943,7 +997,7 @@ func (m *model) updateContent() { } // render the conversation into a string -func (m *model) conversationView() string { +func (m *model) conversationMessagesView() string { sb := strings.Builder{} m.messageOffsets = make([]int, len(m.messages))