package tui import ( "context" "encoding/json" "fmt" "strings" "time" cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util" models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model" "github.com/charmbracelet/bubbles/spinner" "github.com/charmbracelet/bubbles/textarea" "github.com/charmbracelet/bubbles/viewport" tea "github.com/charmbracelet/bubbletea" "github.com/charmbracelet/lipgloss" "github.com/muesli/reflow/wordwrap" "gopkg.in/yaml.v2" ) type focusState int const ( focusInput focusState = iota focusMessages ) type editorTarget int const ( input editorTarget = iota selectedMessage ) // custom tea.Msg types type ( // sent on each chunk received from LLM msgResponseChunk string // sent when response is finished being received msgResponseEnd string // a special case of msgError that stops the response waiting animation msgResponseError error // sent on each completed reply msgAssistantReply models.Message // sent when a conversation is (re)loaded msgConversationLoaded *models.Conversation // sent when a new conversation title is set msgConversationTitleChanged string // sent when a conversation's messages are laoded msgMessagesLoaded []models.Message ) type chatModel struct { basemodel width int height int // app state conversation *models.Conversation messages []models.Message selectedMessage int waitingForReply bool editorTarget editorTarget stopSignal chan struct{} replyChan chan models.Message replyChunkChan chan string persistence bool // whether we will save new messages in the conversation // ui state focus focusState wrap bool // whether message content is wrapped to viewport width status string // a general status message showToolResults bool // whether tool calls and results are shown messageCache []string // cache of syntax highlighted and wrapped message content messageOffsets []int // ui elements content viewport.Model input textarea.Model spinner spinner.Model } func newChatModel(tui *model) chatModel { m := chatModel{ basemodel: basemodel{ opts: tui.opts, ctx: tui.ctx, views: tui.views, }, conversation: &models.Conversation{}, persistence: true, stopSignal: make(chan struct{}), replyChan: make(chan models.Message), replyChunkChan: make(chan string), wrap: true, selectedMessage: -1, content: viewport.New(0, 0), input: textarea.New(), spinner: spinner.New(spinner.WithSpinner( spinner.Spinner{ Frames: []string{ ". ", ".. ", "...", ".. ", ". ", " ", }, FPS: time.Second / 3, }, )), } m.input.Focus() m.input.MaxHeight = 0 m.input.CharLimit = 0 m.input.ShowLineNumbers = false m.input.Placeholder = "Enter a message" m.input.FocusedStyle.CursorLine = lipgloss.NewStyle() m.input.FocusedStyle.Base = inputFocusedStyle m.input.BlurredStyle.Base = inputBlurredStyle m.waitingForReply = false m.status = "Press ctrl+s to send" return m } // styles var ( headerStyle = lipgloss.NewStyle(). PaddingLeft(1). PaddingRight(1). Background(lipgloss.Color("0")) messageHeadingStyle = lipgloss.NewStyle(). MarginTop(1). MarginBottom(1). PaddingLeft(1). Bold(true) userStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("10")) assistantStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("12")) messageStyle = lipgloss.NewStyle(). PaddingLeft(2). PaddingRight(2) inputFocusedStyle = lipgloss.NewStyle(). Border(lipgloss.RoundedBorder(), true, true, true, false) inputBlurredStyle = lipgloss.NewStyle(). Faint(true). Border(lipgloss.RoundedBorder(), true, true, true, false) footerStyle = lipgloss.NewStyle() ) func (m *chatModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) { switch m.focus { case focusInput: consumed, cmd := m.handleInputKey(msg) if consumed { return true, cmd } case focusMessages: consumed, cmd := m.handleMessagesKey(msg) if consumed { return true, cmd } } switch msg.String() { case "esc": return true, func() tea.Msg { return msgChangeState(stateConversations) } case "ctrl+c": if m.waitingForReply { m.stopSignal <- struct{}{} return true, nil } return true, func() tea.Msg { return msgChangeState(stateConversations) } case "ctrl+p": m.persistence = !m.persistence return true, nil case "ctrl+t": m.showToolResults = !m.showToolResults m.rebuildMessageCache() m.updateContent() return true, nil case "ctrl+w": m.wrap = !m.wrap m.rebuildMessageCache() m.updateContent() return true, nil } return false, nil } func (m chatModel) Init() tea.Cmd { return tea.Batch( textarea.Blink, m.spinner.Tick, m.waitForChunk(), m.waitForReply(), ) } func (m *chatModel) handleResize(width, height int) { m.width, m.height = width, height m.content.Width = width m.input.SetWidth(width - m.input.FocusedStyle.Base.GetHorizontalFrameSize()) if len(m.messages) > 0 { m.rebuildMessageCache() m.updateContent() } } func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) { var cmds []tea.Cmd switch msg := msg.(type) { case msgChangeState: if m.opts.convShortname != "" && m.conversation.ShortName.String != m.opts.convShortname { cmds = append(cmds, m.loadConversation(m.opts.convShortname)) } case tea.WindowSizeMsg: m.handleResize(msg.Width, msg.Height) case msgTempfileEditorClosed: contents := string(msg) switch m.editorTarget { case input: m.input.SetValue(contents) case selectedMessage: m.setMessageContents(m.selectedMessage, contents) if m.persistence && m.messages[m.selectedMessage].ID > 0 { // update persisted message err := m.ctx.Store.UpdateMessage(&m.messages[m.selectedMessage]) if err != nil { cmds = append(cmds, wrapError(fmt.Errorf("Could not save edited message: %v", err))) } } m.updateContent() } case msgConversationLoaded: m.conversation = (*models.Conversation)(msg) cmds = append(cmds, m.loadMessages(m.conversation)) case msgMessagesLoaded: m.setMessages(msg) m.updateContent() m.content.GotoBottom() case msgResponseChunk: chunk := string(msg) last := len(m.messages) - 1 if last >= 0 && m.messages[last].Role.IsAssistant() { m.setMessageContents(last, m.messages[last].Content+chunk) } else { m.addMessage(models.Message{ Role: models.MessageRoleAssistant, Content: chunk, }) } m.updateContent() cmds = append(cmds, m.waitForChunk()) // wait for the next chunk case msgAssistantReply: // the last reply that was being worked on is finished reply := models.Message(msg) reply.Content = strings.TrimSpace(reply.Content) last := len(m.messages) - 1 if last < 0 { panic("Unexpected empty messages handling msgAssistantReply") } if reply.Role.IsAssistant() && m.messages[last].Role.IsAssistant() { // this was a continuation, so replace the previous message with the completed reply m.setMessage(last, reply) } else { m.addMessage(reply) } if m.persistence { var err error if m.conversation.ID == 0 { err = m.ctx.Store.SaveConversation(m.conversation) } if err != nil { cmds = append(cmds, wrapError(err)) } else { cmds = append(cmds, m.persistConversation()) } } if m.conversation.Title == "" { cmds = append(cmds, m.generateConversationTitle()) } m.updateContent() cmds = append(cmds, m.waitForReply()) case msgResponseEnd: m.waitingForReply = false last := len(m.messages) - 1 if last < 0 { panic("Unexpected empty messages handling msgResponseEnd") } m.setMessageContents(last, strings.TrimSpace(m.messages[last].Content)) m.updateContent() m.status = "Press ctrl+s to send" case msgResponseError: m.waitingForReply = false m.status = "Press ctrl+s to send" m.err = error(msg) case msgConversationTitleChanged: title := string(msg) m.conversation.Title = title if m.persistence { err := m.ctx.Store.SaveConversation(m.conversation) if err != nil { cmds = append(cmds, wrapError(err)) } } } var cmd tea.Cmd m.spinner, cmd = m.spinner.Update(msg) if cmd != nil { cmds = append(cmds, cmd) } prevInputLineCnt := m.input.LineCount() inputCaptured := false m.input, cmd = m.input.Update(msg) if cmd != nil { inputCaptured = true cmds = append(cmds, cmd) } if !inputCaptured { m.content, cmd = m.content.Update(msg) if cmd != nil { cmds = append(cmds, cmd) } } // update views once window dimensions are known if m.width > 0 { m.views.header = m.headerView() m.views.footer = m.footerView() m.views.error = errorBanner(m.err, m.width) fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer) // calculate clamped input height to accomodate input text newHeight := max(4, min((m.height-fixedHeight-1)/2, m.input.LineCount())) m.input.SetHeight(newHeight) m.views.input = m.input.View() m.content.Height = m.height - fixedHeight - height(m.views.input) m.views.content = m.content.View() } // this is a pretty nasty hack to ensure the input area viewport doesn't // scroll below its content, which can happen when the input viewport // height has grown, or previously entered lines have been deleted if prevInputLineCnt != m.input.LineCount() { // dist is the distance we'd need to scroll up from the current cursor // position to position the last input line at the bottom of the // viewport. if negative, we're already scrolled above the bottom dist := m.input.Line() - (m.input.LineCount() - m.input.Height()) if dist > 0 { for i := 0; i < dist; i++ { // move cursor up until content reaches the bottom of the viewport m.input.CursorUp() } m.input, cmd = m.input.Update(nil) for i := 0; i < dist; i++ { // move cursor back down to its previous position m.input.CursorDown() } m.input, cmd = m.input.Update(nil) } } return m, tea.Batch(cmds...) } func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) { switch msg.String() { case "tab", "enter": m.focus = focusInput m.updateContent() m.input.Focus() return true, nil case "e": message := m.messages[m.selectedMessage] cmd := openTempfileEditor("message.*.md", message.Content, "# Edit the message below\n") m.editorTarget = selectedMessage return true, cmd case "ctrl+k": if m.selectedMessage > 0 && len(m.messages) == len(m.messageOffsets) { m.selectedMessage-- m.updateContent() offset := m.messageOffsets[m.selectedMessage] scrollIntoView(&m.content, offset, 0.1) } return true, nil case "ctrl+j": if m.selectedMessage < len(m.messages)-1 && len(m.messages) == len(m.messageOffsets) { m.selectedMessage++ m.updateContent() offset := m.messageOffsets[m.selectedMessage] scrollIntoView(&m.content, offset, 0.1) } return true, nil case "ctrl+r": // resubmit the conversation with all messages up until and including the selected message if m.waitingForReply || len(m.messages) == 0 { return true, nil } m.messages = m.messages[:m.selectedMessage+1] m.messageCache = m.messageCache[:m.selectedMessage+1] m.updateContent() m.content.GotoBottom() return true, m.promptLLM() } return false, nil } func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) { switch msg.String() { case "esc": m.focus = focusMessages if len(m.messages) > 0 { if m.selectedMessage < 0 || m.selectedMessage >= len(m.messages) { m.selectedMessage = len(m.messages) - 1 } offset := m.messageOffsets[m.selectedMessage] scrollIntoView(&m.content, offset, 0.1) } m.updateContent() m.input.Blur() return true, nil case "ctrl+s": userInput := strings.TrimSpace(m.input.Value()) if strings.TrimSpace(userInput) == "" { return true, nil } if len(m.messages) > 0 && m.messages[len(m.messages)-1].Role == models.MessageRoleUser { return true, wrapError(fmt.Errorf("Can't reply to a user message")) } reply := models.Message{ Role: models.MessageRoleUser, Content: userInput, } if m.persistence { var err error if m.conversation.ID == 0 { err = m.ctx.Store.SaveConversation(m.conversation) } if err != nil { return true, wrapError(err) } // ensure all messages up to the one we're about to add are persisted cmd := m.persistConversation() if cmd != nil { return true, cmd } savedReply, err := m.ctx.Store.AddReply(m.conversation, reply) if err != nil { return true, wrapError(err) } reply = *savedReply } m.input.SetValue("") m.addMessage(reply) m.updateContent() m.content.GotoBottom() return true, m.promptLLM() case "ctrl+e": cmd := openTempfileEditor("message.*.md", m.input.Value(), "# Edit your input below\n") m.editorTarget = input return true, cmd } return false, nil } func (m *chatModel) renderMessageHeading(i int, message *models.Message) string { icon := "" friendly := message.Role.FriendlyRole() style := lipgloss.NewStyle().Faint(true).Bold(true) switch message.Role { case models.MessageRoleSystem: icon = "⚙️" case models.MessageRoleUser: style = userStyle case models.MessageRoleAssistant: style = assistantStyle case models.MessageRoleToolCall: style = assistantStyle friendly = models.MessageRoleAssistant.FriendlyRole() case models.MessageRoleToolResult: icon = "🔧" } user := style.Render(icon + friendly) var prefix string var suffix string faint := lipgloss.NewStyle().Faint(true) if m.focus == focusMessages { if i == m.selectedMessage { prefix = "> " } } if message.ID == 0 { suffix += faint.Render(" (not saved)") } return messageHeadingStyle.Render(prefix + user + suffix) } func (m *chatModel) renderMessage(msg *models.Message) string { sb := &strings.Builder{} sb.Grow(len(msg.Content) * 2) if msg.Content != "" { err := m.ctx.Chroma.Highlight(sb, msg.Content) if err != nil { sb.Reset() sb.WriteString(msg.Content) } } var toolString string switch msg.Role { case models.MessageRoleToolCall: bytes, err := yaml.Marshal(msg.ToolCalls) if err != nil { toolString = "Could not serialize ToolCalls" } else { toolString = "tool_calls:\n" + string(bytes) } case models.MessageRoleToolResult: if !m.showToolResults { break } type renderedResult struct { ToolName string `yaml:"tool"` Result any } var toolResults []renderedResult for _, result := range msg.ToolResults { var jsonResult interface{} err := json.Unmarshal([]byte(result.Result), &jsonResult) if err != nil { // If parsing as JSON fails, treat Result as a plain string toolResults = append(toolResults, renderedResult{ ToolName: result.ToolName, Result: result.Result, }) } else { // If parsing as JSON succeeds, marshal the parsed JSON into YAML toolResults = append(toolResults, renderedResult{ ToolName: result.ToolName, Result: &jsonResult, }) } } bytes, err := yaml.Marshal(toolResults) if err != nil { toolString = "Could not serialize ToolResults" } else { toolString = "tool_results:\n" + string(bytes) } } if toolString != "" { toolString = strings.TrimRight(toolString, "\n") if msg.Content != "" { sb.WriteString("\n\n") } _ = m.ctx.Chroma.HighlightLang(sb, toolString, "yaml") } content := strings.TrimRight(sb.String(), "\n") if m.wrap { wrapWidth := m.content.Width - messageStyle.GetHorizontalPadding() - 2 content = wordwrap.String(content, wrapWidth) } return messageStyle.Width(0).Render(content) } // render the conversation into a string func (m *chatModel) conversationMessagesView() string { sb := strings.Builder{} m.messageOffsets = make([]int, len(m.messages)) lineCnt := 1 for i, message := range m.messages { m.messageOffsets[i] = lineCnt switch message.Role { case models.MessageRoleToolCall: if !m.showToolResults && message.Content == "" { continue } case models.MessageRoleToolResult: if !m.showToolResults { continue } } heading := m.renderMessageHeading(i, &message) sb.WriteString(heading) sb.WriteString("\n") lineCnt += lipgloss.Height(heading) cached := m.messageCache[i] sb.WriteString(cached) sb.WriteString("\n") lineCnt += lipgloss.Height(cached) } return sb.String() } func (m *chatModel) headerView() string { titleStyle := lipgloss.NewStyle().Bold(true) var title string if m.conversation != nil && m.conversation.Title != "" { title = m.conversation.Title } else { title = "Untitled" } title = truncateToCellWidth(title, m.width-headerStyle.GetHorizontalPadding(), "...") header := titleStyle.Render(title) return headerStyle.Width(m.width).Render(header) } func (m *chatModel) footerView() string { segmentStyle := lipgloss.NewStyle().PaddingLeft(1).PaddingRight(1).Faint(true) segmentSeparator := "|" savingStyle := segmentStyle.Copy().Bold(true) saving := "" if m.persistence { saving = savingStyle.Foreground(lipgloss.Color("2")).Render("✅💾") } else { saving = savingStyle.Foreground(lipgloss.Color("1")).Render("❌💾") } status := m.status if m.waitingForReply { status += m.spinner.View() } leftSegments := []string{ saving, segmentStyle.Render(status), } rightSegments := []string{ segmentStyle.Render(fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)), } left := strings.Join(leftSegments, segmentSeparator) right := strings.Join(rightSegments, segmentSeparator) totalWidth := lipgloss.Width(left) + lipgloss.Width(right) remaining := m.width - totalWidth var padding string if remaining > 0 { padding = strings.Repeat(" ", remaining) } footer := left + padding + right if remaining < 0 { footer = truncateToCellWidth(footer, m.width, "...") } return footerStyle.Width(m.width).Render(footer) } func (m *chatModel) setMessages(messages []models.Message) { m.messages = messages m.rebuildMessageCache() } func (m *chatModel) setMessage(i int, msg models.Message) { if i >= len(m.messages) { panic("i out of range") } m.messages[i] = msg m.messageCache[i] = m.renderMessage(&msg) } func (m *chatModel) addMessage(msg models.Message) { m.messages = append(m.messages, msg) m.messageCache = append(m.messageCache, m.renderMessage(&msg)) } func (m *chatModel) setMessageContents(i int, content string) { if i >= len(m.messages) { panic("i out of range") } m.messages[i].Content = content m.messageCache[i] = m.renderMessage(&m.messages[i]) } func (m *chatModel) rebuildMessageCache() { m.messageCache = make([]string, len(m.messages)) for i, msg := range m.messages { m.messageCache[i] = m.renderMessage(&msg) } } func (m *chatModel) updateContent() { atBottom := m.content.AtBottom() m.content.SetContent(m.conversationMessagesView()) if atBottom { // if we were at bottom before the update, scroll with the output m.content.GotoBottom() } } func (m *chatModel) loadConversation(shortname string) tea.Cmd { return func() tea.Msg { if shortname == "" { return nil } c, err := m.ctx.Store.ConversationByShortName(shortname) if err != nil { return msgError(fmt.Errorf("Could not lookup conversation: %v", err)) } if c.ID == 0 { return msgError(fmt.Errorf("Conversation not found: %s", shortname)) } return msgConversationLoaded(c) } } func (m *chatModel) loadMessages(c *models.Conversation) tea.Cmd { return func() tea.Msg { messages, err := m.ctx.Store.Messages(c) if err != nil { return msgError(fmt.Errorf("Could not load conversation messages: %v\n", err)) } return msgMessagesLoaded(messages) } } func (m *chatModel) persistConversation() tea.Cmd { existingMessages, err := m.ctx.Store.Messages(m.conversation) if err != nil { return wrapError(fmt.Errorf("Could not retrieve existing conversation messages while trying to save: %v", err)) } existingById := make(map[uint]*models.Message, len(existingMessages)) for _, msg := range existingMessages { existingById[msg.ID] = &msg } currentById := make(map[uint]*models.Message, len(m.messages)) for _, msg := range m.messages { currentById[msg.ID] = &msg } for _, msg := range existingMessages { _, ok := currentById[msg.ID] if !ok { err := m.ctx.Store.DeleteMessage(&msg) if err != nil { return wrapError(fmt.Errorf("Failed to remove messages: %v", err)) } } } for i, msg := range m.messages { if msg.ID > 0 { exist, ok := existingById[msg.ID] if ok { if msg.Content == exist.Content { continue } // update message when contents don't match that of store err := m.ctx.Store.UpdateMessage(&msg) if err != nil { return wrapError(err) } } else { // this would be quite odd... and I'm not sure how to handle // it at the time of writing this } } else { newMessage, err := m.ctx.Store.AddReply(m.conversation, msg) if err != nil { return wrapError(err) } m.setMessage(i, *newMessage) } } return nil } func (m *chatModel) generateConversationTitle() tea.Cmd { return func() tea.Msg { title, err := cmdutil.GenerateTitle(m.ctx, m.conversation) if err != nil { return msgError(err) } return msgConversationTitleChanged(title) } } func (m *chatModel) waitForReply() tea.Cmd { return func() tea.Msg { return msgAssistantReply(<-m.replyChan) } } func (m *chatModel) waitForChunk() tea.Cmd { return func() tea.Msg { return msgResponseChunk(<-m.replyChunkChan) } } func (m *chatModel) promptLLM() tea.Cmd { m.waitingForReply = true m.status = "Press ctrl+c to cancel" return func() tea.Msg { completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model) if err != nil { return msgError(err) } requestParams := models.RequestParameters{ Model: *m.ctx.Config.Defaults.Model, MaxTokens: *m.ctx.Config.Defaults.MaxTokens, Temperature: *m.ctx.Config.Defaults.Temperature, ToolBag: m.ctx.EnabledTools, } replyHandler := func(msg models.Message) { m.replyChan <- msg } ctx, cancel := context.WithCancel(context.Background()) canceled := false go func() { select { case <-m.stopSignal: canceled = true cancel() } }() resp, err := completionProvider.CreateChatCompletionStream( ctx, requestParams, m.messages, replyHandler, m.replyChunkChan, ) if err != nil && !canceled { return msgResponseError(err) } return msgResponseEnd(resp) } }