Compare commits

..

No commits in common. "69d3265b640b72164db150135e3f04384e37ec30" and "a669313a0b0c1bf50ec1601c8abde1404067ced3" have entirely different histories.

5 changed files with 863 additions and 1345 deletions

View File

@ -115,8 +115,8 @@ func ListCmd(ctx *lmcli.Context) *cobra.Command {
},
}
cmd.Flags().BoolP("all", "a", false, "Show all conversations")
cmd.Flags().IntP("count", "c", LS_COUNT, "How many conversations to show")
cmd.Flags().Bool("all", false, "Show all conversations")
cmd.Flags().Int("count", LS_COUNT, "How many conversations to show")
return cmd
}

View File

@ -1,889 +0,0 @@
package tui
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"github.com/charmbracelet/bubbles/spinner"
"github.com/charmbracelet/bubbles/textarea"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/muesli/reflow/wordwrap"
"gopkg.in/yaml.v2"
)
type focusState int
const (
focusInput focusState = iota
focusMessages
)
type editorTarget int
const (
input editorTarget = iota
selectedMessage
)
// custom tea.Msg types
type (
// sent on each chunk received from LLM
msgResponseChunk string
// sent when response is finished being received
msgResponseEnd string
// a special case of msgError that stops the response waiting animation
msgResponseError error
// sent on each completed reply
msgAssistantReply models.Message
// sent when a conversation is (re)loaded
msgConversationLoaded *models.Conversation
// sent when a new conversation title is set
msgConversationTitleChanged string
// sent when a conversation's messages are laoded
msgMessagesLoaded []models.Message
)
type chatModel struct {
basemodel
width int
height int
// app state
conversation *models.Conversation
messages []models.Message
selectedMessage int
waitingForReply bool
editorTarget editorTarget
stopSignal chan struct{}
replyChan chan models.Message
replyChunkChan chan string
persistence bool // whether we will save new messages in the conversation
// ui state
focus focusState
wrap bool // whether message content is wrapped to viewport width
status string // a general status message
showToolResults bool // whether tool calls and results are shown
messageCache []string // cache of syntax highlighted and wrapped message content
messageOffsets []int
// ui elements
content viewport.Model
input textarea.Model
spinner spinner.Model
}
func newChatModel(tui *model) chatModel {
m := chatModel{
basemodel: basemodel{
opts: tui.opts,
ctx: tui.ctx,
views: tui.views,
},
conversation: &models.Conversation{},
persistence: true,
stopSignal: make(chan struct{}),
replyChan: make(chan models.Message),
replyChunkChan: make(chan string),
wrap: true,
selectedMessage: -1,
content: viewport.New(0, 0),
input: textarea.New(),
spinner: spinner.New(spinner.WithSpinner(
spinner.Spinner{
Frames: []string{
". ",
".. ",
"...",
".. ",
". ",
" ",
},
FPS: time.Second / 3,
},
)),
}
m.input.Focus()
m.input.MaxHeight = 0
m.input.CharLimit = 0
m.input.ShowLineNumbers = false
m.input.Placeholder = "Enter a message"
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
m.input.FocusedStyle.Base = inputFocusedStyle
m.input.BlurredStyle.Base = inputBlurredStyle
m.waitingForReply = false
m.status = "Press ctrl+s to send"
return m
}
// styles
var (
headerStyle = lipgloss.NewStyle().
PaddingLeft(1).
PaddingRight(1).
Background(lipgloss.Color("0"))
messageHeadingStyle = lipgloss.NewStyle().
MarginTop(1).
MarginBottom(1).
PaddingLeft(1).
Bold(true)
userStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("10"))
assistantStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("12"))
messageStyle = lipgloss.NewStyle().
PaddingLeft(2).
PaddingRight(2)
inputFocusedStyle = lipgloss.NewStyle().
Border(lipgloss.RoundedBorder(), true, true, true, false)
inputBlurredStyle = lipgloss.NewStyle().
Faint(true).
Border(lipgloss.RoundedBorder(), true, true, true, false)
footerStyle = lipgloss.NewStyle()
)
func (m *chatModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
switch m.focus {
case focusInput:
consumed, cmd := m.handleInputKey(msg)
if consumed {
return true, cmd
}
case focusMessages:
consumed, cmd := m.handleMessagesKey(msg)
if consumed {
return true, cmd
}
}
switch msg.String() {
case "esc":
return true, func() tea.Msg {
return msgChangeState(stateConversations)
}
case "ctrl+c":
if m.waitingForReply {
m.stopSignal <- struct{}{}
return true, nil
}
return true, func() tea.Msg {
return msgChangeState(stateConversations)
}
case "ctrl+p":
m.persistence = !m.persistence
return true, nil
case "ctrl+t":
m.showToolResults = !m.showToolResults
m.rebuildMessageCache()
m.updateContent()
return true, nil
case "ctrl+w":
m.wrap = !m.wrap
m.rebuildMessageCache()
m.updateContent()
return true, nil
}
return false, nil
}
func (m chatModel) Init() tea.Cmd {
return tea.Batch(
textarea.Blink,
m.spinner.Tick,
m.waitForChunk(),
m.waitForReply(),
)
}
func (m *chatModel) handleResize(width, height int) {
m.width, m.height = width, height
m.content.Width = width
m.input.SetWidth(width - m.input.FocusedStyle.Base.GetHorizontalFrameSize())
if len(m.messages) > 0 {
m.rebuildMessageCache()
m.updateContent()
}
}
func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case msgChangeState:
if m.opts.convShortname != "" && m.conversation.ShortName.String != m.opts.convShortname {
cmds = append(cmds, m.loadConversation(m.opts.convShortname))
}
case tea.WindowSizeMsg:
m.handleResize(msg.Width, msg.Height)
case msgTempfileEditorClosed:
contents := string(msg)
switch m.editorTarget {
case input:
m.input.SetValue(contents)
case selectedMessage:
m.setMessageContents(m.selectedMessage, contents)
if m.persistence && m.messages[m.selectedMessage].ID > 0 {
// update persisted message
err := m.ctx.Store.UpdateMessage(&m.messages[m.selectedMessage])
if err != nil {
cmds = append(cmds, wrapError(fmt.Errorf("Could not save edited message: %v", err)))
}
}
m.updateContent()
}
case msgConversationLoaded:
m.conversation = (*models.Conversation)(msg)
cmds = append(cmds, m.loadMessages(m.conversation))
case msgMessagesLoaded:
m.selectedMessage = len(msg) - 1
m.setMessages(msg)
m.updateContent()
m.content.GotoBottom()
case msgResponseChunk:
chunk := string(msg)
last := len(m.messages) - 1
if last >= 0 && m.messages[last].Role.IsAssistant() {
m.setMessageContents(last, m.messages[last].Content+chunk)
} else {
m.addMessage(models.Message{
Role: models.MessageRoleAssistant,
Content: chunk,
})
}
m.updateContent()
cmds = append(cmds, m.waitForChunk()) // wait for the next chunk
case msgAssistantReply:
// the last reply that was being worked on is finished
reply := models.Message(msg)
reply.Content = strings.TrimSpace(reply.Content)
last := len(m.messages) - 1
if last < 0 {
panic("Unexpected empty messages handling msgAssistantReply")
}
if reply.Role.IsAssistant() && m.messages[last].Role.IsAssistant() {
// this was a continuation, so replace the previous message with the completed reply
m.setMessage(last, reply)
} else {
m.addMessage(reply)
}
if m.persistence {
var err error
if m.conversation.ID == 0 {
err = m.ctx.Store.SaveConversation(m.conversation)
}
if err != nil {
cmds = append(cmds, wrapError(err))
} else {
cmds = append(cmds, m.persistConversation())
}
}
if m.conversation.Title == "" {
cmds = append(cmds, m.generateConversationTitle())
}
m.updateContent()
cmds = append(cmds, m.waitForReply())
case msgResponseEnd:
m.waitingForReply = false
last := len(m.messages) - 1
if last < 0 {
panic("Unexpected empty messages handling msgResponseEnd")
}
m.setMessageContents(last, strings.TrimSpace(m.messages[last].Content))
m.updateContent()
m.status = "Press ctrl+s to send"
case msgResponseError:
m.waitingForReply = false
m.status = "Press ctrl+s to send"
m.err = error(msg)
case msgConversationTitleChanged:
title := string(msg)
m.conversation.Title = title
if m.persistence {
err := m.ctx.Store.SaveConversation(m.conversation)
if err != nil {
cmds = append(cmds, wrapError(err))
}
}
}
var cmd tea.Cmd
m.spinner, cmd = m.spinner.Update(msg)
if cmd != nil {
cmds = append(cmds, cmd)
}
prevInputLineCnt := m.input.LineCount()
inputCaptured := false
m.input, cmd = m.input.Update(msg)
if cmd != nil {
inputCaptured = true
cmds = append(cmds, cmd)
}
if !inputCaptured {
m.content, cmd = m.content.Update(msg)
if cmd != nil {
cmds = append(cmds, cmd)
}
}
// update views once window dimensions are known
if m.width > 0 {
m.views.header = m.headerView()
m.views.footer = m.footerView()
m.views.error = errorBanner(m.err, m.width)
fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer)
// calculate clamped input height to accomodate input text
newHeight := max(4, min((m.height-fixedHeight-1)/2, m.input.LineCount()))
m.input.SetHeight(newHeight)
m.views.input = m.input.View()
m.content.Height = m.height - fixedHeight - height(m.views.input)
m.views.content = m.content.View()
}
// this is a pretty nasty hack to ensure the input area viewport doesn't
// scroll below its content, which can happen when the input viewport
// height has grown, or previously entered lines have been deleted
if prevInputLineCnt != m.input.LineCount() {
// dist is the distance we'd need to scroll up from the current cursor
// position to position the last input line at the bottom of the
// viewport. if negative, we're already scrolled above the bottom
dist := m.input.Line() - (m.input.LineCount() - m.input.Height())
if dist > 0 {
for i := 0; i < dist; i++ {
// move cursor up until content reaches the bottom of the viewport
m.input.CursorUp()
}
m.input, cmd = m.input.Update(nil)
for i := 0; i < dist; i++ {
// move cursor back down to its previous position
m.input.CursorDown()
}
m.input, cmd = m.input.Update(nil)
}
}
return m, tea.Batch(cmds...)
}
func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "tab", "enter":
m.focus = focusInput
m.updateContent()
m.input.Focus()
return true, nil
case "e":
message := m.messages[m.selectedMessage]
cmd := openTempfileEditor("message.*.md", message.Content, "# Edit the message below\n")
m.editorTarget = selectedMessage
return true, cmd
case "ctrl+k":
if m.selectedMessage > 0 && len(m.messages) == len(m.messageOffsets) {
m.selectedMessage--
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
}
return true, nil
case "ctrl+j":
if m.selectedMessage < len(m.messages)-1 && len(m.messages) == len(m.messageOffsets) {
m.selectedMessage++
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
}
return true, nil
case "ctrl+r":
// resubmit the conversation with all messages up until and including the selected message
if m.waitingForReply || len(m.messages) == 0 {
return true, nil
}
m.messages = m.messages[:m.selectedMessage+1]
m.messageCache = m.messageCache[:m.selectedMessage+1]
m.updateContent()
m.content.GotoBottom()
return true, m.promptLLM()
}
return false, nil
}
func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "esc":
m.focus = focusMessages
if len(m.messages) > 0 {
if m.selectedMessage < 0 || m.selectedMessage >= len(m.messages) {
m.selectedMessage = len(m.messages) - 1
}
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
}
m.updateContent()
m.input.Blur()
return true, nil
case "ctrl+s":
userInput := strings.TrimSpace(m.input.Value())
if strings.TrimSpace(userInput) == "" {
return true, nil
}
if len(m.messages) > 0 && m.messages[len(m.messages)-1].Role == models.MessageRoleUser {
return true, wrapError(fmt.Errorf("Can't reply to a user message"))
}
reply := models.Message{
Role: models.MessageRoleUser,
Content: userInput,
}
if m.persistence {
var err error
if m.conversation.ID == 0 {
err = m.ctx.Store.SaveConversation(m.conversation)
}
if err != nil {
return true, wrapError(err)
}
// ensure all messages up to the one we're about to add are persisted
cmd := m.persistConversation()
if cmd != nil {
return true, cmd
}
savedReply, err := m.ctx.Store.AddReply(m.conversation, reply)
if err != nil {
return true, wrapError(err)
}
reply = *savedReply
}
m.input.SetValue("")
m.addMessage(reply)
m.updateContent()
m.content.GotoBottom()
return true, m.promptLLM()
case "ctrl+e":
cmd := openTempfileEditor("message.*.md", m.input.Value(), "# Edit your input below\n")
m.editorTarget = input
return true, cmd
}
return false, nil
}
func (m *chatModel) renderMessageHeading(i int, message *models.Message) string {
icon := ""
friendly := message.Role.FriendlyRole()
style := lipgloss.NewStyle().Faint(true).Bold(true)
switch message.Role {
case models.MessageRoleSystem:
icon = "⚙️"
case models.MessageRoleUser:
style = userStyle
case models.MessageRoleAssistant:
style = assistantStyle
case models.MessageRoleToolCall:
style = assistantStyle
friendly = models.MessageRoleAssistant.FriendlyRole()
case models.MessageRoleToolResult:
icon = "🔧"
}
user := style.Render(icon + friendly)
var prefix string
var suffix string
faint := lipgloss.NewStyle().Faint(true)
if m.focus == focusMessages {
if i == m.selectedMessage {
prefix = "> "
}
}
if message.ID == 0 {
suffix += faint.Render(" (not saved)")
}
return messageHeadingStyle.Render(prefix + user + suffix)
}
func (m *chatModel) renderMessage(msg *models.Message) string {
sb := &strings.Builder{}
sb.Grow(len(msg.Content) * 2)
if msg.Content != "" {
err := m.ctx.Chroma.Highlight(sb, msg.Content)
if err != nil {
sb.Reset()
sb.WriteString(msg.Content)
}
}
var toolString string
switch msg.Role {
case models.MessageRoleToolCall:
bytes, err := yaml.Marshal(msg.ToolCalls)
if err != nil {
toolString = "Could not serialize ToolCalls"
} else {
toolString = "tool_calls:\n" + string(bytes)
}
case models.MessageRoleToolResult:
if !m.showToolResults {
break
}
type renderedResult struct {
ToolName string `yaml:"tool"`
Result any
}
var toolResults []renderedResult
for _, result := range msg.ToolResults {
var jsonResult interface{}
err := json.Unmarshal([]byte(result.Result), &jsonResult)
if err != nil {
// If parsing as JSON fails, treat Result as a plain string
toolResults = append(toolResults, renderedResult{
ToolName: result.ToolName,
Result: result.Result,
})
} else {
// If parsing as JSON succeeds, marshal the parsed JSON into YAML
toolResults = append(toolResults, renderedResult{
ToolName: result.ToolName,
Result: &jsonResult,
})
}
}
bytes, err := yaml.Marshal(toolResults)
if err != nil {
toolString = "Could not serialize ToolResults"
} else {
toolString = "tool_results:\n" + string(bytes)
}
}
if toolString != "" {
toolString = strings.TrimRight(toolString, "\n")
if msg.Content != "" {
sb.WriteString("\n\n")
}
_ = m.ctx.Chroma.HighlightLang(sb, toolString, "yaml")
}
content := strings.TrimRight(sb.String(), "\n")
if m.wrap {
wrapWidth := m.content.Width - messageStyle.GetHorizontalPadding() - 2
content = wordwrap.String(content, wrapWidth)
}
return messageStyle.Width(0).Render(content)
}
// render the conversation into a string
func (m *chatModel) conversationMessagesView() string {
sb := strings.Builder{}
m.messageOffsets = make([]int, len(m.messages))
lineCnt := 1
for i, message := range m.messages {
m.messageOffsets[i] = lineCnt
switch message.Role {
case models.MessageRoleToolCall:
if !m.showToolResults && message.Content == "" {
continue
}
case models.MessageRoleToolResult:
if !m.showToolResults {
continue
}
}
heading := m.renderMessageHeading(i, &message)
sb.WriteString(heading)
sb.WriteString("\n")
lineCnt += lipgloss.Height(heading)
cached := m.messageCache[i]
sb.WriteString(cached)
sb.WriteString("\n")
lineCnt += lipgloss.Height(cached)
}
return sb.String()
}
func (m *chatModel) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
var title string
if m.conversation != nil && m.conversation.Title != "" {
title = m.conversation.Title
} else {
title = "Untitled"
}
title = truncateToCellWidth(title, m.width-headerStyle.GetHorizontalPadding(), "...")
header := titleStyle.Render(title)
return headerStyle.Width(m.width).Render(header)
}
func (m *chatModel) footerView() string {
segmentStyle := lipgloss.NewStyle().PaddingLeft(1).PaddingRight(1).Faint(true)
segmentSeparator := "|"
savingStyle := segmentStyle.Copy().Bold(true)
saving := ""
if m.persistence {
saving = savingStyle.Foreground(lipgloss.Color("2")).Render("✅💾")
} else {
saving = savingStyle.Foreground(lipgloss.Color("1")).Render("❌💾")
}
status := m.status
if m.waitingForReply {
status += m.spinner.View()
}
leftSegments := []string{
saving,
segmentStyle.Render(status),
}
rightSegments := []string{
segmentStyle.Render(fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)),
}
left := strings.Join(leftSegments, segmentSeparator)
right := strings.Join(rightSegments, segmentSeparator)
totalWidth := lipgloss.Width(left) + lipgloss.Width(right)
remaining := m.width - totalWidth
var padding string
if remaining > 0 {
padding = strings.Repeat(" ", remaining)
}
footer := left + padding + right
if remaining < 0 {
footer = truncateToCellWidth(footer, m.width, "...")
}
return footerStyle.Width(m.width).Render(footer)
}
func (m *chatModel) setMessages(messages []models.Message) {
m.messages = messages
m.rebuildMessageCache()
}
func (m *chatModel) setMessage(i int, msg models.Message) {
if i >= len(m.messages) {
panic("i out of range")
}
m.messages[i] = msg
m.messageCache[i] = m.renderMessage(&msg)
}
func (m *chatModel) addMessage(msg models.Message) {
m.messages = append(m.messages, msg)
m.messageCache = append(m.messageCache, m.renderMessage(&msg))
}
func (m *chatModel) setMessageContents(i int, content string) {
if i >= len(m.messages) {
panic("i out of range")
}
m.messages[i].Content = content
m.messageCache[i] = m.renderMessage(&m.messages[i])
}
func (m *chatModel) rebuildMessageCache() {
m.messageCache = make([]string, len(m.messages))
for i, msg := range m.messages {
m.messageCache[i] = m.renderMessage(&msg)
}
}
func (m *chatModel) updateContent() {
atBottom := m.content.AtBottom()
m.content.SetContent(m.conversationMessagesView())
if atBottom {
// if we were at bottom before the update, scroll with the output
m.content.GotoBottom()
}
}
func (m *chatModel) loadConversation(shortname string) tea.Cmd {
return func() tea.Msg {
if shortname == "" {
return nil
}
c, err := m.ctx.Store.ConversationByShortName(shortname)
if err != nil {
return msgError(fmt.Errorf("Could not lookup conversation: %v", err))
}
if c.ID == 0 {
return msgError(fmt.Errorf("Conversation not found: %s", shortname))
}
return msgConversationLoaded(c)
}
}
func (m *chatModel) loadMessages(c *models.Conversation) tea.Cmd {
return func() tea.Msg {
messages, err := m.ctx.Store.Messages(c)
if err != nil {
return msgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
}
return msgMessagesLoaded(messages)
}
}
func (m *chatModel) persistConversation() tea.Cmd {
existingMessages, err := m.ctx.Store.Messages(m.conversation)
if err != nil {
return wrapError(fmt.Errorf("Could not retrieve existing conversation messages while trying to save: %v", err))
}
existingById := make(map[uint]*models.Message, len(existingMessages))
for _, msg := range existingMessages {
existingById[msg.ID] = &msg
}
currentById := make(map[uint]*models.Message, len(m.messages))
for _, msg := range m.messages {
currentById[msg.ID] = &msg
}
for _, msg := range existingMessages {
_, ok := currentById[msg.ID]
if !ok {
err := m.ctx.Store.DeleteMessage(&msg)
if err != nil {
return wrapError(fmt.Errorf("Failed to remove messages: %v", err))
}
}
}
for i, msg := range m.messages {
if msg.ID > 0 {
exist, ok := existingById[msg.ID]
if ok {
if msg.Content == exist.Content {
continue
}
// update message when contents don't match that of store
err := m.ctx.Store.UpdateMessage(&msg)
if err != nil {
return wrapError(err)
}
} else {
// this would be quite odd... and I'm not sure how to handle
// it at the time of writing this
}
} else {
newMessage, err := m.ctx.Store.AddReply(m.conversation, msg)
if err != nil {
return wrapError(err)
}
m.setMessage(i, *newMessage)
}
}
return nil
}
func (m *chatModel) generateConversationTitle() tea.Cmd {
return func() tea.Msg {
title, err := cmdutil.GenerateTitle(m.ctx, m.conversation)
if err != nil {
return msgError(err)
}
return msgConversationTitleChanged(title)
}
}
func (m *chatModel) waitForReply() tea.Cmd {
return func() tea.Msg {
return msgAssistantReply(<-m.replyChan)
}
}
func (m *chatModel) waitForChunk() tea.Cmd {
return func() tea.Msg {
return msgResponseChunk(<-m.replyChunkChan)
}
}
func (m *chatModel) promptLLM() tea.Cmd {
m.waitingForReply = true
m.status = "Press ctrl+c to cancel"
return func() tea.Msg {
completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model)
if err != nil {
return msgError(err)
}
requestParams := models.RequestParameters{
Model: *m.ctx.Config.Defaults.Model,
MaxTokens: *m.ctx.Config.Defaults.MaxTokens,
Temperature: *m.ctx.Config.Defaults.Temperature,
ToolBag: m.ctx.EnabledTools,
}
replyHandler := func(msg models.Message) {
m.replyChan <- msg
}
ctx, cancel := context.WithCancel(context.Background())
canceled := false
go func() {
select {
case <-m.stopSignal:
canceled = true
cancel()
}
}()
resp, err := completionProvider.CreateChatCompletionStream(
ctx, requestParams, m.messages, replyHandler, m.replyChunkChan,
)
if err != nil && !canceled {
return msgResponseError(err)
}
return msgResponseEnd(resp)
}
}

View File

@ -1,277 +0,0 @@
package tui
import (
"fmt"
"slices"
"strings"
"time"
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"git.mlow.ca/mlow/lmcli/pkg/util"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
type loadedConversation struct {
conv models.Conversation
lastReply models.Message
}
type (
// sent when conversation list is loaded
msgConversationsLoaded ([]loadedConversation)
// sent when a conversation is selected
msgConversationSelected models.Conversation
)
type conversationsModel struct {
basemodel
conversations []loadedConversation
cursor int // index of the currently selected conversation
itemOffsets []int // keeps track of the viewport y offset of each rendered item
content viewport.Model
}
func newConversationsModel(tui *model) conversationsModel {
m := conversationsModel{
basemodel: basemodel{
opts: tui.opts,
ctx: tui.ctx,
views: tui.views,
width: tui.width,
height: tui.height,
},
content: viewport.New(0, 0),
}
return m
}
func (m *conversationsModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "enter":
if len(m.conversations) > 0 && m.cursor < len(m.conversations) {
return true, func() tea.Msg {
return msgConversationSelected(m.conversations[m.cursor].conv)
}
}
case "j", "down":
if m.cursor < len(m.conversations)-1 {
m.cursor++
if m.cursor == len(m.conversations)-1 {
// if last conversation, simply scroll to the bottom
m.content.GotoBottom()
} else {
// this hack positions the *next* conversatoin slightly
// *off* the screen, ensuring the entire m.cursor is shown,
// even if its height may not be constant due to wrapping.
scrollIntoView(&m.content, m.itemOffsets[m.cursor+1], -1)
}
m.content.SetContent(m.renderConversationList())
} else {
m.cursor = len(m.conversations) - 1
m.content.GotoBottom()
}
return true, nil
case "k", "up":
if m.cursor > 0 {
m.cursor--
if m.cursor == 0 {
m.content.GotoTop()
} else {
scrollIntoView(&m.content, m.itemOffsets[m.cursor], 1)
}
m.content.SetContent(m.renderConversationList())
} else {
m.cursor = 0
m.content.GotoTop()
}
return true, nil
case "n":
// new conversation
case "d":
// show prompt to delete conversation
case "c":
// copy/clone conversation
case "r":
// show prompt to rename conversation
case "shift+r":
// show prompt to generate name for conversation
}
return false, nil
}
func (m conversationsModel) Init() tea.Cmd {
return nil
}
func (m *conversationsModel) handleResize(width, height int) {
m.width, m.height = width, height
m.content.Width = width
}
func (m conversationsModel) Update(msg tea.Msg) (conversationsModel, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case msgChangeState:
cmds = append(cmds, m.loadConversations())
m.content.SetContent(m.renderConversationList())
case tea.WindowSizeMsg:
m.handleResize(msg.Width, msg.Height)
m.content.SetContent(m.renderConversationList())
case msgConversationsLoaded:
m.conversations = msg
m.content.SetContent(m.renderConversationList())
}
var cmd tea.Cmd
m.content, cmd = m.content.Update(msg)
if cmd != nil {
cmds = append(cmds, cmd)
}
if m.width > 0 {
m.views.header = m.headerView()
m.views.footer = "" // TODO: show /something/
m.views.error = errorBanner(m.err, m.width)
fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer)
m.content.Height = m.height - fixedHeight
m.views.content = m.content.View()
}
return m, tea.Batch(cmds...)
}
func (m *conversationsModel) loadConversations() tea.Cmd {
return func() tea.Msg {
conversations, err := m.ctx.Store.Conversations()
if err != nil {
return msgError(fmt.Errorf("Could not load conversations: %v", err))
}
loaded := make([]loadedConversation, len(conversations))
for i, c := range conversations {
lastMessage, err := m.ctx.Store.LastMessage(&c)
if err != nil {
return msgError(err)
}
loaded[i].conv = c
loaded[i].lastReply = *lastMessage
}
slices.SortFunc(loaded, func(a, b loadedConversation) int {
return b.lastReply.CreatedAt.Compare(a.lastReply.CreatedAt)
})
return msgConversationsLoaded(loaded)
}
}
func (m *conversationsModel) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
header := titleStyle.Render("Conversations")
return headerStyle.Width(m.width).Render(header)
}
func (m *conversationsModel) renderConversationList() string {
type timeCategory struct {
name string
cutoff time.Duration
}
type listItem struct {
id uint
short string
title string
elapsed string
lastReplyAge time.Duration
}
now := time.Now()
midnight := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
monthStart := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location())
dayOfWeek := int(now.Weekday())
categories := []timeCategory{
{"Today", now.Sub(midnight)},
{"Yesterday", now.Sub(midnight.AddDate(0, 0, -1))},
{"This week", now.Sub(midnight.AddDate(0, 0, -dayOfWeek))},
{"Last week", now.Sub(midnight.AddDate(0, 0, -(dayOfWeek + 7)))},
{"This month", now.Sub(monthStart)},
{"Last month", now.Sub(monthStart.AddDate(0, -1, 0))},
{"2 Months ago", now.Sub(monthStart.AddDate(0, -2, 0))},
{"3 Months ago", now.Sub(monthStart.AddDate(0, -3, 0))},
{"4 Months ago", now.Sub(monthStart.AddDate(0, -4, 0))},
{"5 Months ago", now.Sub(monthStart.AddDate(0, -5, 0))},
{"6 Months ago", now.Sub(monthStart.AddDate(0, -6, 0))},
{"Older", now.Sub(time.Time{})},
}
categoryStyle := lipgloss.NewStyle().
MarginBottom(1).
Foreground(lipgloss.Color("170")).
PaddingLeft(1).
Bold(true)
itemStyle := lipgloss.NewStyle().
MarginBottom(1)
ageStyle := lipgloss.NewStyle().Faint(true).SetString()
titleStyle := lipgloss.NewStyle().Bold(true)
untitledStyle := lipgloss.NewStyle().Faint(true).Italic(true)
selectedStyle := lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("6"))
var currentOffset int
var currentCategory string
m.itemOffsets = make([]int, len(m.conversations))
sb := &strings.Builder{}
sb.WriteRune('\n')
currentOffset += 1
for i, c := range m.conversations {
lastReplyAge := now.Sub(c.lastReply.CreatedAt)
var category string
for _, g := range categories {
if lastReplyAge < g.cutoff {
category = g.name
break
}
}
// print the category
if category != currentCategory {
currentCategory = category
heading := categoryStyle.Render(currentCategory)
sb.WriteString(heading)
currentOffset += height(heading)
sb.WriteRune('\n')
}
tStyle := titleStyle.Copy()
padding := " "
if c.conv.Title == "" {
tStyle = tStyle.Inherit(untitledStyle).SetString("(untitled)")
}
if i == m.cursor {
tStyle = tStyle.Inherit(selectedStyle)
}
title := tStyle.Width(m.width - 3).PaddingLeft(2).Render(c.conv.Title)
if i == m.cursor {
title = ">" + title[1:]
}
m.itemOffsets[i] = currentOffset
item := itemStyle.Render(fmt.Sprintf(
"%s\n%s",
title,
padding+ageStyle.Render(util.HumanTimeElapsedSince(lastReplyAge)),
))
sb.WriteString(item)
currentOffset += height(item)
if i < len(m.conversations)-1 {
sb.WriteRune('\n')
}
}
return sb.String()
}

File diff suppressed because it is too large Load Diff

View File

@ -5,9 +5,7 @@ import (
"os/exec"
"strings"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/muesli/reflow/ansi"
)
type msgTempfileEditorClosed string
@ -34,7 +32,6 @@ func openTempfileEditor(pattern string, content string, placeholder string) tea.
if err != nil {
return msgError(err)
}
os.Remove(msgFile.Name())
fileContents := string(bytes)
if strings.HasPrefix(fileContents, placeholder) {
fileContents = fileContents[len(placeholder):]
@ -43,47 +40,3 @@ func openTempfileEditor(pattern string, content string, placeholder string) tea.
return msgTempfileEditorClosed(stripped)
})
}
// similar to lipgloss.Height, except returns 0 on empty strings
func height(str string) int {
if str == "" {
return 0
}
return strings.Count(str, "\n") + 1
}
// truncate a string until its rendered cell width + the provided tail fits
// within the given width
func truncateToCellWidth(str string, width int, tail string) string {
cellWidth := ansi.PrintableRuneWidth(str)
if cellWidth <= width {
return str
}
tailWidth := ansi.PrintableRuneWidth(tail)
for {
str = str[:len(str)-((cellWidth+tailWidth)-width)]
cellWidth = ansi.PrintableRuneWidth(str)
if cellWidth+tailWidth <= max(width, 0) {
break
}
}
return str + tail
}
// fraction is the fraction of the total screen height into view the offset
// should be scrolled into view. 0.5 = items will be snapped to middle of
// view
func scrollIntoView(vp *viewport.Model, offset int, edge int) {
currentOffset := vp.YOffset
if offset >= currentOffset && offset < currentOffset+vp.Height {
return
}
distance := currentOffset - offset
if distance < 0 {
// we should scroll down until it just comes into view
vp.SetYOffset(currentOffset - (distance + (vp.Height - edge)) + 1)
} else {
// we should scroll up
vp.SetYOffset(currentOffset - distance - edge)
}
}