Compare commits

...

9 Commits

Author SHA1 Message Date
58e1b84fea Documentation tweak 2024-05-30 18:24:01 +00:00
a6522dbcd0 Generate title prompt tweak 2024-05-30 18:24:01 +00:00
97cd047861 Cleaned up tui view switching 2024-05-30 07:18:31 +00:00
ed784bb1cf Clean up tui View handling 2024-05-30 07:05:08 +00:00
c1792f27ff Split up tui code into packages (views/*, shared, util) 2024-05-30 06:44:40 +00:00
0ad698a942 Update GenerateTitle
Show conversation and expect result back in JSON
2024-05-28 07:37:09 +00:00
0d66a49997 Add ability to cycle through conversation branches in tui 2024-05-28 06:34:11 +00:00
008fdc0d37 Update title generation prompt 2024-05-23 06:01:30 +00:00
eec9eb41e9 Tiny formatting fix 2024-05-23 05:53:13 +00:00
8 changed files with 473 additions and 307 deletions

View File

@ -2,6 +2,7 @@ package util
import (
"context"
"encoding/json"
"fmt"
"os"
"strings"
@ -149,32 +150,40 @@ func FormatForExternalPrompt(messages []model.Message, system bool) string {
}
func GenerateTitle(ctx *lmcli.Context, messages []model.Message) (string, error) {
const prompt = `Above is an excerpt from a conversation between a user and AI assistant. Please reply with a short title (no more than 8 words) that reflects the topic of the conversation, read from the user's perspective.
const systemPrompt = `You will be shown a conversation between a user and an AI assistant. Your task is to generate a short title (8 words or less) for the provided conversation that reflects the conversation's topic. Your response is expected to be in JSON in the format shown below.
Example conversation:
"""
User:
Hello!
Assistant:
Hello! How may I assist you?
"""
[{"role": "user", "content": "Can you help me with my math homework?"},{"role": "assistant", "content": "Sure, what topic are you struggling with?"}]
Example response:
"""
Title: A brief introduction
"""
{"title": "Help with math homework"}
`
conversation := FormatForExternalPrompt(messages, false)
type msg struct {
Role string
Content string
}
var msgs []msg
for _, m := range messages {
msgs = append(msgs, msg{string(m.Role), m.Content})
}
// Serialize the conversation to JSON
conversation, err := json.Marshal(msgs)
if err != nil {
return "", err
}
generateRequest := []model.Message{
{
Role: model.MessageRoleSystem,
Content: systemPrompt,
},
{
Role: model.MessageRoleUser,
Content: fmt.Sprintf("\"\"\"\n%s\n\"\"\"\n\n%s", conversation, prompt),
Content: string(conversation),
},
}
@ -193,11 +202,16 @@ Title: A brief introduction
return "", err
}
response = strings.TrimPrefix(response, "Title: ")
response = strings.Trim(response, "\"")
response = strings.TrimSpace(response)
// Parse the JSON response
var jsonResponse struct {
Title string `json:"title"`
}
err = json.Unmarshal([]byte(response), &jsonResponse)
if err != nil {
return "", err
}
return response, nil
return jsonResponse.Title, nil
}
// ShowWaitAnimation prints an animated ellipses to stdout until something is

52
pkg/tui/shared/shared.go Normal file
View File

@ -0,0 +1,52 @@
package shared
import (
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
tea "github.com/charmbracelet/bubbletea"
)
type Values struct {
ConvShortname string
}
type State struct {
Ctx *lmcli.Context
Values *Values
Width int
Height int
Err error
}
// a convenience struct for holding rendered content for indiviudal UI
// elements
type Sections struct {
Header string
Content string
Error string
Input string
Footer string
}
type (
// send to change the current state
MsgViewChange View
// sent to a state when it is entered
MsgViewEnter struct{}
// sent when an error occurs
MsgError error
)
func WrapError(err error) tea.Cmd {
return func() tea.Msg {
return MsgError(err)
}
}
type View int
const (
StateChat View = iota
StateConversations
//StateSettings
//StateHelp
)

8
pkg/tui/styles/styles.go Normal file
View File

@ -0,0 +1,8 @@
package styles
import "github.com/charmbracelet/lipgloss"
var Header = lipgloss.NewStyle().
PaddingLeft(1).
PaddingRight(1).
Background(lipgloss.Color("0"))

View File

@ -10,99 +10,60 @@ import (
"fmt"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"git.mlow.ca/mlow/lmcli/pkg/tui/views/chat"
"git.mlow.ca/mlow/lmcli/pkg/tui/views/conversations"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
type state int
// Application model
type Model struct {
shared.State
const (
stateChat = iota
stateConversations
//stateModelSelect // stateOptions?
//stateHelp
)
// this struct holds the final rendered content of various UI components, and
// gets populated in the application's Update() method. View() simply composes
// these elements into the final output
type views struct {
header string
content string
error string
input string
footer string
state shared.View
chat chat.Model
conversations conversations.Model
}
type (
// send to change the current state
msgStateChange state
// sent to a state when it is entered
msgStateEnter struct{}
// sent when an error occurs
msgError error
)
type Options struct {
convShortname string
}
type basemodel struct {
opts *Options
ctx *lmcli.Context
views *views
err error
width int
height int
}
type model struct {
basemodel
state state
chat chatModel
conversations conversationsModel
}
func initialModel(ctx *lmcli.Context, opts Options) model {
m := model{
basemodel: basemodel{
opts: &opts,
ctx: ctx,
views: &views{},
func initialModel(ctx *lmcli.Context, values shared.Values) Model {
m := Model{
State: shared.State{
Ctx: ctx,
Values: &values,
},
}
m.state = stateChat
m.chat = newChatModel(&m)
m.conversations = newConversationsModel(&m)
m.state = shared.StateChat
m.chat = chat.Chat(m.State)
m.conversations = conversations.Conversations(m.State)
return m
}
func (m model) Init() tea.Cmd {
func (m Model) Init() tea.Cmd {
return tea.Batch(
m.conversations.Init(),
m.chat.Init(),
func() tea.Msg {
return msgStateChange(m.state)
return shared.MsgViewChange(m.state)
},
)
}
func (m *model) handleGlobalInput(msg tea.KeyMsg) (bool, tea.Cmd) {
func (m *Model) handleGlobalInput(msg tea.KeyMsg) (bool, tea.Cmd) {
// delegate input to the active child state first, only handling it at the
// global level if the child state does not
var cmds []tea.Cmd
switch m.state {
case stateChat:
handled, cmd := m.chat.handleInput(msg)
case shared.StateChat:
handled, cmd := m.chat.HandleInput(msg)
cmds = append(cmds, cmd)
if handled {
m.chat, cmd = m.chat.Update(nil)
cmds = append(cmds, cmd)
return true, tea.Batch(cmds...)
}
case stateConversations:
handled, cmd := m.conversations.handleInput(msg)
case shared.StateConversations:
handled, cmd := m.conversations.HandleInput(msg)
cmds = append(cmds, cmd)
if handled {
m.conversations, cmd = m.conversations.Update(nil)
@ -117,7 +78,7 @@ func (m *model) handleGlobalInput(msg tea.KeyMsg) (bool, tea.Cmd) {
return false, nil
}
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
@ -126,32 +87,24 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if handled {
return m, cmd
}
case msgStateChange:
m.state = state(msg)
case shared.MsgViewChange:
m.state = shared.View(msg)
switch m.state {
case stateChat:
m.chat.handleResize(m.width, m.height)
case stateConversations:
m.conversations.handleResize(m.width, m.height)
case shared.StateChat:
m.chat.HandleResize(m.Width, m.Height)
case shared.StateConversations:
m.conversations.HandleResize(m.Width, m.Height)
}
return m, func() tea.Msg { return msgStateEnter(struct{}{}) }
case msgConversationSelected:
// passed up through conversation list model
m.opts.convShortname = msg.ShortName.String
cmds = append(cmds, func() tea.Msg {
return msgStateChange(stateChat)
})
return m, func() tea.Msg { return shared.MsgViewEnter(struct{}{}) }
case tea.WindowSizeMsg:
m.width, m.height = msg.Width, msg.Height
case msgError:
m.err = msg
m.Width, m.Height = msg.Width, msg.Height
}
var cmd tea.Cmd
switch m.state {
case stateConversations:
case shared.StateConversations:
m.conversations, cmd = m.conversations.Update(msg)
case stateChat:
case shared.StateChat:
m.chat, cmd = m.chat.Update(msg)
}
if cmd != nil {
@ -161,60 +114,18 @@ func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, tea.Batch(cmds...)
}
func (m model) View() string {
if m.width == 0 {
// this is the case upon initial startup, but it's also a safe bet that
// we can just skip rendering if the terminal is really 0 width...
// without this, the m.*View() functions may crash
return ""
}
sections := make([]string, 0, 6)
if m.views.header != "" {
sections = append(sections, m.views.header)
}
func (m Model) View() string {
switch m.state {
case stateConversations:
sections = append(sections, m.views.content)
if m.views.error != "" {
sections = append(sections, m.views.error)
case shared.StateConversations:
return m.conversations.View()
case shared.StateChat:
return m.chat.View()
}
case stateChat:
sections = append(sections, m.views.content)
if m.views.error != "" {
sections = append(sections, m.views.error)
}
sections = append(sections, m.views.input)
}
if m.views.footer != "" {
sections = append(sections, m.views.footer)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
func errorBanner(err error, width int) string {
if err == nil {
return ""
}
return lipgloss.NewStyle().
Width(width).
AlignHorizontal(lipgloss.Center).
Bold(true).
Foreground(lipgloss.Color("1")).
Render(fmt.Sprintf("%s", err))
}
func wrapError(err error) tea.Cmd {
return func() tea.Msg {
return msgError(err)
}
}
func Launch(ctx *lmcli.Context, convShortname string) error {
p := tea.NewProgram(initialModel(ctx, Options{convShortname}), tea.WithAltScreen())
p := tea.NewProgram(initialModel(ctx, shared.Values{ConvShortname: convShortname}), tea.WithAltScreen())
if _, err := p.Run(); err != nil {
return fmt.Errorf("Error running program: %v", err)
}

View File

@ -1,26 +1,28 @@
package tui
package util
import (
"fmt"
"os"
"os/exec"
"strings"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/muesli/reflow/ansi"
)
type msgTempfileEditorClosed string
type MsgTempfileEditorClosed string
// openTempfileEditor opens an $EDITOR on a new temporary file with the given
// content. Upon closing, the contents of the file are read back returned
// wrapped in a msgTempfileEditorClosed returned by the tea.Cmd
func openTempfileEditor(pattern string, content string, placeholder string) tea.Cmd {
// OpenTempfileEditor opens $EDITOR on a temporary file with the given content.
// Upon closing, the contents of the file are read and returned wrapped in a
// MsgTempfileEditorClosed
func OpenTempfileEditor(pattern string, content string, placeholder string) tea.Cmd {
msgFile, _ := os.CreateTemp("/tmp", pattern)
err := os.WriteFile(msgFile.Name(), []byte(placeholder+content), os.ModeAppend)
if err != nil {
return wrapError(err)
return func() tea.Msg { return err }
}
editor := os.Getenv("EDITOR")
@ -32,7 +34,7 @@ func openTempfileEditor(pattern string, content string, placeholder string) tea.
return tea.ExecProcess(c, func(err error) tea.Msg {
bytes, err := os.ReadFile(msgFile.Name())
if err != nil {
return msgError(err)
return err
}
os.Remove(msgFile.Name())
fileContents := string(bytes)
@ -40,12 +42,12 @@ func openTempfileEditor(pattern string, content string, placeholder string) tea.
fileContents = fileContents[len(placeholder):]
}
stripped := strings.Trim(fileContents, "\n \t")
return msgTempfileEditorClosed(stripped)
return MsgTempfileEditorClosed(stripped)
})
}
// similar to lipgloss.Height, except returns 0 on empty strings
func height(str string) int {
// similar to lipgloss.Height, except returns 0 instead of 1 on empty strings
func Height(str string) int {
if str == "" {
return 0
}
@ -54,7 +56,7 @@ func height(str string) int {
// truncate a string until its rendered cell width + the provided tail fits
// within the given width
func truncateToCellWidth(str string, width int, tail string) string {
func TruncateToCellWidth(str string, width int, tail string) string {
cellWidth := ansi.PrintableRuneWidth(str)
if cellWidth <= width {
return str
@ -70,10 +72,7 @@ func truncateToCellWidth(str string, width int, tail string) string {
return str + tail
}
// fraction is the fraction of the total screen height into view the offset
// should be scrolled into view. 0.5 = items will be snapped to middle of
// view
func scrollIntoView(vp *viewport.Model, offset int, edge int) {
func ScrollIntoView(vp *viewport.Model, offset int, edge int) {
currentOffset := vp.YOffset
if offset >= currentOffset && offset < currentOffset+vp.Height {
return
@ -87,3 +86,16 @@ func scrollIntoView(vp *viewport.Model, offset int, edge int) {
vp.SetYOffset(currentOffset - distance - edge)
}
}
func ErrorBanner(err error, width int) string {
if err == nil {
return ""
}
return lipgloss.NewStyle().
Width(width).
AlignHorizontal(lipgloss.Center).
Bold(true).
Foreground(lipgloss.Color("1")).
Render(fmt.Sprintf("%s", err))
}

View File

@ -1,4 +1,4 @@
package tui
package chat
import (
"context"
@ -9,6 +9,9 @@ import (
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"github.com/charmbracelet/bubbles/cursor"
"github.com/charmbracelet/bubbles/spinner"
"github.com/charmbracelet/bubbles/textarea"
@ -40,7 +43,7 @@ type (
msgResponseChunk string
// sent when response is finished being received
msgResponseEnd string
// a special case of msgError that stops the response waiting animation
// a special case of common.MsgError that stops the response waiting animation
msgResponseError error
// sent on each completed reply
msgAssistantReply models.Message
@ -52,13 +55,13 @@ type (
msgMessagesLoaded []models.Message
)
type chatModel struct {
basemodel
width int
height int
type Model struct {
shared.State
shared.Sections
// app state
conversation *models.Conversation
rootMessages []models.Message
messages []models.Message
selectedMessage int
waitingForReply bool
@ -68,10 +71,6 @@ type chatModel struct {
replyChunkChan chan string
persistence bool // whether we will save new messages in the conversation
tokenCount uint
startTime time.Time
elapsed time.Duration
// ui state
focus focusState
wrap bool // whether message content is wrapped to viewport width
@ -80,6 +79,10 @@ type chatModel struct {
messageCache []string // cache of syntax highlighted and wrapped message content
messageOffsets []int
tokenCount uint
startTime time.Time
elapsed time.Duration
// ui elements
content viewport.Model
input textarea.Model
@ -87,13 +90,9 @@ type chatModel struct {
replyCursor cursor.Model // cursor to indicate incoming response
}
func newChatModel(tui *model) chatModel {
m := chatModel{
basemodel: basemodel{
opts: tui.opts,
ctx: tui.ctx,
views: tui.views,
},
func Chat(state shared.State) Model {
m := Model{
State: state,
conversation: &models.Conversation{},
persistence: true,
@ -126,7 +125,7 @@ func newChatModel(tui *model) chatModel {
m.replyCursor.SetChar(" ")
m.replyCursor.Focus()
system := tui.ctx.GetSystemPrompt()
system := state.Ctx.GetSystemPrompt()
if system != "" {
m.messages = []models.Message{{
Role: models.MessageRoleSystem,
@ -151,11 +150,6 @@ func newChatModel(tui *model) chatModel {
// styles
var (
headerStyle = lipgloss.NewStyle().
PaddingLeft(1).
PaddingRight(1).
Background(lipgloss.Color("0"))
messageHeadingStyle = lipgloss.NewStyle().
MarginTop(1).
MarginBottom(1).
@ -180,10 +174,10 @@ var (
footerStyle = lipgloss.NewStyle()
)
func (m *chatModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
switch m.focus {
case focusInput:
consumed, cmd := m.handleInputKey(msg)
consumed, cmd := m.HandleInputKey(msg)
if consumed {
return true, cmd
}
@ -201,7 +195,7 @@ func (m *chatModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
return true, nil
}
return true, func() tea.Msg {
return msgStateChange(stateConversations)
return shared.MsgViewChange(shared.StateConversations)
}
case "ctrl+c":
if m.waitingForReply {
@ -225,15 +219,15 @@ func (m *chatModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
return false, nil
}
func (m chatModel) Init() tea.Cmd {
func (m Model) Init() tea.Cmd {
return tea.Batch(
m.waitForChunk(),
m.waitForReply(),
)
}
func (m *chatModel) handleResize(width, height int) {
m.width, m.height = width, height
func (m *Model) HandleResize(width, height int) {
m.Width, m.Height = width, height
m.content.Width = width
m.input.SetWidth(width - m.input.FocusedStyle.Base.GetHorizontalFrameSize())
if len(m.messages) > 0 {
@ -242,22 +236,22 @@ func (m *chatModel) handleResize(width, height int) {
}
}
func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case msgStateEnter:
case shared.MsgViewEnter:
// wake up spinners and cursors
cmds = append(cmds, cursor.Blink, m.spinner.Tick)
if m.opts.convShortname != "" && m.conversation.ShortName.String != m.opts.convShortname {
cmds = append(cmds, m.loadConversation(m.opts.convShortname))
if m.State.Values.ConvShortname != "" && m.conversation.ShortName.String != m.State.Values.ConvShortname {
cmds = append(cmds, m.loadConversation(m.State.Values.ConvShortname))
}
m.rebuildMessageCache()
m.updateContent()
case tea.WindowSizeMsg:
m.handleResize(msg.Width, msg.Height)
case msgTempfileEditorClosed:
m.HandleResize(msg.Width, msg.Height)
case tuiutil.MsgTempfileEditorClosed:
contents := string(msg)
switch m.editorTarget {
case input:
@ -266,15 +260,16 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
m.setMessageContents(m.selectedMessage, contents)
if m.persistence && m.messages[m.selectedMessage].ID > 0 {
// update persisted message
err := m.ctx.Store.UpdateMessage(&m.messages[m.selectedMessage])
err := m.State.Ctx.Store.UpdateMessage(&m.messages[m.selectedMessage])
if err != nil {
cmds = append(cmds, wrapError(fmt.Errorf("Could not save edited message: %v", err)))
cmds = append(cmds, shared.WrapError(fmt.Errorf("Could not save edited message: %v", err)))
}
}
m.updateContent()
}
case msgConversationLoaded:
m.conversation = (*models.Conversation)(msg)
m.rootMessages, _ = m.State.Ctx.Store.RootMessages(m.conversation.ID)
cmds = append(cmds, m.loadMessages(m.conversation))
case msgMessagesLoaded:
m.selectedMessage = len(msg) - 1
@ -328,7 +323,7 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
if m.persistence {
err := m.persistConversation()
if err != nil {
cmds = append(cmds, wrapError(err))
cmds = append(cmds, shared.WrapError(err))
}
}
@ -350,15 +345,15 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
case msgResponseError:
m.waitingForReply = false
m.status = "Press ctrl+s to send"
m.err = error(msg)
m.State.Err = error(msg)
m.updateContent()
case msgConversationTitleChanged:
title := string(msg)
m.conversation.Title = title
if m.persistence {
err := m.ctx.Store.UpdateConversation(m.conversation)
err := m.State.Ctx.Store.UpdateConversation(m.conversation)
if err != nil {
cmds = append(cmds, wrapError(err))
cmds = append(cmds, shared.WrapError(err))
}
}
case cursor.BlinkMsg:
@ -394,21 +389,21 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
}
// update views once window dimensions are known
if m.width > 0 {
m.views.header = m.headerView()
m.views.footer = m.footerView()
m.views.error = errorBanner(m.err, m.width)
fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer)
if m.Width > 0 {
m.Header = m.headerView()
m.Footer = m.footerView()
m.Error = tuiutil.ErrorBanner(m.Err, m.Width)
fixedHeight := tuiutil.Height(m.Header) + tuiutil.Height(m.Error) + tuiutil.Height(m.Footer)
// calculate clamped input height to accomodate input text
// minimum 4 lines, maximum half of content area
newHeight := max(4, min((m.height-fixedHeight-1)/2, m.input.LineCount()))
newHeight := max(4, min((m.Height-fixedHeight-1)/2, m.input.LineCount()))
m.input.SetHeight(newHeight)
m.views.input = m.input.View()
m.Input = m.input.View()
// remaining height towards content
m.content.Height = m.height - fixedHeight - height(m.views.input)
m.views.content = m.content.View()
m.content.Height = m.Height - fixedHeight - tuiutil.Height(m.Input)
m.Content = m.content.View()
}
// this is a pretty nasty hack to ensure the input area viewport doesn't
@ -436,7 +431,7 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
return m, tea.Batch(cmds...)
}
func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
func (m *Model) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "tab", "enter":
m.focus = focusInput
@ -445,7 +440,7 @@ func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
return true, nil
case "e":
message := m.messages[m.selectedMessage]
cmd := openTempfileEditor("message.*.md", message.Content, "# Edit the message below\n")
cmd := tuiutil.OpenTempfileEditor("message.*.md", message.Content, "# Edit the message below\n")
m.editorTarget = selectedMessage
return true, cmd
case "ctrl+k":
@ -453,7 +448,7 @@ func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
m.selectedMessage--
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
return true, nil
case "ctrl+j":
@ -461,9 +456,43 @@ func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
m.selectedMessage++
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
return true, nil
case "ctrl+h", "ctrl+l":
dir := CyclePrev
if msg.String() == "ctrl+l" {
dir = CycleNext
}
var err error
var selected *models.Message
if m.selectedMessage == 0 {
selected, err = m.cycleSelectedRoot(m.conversation, dir)
if err != nil {
return true, shared.WrapError(fmt.Errorf("Could not cycle conversation root: %v", err))
}
} else if m.selectedMessage > 0 {
selected, err = m.cycleSelectedReply(&m.messages[m.selectedMessage-1], dir)
if err != nil {
return true, shared.WrapError(fmt.Errorf("Could not cycle reply: %v", err))
}
}
if selected == nil {
return false, nil
}
// Retrieve updated view at this point
newPath, err := m.State.Ctx.Store.PathToLeaf(selected)
if err != nil {
m.State.Err = fmt.Errorf("Could not fetch messages: %v", err)
}
m.messages = append(m.messages[:m.selectedMessage], newPath...)
m.rebuildMessageCache()
m.updateContent()
return true, nil
case "ctrl+r":
// resubmit the conversation with all messages up until and including the selected message
if m.waitingForReply || len(m.messages) == 0 {
@ -479,7 +508,74 @@ func (m *chatModel) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
return false, nil
}
func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
type CycleDirection int
const (
CycleNext CycleDirection = 1
CyclePrev CycleDirection = -1
)
func cycleMessages(m *models.Message, msgs []models.Message, dir CycleDirection) (*models.Message, error) {
currentIndex := -1
for i, reply := range msgs {
if reply.ID == m.ID {
currentIndex = i
break
}
}
if currentIndex < 0 {
return nil, fmt.Errorf("message not found")
}
var next int
if dir == CyclePrev {
// Wrap around to the last reply if at the beginning
next = (currentIndex - 1 + len(msgs)) % len(msgs)
} else {
// Wrap around to the first reply if at the end
next = (currentIndex + 1) % len(msgs)
}
return &msgs[next], nil
}
func (m *Model) cycleSelectedRoot(conv *models.Conversation, dir CycleDirection) (*models.Message, error) {
if len(m.rootMessages) < 2 {
return nil, nil
}
nextRoot, err := cycleMessages(conv.SelectedRoot, m.rootMessages, dir)
if err != nil {
return nil, err
}
conv.SelectedRoot = nextRoot
err = m.State.Ctx.Store.UpdateConversation(conv)
if err != nil {
return nil, fmt.Errorf("Could not update conversation: %v", err)
}
return nextRoot, nil
}
func (m *Model) cycleSelectedReply(message *models.Message, dir CycleDirection) (*models.Message, error) {
if len(message.Replies) < 2 {
return nil, nil
}
nextReply, err := cycleMessages(message.SelectedReply, message.Replies, dir)
if err != nil {
return nil, err
}
message.SelectedReply = nextReply
err = m.State.Ctx.Store.UpdateMessage(message)
if err != nil {
return nil, fmt.Errorf("Could not update message: %v", err)
}
return nextReply, nil
}
func (m *Model) HandleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "esc":
m.focus = focusMessages
@ -488,7 +584,7 @@ func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
m.selectedMessage = len(m.messages) - 1
}
offset := m.messageOffsets[m.selectedMessage]
scrollIntoView(&m.content, offset, m.content.Height/2)
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
m.updateContent()
m.input.Blur()
@ -504,7 +600,7 @@ func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
}
if len(m.messages) > 0 && m.messages[len(m.messages)-1].Role == models.MessageRoleUser {
return true, wrapError(fmt.Errorf("Can't reply to a user message"))
return true, shared.WrapError(fmt.Errorf("Can't reply to a user message"))
}
m.addMessage(models.Message{
@ -517,7 +613,7 @@ func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
if m.persistence {
err := m.persistConversation()
if err != nil {
return true, wrapError(err)
return true, shared.WrapError(err)
}
}
@ -526,14 +622,37 @@ func (m *chatModel) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
m.content.GotoBottom()
return true, cmd
case "ctrl+e":
cmd := openTempfileEditor("message.*.md", m.input.Value(), "# Edit your input below\n")
cmd := tuiutil.OpenTempfileEditor("message.*.md", m.input.Value(), "# Edit your input below\n")
m.editorTarget = input
return true, cmd
}
return false, nil
}
func (m *chatModel) renderMessageHeading(i int, message *models.Message) string {
func (m Model) View() string {
if m.Width == 0 {
return ""
}
sections := make([]string, 0, 6)
if m.Header != "" {
sections = append(sections, m.Header)
}
sections = append(sections, m.Content)
if m.Error != "" {
sections = append(sections, m.Error)
}
sections = append(sections, m.Input)
if m.Footer != "" {
sections = append(sections, m.Footer)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
func (m *Model) renderMessageHeading(i int, message *models.Message) string {
icon := ""
friendly := message.Role.FriendlyRole()
style := lipgloss.NewStyle().Faint(true).Bold(true)
@ -558,6 +677,29 @@ func (m *chatModel) renderMessageHeading(i int, message *models.Message) string
var suffix string
faint := lipgloss.NewStyle().Faint(true)
if i == 0 && len(m.rootMessages) > 0 {
selectedRootIndex := 0
for j, reply := range m.rootMessages {
if reply.ID == *m.conversation.SelectedRootID {
selectedRootIndex = j
break
}
}
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedRootIndex+1, len(m.rootMessages)))
}
if i > 0 && len(m.messages[i-1].Replies) > 1 {
// Find the selected reply index
selectedReplyIndex := 0
for j, reply := range m.messages[i-1].Replies {
if reply.ID == *m.messages[i-1].SelectedReplyID {
selectedReplyIndex = j
break
}
}
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedReplyIndex+1, len(m.messages[i-1].Replies)))
}
if m.focus == focusMessages {
if i == m.selectedMessage {
prefix = "> "
@ -571,14 +713,14 @@ func (m *chatModel) renderMessageHeading(i int, message *models.Message) string
return messageHeadingStyle.Render(prefix + user + suffix)
}
func (m *chatModel) renderMessage(i int) string {
func (m *Model) renderMessage(i int) string {
msg := &m.messages[i]
// Write message contents
sb := &strings.Builder{}
sb.Grow(len(msg.Content) * 2)
if msg.Content != "" {
err := m.ctx.Chroma.Highlight(sb, msg.Content)
err := m.State.Ctx.Chroma.Highlight(sb, msg.Content)
if err != nil {
sb.Reset()
sb.WriteString(msg.Content)
@ -642,7 +784,7 @@ func (m *chatModel) renderMessage(i int) string {
if msg.Content != "" {
sb.WriteString("\n\n")
}
_ = m.ctx.Chroma.HighlightLang(sb, toolString, "yaml")
_ = m.State.Ctx.Chroma.HighlightLang(sb, toolString, "yaml")
}
content := strings.TrimRight(sb.String(), "\n")
@ -659,7 +801,7 @@ func (m *chatModel) renderMessage(i int) string {
}
// render the conversation into a string
func (m *chatModel) conversationMessagesView() string {
func (m *Model) conversationMessagesView() string {
sb := strings.Builder{}
m.messageOffsets = make([]int, len(m.messages))
@ -700,7 +842,7 @@ func (m *chatModel) conversationMessagesView() string {
return sb.String()
}
func (m *chatModel) headerView() string {
func (m *Model) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
var title string
if m.conversation != nil && m.conversation.Title != "" {
@ -708,12 +850,12 @@ func (m *chatModel) headerView() string {
} else {
title = "Untitled"
}
title = truncateToCellWidth(title, m.width-headerStyle.GetHorizontalPadding(), "...")
title = tuiutil.TruncateToCellWidth(title, m.Width-styles.Header.GetHorizontalPadding(), "...")
header := titleStyle.Render(title)
return headerStyle.Width(m.width).Render(header)
return styles.Header.Width(m.Width).Render(header)
}
func (m *chatModel) footerView() string {
func (m *Model) footerView() string {
segmentStyle := lipgloss.NewStyle().PaddingLeft(1).PaddingRight(1).Faint(true)
segmentSeparator := "|"
@ -741,14 +883,14 @@ func (m *chatModel) footerView() string {
rightSegments = append(rightSegments, segmentStyle.Render(throughput))
}
model := fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)
model := fmt.Sprintf("Model: %s", *m.State.Ctx.Config.Defaults.Model)
rightSegments = append(rightSegments, segmentStyle.Render(model))
left := strings.Join(leftSegments, segmentSeparator)
right := strings.Join(rightSegments, segmentSeparator)
totalWidth := lipgloss.Width(left) + lipgloss.Width(right)
remaining := m.width - totalWidth
remaining := m.Width - totalWidth
var padding string
if remaining > 0 {
@ -757,12 +899,12 @@ func (m *chatModel) footerView() string {
footer := left + padding + right
if remaining < 0 {
footer = truncateToCellWidth(footer, m.width, "...")
footer = tuiutil.TruncateToCellWidth(footer, m.Width, "...")
}
return footerStyle.Width(m.width).Render(footer)
return footerStyle.Width(m.Width).Render(footer)
}
func (m *chatModel) setMessage(i int, msg models.Message) {
func (m *Model) setMessage(i int, msg models.Message) {
if i >= len(m.messages) {
panic("i out of range")
}
@ -770,12 +912,12 @@ func (m *chatModel) setMessage(i int, msg models.Message) {
m.messageCache[i] = m.renderMessage(i)
}
func (m *chatModel) addMessage(msg models.Message) {
func (m *Model) addMessage(msg models.Message) {
m.messages = append(m.messages, msg)
m.messageCache = append(m.messageCache, m.renderMessage(len(m.messages)-1))
}
func (m *chatModel) setMessageContents(i int, content string) {
func (m *Model) setMessageContents(i int, content string) {
if i >= len(m.messages) {
panic("i out of range")
}
@ -783,14 +925,14 @@ func (m *chatModel) setMessageContents(i int, content string) {
m.messageCache[i] = m.renderMessage(i)
}
func (m *chatModel) rebuildMessageCache() {
func (m *Model) rebuildMessageCache() {
m.messageCache = make([]string, len(m.messages))
for i := range m.messages {
m.messageCache[i] = m.renderMessage(i)
}
}
func (m *chatModel) updateContent() {
func (m *Model) updateContent() {
atBottom := m.content.AtBottom()
m.content.SetContent(m.conversationMessagesView())
if atBottom {
@ -799,36 +941,36 @@ func (m *chatModel) updateContent() {
}
}
func (m *chatModel) loadConversation(shortname string) tea.Cmd {
func (m *Model) loadConversation(shortname string) tea.Cmd {
return func() tea.Msg {
if shortname == "" {
return nil
}
c, err := m.ctx.Store.ConversationByShortName(shortname)
c, err := m.State.Ctx.Store.ConversationByShortName(shortname)
if err != nil {
return msgError(fmt.Errorf("Could not lookup conversation: %v", err))
return shared.MsgError(fmt.Errorf("Could not lookup conversation: %v", err))
}
if c.ID == 0 {
return msgError(fmt.Errorf("Conversation not found: %s", shortname))
return shared.MsgError(fmt.Errorf("Conversation not found: %s", shortname))
}
return msgConversationLoaded(c)
}
}
func (m *chatModel) loadMessages(c *models.Conversation) tea.Cmd {
func (m *Model) loadMessages(c *models.Conversation) tea.Cmd {
return func() tea.Msg {
messages, err := m.ctx.Store.PathToLeaf(c.SelectedRoot)
messages, err := m.State.Ctx.Store.PathToLeaf(c.SelectedRoot)
if err != nil {
return msgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
return shared.MsgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
}
return msgMessagesLoaded(messages)
}
}
func (m *chatModel) persistConversation() error {
func (m *Model) persistConversation() error {
if m.conversation.ID == 0 {
// Start a new conversation with all messages so far
c, messages, err := m.ctx.Store.StartConversation(m.messages...)
c, messages, err := m.State.Ctx.Store.StartConversation(m.messages...)
if err != nil {
return err
}
@ -843,16 +985,18 @@ func (m *chatModel) persistConversation() error {
if m.messages[i].ID > 0 {
// message has an ID, update its contents
// TODO: check for content/tool equality before updating?
err := m.ctx.Store.UpdateMessage(&m.messages[i])
err := m.State.Ctx.Store.UpdateMessage(&m.messages[i])
if err != nil {
return err
}
} else if i > 0 {
// messages is new, so add it as a reply to previous message
saved, err := m.ctx.Store.Reply(&m.messages[i-1], m.messages[i])
saved, err := m.State.Ctx.Store.Reply(&m.messages[i-1], m.messages[i])
if err != nil {
return err
}
// add this message as a reply to the previous
m.messages[i-1].Replies = append(m.messages[i-1].Replies, saved[0])
m.messages[i] = saved[0]
} else {
// message has no id and no previous messages to add it to
@ -864,29 +1008,29 @@ func (m *chatModel) persistConversation() error {
return nil
}
func (m *chatModel) generateConversationTitle() tea.Cmd {
func (m *Model) generateConversationTitle() tea.Cmd {
return func() tea.Msg {
title, err := cmdutil.GenerateTitle(m.ctx, m.messages)
title, err := cmdutil.GenerateTitle(m.State.Ctx, m.messages)
if err != nil {
return msgError(err)
return shared.MsgError(err)
}
return msgConversationTitleChanged(title)
}
}
func (m *chatModel) waitForReply() tea.Cmd {
func (m *Model) waitForReply() tea.Cmd {
return func() tea.Msg {
return msgAssistantReply(<-m.replyChan)
}
}
func (m *chatModel) waitForChunk() tea.Cmd {
func (m *Model) waitForChunk() tea.Cmd {
return func() tea.Msg {
return msgResponseChunk(<-m.replyChunkChan)
}
}
func (m *chatModel) promptLLM() tea.Cmd {
func (m *Model) promptLLM() tea.Cmd {
m.waitingForReply = true
m.replyCursor.Blink = false
m.status = "Press ctrl+c to cancel"
@ -906,16 +1050,16 @@ func (m *chatModel) promptLLM() tea.Cmd {
m.elapsed = 0
return func() tea.Msg {
completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model)
completionProvider, err := m.State.Ctx.GetCompletionProvider(*m.State.Ctx.Config.Defaults.Model)
if err != nil {
return msgError(err)
return shared.MsgError(err)
}
requestParams := models.RequestParameters{
Model: *m.ctx.Config.Defaults.Model,
MaxTokens: *m.ctx.Config.Defaults.MaxTokens,
Temperature: *m.ctx.Config.Defaults.Temperature,
ToolBag: m.ctx.EnabledTools,
Model: *m.State.Ctx.Config.Defaults.Model,
MaxTokens: *m.State.Ctx.Config.Defaults.MaxTokens,
Temperature: *m.State.Ctx.Config.Defaults.Temperature,
ToolBag: m.State.Ctx.EnabledTools,
}
replyHandler := func(msg models.Message) {

View File

@ -1,4 +1,4 @@
package tui
package conversations
import (
"fmt"
@ -6,6 +6,9 @@ import (
"time"
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"git.mlow.ca/mlow/lmcli/pkg/util"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
@ -24,8 +27,9 @@ type (
msgConversationSelected models.Conversation
)
type conversationsModel struct {
basemodel
type Model struct {
shared.State
shared.Sections
conversations []loadedConversation
cursor int // index of the currently selected conversation
@ -34,21 +38,15 @@ type conversationsModel struct {
content viewport.Model
}
func newConversationsModel(tui *model) conversationsModel {
m := conversationsModel{
basemodel: basemodel{
opts: tui.opts,
ctx: tui.ctx,
views: tui.views,
width: tui.width,
height: tui.height,
},
func Conversations(state shared.State) Model {
m := Model{
State: state,
content: viewport.New(0, 0),
}
return m
}
func (m *conversationsModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "enter":
if len(m.conversations) > 0 && m.cursor < len(m.conversations) {
@ -66,7 +64,7 @@ func (m *conversationsModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
// this hack positions the *next* conversatoin slightly
// *off* the screen, ensuring the entire m.cursor is shown,
// even if its height may not be constant due to wrapping.
scrollIntoView(&m.content, m.itemOffsets[m.cursor+1], -1)
tuiutil.ScrollIntoView(&m.content, m.itemOffsets[m.cursor+1], -1)
}
m.content.SetContent(m.renderConversationList())
} else {
@ -80,7 +78,7 @@ func (m *conversationsModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
if m.cursor == 0 {
m.content.GotoTop()
} else {
scrollIntoView(&m.content, m.itemOffsets[m.cursor], 1)
tuiutil.ScrollIntoView(&m.content, m.itemOffsets[m.cursor], 1)
}
m.content.SetContent(m.renderConversationList())
} else {
@ -102,27 +100,32 @@ func (m *conversationsModel) handleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
return false, nil
}
func (m conversationsModel) Init() tea.Cmd {
func (m Model) Init() tea.Cmd {
return nil
}
func (m *conversationsModel) handleResize(width, height int) {
m.width, m.height = width, height
func (m *Model) HandleResize(width, height int) {
m.Width, m.Height = width, height
m.content.Width = width
}
func (m conversationsModel) Update(msg tea.Msg) (conversationsModel, tea.Cmd) {
func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case msgStateEnter:
case shared.MsgViewEnter:
cmds = append(cmds, m.loadConversations())
m.content.SetContent(m.renderConversationList())
case tea.WindowSizeMsg:
m.handleResize(msg.Width, msg.Height)
m.HandleResize(msg.Width, msg.Height)
m.content.SetContent(m.renderConversationList())
case msgConversationsLoaded:
m.conversations = msg
m.content.SetContent(m.renderConversationList())
case msgConversationSelected:
m.Values.ConvShortname = msg.ShortName.String
cmds = append(cmds, func() tea.Msg {
return shared.MsgViewChange(shared.StateChat)
})
}
var cmd tea.Cmd
@ -131,22 +134,22 @@ func (m conversationsModel) Update(msg tea.Msg) (conversationsModel, tea.Cmd) {
cmds = append(cmds, cmd)
}
if m.width > 0 {
m.views.header = m.headerView()
m.views.footer = "" // TODO: show /something/
m.views.error = errorBanner(m.err, m.width)
fixedHeight := height(m.views.header) + height(m.views.error) + height(m.views.footer)
m.content.Height = m.height - fixedHeight
m.views.content = m.content.View()
if m.Width > 0 {
m.Header = m.headerView()
m.Footer = "" // TODO: show /something/
m.Error = tuiutil.ErrorBanner(m.Err, m.Width)
fixedHeight := tuiutil.Height(m.Header) + tuiutil.Height(m.Error) + tuiutil.Height(m.Footer)
m.content.Height = m.Height - fixedHeight
m.Content = m.content.View()
}
return m, tea.Batch(cmds...)
}
func (m *conversationsModel) loadConversations() tea.Cmd {
func (m *Model) loadConversations() tea.Cmd {
return func() tea.Msg {
messages, err := m.ctx.Store.LatestConversationMessages()
messages, err := m.Ctx.Store.LatestConversationMessages()
if err != nil {
return msgError(fmt.Errorf("Could not load conversations: %v", err))
return shared.MsgError(fmt.Errorf("Could not load conversations: %v", err))
}
loaded := make([]loadedConversation, len(messages))
@ -159,13 +162,35 @@ func (m *conversationsModel) loadConversations() tea.Cmd {
}
}
func (m *conversationsModel) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
header := titleStyle.Render("Conversations")
return headerStyle.Width(m.width).Render(header)
func (m Model) View() string {
if m.Width == 0 {
return ""
}
sections := make([]string, 0, 6)
if m.Header != "" {
sections = append(sections, m.Header)
}
sections = append(sections, m.Content)
if m.Error != "" {
sections = append(sections, m.Error)
}
if m.Footer != "" {
sections = append(sections, m.Footer)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
func (m *conversationsModel) renderConversationList() string {
func (m *Model) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
header := titleStyle.Render("Conversations")
return styles.Header.Width(m.Width).Render(header)
}
func (m *Model) renderConversationList() string {
now := time.Now()
midnight := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
monthStart := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location())
@ -228,7 +253,7 @@ func (m *conversationsModel) renderConversationList() string {
currentCategory = category
heading := categoryStyle.Render(currentCategory)
sb.WriteString(heading)
currentOffset += height(heading)
currentOffset += tuiutil.Height(heading)
sb.WriteRune('\n')
}
@ -240,7 +265,7 @@ func (m *conversationsModel) renderConversationList() string {
tStyle = tStyle.Inherit(selectedStyle)
}
title := tStyle.Width(m.width - 3).PaddingLeft(2).Render(c.conv.Title)
title := tStyle.Width(m.Width - 3).PaddingLeft(2).Render(c.conv.Title)
if i == m.cursor {
title = ">" + title[1:]
}
@ -252,7 +277,7 @@ func (m *conversationsModel) renderConversationList() string {
ageStyle.Render(util.HumanTimeElapsedSince(lastReplyAge)),
))
sb.WriteString(item)
currentOffset += height(item)
currentOffset += tuiutil.Height(item)
if i < len(m.conversations)-1 {
sb.WriteRune('\n')
}