2024-03-12 01:10:54 -06:00
|
|
|
package tui
|
|
|
|
|
|
|
|
// The terminal UI for lmcli, launched from the `lmcli chat` command
|
|
|
|
// TODO:
|
|
|
|
// - binding to open selected message/input in $EDITOR
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
|
|
|
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
|
|
|
"github.com/charmbracelet/bubbles/textarea"
|
|
|
|
"github.com/charmbracelet/bubbles/viewport"
|
|
|
|
tea "github.com/charmbracelet/bubbletea"
|
|
|
|
"github.com/charmbracelet/lipgloss"
|
|
|
|
)
|
|
|
|
|
2024-03-12 11:50:10 -06:00
|
|
|
type focusState int
|
|
|
|
|
|
|
|
const (
|
|
|
|
focusInput focusState = iota
|
|
|
|
focusMessages
|
|
|
|
)
|
|
|
|
|
2024-03-12 01:10:54 -06:00
|
|
|
type model struct {
|
|
|
|
ctx *lmcli.Context
|
|
|
|
convShortname string
|
|
|
|
|
|
|
|
// application state
|
2024-03-12 12:33:57 -06:00
|
|
|
conversation *models.Conversation
|
|
|
|
messages []models.Message
|
|
|
|
waitingForReply bool
|
2024-03-12 20:05:48 -06:00
|
|
|
replyChan chan models.Message
|
|
|
|
replyChunkChan chan string
|
2024-03-12 12:33:57 -06:00
|
|
|
replyCancelFunc context.CancelFunc
|
|
|
|
err error
|
2024-03-12 01:10:54 -06:00
|
|
|
|
|
|
|
// ui state
|
2024-03-12 12:33:57 -06:00
|
|
|
focus focusState
|
|
|
|
status string // a general status message
|
2024-03-12 01:10:54 -06:00
|
|
|
|
|
|
|
// ui elements
|
|
|
|
content viewport.Model
|
|
|
|
input textarea.Model
|
|
|
|
}
|
|
|
|
|
|
|
|
type message struct {
|
|
|
|
role string
|
|
|
|
content string
|
|
|
|
}
|
|
|
|
|
|
|
|
// custom tea.Msg types
|
|
|
|
type (
|
|
|
|
// sent on each chunk received from LLM
|
|
|
|
msgResponseChunk string
|
|
|
|
// sent when response is finished being received
|
|
|
|
msgResponseEnd string
|
2024-03-12 20:05:48 -06:00
|
|
|
// sent on each completed reply
|
|
|
|
msgReply models.Message
|
2024-03-12 01:10:54 -06:00
|
|
|
// sent when a conversation is (re)loaded
|
|
|
|
msgConversationLoaded *models.Conversation
|
|
|
|
// send when a conversation's messages are laoded
|
|
|
|
msgMessagesLoaded []models.Message
|
|
|
|
// sent when an error occurs
|
|
|
|
msgError error
|
|
|
|
)
|
|
|
|
|
|
|
|
// styles
|
|
|
|
var (
|
2024-03-12 23:09:42 -06:00
|
|
|
userStyle = lipgloss.NewStyle().PaddingLeft(1).Bold(true).Foreground(lipgloss.Color("10"))
|
|
|
|
assistantStyle = lipgloss.NewStyle().PaddingLeft(1).Bold(true).Foreground(lipgloss.Color("12"))
|
|
|
|
messageStyle = lipgloss.NewStyle().PaddingLeft(2)
|
2024-03-12 23:01:36 -06:00
|
|
|
headerStyle = lipgloss.NewStyle().
|
|
|
|
PaddingLeft(1).
|
|
|
|
Background(lipgloss.Color("0"))
|
|
|
|
footerStyle = lipgloss.NewStyle().
|
|
|
|
Faint(true).
|
2024-03-12 01:10:54 -06:00
|
|
|
BorderTop(true).
|
|
|
|
BorderStyle(lipgloss.NormalBorder())
|
|
|
|
)
|
|
|
|
|
|
|
|
func (m model) Init() tea.Cmd {
|
|
|
|
return tea.Batch(
|
|
|
|
textarea.Blink,
|
|
|
|
m.loadConversation(m.convShortname),
|
2024-03-12 20:05:48 -06:00
|
|
|
m.waitForChunk(),
|
|
|
|
m.waitForReply(),
|
2024-03-12 01:10:54 -06:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
|
|
|
var cmd tea.Cmd
|
|
|
|
|
|
|
|
switch msg := msg.(type) {
|
|
|
|
case tea.KeyMsg:
|
2024-03-12 11:50:10 -06:00
|
|
|
switch msg.String() {
|
|
|
|
case "ctrl+c":
|
2024-03-12 12:33:57 -06:00
|
|
|
if m.waitingForReply {
|
|
|
|
m.replyCancelFunc()
|
|
|
|
} else {
|
|
|
|
return m, tea.Quit
|
|
|
|
}
|
2024-03-12 11:50:10 -06:00
|
|
|
case "q":
|
|
|
|
if m.focus != focusInput {
|
|
|
|
return m, tea.Quit
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
var inputHandled tea.Cmd
|
|
|
|
switch m.focus {
|
|
|
|
case focusInput:
|
|
|
|
inputHandled = m.handleInputKey(msg)
|
|
|
|
case focusMessages:
|
|
|
|
inputHandled = m.handleMessagesKey(msg)
|
|
|
|
}
|
|
|
|
if inputHandled != nil {
|
|
|
|
return m, inputHandled
|
|
|
|
}
|
|
|
|
}
|
2024-03-12 01:10:54 -06:00
|
|
|
case tea.WindowSizeMsg:
|
|
|
|
m.content.Width = msg.Width
|
2024-03-12 23:01:36 -06:00
|
|
|
m.content.Height = msg.Height - m.input.Height() - lipgloss.Height(m.footerView()) - lipgloss.Height(m.headerView())
|
2024-03-12 01:10:54 -06:00
|
|
|
m.input.SetWidth(msg.Width - 1)
|
|
|
|
m.updateContent()
|
|
|
|
case msgConversationLoaded:
|
|
|
|
c := (*models.Conversation)(msg)
|
|
|
|
cmd = m.loadMessages(c)
|
|
|
|
case msgMessagesLoaded:
|
|
|
|
m.messages = []models.Message(msg)
|
|
|
|
m.updateContent()
|
|
|
|
case msgResponseChunk:
|
|
|
|
chunk := string(msg)
|
2024-03-12 17:31:48 -06:00
|
|
|
last := len(m.messages) - 1
|
|
|
|
if last >= 0 && m.messages[last].Role == models.MessageRoleAssistant {
|
|
|
|
m.messages[last].Content += chunk
|
|
|
|
} else {
|
|
|
|
m.messages = append(m.messages, models.Message{
|
|
|
|
Role: models.MessageRoleAssistant,
|
|
|
|
Content: chunk,
|
|
|
|
})
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
2024-03-12 17:31:48 -06:00
|
|
|
m.updateContent()
|
2024-03-12 20:05:48 -06:00
|
|
|
cmd = m.waitForChunk() // wait for the next chunk
|
|
|
|
case msgReply:
|
|
|
|
// the last reply that was being worked on is finished
|
|
|
|
reply := models.Message(msg)
|
|
|
|
last := len(m.messages) - 1
|
|
|
|
if last < 0 {
|
|
|
|
panic("Unexpected messages length handling msgReply")
|
|
|
|
}
|
|
|
|
if reply.Role == models.MessageRoleToolCall && m.messages[last].Role == models.MessageRoleAssistant {
|
|
|
|
m.messages[last] = reply
|
|
|
|
} else if reply.Role != models.MessageRoleAssistant {
|
|
|
|
m.messages = append(m.messages, reply)
|
|
|
|
}
|
|
|
|
m.updateContent()
|
|
|
|
cmd = m.waitForReply()
|
2024-03-12 01:10:54 -06:00
|
|
|
case msgResponseEnd:
|
2024-03-12 12:33:57 -06:00
|
|
|
m.replyCancelFunc = nil
|
|
|
|
m.waitingForReply = false
|
2024-03-12 11:10:40 -06:00
|
|
|
m.status = "Press ctrl+s to send"
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if cmd != nil {
|
|
|
|
return m, cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
m.input, cmd = m.input.Update(msg)
|
|
|
|
if cmd != nil {
|
|
|
|
return m, cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
m.content, cmd = m.content.Update(msg)
|
|
|
|
if cmd != nil {
|
|
|
|
return m, cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
return m, cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m model) View() string {
|
|
|
|
return lipgloss.JoinVertical(
|
|
|
|
lipgloss.Left,
|
2024-03-12 23:01:36 -06:00
|
|
|
m.headerView(),
|
2024-03-12 01:10:54 -06:00
|
|
|
m.content.View(),
|
|
|
|
m.inputView(),
|
|
|
|
m.footerView(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
func initialModel(ctx *lmcli.Context, convShortname string) model {
|
|
|
|
m := model{
|
|
|
|
ctx: ctx,
|
|
|
|
convShortname: convShortname,
|
|
|
|
|
2024-03-12 20:05:48 -06:00
|
|
|
replyChan: make(chan models.Message),
|
|
|
|
replyChunkChan: make(chan string),
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
m.content = viewport.New(0, 0)
|
|
|
|
|
|
|
|
m.input = textarea.New()
|
|
|
|
m.input.Placeholder = "Enter a message"
|
|
|
|
|
|
|
|
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
|
|
|
|
m.input.ShowLineNumbers = false
|
|
|
|
m.input.Focus()
|
|
|
|
|
|
|
|
m.updateContent()
|
|
|
|
|
2024-03-12 12:33:57 -06:00
|
|
|
m.waitingForReply = false
|
2024-03-12 11:10:40 -06:00
|
|
|
m.status = "Press ctrl+s to send"
|
2024-03-12 01:10:54 -06:00
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
2024-03-12 11:50:10 -06:00
|
|
|
func (m *model) handleMessagesKey(msg tea.KeyMsg) tea.Cmd {
|
2024-03-12 01:10:54 -06:00
|
|
|
switch msg.String() {
|
2024-03-12 11:50:10 -06:00
|
|
|
case "tab":
|
|
|
|
m.focus = focusInput
|
|
|
|
m.input.Focus()
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) handleInputKey(msg tea.KeyMsg) tea.Cmd {
|
|
|
|
switch msg.String() {
|
|
|
|
case "esc":
|
|
|
|
m.focus = focusMessages
|
|
|
|
m.input.Blur()
|
2024-03-12 01:10:54 -06:00
|
|
|
case "ctrl+s":
|
2024-03-12 11:10:40 -06:00
|
|
|
userInput := strings.TrimSpace(m.input.Value())
|
|
|
|
if strings.TrimSpace(userInput) == "" {
|
2024-03-12 01:10:54 -06:00
|
|
|
return nil
|
|
|
|
}
|
2024-03-12 11:10:40 -06:00
|
|
|
m.input.SetValue("")
|
|
|
|
m.messages = append(m.messages, models.Message{
|
|
|
|
Role: models.MessageRoleUser,
|
|
|
|
Content: userInput,
|
|
|
|
})
|
|
|
|
|
|
|
|
m.updateContent()
|
|
|
|
m.content.GotoBottom()
|
|
|
|
|
2024-03-12 12:33:57 -06:00
|
|
|
m.waitingForReply = true
|
|
|
|
m.status = "Waiting for response, press ctrl+c to cancel..."
|
2024-03-12 11:10:40 -06:00
|
|
|
return m.promptLLM()
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) loadConversation(shortname string) tea.Cmd {
|
|
|
|
return func() tea.Msg {
|
|
|
|
if shortname == "" {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
c, err := m.ctx.Store.ConversationByShortName(shortname)
|
|
|
|
if err != nil {
|
|
|
|
return msgError(fmt.Errorf("Could not lookup conversation: %v\n", err))
|
|
|
|
}
|
|
|
|
if c.ID == 0 {
|
|
|
|
return msgError(fmt.Errorf("Conversation not found with short name: %s\n", shortname))
|
|
|
|
}
|
|
|
|
return msgConversationLoaded(c)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) loadMessages(c *models.Conversation) tea.Cmd {
|
|
|
|
return func() tea.Msg {
|
|
|
|
messages, err := m.ctx.Store.Messages(c)
|
|
|
|
if err != nil {
|
|
|
|
return msgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
|
|
|
|
}
|
|
|
|
return msgMessagesLoaded(messages)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-12 20:05:48 -06:00
|
|
|
func (m *model) waitForReply() tea.Cmd {
|
|
|
|
return func() tea.Msg {
|
|
|
|
return msgReply(<-m.replyChan)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) waitForChunk() tea.Cmd {
|
2024-03-12 01:10:54 -06:00
|
|
|
return func() tea.Msg {
|
2024-03-12 20:05:48 -06:00
|
|
|
return msgResponseChunk(<-m.replyChunkChan)
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) promptLLM() tea.Cmd {
|
|
|
|
return func() tea.Msg {
|
|
|
|
completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model)
|
|
|
|
if err != nil {
|
|
|
|
return msgError(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
requestParams := models.RequestParameters{
|
|
|
|
Model: *m.ctx.Config.Defaults.Model,
|
|
|
|
MaxTokens: *m.ctx.Config.Defaults.MaxTokens,
|
|
|
|
Temperature: *m.ctx.Config.Defaults.Temperature,
|
2024-03-12 23:09:55 -06:00
|
|
|
ToolBag: m.ctx.EnabledTools,
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
2024-03-12 20:05:48 -06:00
|
|
|
replyHandler := func(msg models.Message) {
|
|
|
|
m.replyChan <- msg
|
|
|
|
}
|
|
|
|
|
2024-03-12 12:33:57 -06:00
|
|
|
ctx, replyCancelFunc := context.WithCancel(context.Background())
|
|
|
|
m.replyCancelFunc = replyCancelFunc
|
|
|
|
|
2024-03-12 20:05:48 -06:00
|
|
|
// TODO: handle error
|
2024-03-12 01:10:54 -06:00
|
|
|
resp, _ := completionProvider.CreateChatCompletionStream(
|
2024-03-12 20:05:48 -06:00
|
|
|
ctx, requestParams, m.messages, replyHandler, m.replyChunkChan,
|
2024-03-12 01:10:54 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
return msgResponseEnd(resp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m *model) updateContent() {
|
|
|
|
sb := strings.Builder{}
|
|
|
|
msgCnt := len(m.messages)
|
|
|
|
for i, message := range m.messages {
|
|
|
|
var style lipgloss.Style
|
|
|
|
if message.Role == models.MessageRoleUser {
|
|
|
|
style = userStyle
|
|
|
|
} else {
|
|
|
|
style = assistantStyle
|
|
|
|
}
|
|
|
|
|
2024-03-12 11:10:40 -06:00
|
|
|
sb.WriteString(fmt.Sprintf("%s:\n\n", style.Render(string(message.Role.FriendlyRole()))))
|
2024-03-12 01:10:54 -06:00
|
|
|
|
2024-03-12 02:12:12 -06:00
|
|
|
highlighted, _ := m.ctx.Chroma.HighlightS(message.Content)
|
2024-03-12 23:09:42 -06:00
|
|
|
sb.WriteString(messageStyle.Width(m.content.Width - 5).Render(highlighted))
|
2024-03-12 01:10:54 -06:00
|
|
|
if i < msgCnt-1 {
|
|
|
|
sb.WriteString("\n\n")
|
|
|
|
}
|
|
|
|
}
|
2024-03-12 17:31:48 -06:00
|
|
|
atBottom := m.content.AtBottom()
|
2024-03-12 01:10:54 -06:00
|
|
|
m.content.SetContent(sb.String())
|
2024-03-12 17:31:48 -06:00
|
|
|
if atBottom {
|
|
|
|
// if we were at bottom before the update, scroll with the output
|
|
|
|
m.content.GotoBottom()
|
|
|
|
}
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
2024-03-12 23:01:36 -06:00
|
|
|
func (m *model) headerView() string {
|
|
|
|
titleStyle := lipgloss.NewStyle().
|
|
|
|
Bold(true)
|
|
|
|
var title string
|
|
|
|
if m.conversation != nil && m.conversation.Title != "" {
|
|
|
|
title = m.conversation.Title
|
2024-03-12 01:10:54 -06:00
|
|
|
} else {
|
2024-03-12 23:01:36 -06:00
|
|
|
title = "Untitled"
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
2024-03-12 23:01:36 -06:00
|
|
|
part := titleStyle.Render(title)
|
|
|
|
|
|
|
|
return headerStyle.Width(m.content.Width).Render(part)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (m model) inputView() string {
|
|
|
|
return m.input.View()
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
2024-03-12 23:01:36 -06:00
|
|
|
func (m *model) footerView() string {
|
2024-03-12 11:10:40 -06:00
|
|
|
left := m.status
|
|
|
|
right := fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)
|
|
|
|
|
|
|
|
totalWidth := lipgloss.Width(left + right)
|
|
|
|
var padding string
|
|
|
|
if m.content.Width-totalWidth > 0 {
|
|
|
|
padding = strings.Repeat(" ", m.content.Width-totalWidth)
|
|
|
|
} else {
|
|
|
|
padding = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
footer := lipgloss.JoinHorizontal(lipgloss.Center, left, padding, right)
|
|
|
|
|
|
|
|
return footerStyle.Width(m.content.Width).Render(footer)
|
2024-03-12 01:10:54 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func Launch(ctx *lmcli.Context, convShortname string) error {
|
|
|
|
p := tea.NewProgram(initialModel(ctx, convShortname), tea.WithAltScreen())
|
|
|
|
if _, err := p.Run(); err != nil {
|
|
|
|
return fmt.Errorf("Error running program: %v", err)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|