148 lines
3.3 KiB
Go
148 lines
3.3 KiB
Go
package chat
|
|
|
|
import (
|
|
"time"
|
|
|
|
models "git.mlow.ca/mlow/lmcli/pkg/lmcli/model"
|
|
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
|
"github.com/charmbracelet/bubbles/cursor"
|
|
"github.com/charmbracelet/bubbles/spinner"
|
|
"github.com/charmbracelet/bubbles/textarea"
|
|
"github.com/charmbracelet/bubbles/viewport"
|
|
tea "github.com/charmbracelet/bubbletea"
|
|
"github.com/charmbracelet/lipgloss"
|
|
)
|
|
|
|
type focusState int
|
|
|
|
const (
|
|
focusInput focusState = iota
|
|
focusMessages
|
|
)
|
|
|
|
type editorTarget int
|
|
|
|
const (
|
|
input editorTarget = iota
|
|
selectedMessage
|
|
)
|
|
|
|
// custom tea.Msg types
|
|
type (
|
|
// sent on each chunk received from LLM
|
|
msgResponseChunk string
|
|
// sent when response is finished being received
|
|
msgResponseEnd string
|
|
// a special case of common.MsgError that stops the response waiting animation
|
|
msgResponseError error
|
|
// sent on each completed reply
|
|
msgAssistantReply models.Message
|
|
// sent when a conversation is (re)loaded
|
|
msgConversationLoaded *models.Conversation
|
|
// sent when a new conversation title is set
|
|
msgConversationTitleChanged string
|
|
// sent when a conversation's messages are laoded
|
|
msgMessagesLoaded []models.Message
|
|
)
|
|
|
|
type Model struct {
|
|
shared.State
|
|
shared.Sections
|
|
|
|
// app state
|
|
conversation *models.Conversation
|
|
rootMessages []models.Message
|
|
messages []models.Message
|
|
selectedMessage int
|
|
waitingForReply bool
|
|
editorTarget editorTarget
|
|
stopSignal chan struct{}
|
|
replyChan chan models.Message
|
|
replyChunkChan chan string
|
|
persistence bool // whether we will save new messages in the conversation
|
|
|
|
// ui state
|
|
focus focusState
|
|
wrap bool // whether message content is wrapped to viewport width
|
|
status string // a general status message
|
|
showToolResults bool // whether tool calls and results are shown
|
|
messageCache []string // cache of syntax highlighted and wrapped message content
|
|
messageOffsets []int
|
|
|
|
// ui elements
|
|
content viewport.Model
|
|
input textarea.Model
|
|
spinner spinner.Model
|
|
replyCursor cursor.Model // cursor to indicate incoming response
|
|
|
|
// metrics
|
|
tokenCount uint
|
|
startTime time.Time
|
|
elapsed time.Duration
|
|
}
|
|
|
|
func Chat(state shared.State) Model {
|
|
m := Model{
|
|
State: state,
|
|
|
|
conversation: &models.Conversation{},
|
|
persistence: true,
|
|
|
|
stopSignal: make(chan struct{}),
|
|
replyChan: make(chan models.Message),
|
|
replyChunkChan: make(chan string),
|
|
|
|
wrap: true,
|
|
selectedMessage: -1,
|
|
|
|
content: viewport.New(0, 0),
|
|
input: textarea.New(),
|
|
spinner: spinner.New(spinner.WithSpinner(
|
|
spinner.Spinner{
|
|
Frames: []string{
|
|
". ",
|
|
".. ",
|
|
"...",
|
|
".. ",
|
|
". ",
|
|
" ",
|
|
},
|
|
FPS: time.Second / 3,
|
|
},
|
|
)),
|
|
replyCursor: cursor.New(),
|
|
}
|
|
|
|
m.replyCursor.SetChar(" ")
|
|
m.replyCursor.Focus()
|
|
|
|
system := state.Ctx.GetSystemPrompt()
|
|
if system != "" {
|
|
m.messages = []models.Message{{
|
|
Role: models.MessageRoleSystem,
|
|
Content: system,
|
|
}}
|
|
}
|
|
|
|
m.input.Focus()
|
|
m.input.MaxHeight = 0
|
|
m.input.CharLimit = 0
|
|
m.input.ShowLineNumbers = false
|
|
m.input.Placeholder = "Enter a message"
|
|
|
|
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
|
|
m.input.FocusedStyle.Base = inputFocusedStyle
|
|
m.input.BlurredStyle.Base = inputBlurredStyle
|
|
|
|
m.waitingForReply = false
|
|
m.status = "Press ctrl+s to send"
|
|
return m
|
|
}
|
|
|
|
func (m Model) Init() tea.Cmd {
|
|
return tea.Batch(
|
|
m.waitForChunk(),
|
|
m.waitForReply(),
|
|
)
|
|
}
|