TUI refactor
- Clean up, improved startup logic, initial conversation load - Moved converation/message business logic (mostly) into `model/tui`
This commit is contained in:
parent
1570988b98
commit
443c8096d3
@ -6,6 +6,7 @@ import (
|
||||
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -19,17 +20,30 @@ func ChatCmd(ctx *lmcli.Context) *cobra.Command {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
shortname := ""
|
||||
if len(args) == 1 {
|
||||
shortname = args[0]
|
||||
}
|
||||
if shortname != ""{
|
||||
_, err := cmdutil.LookupConversationE(ctx, shortname)
|
||||
|
||||
var opts []tui.LaunchOption
|
||||
|
||||
list, err := cmd.Flags().GetBool("list")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !list && len(args) == 1 {
|
||||
shortname := args[0]
|
||||
if shortname != ""{
|
||||
conv, err := cmdutil.LookupConversationE(ctx, shortname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = tui.Launch(ctx, shortname)
|
||||
opts = append(opts, tui.WithInitialConversation(conv))
|
||||
}
|
||||
}
|
||||
|
||||
if list {
|
||||
opts = append(opts, tui.WithInitialView(shared.StateConversations))
|
||||
}
|
||||
|
||||
err = tui.Launch(ctx, opts...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching LLM response: %v", err)
|
||||
}
|
||||
@ -43,6 +57,10 @@ func ChatCmd(ctx *lmcli.Context) *cobra.Command {
|
||||
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
|
||||
},
|
||||
}
|
||||
|
||||
// -l, --list
|
||||
cmd.Flags().BoolP("list", "l", false, "View/manage conversations")
|
||||
|
||||
applyGenerationFlags(ctx, cmd)
|
||||
return cmd
|
||||
}
|
||||
|
217
pkg/tui/model/model.go
Normal file
217
pkg/tui/model/model.go
Normal file
@ -0,0 +1,217 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/agents"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
)
|
||||
|
||||
type LoadedConversation struct {
|
||||
Conv api.Conversation
|
||||
LastReply api.Message
|
||||
}
|
||||
|
||||
// AppModel represents the application data model
|
||||
type AppModel struct {
|
||||
Ctx *lmcli.Context
|
||||
Conversations []LoadedConversation
|
||||
Conversation *api.Conversation
|
||||
RootMessages []api.Message
|
||||
Messages []api.Message
|
||||
}
|
||||
|
||||
type MessageCycleDirection int
|
||||
|
||||
const (
|
||||
CycleNext MessageCycleDirection = 1
|
||||
CyclePrev MessageCycleDirection = -1
|
||||
)
|
||||
|
||||
func (m *AppModel) LoadConversations() (error, []LoadedConversation) {
|
||||
messages, err := m.Ctx.Store.LatestConversationMessages()
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not load conversations: %v", err)), nil
|
||||
}
|
||||
|
||||
conversations := make([]LoadedConversation, len(messages))
|
||||
for i, msg := range messages {
|
||||
conversations[i] = LoadedConversation{
|
||||
Conv: *msg.Conversation,
|
||||
LastReply: msg,
|
||||
}
|
||||
}
|
||||
return nil, conversations
|
||||
}
|
||||
|
||||
func (a *AppModel) LoadConversationMessages() ([]api.Message, error) {
|
||||
messages, err := a.Ctx.Store.PathToLeaf(a.Conversation.SelectedRoot)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Could not load conversation messages: %v %v", a.Conversation.SelectedRoot, err)
|
||||
}
|
||||
return messages, nil
|
||||
}
|
||||
|
||||
func (a *AppModel) GenerateConversationTitle(messages []api.Message) (string, error) {
|
||||
return cmdutil.GenerateTitle(a.Ctx, messages)
|
||||
}
|
||||
|
||||
func (a *AppModel) UpdateConversationTitle(conversation *api.Conversation) error {
|
||||
return a.Ctx.Store.UpdateConversation(conversation)
|
||||
}
|
||||
|
||||
func (a *AppModel) CloneMessage(message api.Message, selected bool) (*api.Message, error) {
|
||||
msg, _, err := a.Ctx.Store.CloneBranch(message)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Could not clone message: %v", err)
|
||||
}
|
||||
if selected {
|
||||
if msg.Parent == nil {
|
||||
msg.Conversation.SelectedRoot = msg
|
||||
err = a.Ctx.Store.UpdateConversation(msg.Conversation)
|
||||
} else {
|
||||
msg.Parent.SelectedReply = msg
|
||||
err = a.Ctx.Store.UpdateMessage(msg.Parent)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Could not update selected message: %v", err)
|
||||
}
|
||||
}
|
||||
return msg, nil
|
||||
}
|
||||
|
||||
func (a *AppModel) UpdateMessageContent(message *api.Message) error {
|
||||
return a.Ctx.Store.UpdateMessage(message)
|
||||
}
|
||||
|
||||
func (a *AppModel) CycleSelectedRoot(conv *api.Conversation, rootMessages []api.Message, dir MessageCycleDirection) (*api.Message, error) {
|
||||
if len(rootMessages) < 2 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
nextRoot, err := cycleSelectedMessage(conv.SelectedRoot, rootMessages, dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
conv.SelectedRoot = nextRoot
|
||||
err = a.Ctx.Store.UpdateConversation(conv)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Could not update conversation SelectedRoot: %v", err)
|
||||
}
|
||||
return nextRoot, nil
|
||||
}
|
||||
|
||||
func (a *AppModel) CycleSelectedReply(message *api.Message, dir MessageCycleDirection) (*api.Message, error) {
|
||||
if len(message.Replies) < 2 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
nextReply, err := cycleSelectedMessage(message.SelectedReply, message.Replies, dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
message.SelectedReply = nextReply
|
||||
err = a.Ctx.Store.UpdateMessage(message)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Could not update message SelectedReply: %v", err)
|
||||
}
|
||||
return nextReply, nil
|
||||
}
|
||||
|
||||
func (a *AppModel) PersistConversation(conversation *api.Conversation, messages []api.Message) (*api.Conversation, []api.Message, error) {
|
||||
var err error
|
||||
if conversation == nil || conversation.ID == 0 {
|
||||
conversation, messages, err = a.Ctx.Store.StartConversation(messages...)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("Could not start new conversation: %v", err)
|
||||
}
|
||||
return conversation, messages, nil
|
||||
}
|
||||
|
||||
for i := range messages {
|
||||
if messages[i].ID > 0 {
|
||||
err := a.Ctx.Store.UpdateMessage(&messages[i])
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
} else if i > 0 {
|
||||
saved, err := a.Ctx.Store.Reply(&messages[i-1], messages[i])
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
messages[i] = saved[0]
|
||||
} else {
|
||||
return nil, nil, fmt.Errorf("Error: no messages to reply to")
|
||||
}
|
||||
}
|
||||
return conversation, messages, nil
|
||||
}
|
||||
|
||||
func (a *AppModel) ExecuteToolCalls(toolCalls []api.ToolCall) ([]api.ToolResult, error) {
|
||||
agent := a.Ctx.GetAgent(a.Ctx.Config.Defaults.Agent)
|
||||
if agent == nil {
|
||||
return nil, fmt.Errorf("Attempted to execute tool calls with no agent configured")
|
||||
}
|
||||
|
||||
return agents.ExecuteToolCalls(toolCalls, agent.Toolbox)
|
||||
}
|
||||
|
||||
func (a *AppModel) PromptLLM(messages []api.Message, chatReplyChunks chan api.Chunk, stopSignal chan struct{}) (*api.Message, error) {
|
||||
model, provider, err := a.Ctx.GetModelProvider(*a.Ctx.Config.Defaults.Model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := api.RequestParameters{
|
||||
Model: model,
|
||||
MaxTokens: *a.Ctx.Config.Defaults.MaxTokens,
|
||||
Temperature: *a.Ctx.Config.Defaults.Temperature,
|
||||
}
|
||||
|
||||
agent := a.Ctx.GetAgent(a.Ctx.Config.Defaults.Agent)
|
||||
if agent != nil {
|
||||
params.Toolbox = agent.Toolbox
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
go func() {
|
||||
select {
|
||||
case <-stopSignal:
|
||||
cancel()
|
||||
}
|
||||
}()
|
||||
|
||||
return provider.CreateChatCompletionStream(
|
||||
ctx, params, messages, chatReplyChunks,
|
||||
)
|
||||
}
|
||||
|
||||
// Helper function
|
||||
func cycleSelectedMessage(selected *api.Message, choices []api.Message, dir MessageCycleDirection) (*api.Message, error) {
|
||||
currentIndex := -1
|
||||
for i, reply := range choices {
|
||||
if reply.ID == selected.ID {
|
||||
currentIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if currentIndex < 0 {
|
||||
return nil, fmt.Errorf("Selected message %d not found in choices, this is a bug", selected.ID)
|
||||
}
|
||||
|
||||
var next int
|
||||
if dir == CyclePrev {
|
||||
next = (currentIndex - 1 + len(choices)) % len(choices)
|
||||
} else {
|
||||
next = (currentIndex + 1) % len(choices)
|
||||
}
|
||||
return &choices[next], nil
|
||||
}
|
@ -1,17 +1,10 @@
|
||||
package shared
|
||||
|
||||
import (
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
)
|
||||
|
||||
type Values struct {
|
||||
ConvShortname string
|
||||
}
|
||||
|
||||
type Shared struct {
|
||||
Ctx *lmcli.Context
|
||||
Values *Values
|
||||
Width int
|
||||
Height int
|
||||
Err error
|
||||
|
@ -9,33 +9,42 @@ package tui
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/views/chat"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/views/conversations"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
)
|
||||
|
||||
// Application model
|
||||
type Model struct {
|
||||
shared.Shared
|
||||
|
||||
state shared.View
|
||||
chat chat.Model
|
||||
conversations conversations.Model
|
||||
type LaunchOptions struct {
|
||||
InitialConversation *api.Conversation
|
||||
InitialView shared.View
|
||||
}
|
||||
|
||||
func initialModel(ctx *lmcli.Context, values shared.Values) Model {
|
||||
type Model struct {
|
||||
App *model.AppModel
|
||||
view shared.View
|
||||
chat chat.Model
|
||||
conversations conversations.Model
|
||||
Width int
|
||||
Height int
|
||||
}
|
||||
|
||||
func initialModel(ctx *lmcli.Context, opts LaunchOptions) Model {
|
||||
m := Model{
|
||||
Shared: shared.Shared{
|
||||
App: &model.AppModel{
|
||||
Ctx: ctx,
|
||||
Values: &values,
|
||||
Conversation: opts.InitialConversation,
|
||||
},
|
||||
view: opts.InitialView,
|
||||
}
|
||||
|
||||
m.state = shared.StateChat
|
||||
m.chat = chat.Chat(m.Shared)
|
||||
m.conversations = conversations.Conversations(m.Shared)
|
||||
sharedData := shared.Shared{}
|
||||
|
||||
m.chat = chat.Chat(m.App, sharedData)
|
||||
m.conversations = conversations.Conversations(m.App, sharedData)
|
||||
return m
|
||||
}
|
||||
|
||||
@ -44,16 +53,14 @@ func (m Model) Init() tea.Cmd {
|
||||
m.conversations.Init(),
|
||||
m.chat.Init(),
|
||||
func() tea.Msg {
|
||||
return shared.MsgViewChange(m.state)
|
||||
return shared.MsgViewChange(m.view)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (m *Model) handleGlobalInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
// delegate input to the active child state first, only handling it at the
|
||||
// global level if the child state does not
|
||||
var cmds []tea.Cmd
|
||||
switch m.state {
|
||||
switch m.view {
|
||||
case shared.StateChat:
|
||||
handled, cmd := m.chat.HandleInput(msg)
|
||||
cmds = append(cmds, cmd)
|
||||
@ -88,8 +95,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
return m, cmd
|
||||
}
|
||||
case shared.MsgViewChange:
|
||||
m.state = shared.View(msg)
|
||||
switch m.state {
|
||||
m.view = shared.View(msg)
|
||||
switch m.view {
|
||||
case shared.StateChat:
|
||||
m.chat.HandleResize(m.Width, m.Height)
|
||||
case shared.StateConversations:
|
||||
@ -101,7 +108,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
}
|
||||
|
||||
var cmd tea.Cmd
|
||||
switch m.state {
|
||||
switch m.view {
|
||||
case shared.StateConversations:
|
||||
m.conversations, cmd = m.conversations.Update(msg)
|
||||
case shared.StateChat:
|
||||
@ -115,7 +122,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
}
|
||||
|
||||
func (m Model) View() string {
|
||||
switch m.state {
|
||||
switch m.view {
|
||||
case shared.StateConversations:
|
||||
return m.conversations.View()
|
||||
case shared.StateChat:
|
||||
@ -124,9 +131,30 @@ func (m Model) View() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func Launch(ctx *lmcli.Context, convShortname string) error {
|
||||
p := tea.NewProgram(initialModel(ctx, shared.Values{ConvShortname: convShortname}), tea.WithAltScreen())
|
||||
if _, err := p.Run(); err != nil {
|
||||
type LaunchOption func(*LaunchOptions)
|
||||
|
||||
func WithInitialConversation(conv *api.Conversation) LaunchOption {
|
||||
return func(opts *LaunchOptions) {
|
||||
opts.InitialConversation = conv
|
||||
}
|
||||
}
|
||||
|
||||
func WithInitialView(view shared.View) LaunchOption {
|
||||
return func(opts *LaunchOptions) {
|
||||
opts.InitialView = view
|
||||
}
|
||||
}
|
||||
|
||||
func Launch(ctx *lmcli.Context, options ...LaunchOption) error {
|
||||
opts := &LaunchOptions{
|
||||
InitialView: shared.StateChat,
|
||||
}
|
||||
for _, opt := range options {
|
||||
opt(opts)
|
||||
}
|
||||
|
||||
program := tea.NewProgram(initialModel(ctx, *opts), tea.WithAltScreen())
|
||||
if _, err := program.Run(); err != nil {
|
||||
return fmt.Errorf("Error running program: %v", err)
|
||||
}
|
||||
return nil
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"time"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
"github.com/charmbracelet/bubbles/cursor"
|
||||
"github.com/charmbracelet/bubbles/spinner"
|
||||
@ -76,11 +77,11 @@ type Model struct {
|
||||
shared.Shared
|
||||
shared.Sections
|
||||
|
||||
// app state
|
||||
// App state
|
||||
App *model.AppModel
|
||||
|
||||
// Chat view state
|
||||
state state // current overall status of the view
|
||||
conversation *api.Conversation
|
||||
rootMessages []api.Message
|
||||
messages []api.Message
|
||||
selectedMessage int
|
||||
editorTarget editorTarget
|
||||
stopSignal chan struct{}
|
||||
@ -88,7 +89,7 @@ type Model struct {
|
||||
chatReplyChunks chan api.Chunk
|
||||
persistence bool // whether we will save new messages in the conversation
|
||||
|
||||
// ui state
|
||||
// UI state
|
||||
focus focusState
|
||||
wrap bool // whether message content is wrapped to viewport width
|
||||
showToolResults bool // whether tool calls and results are shown
|
||||
@ -107,12 +108,12 @@ type Model struct {
|
||||
elapsed time.Duration
|
||||
}
|
||||
|
||||
func Chat(shared shared.Shared) Model {
|
||||
func Chat(app *model.AppModel, shared shared.Shared) Model {
|
||||
m := Model{
|
||||
App: app,
|
||||
Shared: shared,
|
||||
|
||||
state: idle,
|
||||
conversation: &api.Conversation{},
|
||||
persistence: true,
|
||||
|
||||
stopSignal: make(chan struct{}),
|
||||
@ -143,15 +144,15 @@ func Chat(shared shared.Shared) Model {
|
||||
m.replyCursor.SetChar(" ")
|
||||
m.replyCursor.Focus()
|
||||
|
||||
system := shared.Ctx.DefaultSystemPrompt()
|
||||
system := app.Ctx.DefaultSystemPrompt()
|
||||
|
||||
agent := shared.Ctx.GetAgent(shared.Ctx.Config.Defaults.Agent)
|
||||
agent := app.Ctx.GetAgent(app.Ctx.Config.Defaults.Agent)
|
||||
if agent != nil && agent.SystemPrompt != "" {
|
||||
system = agent.SystemPrompt
|
||||
}
|
||||
|
||||
if system != "" {
|
||||
m.messages = api.ApplySystemPrompt(m.messages, system, false)
|
||||
m.App.Messages = api.ApplySystemPrompt(m.App.Messages, system, false)
|
||||
}
|
||||
|
||||
m.input.Focus()
|
||||
|
@ -1,42 +1,38 @@
|
||||
package chat
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/agents"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
)
|
||||
|
||||
func (m *Model) setMessage(i int, msg api.Message) {
|
||||
if i >= len(m.messages) {
|
||||
if i >= len(m.App.Messages) {
|
||||
panic("i out of range")
|
||||
}
|
||||
m.messages[i] = msg
|
||||
m.App.Messages[i] = msg
|
||||
m.messageCache[i] = m.renderMessage(i)
|
||||
}
|
||||
|
||||
func (m *Model) addMessage(msg api.Message) {
|
||||
m.messages = append(m.messages, msg)
|
||||
m.messageCache = append(m.messageCache, m.renderMessage(len(m.messages)-1))
|
||||
m.App.Messages = append(m.App.Messages, msg)
|
||||
m.messageCache = append(m.messageCache, m.renderMessage(len(m.App.Messages)-1))
|
||||
}
|
||||
|
||||
func (m *Model) setMessageContents(i int, content string) {
|
||||
if i >= len(m.messages) {
|
||||
if i >= len(m.App.Messages) {
|
||||
panic("i out of range")
|
||||
}
|
||||
m.messages[i].Content = content
|
||||
m.App.Messages[i].Content = content
|
||||
m.messageCache[i] = m.renderMessage(i)
|
||||
}
|
||||
|
||||
func (m *Model) rebuildMessageCache() {
|
||||
m.messageCache = make([]string, len(m.messages))
|
||||
for i := range m.messages {
|
||||
m.messageCache = make([]string, len(m.App.Messages))
|
||||
for i := range m.App.Messages {
|
||||
m.messageCache[i] = m.renderMessage(i)
|
||||
}
|
||||
}
|
||||
@ -45,36 +41,15 @@ func (m *Model) updateContent() {
|
||||
atBottom := m.content.AtBottom()
|
||||
m.content.SetContent(m.conversationMessagesView())
|
||||
if atBottom {
|
||||
// if we were at bottom before the update, scroll with the output
|
||||
m.content.GotoBottom()
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) loadConversation(shortname string) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
if shortname == "" {
|
||||
return nil
|
||||
}
|
||||
c, err := m.Shared.Ctx.Store.ConversationByShortName(shortname)
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not lookup conversation: %v", err))
|
||||
}
|
||||
if c.ID == 0 {
|
||||
return shared.MsgError(fmt.Errorf("Conversation not found: %s", shortname))
|
||||
}
|
||||
rootMessages, err := m.Shared.Ctx.Store.RootMessages(c.ID)
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not load conversation root messages: %v\n", err))
|
||||
}
|
||||
return msgConversationLoaded{c, rootMessages}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) loadConversationMessages() tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
messages, err := m.Shared.Ctx.Store.PathToLeaf(m.conversation.SelectedRoot)
|
||||
messages, err := m.App.LoadConversationMessages()
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
return msgMessagesLoaded(messages)
|
||||
}
|
||||
@ -82,7 +57,7 @@ func (m *Model) loadConversationMessages() tea.Cmd {
|
||||
|
||||
func (m *Model) generateConversationTitle() tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
title, err := cmdutil.GenerateTitle(m.Shared.Ctx, m.messages)
|
||||
title, err := m.App.GenerateConversationTitle(m.App.Messages)
|
||||
if err != nil {
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
@ -92,7 +67,7 @@ func (m *Model) generateConversationTitle() tea.Cmd {
|
||||
|
||||
func (m *Model) updateConversationTitle(conversation *api.Conversation) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
err := m.Shared.Ctx.Store.UpdateConversation(conversation)
|
||||
err := m.App.UpdateConversationTitle(conversation)
|
||||
if err != nil {
|
||||
return shared.WrapError(err)
|
||||
}
|
||||
@ -100,26 +75,11 @@ func (m *Model) updateConversationTitle(conversation *api.Conversation) tea.Cmd
|
||||
}
|
||||
}
|
||||
|
||||
// Clones the given message (and its descendents). If selected is true, updates
|
||||
// either its parent's SelectedReply or its conversation's SelectedRoot to
|
||||
// point to the new clone
|
||||
func (m *Model) cloneMessage(message api.Message, selected bool) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
msg, _, err := m.Ctx.Store.CloneBranch(message)
|
||||
msg, err := m.App.CloneMessage(message, selected)
|
||||
if err != nil {
|
||||
return shared.WrapError(fmt.Errorf("Could not clone message: %v", err))
|
||||
}
|
||||
if selected {
|
||||
if msg.Parent == nil {
|
||||
msg.Conversation.SelectedRoot = msg
|
||||
err = m.Shared.Ctx.Store.UpdateConversation(msg.Conversation)
|
||||
} else {
|
||||
msg.Parent.SelectedReply = msg
|
||||
err = m.Shared.Ctx.Store.UpdateMessage(msg.Parent)
|
||||
}
|
||||
if err != nil {
|
||||
return shared.WrapError(fmt.Errorf("Could not update selected message: %v", err))
|
||||
}
|
||||
return shared.WrapError(err)
|
||||
}
|
||||
return msgMessageCloned(msg)
|
||||
}
|
||||
@ -127,129 +87,55 @@ func (m *Model) cloneMessage(message api.Message, selected bool) tea.Cmd {
|
||||
|
||||
func (m *Model) updateMessageContent(message *api.Message) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
err := m.Shared.Ctx.Store.UpdateMessage(message)
|
||||
err := m.App.UpdateMessageContent(message)
|
||||
if err != nil {
|
||||
return shared.WrapError(fmt.Errorf("Could not update message: %v", err))
|
||||
return shared.WrapError(err)
|
||||
}
|
||||
return msgMessageUpdated(message)
|
||||
}
|
||||
}
|
||||
|
||||
func cycleSelectedMessage(selected *api.Message, choices []api.Message, dir MessageCycleDirection) (*api.Message, error) {
|
||||
currentIndex := -1
|
||||
for i, reply := range choices {
|
||||
if reply.ID == selected.ID {
|
||||
currentIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if currentIndex < 0 {
|
||||
// this should probably be an assert
|
||||
return nil, fmt.Errorf("Selected message %d not found in choices, this is a bug", selected.ID)
|
||||
}
|
||||
|
||||
var next int
|
||||
if dir == CyclePrev {
|
||||
// Wrap around to the last reply if at the beginning
|
||||
next = (currentIndex - 1 + len(choices)) % len(choices)
|
||||
} else {
|
||||
// Wrap around to the first reply if at the end
|
||||
next = (currentIndex + 1) % len(choices)
|
||||
}
|
||||
return &choices[next], nil
|
||||
}
|
||||
|
||||
func (m *Model) cycleSelectedRoot(conv *api.Conversation, dir MessageCycleDirection) tea.Cmd {
|
||||
if len(m.rootMessages) < 2 {
|
||||
func (m *Model) cycleSelectedRoot(conv *api.Conversation, dir model.MessageCycleDirection) tea.Cmd {
|
||||
if len(m.App.RootMessages) < 2 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return func() tea.Msg {
|
||||
nextRoot, err := cycleSelectedMessage(conv.SelectedRoot, m.rootMessages, dir)
|
||||
nextRoot, err := m.App.CycleSelectedRoot(conv, m.App.RootMessages, dir)
|
||||
if err != nil {
|
||||
return shared.WrapError(err)
|
||||
}
|
||||
|
||||
conv.SelectedRoot = nextRoot
|
||||
err = m.Shared.Ctx.Store.UpdateConversation(conv)
|
||||
if err != nil {
|
||||
return shared.WrapError(fmt.Errorf("Could not update conversation SelectedRoot: %v", err))
|
||||
}
|
||||
return msgSelectedRootCycled(nextRoot)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) cycleSelectedReply(message *api.Message, dir MessageCycleDirection) tea.Cmd {
|
||||
func (m *Model) cycleSelectedReply(message *api.Message, dir model.MessageCycleDirection) tea.Cmd {
|
||||
if len(message.Replies) < 2 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return func() tea.Msg {
|
||||
nextReply, err := cycleSelectedMessage(message.SelectedReply, message.Replies, dir)
|
||||
nextReply, err := m.App.CycleSelectedReply(message, dir)
|
||||
if err != nil {
|
||||
return shared.WrapError(err)
|
||||
}
|
||||
|
||||
message.SelectedReply = nextReply
|
||||
err = m.Shared.Ctx.Store.UpdateMessage(message)
|
||||
if err != nil {
|
||||
return shared.WrapError(fmt.Errorf("Could not update message SelectedReply: %v", err))
|
||||
}
|
||||
return msgSelectedReplyCycled(nextReply)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) persistConversation() tea.Cmd {
|
||||
conversation := m.conversation
|
||||
messages := m.messages
|
||||
|
||||
var err error
|
||||
if conversation.ID == 0 {
|
||||
return func() tea.Msg {
|
||||
// Start a new conversation with all messages so far
|
||||
conversation, messages, err = m.Shared.Ctx.Store.StartConversation(messages...)
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not start new conversation: %v", err))
|
||||
}
|
||||
return msgConversationPersisted{true, conversation, messages}
|
||||
}
|
||||
}
|
||||
|
||||
return func() tea.Msg {
|
||||
// else, we'll handle updating an existing conversation's messages
|
||||
for i := range messages {
|
||||
if messages[i].ID > 0 {
|
||||
// message has an ID, update it
|
||||
err := m.Shared.Ctx.Store.UpdateMessage(&messages[i])
|
||||
conversation, messages, err := m.App.PersistConversation(m.App.Conversation, m.App.Messages)
|
||||
if err != nil {
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
} else if i > 0 {
|
||||
// messages is new, so add it as a reply to previous message
|
||||
saved, err := m.Shared.Ctx.Store.Reply(&messages[i-1], messages[i])
|
||||
if err != nil {
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
messages[i] = saved[0]
|
||||
} else {
|
||||
// message has no id and no previous messages to add it to
|
||||
// this shouldn't happen?
|
||||
return fmt.Errorf("Error: no messages to reply to")
|
||||
}
|
||||
}
|
||||
return msgConversationPersisted{false, conversation, messages}
|
||||
return msgConversationPersisted{conversation.ID == 0, conversation, messages}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) executeToolCalls(toolCalls []api.ToolCall) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
|
||||
if agent == nil {
|
||||
return shared.MsgError(fmt.Errorf("Attempted to execute tool calls with no agent configured"))
|
||||
}
|
||||
|
||||
results, err := agents.ExecuteToolCalls(toolCalls, agent.Toolbox)
|
||||
results, err := m.App.ExecuteToolCalls(toolCalls)
|
||||
if err != nil {
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
@ -266,38 +152,7 @@ func (m *Model) promptLLM() tea.Cmd {
|
||||
m.tokenCount = 0
|
||||
|
||||
return func() tea.Msg {
|
||||
model, provider, err := m.Shared.Ctx.GetModelProvider(*m.Shared.Ctx.Config.Defaults.Model)
|
||||
if err != nil {
|
||||
return shared.MsgError(err)
|
||||
}
|
||||
|
||||
params := api.RequestParameters{
|
||||
Model: model,
|
||||
MaxTokens: *m.Shared.Ctx.Config.Defaults.MaxTokens,
|
||||
Temperature: *m.Shared.Ctx.Config.Defaults.Temperature,
|
||||
}
|
||||
|
||||
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
|
||||
if agent != nil {
|
||||
params.Toolbox = agent.Toolbox
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
go func() {
|
||||
select {
|
||||
case <-m.stopSignal:
|
||||
cancel()
|
||||
}
|
||||
}()
|
||||
|
||||
resp, err := provider.CreateChatCompletionStream(
|
||||
ctx, params, m.messages, m.chatReplyChunks,
|
||||
)
|
||||
|
||||
if errors.Is(err, context.Canceled) {
|
||||
return msgChatResponseCanceled(struct{}{})
|
||||
}
|
||||
resp, err := m.App.PromptLLM(m.App.Messages, m.chatReplyChunks, m.stopSignal)
|
||||
|
||||
if err != nil {
|
||||
return msgChatResponseError(err)
|
||||
|
@ -5,18 +5,12 @@ import (
|
||||
"strings"
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
)
|
||||
|
||||
type MessageCycleDirection int
|
||||
|
||||
const (
|
||||
CycleNext MessageCycleDirection = 1
|
||||
CyclePrev MessageCycleDirection = -1
|
||||
)
|
||||
|
||||
func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
switch m.focus {
|
||||
case focusInput:
|
||||
@ -71,17 +65,17 @@ func (m *Model) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
m.input.Focus()
|
||||
return true, nil
|
||||
case "e":
|
||||
if m.selectedMessage < len(m.messages) {
|
||||
if m.selectedMessage < len(m.App.Messages) {
|
||||
m.editorTarget = selectedMessage
|
||||
return true, tuiutil.OpenTempfileEditor(
|
||||
"message.*.md",
|
||||
m.messages[m.selectedMessage].Content,
|
||||
m.App.Messages[m.selectedMessage].Content,
|
||||
"# Edit the message below\n",
|
||||
)
|
||||
}
|
||||
return false, nil
|
||||
case "ctrl+k":
|
||||
if m.selectedMessage > 0 && len(m.messages) == len(m.messageOffsets) {
|
||||
if m.selectedMessage > 0 && len(m.App.Messages) == len(m.messageOffsets) {
|
||||
m.selectedMessage--
|
||||
m.updateContent()
|
||||
offset := m.messageOffsets[m.selectedMessage]
|
||||
@ -89,7 +83,7 @@ func (m *Model) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
}
|
||||
return true, nil
|
||||
case "ctrl+j":
|
||||
if m.selectedMessage < len(m.messages)-1 && len(m.messages) == len(m.messageOffsets) {
|
||||
if m.selectedMessage < len(m.App.Messages)-1 && len(m.App.Messages) == len(m.messageOffsets) {
|
||||
m.selectedMessage++
|
||||
m.updateContent()
|
||||
offset := m.messageOffsets[m.selectedMessage]
|
||||
@ -97,23 +91,23 @@ func (m *Model) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
}
|
||||
return true, nil
|
||||
case "ctrl+h", "ctrl+l":
|
||||
dir := CyclePrev
|
||||
dir := model.CyclePrev
|
||||
if msg.String() == "ctrl+l" {
|
||||
dir = CycleNext
|
||||
dir = model.CycleNext
|
||||
}
|
||||
|
||||
var cmd tea.Cmd
|
||||
if m.selectedMessage == 0 {
|
||||
cmd = m.cycleSelectedRoot(m.conversation, dir)
|
||||
cmd = m.cycleSelectedRoot(m.App.Conversation, dir)
|
||||
} else if m.selectedMessage > 0 {
|
||||
cmd = m.cycleSelectedReply(&m.messages[m.selectedMessage-1], dir)
|
||||
cmd = m.cycleSelectedReply(&m.App.Messages[m.selectedMessage-1], dir)
|
||||
}
|
||||
|
||||
return cmd != nil, cmd
|
||||
case "ctrl+r":
|
||||
// resubmit the conversation with all messages up until and including the selected message
|
||||
if m.state == idle && m.selectedMessage < len(m.messages) {
|
||||
m.messages = m.messages[:m.selectedMessage+1]
|
||||
if m.state == idle && m.selectedMessage < len(m.App.Messages) {
|
||||
m.App.Messages = m.App.Messages[:m.selectedMessage+1]
|
||||
m.messageCache = m.messageCache[:m.selectedMessage+1]
|
||||
cmd := m.promptLLM()
|
||||
m.updateContent()
|
||||
@ -129,9 +123,9 @@ func (m *Model) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
switch msg.String() {
|
||||
case "esc":
|
||||
m.focus = focusMessages
|
||||
if len(m.messages) > 0 {
|
||||
if m.selectedMessage < 0 || m.selectedMessage >= len(m.messages) {
|
||||
m.selectedMessage = len(m.messages) - 1
|
||||
if len(m.App.Messages) > 0 {
|
||||
if m.selectedMessage < 0 || m.selectedMessage >= len(m.App.Messages) {
|
||||
m.selectedMessage = len(m.App.Messages) - 1
|
||||
}
|
||||
offset := m.messageOffsets[m.selectedMessage]
|
||||
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
|
||||
@ -150,7 +144,7 @@ func (m *Model) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if len(m.messages) > 0 && m.messages[len(m.messages)-1].Role == api.MessageRoleUser {
|
||||
if len(m.App.Messages) > 0 && m.App.Messages[len(m.App.Messages)-1].Role == api.MessageRoleUser {
|
||||
return true, shared.WrapError(fmt.Errorf("Can't reply to a user message"))
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ func (m *Model) HandleResize(width, height int) {
|
||||
m.Width, m.Height = width, height
|
||||
m.content.Width = width
|
||||
m.input.SetWidth(width - m.input.FocusedStyle.Base.GetHorizontalFrameSize())
|
||||
if len(m.messages) > 0 {
|
||||
if len(m.App.Messages) > 0 {
|
||||
m.rebuildMessageCache()
|
||||
m.updateContent()
|
||||
}
|
||||
@ -36,26 +36,21 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
// wake up spinners and cursors
|
||||
cmds = append(cmds, cursor.Blink, m.spinner.Tick)
|
||||
|
||||
if m.Shared.Values.ConvShortname != "" {
|
||||
// (re)load conversation contents
|
||||
cmds = append(cmds, m.loadConversation(m.Shared.Values.ConvShortname))
|
||||
|
||||
if m.conversation.ShortName.String != m.Shared.Values.ConvShortname {
|
||||
// clear existing messages if we're loading a new conversation
|
||||
m.messages = []api.Message{}
|
||||
m.selectedMessage = 0
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh view
|
||||
m.rebuildMessageCache()
|
||||
m.updateContent()
|
||||
|
||||
if m.App.Conversation != nil && m.App.Conversation.ID > 0 {
|
||||
// (re)load conversation contents
|
||||
cmds = append(cmds, m.loadConversationMessages())
|
||||
}
|
||||
case tuiutil.MsgTempfileEditorClosed:
|
||||
contents := string(msg)
|
||||
switch m.editorTarget {
|
||||
case input:
|
||||
m.input.SetValue(contents)
|
||||
case selectedMessage:
|
||||
toEdit := m.messages[m.selectedMessage]
|
||||
toEdit := m.App.Messages[m.selectedMessage]
|
||||
if toEdit.Content != contents {
|
||||
toEdit.Content = contents
|
||||
m.setMessage(m.selectedMessage, toEdit)
|
||||
@ -66,18 +61,18 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
}
|
||||
}
|
||||
case msgConversationLoaded:
|
||||
m.conversation = msg.conversation
|
||||
m.rootMessages = msg.rootMessages
|
||||
m.App.Conversation = msg.conversation
|
||||
m.App.RootMessages = msg.rootMessages
|
||||
m.selectedMessage = -1
|
||||
if len(m.rootMessages) > 0 {
|
||||
if len(m.App.RootMessages) > 0 {
|
||||
cmds = append(cmds, m.loadConversationMessages())
|
||||
}
|
||||
case msgMessagesLoaded:
|
||||
m.messages = msg
|
||||
m.App.Messages = msg
|
||||
if m.selectedMessage == -1 {
|
||||
m.selectedMessage = len(msg) - 1
|
||||
} else {
|
||||
m.selectedMessage = min(m.selectedMessage, len(m.messages))
|
||||
m.selectedMessage = min(m.selectedMessage, len(m.App.Messages))
|
||||
}
|
||||
m.rebuildMessageCache()
|
||||
m.updateContent()
|
||||
@ -88,10 +83,10 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
break
|
||||
}
|
||||
|
||||
last := len(m.messages) - 1
|
||||
if last >= 0 && m.messages[last].Role.IsAssistant() {
|
||||
last := len(m.App.Messages) - 1
|
||||
if last >= 0 && m.App.Messages[last].Role.IsAssistant() {
|
||||
// append chunk to existing message
|
||||
m.setMessageContents(last, m.messages[last].Content+msg.Content)
|
||||
m.setMessageContents(last, m.App.Messages[last].Content+msg.Content)
|
||||
} else {
|
||||
// use chunk in a new message
|
||||
m.addMessage(api.Message{
|
||||
@ -113,12 +108,12 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
reply := (*api.Message)(msg)
|
||||
reply.Content = strings.TrimSpace(reply.Content)
|
||||
|
||||
last := len(m.messages) - 1
|
||||
last := len(m.App.Messages) - 1
|
||||
if last < 0 {
|
||||
panic("Unexpected empty messages handling msgAssistantReply")
|
||||
}
|
||||
|
||||
if m.messages[last].Role.IsAssistant() {
|
||||
if m.App.Messages[last].Role.IsAssistant() {
|
||||
// TODO: handle continuations gracefully - some models support them well, others fail horribly.
|
||||
m.setMessage(last, *reply)
|
||||
} else {
|
||||
@ -136,7 +131,7 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
cmds = append(cmds, m.persistConversation())
|
||||
}
|
||||
|
||||
if m.conversation.Title == "" {
|
||||
if m.App.Conversation.Title == "" {
|
||||
cmds = append(cmds, m.generateConversationTitle())
|
||||
}
|
||||
|
||||
@ -149,12 +144,12 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
m.Shared.Err = error(msg)
|
||||
m.updateContent()
|
||||
case msgToolResults:
|
||||
last := len(m.messages) - 1
|
||||
last := len(m.App.Messages) - 1
|
||||
if last < 0 {
|
||||
panic("Unexpected empty messages handling msgAssistantReply")
|
||||
}
|
||||
|
||||
if m.messages[last].Role != api.MessageRoleToolCall {
|
||||
if m.App.Messages[last].Role != api.MessageRoleToolCall {
|
||||
panic("Previous message not a tool call, unexpected")
|
||||
}
|
||||
|
||||
@ -170,29 +165,29 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
m.updateContent()
|
||||
case msgConversationTitleGenerated:
|
||||
title := string(msg)
|
||||
m.conversation.Title = title
|
||||
m.App.Conversation.Title = title
|
||||
if m.persistence {
|
||||
cmds = append(cmds, m.updateConversationTitle(m.conversation))
|
||||
cmds = append(cmds, m.updateConversationTitle(m.App.Conversation))
|
||||
}
|
||||
case cursor.BlinkMsg:
|
||||
if m.state == pendingResponse {
|
||||
// ensure we show the updated "wait for response" cursor blink state
|
||||
last := len(m.messages)-1
|
||||
last := len(m.App.Messages) - 1
|
||||
m.messageCache[last] = m.renderMessage(last)
|
||||
m.updateContent()
|
||||
}
|
||||
case msgConversationPersisted:
|
||||
m.conversation = msg.conversation
|
||||
m.messages = msg.messages
|
||||
m.App.Conversation = msg.conversation
|
||||
m.App.Messages = msg.messages
|
||||
if msg.isNew {
|
||||
m.rootMessages = []api.Message{m.messages[0]}
|
||||
m.App.RootMessages = []api.Message{m.App.Messages[0]}
|
||||
}
|
||||
m.rebuildMessageCache()
|
||||
m.updateContent()
|
||||
case msgMessageCloned:
|
||||
if msg.Parent == nil {
|
||||
m.conversation = msg.Conversation
|
||||
m.rootMessages = append(m.rootMessages, *msg)
|
||||
m.App.Conversation = msg.Conversation
|
||||
m.App.RootMessages = append(m.App.RootMessages, *msg)
|
||||
}
|
||||
cmds = append(cmds, m.loadConversationMessages())
|
||||
case msgSelectedRootCycled, msgSelectedReplyCycled, msgMessageUpdated:
|
||||
|
@ -88,26 +88,26 @@ func (m *Model) renderMessageHeading(i int, message *api.Message) string {
|
||||
|
||||
faint := lipgloss.NewStyle().Faint(true)
|
||||
|
||||
if i == 0 && len(m.rootMessages) > 1 && m.conversation.SelectedRootID != nil {
|
||||
if i == 0 && len(m.App.RootMessages) > 1 && m.App.Conversation.SelectedRootID != nil {
|
||||
selectedRootIndex := 0
|
||||
for j, reply := range m.rootMessages {
|
||||
if reply.ID == *m.conversation.SelectedRootID {
|
||||
for j, reply := range m.App.RootMessages {
|
||||
if reply.ID == *m.App.Conversation.SelectedRootID {
|
||||
selectedRootIndex = j
|
||||
break
|
||||
}
|
||||
}
|
||||
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedRootIndex+1, len(m.rootMessages)))
|
||||
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedRootIndex+1, len(m.App.RootMessages)))
|
||||
}
|
||||
if i > 0 && len(m.messages[i-1].Replies) > 1 {
|
||||
if i > 0 && len(m.App.Messages[i-1].Replies) > 1 {
|
||||
// Find the selected reply index
|
||||
selectedReplyIndex := 0
|
||||
for j, reply := range m.messages[i-1].Replies {
|
||||
if reply.ID == *m.messages[i-1].SelectedReplyID {
|
||||
for j, reply := range m.App.Messages[i-1].Replies {
|
||||
if reply.ID == *m.App.Messages[i-1].SelectedReplyID {
|
||||
selectedReplyIndex = j
|
||||
break
|
||||
}
|
||||
}
|
||||
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedReplyIndex+1, len(m.messages[i-1].Replies)))
|
||||
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedReplyIndex+1, len(m.App.Messages[i-1].Replies)))
|
||||
}
|
||||
|
||||
if i == m.selectedMessage {
|
||||
@ -127,20 +127,20 @@ func (m *Model) renderMessageHeading(i int, message *api.Message) string {
|
||||
// *at this moment* - we render differently depending on the current application
|
||||
// state (window size, etc, etc).
|
||||
func (m *Model) renderMessage(i int) string {
|
||||
msg := &m.messages[i]
|
||||
msg := &m.App.Messages[i]
|
||||
|
||||
// Write message contents
|
||||
sb := &strings.Builder{}
|
||||
sb.Grow(len(msg.Content) * 2)
|
||||
if msg.Content != "" {
|
||||
err := m.Shared.Ctx.Chroma.Highlight(sb, msg.Content)
|
||||
err := m.App.Ctx.Chroma.Highlight(sb, msg.Content)
|
||||
if err != nil {
|
||||
sb.Reset()
|
||||
sb.WriteString(msg.Content)
|
||||
}
|
||||
}
|
||||
|
||||
isLast := i == len(m.messages)-1
|
||||
isLast := i == len(m.App.Messages)-1
|
||||
isAssistant := msg.Role == api.MessageRoleAssistant
|
||||
|
||||
if m.state == pendingResponse && isLast && isAssistant {
|
||||
@ -204,7 +204,7 @@ func (m *Model) renderMessage(i int) string {
|
||||
if msg.Content != "" {
|
||||
sb.WriteString("\n\n")
|
||||
}
|
||||
_ = m.Shared.Ctx.Chroma.HighlightLang(sb, toolString, "yaml")
|
||||
_ = m.App.Ctx.Chroma.HighlightLang(sb, toolString, "yaml")
|
||||
}
|
||||
|
||||
content := strings.TrimRight(sb.String(), "\n")
|
||||
@ -224,9 +224,9 @@ func (m *Model) renderMessage(i int) string {
|
||||
func (m *Model) conversationMessagesView() string {
|
||||
sb := strings.Builder{}
|
||||
|
||||
m.messageOffsets = make([]int, len(m.messages))
|
||||
m.messageOffsets = make([]int, len(m.App.Messages))
|
||||
lineCnt := 1
|
||||
for i, message := range m.messages {
|
||||
for i, message := range m.App.Messages {
|
||||
m.messageOffsets[i] = lineCnt
|
||||
|
||||
heading := m.renderMessageHeading(i, &message)
|
||||
@ -241,7 +241,7 @@ func (m *Model) conversationMessagesView() string {
|
||||
}
|
||||
|
||||
// Render a placeholder for the incoming assistant reply
|
||||
if m.state == pendingResponse && m.messages[len(m.messages)-1].Role != api.MessageRoleAssistant {
|
||||
if m.state == pendingResponse && m.App.Messages[len(m.App.Messages)-1].Role != api.MessageRoleAssistant {
|
||||
heading := m.renderMessageHeading(-1, &api.Message{
|
||||
Role: api.MessageRoleAssistant,
|
||||
})
|
||||
@ -257,8 +257,8 @@ func (m *Model) conversationMessagesView() string {
|
||||
func (m *Model) headerView() string {
|
||||
titleStyle := lipgloss.NewStyle().Bold(true)
|
||||
var title string
|
||||
if m.conversation != nil && m.conversation.Title != "" {
|
||||
title = m.conversation.Title
|
||||
if m.App.Conversation != nil && m.App.Conversation.Title != "" {
|
||||
title = m.App.Conversation.Title
|
||||
} else {
|
||||
title = "Untitled"
|
||||
}
|
||||
@ -298,7 +298,7 @@ func (m *Model) footerView() string {
|
||||
rightSegments = append(rightSegments, segmentStyle.Render(throughput))
|
||||
}
|
||||
|
||||
model := fmt.Sprintf("Model: %s", *m.Shared.Ctx.Config.Defaults.Model)
|
||||
model := fmt.Sprintf("Model: %s", *m.App.Ctx.Config.Defaults.Model)
|
||||
rightSegments = append(rightSegments, segmentStyle.Render(model))
|
||||
|
||||
left := strings.Join(leftSegments, segmentSeparator)
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"git.mlow.ca/mlow/lmcli/pkg/api"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/bubbles"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
|
||||
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
|
||||
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
|
||||
@ -16,40 +17,30 @@ import (
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
)
|
||||
|
||||
type loadedConversation struct {
|
||||
conv api.Conversation
|
||||
lastReply api.Message
|
||||
}
|
||||
|
||||
type (
|
||||
// sent when conversation list is loaded
|
||||
msgConversationsLoaded ([]loadedConversation)
|
||||
msgConversationsLoaded ([]model.LoadedConversation)
|
||||
// sent when a conversation is selected
|
||||
msgConversationSelected api.Conversation
|
||||
// sent when a conversation is deleted
|
||||
msgConversationDeleted struct{}
|
||||
)
|
||||
|
||||
// Prompt payloads
|
||||
type (
|
||||
deleteConversationPayload api.Conversation
|
||||
)
|
||||
|
||||
type Model struct {
|
||||
shared.Shared
|
||||
shared.Sections
|
||||
|
||||
conversations []loadedConversation
|
||||
cursor int // index of the currently selected conversation
|
||||
itemOffsets []int // keeps track of the viewport y offset of each rendered item
|
||||
App *model.AppModel
|
||||
cursor int
|
||||
|
||||
itemOffsets []int // conversation y offsets
|
||||
|
||||
content viewport.Model
|
||||
|
||||
confirmPrompt bubbles.ConfirmPrompt
|
||||
}
|
||||
|
||||
func Conversations(shared shared.Shared) Model {
|
||||
func Conversations(app *model.AppModel, shared shared.Shared) Model {
|
||||
m := Model{
|
||||
App: app,
|
||||
Shared: shared,
|
||||
content: viewport.New(0, 0),
|
||||
}
|
||||
@ -67,16 +58,17 @@ func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
|
||||
switch msg.String() {
|
||||
case "enter":
|
||||
if len(m.conversations) > 0 && m.cursor < len(m.conversations) {
|
||||
if len(m.App.Conversations) > 0 && m.cursor < len(m.App.Conversations) {
|
||||
m.App.Conversation = &m.App.Conversations[m.cursor].Conv
|
||||
m.App.Messages = []api.Message{}
|
||||
return true, func() tea.Msg {
|
||||
return msgConversationSelected(m.conversations[m.cursor].conv)
|
||||
return shared.MsgViewChange(shared.StateChat)
|
||||
}
|
||||
}
|
||||
case "j", "down":
|
||||
if m.cursor < len(m.conversations)-1 {
|
||||
if m.cursor < len(m.App.Conversations)-1 {
|
||||
m.cursor++
|
||||
if m.cursor == len(m.conversations)-1 {
|
||||
// if last conversation, simply scroll to the bottom
|
||||
if m.cursor == len(m.App.Conversations)-1 {
|
||||
m.content.GotoBottom()
|
||||
} else {
|
||||
// this hack positions the *next* conversatoin slightly
|
||||
@ -86,7 +78,7 @@ func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
}
|
||||
m.content.SetContent(m.renderConversationList())
|
||||
} else {
|
||||
m.cursor = len(m.conversations) - 1
|
||||
m.cursor = len(m.App.Conversations) - 1
|
||||
m.content.GotoBottom()
|
||||
}
|
||||
return true, nil
|
||||
@ -107,14 +99,14 @@ func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
case "n":
|
||||
// new conversation
|
||||
case "d":
|
||||
if !m.confirmPrompt.Focused() && len(m.conversations) > 0 && m.cursor < len(m.conversations) {
|
||||
title := m.conversations[m.cursor].conv.Title
|
||||
if !m.confirmPrompt.Focused() && len(m.App.Conversations) > 0 && m.cursor < len(m.App.Conversations) {
|
||||
title := m.App.Conversations[m.cursor].Conv.Title
|
||||
if title == "" {
|
||||
title = "(untitled)"
|
||||
}
|
||||
m.confirmPrompt = bubbles.NewConfirmPrompt(
|
||||
fmt.Sprintf("Delete '%s'?", title),
|
||||
deleteConversationPayload(m.conversations[m.cursor].conv),
|
||||
m.App.Conversations[m.cursor].Conv,
|
||||
)
|
||||
m.confirmPrompt.Style = lipgloss.NewStyle().
|
||||
Bold(true).
|
||||
@ -132,7 +124,7 @@ func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
|
||||
}
|
||||
|
||||
func (m Model) Init() tea.Cmd {
|
||||
return nil
|
||||
return m.loadConversations()
|
||||
}
|
||||
|
||||
func (m *Model) HandleResize(width, height int) {
|
||||
@ -150,20 +142,15 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
m.HandleResize(msg.Width, msg.Height)
|
||||
m.content.SetContent(m.renderConversationList())
|
||||
case msgConversationsLoaded:
|
||||
m.conversations = msg
|
||||
m.cursor = max(0, min(len(m.conversations), m.cursor))
|
||||
m.App.Conversations = msg
|
||||
m.cursor = max(0, min(len(m.App.Conversations), m.cursor))
|
||||
m.content.SetContent(m.renderConversationList())
|
||||
case msgConversationSelected:
|
||||
m.Values.ConvShortname = msg.ShortName.String
|
||||
cmds = append(cmds, func() tea.Msg {
|
||||
return shared.MsgViewChange(shared.StateChat)
|
||||
})
|
||||
case bubbles.MsgConfirmPromptAnswered:
|
||||
m.confirmPrompt.Blur()
|
||||
if msg.Value {
|
||||
switch payload := msg.Payload.(type) {
|
||||
case deleteConversationPayload:
|
||||
cmds = append(cmds, m.deleteConversation(api.Conversation(payload)))
|
||||
conv, ok := msg.Payload.(api.Conversation)
|
||||
if ok {
|
||||
cmds = append(cmds, m.deleteConversation(conv))
|
||||
}
|
||||
}
|
||||
case msgConversationDeleted:
|
||||
@ -193,24 +180,17 @@ func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
|
||||
|
||||
func (m *Model) loadConversations() tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
messages, err := m.Ctx.Store.LatestConversationMessages()
|
||||
err, conversations := m.App.LoadConversations()
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not load conversations: %v", err))
|
||||
}
|
||||
|
||||
loaded := make([]loadedConversation, len(messages))
|
||||
for i, m := range messages {
|
||||
loaded[i].lastReply = m
|
||||
loaded[i].conv = *m.Conversation
|
||||
}
|
||||
|
||||
return msgConversationsLoaded(loaded)
|
||||
return msgConversationsLoaded(conversations)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Model) deleteConversation(conv api.Conversation) tea.Cmd {
|
||||
return func() tea.Msg {
|
||||
err := m.Ctx.Store.DeleteConversation(&conv)
|
||||
err := m.App.Ctx.Store.DeleteConversation(&conv)
|
||||
if err != nil {
|
||||
return shared.MsgError(fmt.Errorf("Could not delete conversation: %v", err))
|
||||
}
|
||||
@ -289,12 +269,12 @@ func (m *Model) renderConversationList() string {
|
||||
sb strings.Builder
|
||||
)
|
||||
|
||||
m.itemOffsets = make([]int, len(m.conversations))
|
||||
m.itemOffsets = make([]int, len(m.App.Conversations))
|
||||
sb.WriteRune('\n')
|
||||
currentOffset += 1
|
||||
|
||||
for i, c := range m.conversations {
|
||||
lastReplyAge := now.Sub(c.lastReply.CreatedAt)
|
||||
for i, c := range m.App.Conversations {
|
||||
lastReplyAge := now.Sub(c.LastReply.CreatedAt)
|
||||
|
||||
var category string
|
||||
for _, g := range categories {
|
||||
@ -314,14 +294,14 @@ func (m *Model) renderConversationList() string {
|
||||
}
|
||||
|
||||
tStyle := titleStyle.Copy()
|
||||
if c.conv.Title == "" {
|
||||
if c.Conv.Title == "" {
|
||||
tStyle = tStyle.Inherit(untitledStyle).SetString("(untitled)")
|
||||
}
|
||||
if i == m.cursor {
|
||||
tStyle = tStyle.Inherit(selectedStyle)
|
||||
}
|
||||
|
||||
title := tStyle.Width(m.Width - 3).PaddingLeft(2).Render(c.conv.Title)
|
||||
title := tStyle.Width(m.Width - 3).PaddingLeft(2).Render(c.Conv.Title)
|
||||
if i == m.cursor {
|
||||
title = ">" + title[1:]
|
||||
}
|
||||
@ -334,7 +314,7 @@ func (m *Model) renderConversationList() string {
|
||||
))
|
||||
sb.WriteString(item)
|
||||
currentOffset += tuiutil.Height(item)
|
||||
if i < len(m.conversations)-1 {
|
||||
if i < len(m.App.Conversations)-1 {
|
||||
sb.WriteRune('\n')
|
||||
}
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ func SetStructDefaults(data interface{}) bool {
|
||||
|
||||
// Get the "default" struct tag
|
||||
defaultTag, ok := v.Type().Field(i).Tag.Lookup("default")
|
||||
if (!ok) {
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user