tui: conversation rendering tweaks, remove input character limit

This commit is contained in:
Matt Low 2024-03-16 00:37:08 +00:00
parent 1c7ad75fd5
commit adb61ffa59

View File

@ -90,11 +90,12 @@ type (
var ( var (
userStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("10")) userStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("10"))
assistantStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("12")) assistantStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("12"))
messageStyle = lipgloss.NewStyle().PaddingLeft(1) messageStyle = lipgloss.NewStyle().PaddingLeft(2).PaddingRight(2)
headerStyle = lipgloss.NewStyle(). headerStyle = lipgloss.NewStyle().
Background(lipgloss.Color("0")) Background(lipgloss.Color("0"))
contentStyle = lipgloss.NewStyle(). conversationStyle = lipgloss.NewStyle().
Padding(1) MarginTop(1).
MarginBottom(1)
footerStyle = lipgloss.NewStyle(). footerStyle = lipgloss.NewStyle().
BorderTop(true). BorderTop(true).
BorderStyle(lipgloss.NormalBorder()) BorderStyle(lipgloss.NormalBorder())
@ -381,6 +382,7 @@ func initialModel(ctx *lmcli.Context, convShortname string) model {
m.content = viewport.New(0, 0) m.content = viewport.New(0, 0)
m.input = textarea.New() m.input = textarea.New()
m.input.CharLimit = 0
m.input.Placeholder = "Enter a message" m.input.Placeholder = "Enter a message"
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle() m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
@ -464,7 +466,6 @@ func (m *model) handleInputKey(msg tea.KeyMsg) tea.Cmd {
m.updateContent() m.updateContent()
m.content.GotoBottom() m.content.GotoBottom()
return m.promptLLM() return m.promptLLM()
case "ctrl+r": case "ctrl+r":
if len(m.messages) == 0 { if len(m.messages) == 0 {
@ -620,8 +621,17 @@ func (m *model) setMessageContents(i int, content string) {
m.highlightCache[i] = highlighted m.highlightCache[i] = highlighted
} }
// render the conversation into the main viewport
func (m *model) updateContent() { func (m *model) updateContent() {
atBottom := m.content.AtBottom()
m.content.SetContent(conversationStyle.Render(m.conversationView()))
if atBottom {
// if we were at bottom before the update, scroll with the output
m.content.GotoBottom()
}
}
// render the conversation into a string
func (m *model) conversationView() string {
sb := strings.Builder{} sb := strings.Builder{}
msgCnt := len(m.messages) msgCnt := len(m.messages)
for i, message := range m.messages { for i, message := range m.messages {
@ -646,10 +656,12 @@ func (m *model) updateContent() {
} }
// write message heading with space for content // write message heading with space for content
header := fmt.Sprintf("%s\n\n", style.Render(icon+friendly)+saved) header := fmt.Sprintf(" %s", style.Render(icon+friendly)+saved)
sb.WriteString(header) sb.WriteString(header)
// TODO: special rendering for tool calls/results? // TODO: special rendering for tool calls/results?
if message.Content != "" {
sb.WriteString("\n\n")
// write message contents // write message contents
var highlighted string var highlighted string
@ -658,19 +670,15 @@ func (m *model) updateContent() {
} else { } else {
highlighted = m.highlightCache[i] highlighted = m.highlightCache[i]
} }
contents := messageStyle.Width(m.content.Width - 5).Render(highlighted) contents := messageStyle.Width(m.content.Width).Render(highlighted)
sb.WriteString(contents) sb.WriteString(contents)
}
if i < msgCnt-1 { if i < msgCnt-1 {
sb.WriteString("\n\n") sb.WriteString("\n\n")
} }
} }
atBottom := m.content.AtBottom() return sb.String()
m.content.SetContent(contentStyle.Render(sb.String()))
if atBottom {
// if we were at bottom before the update, scroll with the output
m.content.GotoBottom()
}
} }
func Launch(ctx *lmcli.Context, convShortname string) error { func Launch(ctx *lmcli.Context, convShortname string) error {