tui: conversation rendering tweaks, remove input character limit
This commit is contained in:
parent
e9fde37201
commit
4fb059c850
@ -90,11 +90,12 @@ type (
|
||||
var (
|
||||
userStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("10"))
|
||||
assistantStyle = lipgloss.NewStyle().Faint(true).Bold(true).Foreground(lipgloss.Color("12"))
|
||||
messageStyle = lipgloss.NewStyle().PaddingLeft(1)
|
||||
messageStyle = lipgloss.NewStyle().PaddingLeft(2).PaddingRight(2)
|
||||
headerStyle = lipgloss.NewStyle().
|
||||
Background(lipgloss.Color("0"))
|
||||
contentStyle = lipgloss.NewStyle().
|
||||
Padding(1)
|
||||
conversationStyle = lipgloss.NewStyle().
|
||||
MarginTop(1).
|
||||
MarginBottom(1)
|
||||
footerStyle = lipgloss.NewStyle().
|
||||
BorderTop(true).
|
||||
BorderStyle(lipgloss.NormalBorder())
|
||||
@ -381,6 +382,7 @@ func initialModel(ctx *lmcli.Context, convShortname string) model {
|
||||
m.content = viewport.New(0, 0)
|
||||
|
||||
m.input = textarea.New()
|
||||
m.input.CharLimit = 0
|
||||
m.input.Placeholder = "Enter a message"
|
||||
|
||||
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
|
||||
@ -464,7 +466,6 @@ func (m *model) handleInputKey(msg tea.KeyMsg) tea.Cmd {
|
||||
|
||||
m.updateContent()
|
||||
m.content.GotoBottom()
|
||||
|
||||
return m.promptLLM()
|
||||
case "ctrl+r":
|
||||
if len(m.messages) == 0 {
|
||||
@ -620,8 +621,17 @@ func (m *model) setMessageContents(i int, content string) {
|
||||
m.highlightCache[i] = highlighted
|
||||
}
|
||||
|
||||
// render the conversation into the main viewport
|
||||
func (m *model) updateContent() {
|
||||
atBottom := m.content.AtBottom()
|
||||
m.content.SetContent(conversationStyle.Render(m.conversationView()))
|
||||
if atBottom {
|
||||
// if we were at bottom before the update, scroll with the output
|
||||
m.content.GotoBottom()
|
||||
}
|
||||
}
|
||||
|
||||
// render the conversation into a string
|
||||
func (m *model) conversationView() string {
|
||||
sb := strings.Builder{}
|
||||
msgCnt := len(m.messages)
|
||||
for i, message := range m.messages {
|
||||
@ -646,31 +656,29 @@ func (m *model) updateContent() {
|
||||
}
|
||||
|
||||
// write message heading with space for content
|
||||
header := fmt.Sprintf("%s\n\n", style.Render(icon+friendly)+saved)
|
||||
header := fmt.Sprintf(" %s", style.Render(icon+friendly)+saved)
|
||||
sb.WriteString(header)
|
||||
|
||||
// TODO: special rendering for tool calls/results?
|
||||
if message.Content != "" {
|
||||
sb.WriteString("\n\n")
|
||||
|
||||
// write message contents
|
||||
var highlighted string
|
||||
if m.highlightCache[i] == "" {
|
||||
highlighted = message.Content
|
||||
} else {
|
||||
highlighted = m.highlightCache[i]
|
||||
// write message contents
|
||||
var highlighted string
|
||||
if m.highlightCache[i] == "" {
|
||||
highlighted = message.Content
|
||||
} else {
|
||||
highlighted = m.highlightCache[i]
|
||||
}
|
||||
contents := messageStyle.Width(m.content.Width).Render(highlighted)
|
||||
sb.WriteString(contents)
|
||||
}
|
||||
contents := messageStyle.Width(m.content.Width - 5).Render(highlighted)
|
||||
sb.WriteString(contents)
|
||||
|
||||
if i < msgCnt-1 {
|
||||
sb.WriteString("\n\n")
|
||||
}
|
||||
}
|
||||
atBottom := m.content.AtBottom()
|
||||
m.content.SetContent(contentStyle.Render(sb.String()))
|
||||
if atBottom {
|
||||
// if we were at bottom before the update, scroll with the output
|
||||
m.content.GotoBottom()
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func Launch(ctx *lmcli.Context, convShortname string) error {
|
||||
|
Loading…
Reference in New Issue
Block a user