Add token/sec counter to tui
This commit is contained in:
parent
60394de620
commit
86bdc733bf
@ -66,6 +66,10 @@ type chatModel struct {
|
|||||||
replyChunkChan chan string
|
replyChunkChan chan string
|
||||||
persistence bool // whether we will save new messages in the conversation
|
persistence bool // whether we will save new messages in the conversation
|
||||||
|
|
||||||
|
tokenCount uint
|
||||||
|
startTime time.Time
|
||||||
|
elapsed time.Duration
|
||||||
|
|
||||||
// ui state
|
// ui state
|
||||||
focus focusState
|
focus focusState
|
||||||
wrap bool // whether message content is wrapped to viewport width
|
wrap bool // whether message content is wrapped to viewport width
|
||||||
@ -282,6 +286,9 @@ func (m chatModel) Update(msg tea.Msg) (chatModel, tea.Cmd) {
|
|||||||
}
|
}
|
||||||
m.updateContent()
|
m.updateContent()
|
||||||
cmds = append(cmds, m.waitForChunk()) // wait for the next chunk
|
cmds = append(cmds, m.waitForChunk()) // wait for the next chunk
|
||||||
|
|
||||||
|
m.tokenCount++
|
||||||
|
m.elapsed = time.Now().Sub(m.startTime)
|
||||||
case msgAssistantReply:
|
case msgAssistantReply:
|
||||||
// the last reply that was being worked on is finished
|
// the last reply that was being worked on is finished
|
||||||
reply := models.Message(msg)
|
reply := models.Message(msg)
|
||||||
@ -693,10 +700,16 @@ func (m *chatModel) footerView() string {
|
|||||||
saving,
|
saving,
|
||||||
segmentStyle.Render(status),
|
segmentStyle.Render(status),
|
||||||
}
|
}
|
||||||
rightSegments := []string{
|
rightSegments := []string{}
|
||||||
segmentStyle.Render(fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)),
|
|
||||||
|
if m.elapsed > 0 && m.tokenCount > 0 {
|
||||||
|
throughput := fmt.Sprintf("%.0f t/sec", float64(m.tokenCount)/m.elapsed.Seconds())
|
||||||
|
rightSegments = append(rightSegments, segmentStyle.Render(throughput))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model := fmt.Sprintf("Model: %s", *m.ctx.Config.Defaults.Model)
|
||||||
|
rightSegments = append(rightSegments, segmentStyle.Render(model))
|
||||||
|
|
||||||
left := strings.Join(leftSegments, segmentSeparator)
|
left := strings.Join(leftSegments, segmentSeparator)
|
||||||
right := strings.Join(rightSegments, segmentSeparator)
|
right := strings.Join(rightSegments, segmentSeparator)
|
||||||
|
|
||||||
@ -857,6 +870,10 @@ func (m *chatModel) promptLLM() tea.Cmd {
|
|||||||
m.waitingForReply = true
|
m.waitingForReply = true
|
||||||
m.status = "Press ctrl+c to cancel"
|
m.status = "Press ctrl+c to cancel"
|
||||||
|
|
||||||
|
m.tokenCount = 0
|
||||||
|
m.startTime = time.Now()
|
||||||
|
m.elapsed = 0
|
||||||
|
|
||||||
return func() tea.Msg {
|
return func() tea.Msg {
|
||||||
completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model)
|
completionProvider, err := m.ctx.GetCompletionProvider(*m.ctx.Config.Defaults.Model)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
Loading…
Reference in New Issue
Block a user