Compare commits

..

No commits in common. "f05e2e30f7a4a83878714b04dcb627b2bb8450d3" and "e59ce973b6b5a9f7d66ba2058fe127a5194f819d" have entirely different histories.

45 changed files with 1570 additions and 2420 deletions

View File

@ -14,7 +14,7 @@
system prompt, rather than having them in the conversation messages)
- [ ] Agents may have some form of long term memory management (key-value?
natural lang?).
- [ ] Sandboxed python, js interpreters (implemented with containers)
- [ ] Sandboxed python, js interpreter (both useful for different reasons)
- [ ] Support for arbitrary external script tools
- [ ] Search - RAG driven search of existing conversation "hey, remind me of
the conversation we had six months ago about X")
@ -23,15 +23,12 @@
- [ ] Image input
- [ ] Image output (sixel support?)
- [ ] Conversation exports to html/pdf/json
- [ ] Store message generation model
- [ ] Hidden CoT
- [ ] Token accounting
## UI
- [x] Prettify/normalize tool_call and tool_result outputs so they can be
shown/optionally hidden in `lmcli view` and `lmcli chat`
- [x] Conversation deletion in conversations view
- [ ] User confirmation before calling (some?) tools
- [ ] Conversation deletion in conversations view
- [ ] Message deletion, Ctrl+D to delete a message and attach its children to
its parent, Ctrl+Shift+D to delete a message and its descendents
- [ ] Show available key bindings and their action in any given view

35
go.mod
View File

@ -3,41 +3,40 @@ module git.mlow.ca/mlow/lmcli
go 1.21
require (
github.com/alecthomas/chroma/v2 v2.14.0
github.com/charmbracelet/bubbles v0.20.0
github.com/charmbracelet/bubbletea v1.1.1
github.com/charmbracelet/lipgloss v0.13.0
github.com/alecthomas/chroma/v2 v2.11.1
github.com/charmbracelet/bubbles v0.18.0
github.com/charmbracelet/bubbletea v0.25.0
github.com/charmbracelet/lipgloss v0.10.0
github.com/muesli/reflow v0.3.0
github.com/spf13/cobra v1.8.1
github.com/spf13/cobra v1.8.0
github.com/sqids/sqids-go v0.4.1
gopkg.in/yaml.v3 v3.0.1
gorm.io/driver/sqlite v1.5.6
gorm.io/gorm v1.25.12
gorm.io/driver/sqlite v1.5.4
gorm.io/gorm v1.25.5
)
require (
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/charmbracelet/x/ansi v0.3.1 // indirect
github.com/charmbracelet/x/term v0.2.0 // indirect
github.com/dlclark/regexp2 v1.11.4 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81 // indirect
github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/kr/pretty v0.3.1 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-isatty v0.0.18 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mattn/go-sqlite3 v1.14.23 // indirect
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-sqlite3 v1.14.18 // indirect
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/spf13/pflag v1.0.5 // indirect
golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.25.0 // indirect
golang.org/x/text v0.18.0 // indirect
golang.org/x/sync v0.1.0 // indirect
golang.org/x/sys v0.14.0 // indirect
golang.org/x/term v0.6.0 // indirect
golang.org/x/text v0.3.8 // indirect
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
)

84
go.sum
View File

@ -1,31 +1,25 @@
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/alecthomas/assert/v2 v2.7.0 h1:QtqSACNS3tF7oasA8CU6A6sXZSBDqnm7RfpLl9bZqbE=
github.com/alecthomas/assert/v2 v2.7.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.14.0 h1:R3+wzpnUArGcQz7fCETQBzO5n9IMNi13iIs46aU4V9E=
github.com/alecthomas/chroma/v2 v2.14.0/go.mod h1:QolEbTfmUHIMVpBqxeDnNBj2uoeI4EbYP4i6n68SG4I=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
github.com/alecthomas/chroma/v2 v2.11.1 h1:m9uUtgcdAwgfFNxuqj7AIG75jD2YmL61BBIJWtdzJPs=
github.com/alecthomas/chroma/v2 v2.11.1/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE=
github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU=
github.com/charmbracelet/bubbletea v1.1.1 h1:KJ2/DnmpfqFtDNVTvYZ6zpPFL9iRCRr0qqKOCvppbPY=
github.com/charmbracelet/bubbletea v1.1.1/go.mod h1:9Ogk0HrdbHolIKHdjfFpyXJmiCzGwy+FesYkZr7hYU4=
github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw=
github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY=
github.com/charmbracelet/x/ansi v0.3.1 h1:CRO6lc/6HCx2/D6S/GZ87jDvRvk6GtPyFP+IljkNtqI=
github.com/charmbracelet/x/ansi v0.3.1/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw=
github.com/charmbracelet/x/term v0.2.0 h1:cNB9Ot9q8I711MyZ7myUR5HFWL/lc3OpU8jZ4hwm0x0=
github.com/charmbracelet/x/term v0.2.0/go.mod h1:GVxgxAbjUrmpvIINHIQnJJKpMlHiZ4cktEQCN6GWyF0=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/charmbracelet/bubbles v0.18.0 h1:PYv1A036luoBGroX6VWjQIE9Syf2Wby2oOl/39KLfy0=
github.com/charmbracelet/bubbles v0.18.0/go.mod h1:08qhZhtIwzgrtBjAcJnij1t1H0ZRjwHyGsy6AL11PSw=
github.com/charmbracelet/bubbletea v0.25.0 h1:bAfwk7jRz7FKFl9RzlIULPkStffg5k6pNt5dywy4TcM=
github.com/charmbracelet/bubbletea v0.25.0/go.mod h1:EN3QDR1T5ZdWmdfDzYcqOCAps45+QIJbLOBxmVNWNNg=
github.com/charmbracelet/lipgloss v0.10.0 h1:KWeXFSexGcfahHX+54URiZGkBFazf70JNMtwg/AFW3s=
github.com/charmbracelet/lipgloss v0.10.0/go.mod h1:Wig9DSfvANsxqkRsqj6x87irdy123SR4dOXlKa91ciE=
github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81 h1:q2hJAaP1k2wIvVRd/hEHD7lacgqrCPS+k8g1MndzfWY=
github.com/containerd/console v1.0.4-0.20230313162750-1ae8d489ac81/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@ -40,17 +34,17 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.18 h1:DOKFKCQ7FNG2L1rbrmstDN4QVRdS89Nkh85u68Uwp98=
github.com/mattn/go-isatty v0.0.18/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0=
github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.18 h1:JL0eqdCOq6DJVNPSvArO/bIV9/P7fbGrV00LZHc+5aI=
github.com/mattn/go-sqlite3 v1.14.18/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b h1:1XF24mVaiu7u+CFywTdcDo2ie1pzzhwjt6RHqzpMU34=
github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b/go.mod h1:fQuZ0gauxyBcmsdE3ZT4NasjaRdxmbCS0jRHsrWu3Ho=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
@ -65,26 +59,28 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/sqids/sqids-go v0.4.1 h1:eQKYzmAZbLlRwHeHYPF35QhgxwZHLnlmVj9AkIj/rrw=
github.com/sqids/sqids-go v0.4.1/go.mod h1:EMwHuPQgSNFS0A49jESTfIQS+066XQTVhukrzEPScl8=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q=
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/sqlite v1.5.6 h1:fO/X46qn5NUEEOZtnjJRWRzZMe8nqJiQ9E+0hi+hKQE=
gorm.io/driver/sqlite v1.5.6/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
gorm.io/driver/sqlite v1.5.4 h1:IqXwXi8M/ZlPzH/947tn5uik3aYQslP9BVveoax0nV0=
gorm.io/driver/sqlite v1.5.4/go.mod h1:qxAuCol+2r6PannQDpOP1FP6ag3mKi4esLnB/jHed+4=
gorm.io/gorm v1.25.5 h1:zR9lOiiYf09VNh5Q1gphfyia1JpiClIWG9hQaxB/mls=
gorm.io/gorm v1.25.5/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=

View File

@ -1,126 +1,49 @@
package api
import (
"encoding/json"
"fmt"
"context"
)
type MessageRole string
type ReplyCallback func(Message)
const (
MessageRoleSystem MessageRole = "system"
MessageRoleUser MessageRole = "user"
MessageRoleAssistant MessageRole = "assistant"
MessageRoleToolCall MessageRole = "tool_call"
MessageRoleToolResult MessageRole = "tool_result"
)
type Message struct {
Content string // TODO: support multi-part messages
Role MessageRole
ToolCalls []ToolCall
ToolResults []ToolResult
type Chunk struct {
Content string
TokenCount uint
}
type ToolSpec struct {
Name string
Description string
Parameters []ToolParameter
Impl func(*ToolSpec, map[string]interface{}) (string, error)
type RequestParameters struct {
Model string
MaxTokens int
Temperature float32
TopP float32
Toolbox []ToolSpec
}
type ToolParameter struct {
Name string `json:"name"`
Type string `json:"type"` // "string", "integer", "boolean"
Required bool `json:"required"`
Description string `json:"description"`
Enum []string `json:"enum,omitempty"`
type ChatCompletionProvider interface {
// CreateChatCompletion requests a response to the provided messages.
// Replies are appended to the given replies struct, and the
// complete user-facing response is returned as a string.
CreateChatCompletion(
ctx context.Context,
params RequestParameters,
messages []Message,
) (*Message, error)
// Like CreateChageCompletion, except the response is streamed via
// the output channel as it's received.
CreateChatCompletionStream(
ctx context.Context,
params RequestParameters,
messages []Message,
chunks chan<- Chunk,
) (*Message, error)
}
type ToolCall struct {
ID string `json:"id" yaml:"-"`
Name string `json:"name" yaml:"tool"`
Parameters map[string]interface{} `json:"parameters" yaml:"parameters"`
}
type ToolResult struct {
ToolCallID string `json:"toolCallID" yaml:"-"`
ToolName string `json:"toolName,omitempty" yaml:"tool"`
Result string `json:"result,omitempty" yaml:"result"`
}
func NewMessageWithAssistant(content string) *Message {
return &Message{
Role: MessageRoleAssistant,
Content: content,
}
}
func NewMessageWithToolCalls(content string, toolCalls []ToolCall) *Message {
return &Message{
Role: MessageRoleToolCall,
Content: content,
ToolCalls: toolCalls,
}
}
func (m MessageRole) IsAssistant() bool {
switch m {
case MessageRoleAssistant, MessageRoleToolCall:
return true
}
func IsAssistantContinuation(messages []Message) bool {
if len(messages) == 0 {
return false
}
func (m MessageRole) IsUser() bool {
switch m {
case MessageRoleUser, MessageRoleToolResult:
return true
}
return false
}
func (m MessageRole) IsSystem() bool {
switch m {
case MessageRoleSystem:
return true
}
return false
}
// FriendlyRole returns a human friendly signifier for the message's role.
func (m MessageRole) FriendlyRole() string {
switch m {
case MessageRoleUser:
return "You"
case MessageRoleSystem:
return "System"
case MessageRoleAssistant:
return "Assistant"
case MessageRoleToolCall:
return "Tool Call"
case MessageRoleToolResult:
return "Tool Result"
default:
return string(m)
}
}
// TODO: remove this
type CallResult struct {
Message string `json:"message"`
Result any `json:"result,omitempty"`
}
func (r CallResult) ToJson() (string, error) {
if r.Message == "" {
// When message not supplied, assume success
r.Message = "success"
}
jsonBytes, err := json.Marshal(r)
if err != nil {
return "", fmt.Errorf("Could not marshal CallResult to JSON: %v\n", err)
}
return string(jsonBytes), nil
return messages[len(messages)-1].Role == MessageRoleAssistant
}

11
pkg/api/conversation.go Normal file
View File

@ -0,0 +1,11 @@
package api
import "database/sql"
type Conversation struct {
ID uint `gorm:"primaryKey"`
ShortName sql.NullString
Title string
SelectedRootID *uint
SelectedRoot *Message `gorm:"foreignKey:SelectedRootID"`
}

72
pkg/api/message.go Normal file
View File

@ -0,0 +1,72 @@
package api
import (
"time"
)
type MessageRole string
const (
MessageRoleSystem MessageRole = "system"
MessageRoleUser MessageRole = "user"
MessageRoleAssistant MessageRole = "assistant"
MessageRoleToolCall MessageRole = "tool_call"
MessageRoleToolResult MessageRole = "tool_result"
)
type Message struct {
ID uint `gorm:"primaryKey"`
ConversationID *uint `gorm:"index"`
Conversation *Conversation `gorm:"foreignKey:ConversationID"`
Content string
Role MessageRole
CreatedAt time.Time
ToolCalls ToolCalls // a json array of tool calls (from the model)
ToolResults ToolResults // a json array of tool results
ParentID *uint
Parent *Message `gorm:"foreignKey:ParentID"`
Replies []Message `gorm:"foreignKey:ParentID"`
SelectedReplyID *uint
SelectedReply *Message `gorm:"foreignKey:SelectedReplyID"`
}
func ApplySystemPrompt(m []Message, system string, force bool) []Message {
if len(m) > 0 && m[0].Role == MessageRoleSystem {
if force {
m[0].Content = system
}
return m
} else {
return append([]Message{{
Role: MessageRoleSystem,
Content: system,
}}, m...)
}
}
func (m *MessageRole) IsAssistant() bool {
switch *m {
case MessageRoleAssistant, MessageRoleToolCall:
return true
}
return false
}
// FriendlyRole returns a human friendly signifier for the message's role.
func (m MessageRole) FriendlyRole() string {
switch m {
case MessageRoleUser:
return "You"
case MessageRoleSystem:
return "System"
case MessageRoleAssistant:
return "Assistant"
case MessageRoleToolCall:
return "Tool Call"
case MessageRoleToolResult:
return "Tool Result"
default:
return string(m)
}
}

View File

@ -11,7 +11,6 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
)
const ANTHROPIC_VERSION = "2023-06-01"
@ -118,7 +117,7 @@ func convertTools(tools []api.ToolSpec) []Tool {
}
func createChatCompletionRequest(
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (string, ChatCompletionRequest) {
requestMessages := make([]ChatCompletionMessage, 0, len(messages))
@ -189,8 +188,7 @@ func createChatCompletionRequest(
}
var prefill string
if len(messages) > 0 && messages[len(messages)-1].Role == api.MessageRoleAssistant {
// Prompting on an assitant message, use its content as prefill
if api.IsAssistantContinuation(messages) {
prefill = messages[len(messages)-1].Content
}
@ -228,7 +226,7 @@ func (c *AnthropicClient) sendRequest(ctx context.Context, r ChatCompletionReque
func (c *AnthropicClient) CreateChatCompletion(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (*api.Message, error) {
if len(messages) == 0 {
@ -255,9 +253,9 @@ func (c *AnthropicClient) CreateChatCompletion(
func (c *AnthropicClient) CreateChatCompletionStream(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
output chan<- provider.Chunk,
output chan<- api.Chunk,
) (*api.Message, error) {
if len(messages) == 0 {
return nil, fmt.Errorf("can't create completion from no messages")
@ -351,10 +349,9 @@ func (c *AnthropicClient) CreateChatCompletionStream(
firstChunkReceived = true
}
block.Text += text
output <- provider.Chunk{
output <- api.Chunk{
Content: text,
// rough, anthropic performs some chunking
TokenCount: uint(len(strings.Split(text, " "))),
TokenCount: 1,
}
}
case "input_json_delta":
@ -439,9 +436,15 @@ func convertResponseToMessage(resp ChatCompletionResponse) (*api.Message, error)
}
}
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content.String(), toolCalls), nil
message := &api.Message{
Role: api.MessageRoleAssistant,
Content: content.String(),
ToolCalls: toolCalls,
}
return api.NewMessageWithAssistant(content.String()), nil
if len(toolCalls) > 0 {
message.Role = api.MessageRoleToolCall
}
return message, nil
}

View File

@ -11,7 +11,6 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
)
type Client struct {
@ -173,7 +172,7 @@ func convertToolResultsToGemini(toolResults []api.ToolResult) ([]FunctionRespons
}
func createGenerateContentRequest(
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (*GenerateContentRequest, error) {
requestContents := make([]Content, 0, len(messages))
@ -280,7 +279,7 @@ func (c *Client) sendRequest(req *http.Request) (*http.Response, error) {
func (c *Client) CreateChatCompletion(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (*api.Message, error) {
if len(messages) == 0 {
@ -337,17 +336,24 @@ func (c *Client) CreateChatCompletion(
}
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content, convertToolCallToAPI(toolCalls)), nil
return &api.Message{
Role: api.MessageRoleToolCall,
Content: content,
ToolCalls: convertToolCallToAPI(toolCalls),
}, nil
}
return api.NewMessageWithAssistant(content), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: content,
}, nil
}
func (c *Client) CreateChatCompletionStream(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
output chan<- provider.Chunk,
output chan<- api.Chunk,
) (*api.Message, error) {
if len(messages) == 0 {
return nil, fmt.Errorf("Can't create completion from no messages")
@ -419,7 +425,7 @@ func (c *Client) CreateChatCompletionStream(
if part.FunctionCall != nil {
toolCalls = append(toolCalls, *part.FunctionCall)
} else if part.Text != "" {
output <- provider.Chunk{
output <- api.Chunk{
Content: part.Text,
TokenCount: uint(tokens),
}
@ -428,9 +434,17 @@ func (c *Client) CreateChatCompletionStream(
}
}
// If there are function calls, handle them and recurse
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content.String(), convertToolCallToAPI(toolCalls)), nil
return &api.Message{
Role: api.MessageRoleToolCall,
Content: content.String(),
ToolCalls: convertToolCallToAPI(toolCalls),
}, nil
}
return api.NewMessageWithAssistant(content.String()), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: content.String(),
}, nil
}

View File

@ -11,7 +11,6 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
)
type OllamaClient struct {
@ -43,7 +42,7 @@ type OllamaResponse struct {
}
func createOllamaRequest(
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) OllamaRequest {
requestMessages := make([]OllamaMessage, 0, len(messages))
@ -83,7 +82,7 @@ func (c *OllamaClient) sendRequest(req *http.Request) (*http.Response, error) {
func (c *OllamaClient) CreateChatCompletion(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (*api.Message, error) {
if len(messages) == 0 {
@ -115,14 +114,17 @@ func (c *OllamaClient) CreateChatCompletion(
return nil, err
}
return api.NewMessageWithAssistant(completionResp.Message.Content), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: completionResp.Message.Content,
}, nil
}
func (c *OllamaClient) CreateChatCompletionStream(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
output chan<- provider.Chunk,
output chan<- api.Chunk,
) (*api.Message, error) {
if len(messages) == 0 {
return nil, fmt.Errorf("Can't create completion from no messages")
@ -171,7 +173,7 @@ func (c *OllamaClient) CreateChatCompletionStream(
}
if len(streamResp.Message.Content) > 0 {
output <- provider.Chunk{
output <- api.Chunk{
Content: streamResp.Message.Content,
TokenCount: 1,
}
@ -179,5 +181,8 @@ func (c *OllamaClient) CreateChatCompletionStream(
}
}
return api.NewMessageWithAssistant(content.String()), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: content.String(),
}, nil
}

View File

@ -11,13 +11,11 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
)
type OpenAIClient struct {
APIKey string
BaseURL string
Headers map[string]string
}
type ChatCompletionMessage struct {
@ -141,7 +139,7 @@ func convertToolCallToAPI(toolCalls []ToolCall) []api.ToolCall {
}
func createChatCompletionRequest(
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) ChatCompletionRequest {
requestMessages := make([]ChatCompletionMessage, 0, len(messages))
@ -200,9 +198,6 @@ func (c *OpenAIClient) sendRequest(ctx context.Context, r ChatCompletionRequest)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+c.APIKey)
for header, val := range c.Headers {
req.Header.Set(header, val)
}
client := &http.Client{}
resp, err := client.Do(req)
@ -220,7 +215,7 @@ func (c *OpenAIClient) sendRequest(ctx context.Context, r ChatCompletionRequest)
func (c *OpenAIClient) CreateChatCompletion(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
) (*api.Message, error) {
if len(messages) == 0 {
@ -253,17 +248,24 @@ func (c *OpenAIClient) CreateChatCompletion(
toolCalls := choice.Message.ToolCalls
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content, convertToolCallToAPI(toolCalls)), nil
return &api.Message{
Role: api.MessageRoleToolCall,
Content: content,
ToolCalls: convertToolCallToAPI(toolCalls),
}, nil
}
return api.NewMessageWithAssistant(content), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: content,
}, nil
}
func (c *OpenAIClient) CreateChatCompletionStream(
ctx context.Context,
params provider.RequestParameters,
params api.RequestParameters,
messages []api.Message,
output chan<- provider.Chunk,
output chan<- api.Chunk,
) (*api.Message, error) {
if len(messages) == 0 {
return nil, fmt.Errorf("Can't create completion from no messages")
@ -327,7 +329,7 @@ func (c *OpenAIClient) CreateChatCompletionStream(
}
}
if len(delta.Content) > 0 {
output <- provider.Chunk{
output <- api.Chunk{
Content: delta.Content,
TokenCount: 1,
}
@ -336,8 +338,15 @@ func (c *OpenAIClient) CreateChatCompletionStream(
}
if len(toolCalls) > 0 {
return api.NewMessageWithToolCalls(content.String(), convertToolCallToAPI(toolCalls)), nil
return &api.Message{
Role: api.MessageRoleToolCall,
Content: content.String(),
ToolCalls: convertToolCallToAPI(toolCalls),
}, nil
}
return api.NewMessageWithAssistant(content.String()), nil
return &api.Message{
Role: api.MessageRoleAssistant,
Content: content.String(),
}, nil
}

98
pkg/api/tools.go Normal file
View File

@ -0,0 +1,98 @@
package api
import (
"database/sql/driver"
"encoding/json"
"fmt"
)
type ToolSpec struct {
Name string
Description string
Parameters []ToolParameter
Impl func(*ToolSpec, map[string]interface{}) (string, error)
}
type ToolParameter struct {
Name string `json:"name"`
Type string `json:"type"` // "string", "integer", "boolean"
Required bool `json:"required"`
Description string `json:"description"`
Enum []string `json:"enum,omitempty"`
}
type ToolCall struct {
ID string `json:"id" yaml:"-"`
Name string `json:"name" yaml:"tool"`
Parameters map[string]interface{} `json:"parameters" yaml:"parameters"`
}
type ToolResult struct {
ToolCallID string `json:"toolCallID" yaml:"-"`
ToolName string `json:"toolName,omitempty" yaml:"tool"`
Result string `json:"result,omitempty" yaml:"result"`
}
type ToolCalls []ToolCall
func (tc *ToolCalls) Scan(value any) (err error) {
s := value.(string)
if value == nil || s == "" {
*tc = nil
return
}
err = json.Unmarshal([]byte(s), tc)
return
}
func (tc ToolCalls) Value() (driver.Value, error) {
if len(tc) == 0 {
return "", nil
}
jsonBytes, err := json.Marshal(tc)
if err != nil {
return "", fmt.Errorf("Could not marshal ToolCalls to JSON: %v\n", err)
}
return string(jsonBytes), nil
}
type ToolResults []ToolResult
func (tr *ToolResults) Scan(value any) (err error) {
s := value.(string)
if value == nil || s == "" {
*tr = nil
return
}
err = json.Unmarshal([]byte(s), tr)
return
}
func (tr ToolResults) Value() (driver.Value, error) {
if len(tr) == 0 {
return "", nil
}
jsonBytes, err := json.Marshal([]ToolResult(tr))
if err != nil {
return "", fmt.Errorf("Could not marshal ToolResults to JSON: %v\n", err)
}
return string(jsonBytes), nil
}
type CallResult struct {
Message string `json:"message"`
Result any `json:"result,omitempty"`
}
func (r CallResult) ToJson() (string, error) {
if r.Message == "" {
// When message not supplied, assume success
r.Message = "success"
}
jsonBytes, err := json.Marshal(r)
if err != nil {
return "", fmt.Errorf("Could not marshal CallResult to JSON: %v\n", err)
}
return string(jsonBytes), nil
}

View File

@ -6,7 +6,6 @@ import (
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/tui"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"github.com/spf13/cobra"
)
@ -20,30 +19,17 @@ func ChatCmd(ctx *lmcli.Context) *cobra.Command {
if err != nil {
return err
}
var opts []tui.LaunchOption
list, err := cmd.Flags().GetBool("list")
if err != nil {
return err
shortname := ""
if len(args) == 1 {
shortname = args[0]
}
if !list && len(args) == 1 {
shortname := args[0]
if shortname != ""{
conv, err := cmdutil.LookupConversationE(ctx, shortname)
_, err := cmdutil.LookupConversationE(ctx, shortname)
if err != nil {
return err
}
opts = append(opts, tui.WithInitialConversation(conv))
}
}
if list {
opts = append(opts, tui.WithInitialView(shared.ViewConversations))
}
err = tui.Launch(ctx, opts...)
err = tui.Launch(ctx, shortname)
if err != nil {
return fmt.Errorf("Error fetching LLM response: %v", err)
}
@ -54,13 +40,9 @@ func ChatCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}
// -l, --list
cmd.Flags().BoolP("list", "l", false, "View/manage conversations")
applyGenerationFlags(ctx, cmd)
return cmd
}

View File

@ -27,7 +27,7 @@ func CloneCmd(ctx *lmcli.Context) *cobra.Command {
return err
}
clone, messageCnt, err := ctx.Conversations.CloneConversation(*toClone)
clone, messageCnt, err := ctx.Store.CloneConversation(*toClone)
if err != nil {
return fmt.Errorf("Failed to clone conversation: %v", err)
}
@ -40,7 +40,7 @@ func CloneCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}
return cmd

View File

@ -83,7 +83,7 @@ func validateGenerationFlags(ctx *lmcli.Context, cmd *cobra.Command) error {
if err != nil {
return fmt.Errorf("Error parsing --agent: %w", err)
}
if agent != "" && agent != "none" && !slices.Contains(ctx.GetAgents(), agent) {
if agent != "" && !slices.Contains(ctx.GetAgents(), agent) {
return fmt.Errorf("Unknown agent: %s", agent)
}
return nil

View File

@ -29,9 +29,9 @@ func ContinueCmd(ctx *lmcli.Context) *cobra.Command {
}
shortName := args[0]
c := cmdutil.LookupConversation(ctx, shortName)
conversation := cmdutil.LookupConversation(ctx, shortName)
messages, err := ctx.Conversations.PathToLeaf(c.SelectedRoot)
messages, err := ctx.Store.PathToLeaf(conversation.SelectedRoot)
if err != nil {
return fmt.Errorf("could not retrieve conversation messages: %v", err)
}
@ -58,7 +58,7 @@ func ContinueCmd(ctx *lmcli.Context) *cobra.Command {
lastMessage.Content += strings.TrimRight(continuedOutput.Content, "\n\t ")
// Update the original message
err = ctx.Conversations.UpdateMessage(lastMessage)
err = ctx.Store.UpdateMessage(lastMessage)
if err != nil {
return fmt.Errorf("could not update the last message: %v", err)
}
@ -70,7 +70,7 @@ func ContinueCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}
applyGenerationFlags(ctx, cmd)

View File

@ -22,11 +22,11 @@ func EditCmd(ctx *lmcli.Context) *cobra.Command {
},
RunE: func(cmd *cobra.Command, args []string) error {
shortName := args[0]
c := cmdutil.LookupConversation(ctx, shortName)
conversation := cmdutil.LookupConversation(ctx, shortName)
messages, err := ctx.Conversations.PathToLeaf(c.SelectedRoot)
messages, err := ctx.Store.PathToLeaf(conversation.SelectedRoot)
if err != nil {
return fmt.Errorf("Could not retrieve messages for conversation: %s", c.Title)
return fmt.Errorf("Could not retrieve messages for conversation: %s", conversation.Title)
}
offset, _ := cmd.Flags().GetInt("offset")
@ -62,11 +62,11 @@ func EditCmd(ctx *lmcli.Context) *cobra.Command {
// Update the message in-place
inplace, _ := cmd.Flags().GetBool("in-place")
if inplace {
return ctx.Conversations.UpdateMessage(&toEdit)
return ctx.Store.UpdateMessage(&toEdit)
}
// Otherwise, create a branch for the edited message
message, _, err := ctx.Conversations.CloneBranch(toEdit)
message, _, err := ctx.Store.CloneBranch(toEdit)
if err != nil {
return err
}
@ -74,11 +74,11 @@ func EditCmd(ctx *lmcli.Context) *cobra.Command {
if desiredIdx > 0 {
// update selected reply
messages[desiredIdx-1].SelectedReply = message
err = ctx.Conversations.UpdateMessage(&messages[desiredIdx-1])
err = ctx.Store.UpdateMessage(&messages[desiredIdx-1])
} else {
// update selected root
c.SelectedRoot = message
err = ctx.Conversations.UpdateConversation(c)
conversation.SelectedRoot = message
err = ctx.Store.UpdateConversation(conversation)
}
return err
},
@ -87,7 +87,7 @@ func EditCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}

View File

@ -20,9 +20,9 @@ func ListCmd(ctx *lmcli.Context) *cobra.Command {
Short: "List conversations",
Long: `List conversations in order of recent activity`,
RunE: func(cmd *cobra.Command, args []string) error {
list, err := ctx.Conversations.LoadConversationList()
messages, err := ctx.Store.LatestConversationMessages()
if err != nil {
return fmt.Errorf("Could not load conversations: %v", err)
return fmt.Errorf("Could not fetch conversations: %v", err)
}
type Category struct {
@ -57,12 +57,12 @@ func ListCmd(ctx *lmcli.Context) *cobra.Command {
all, _ := cmd.Flags().GetBool("all")
for _, item := range list.Items {
age := now.Sub(item.LastMessageAt)
for _, message := range messages {
messageAge := now.Sub(message.CreatedAt)
var category string
for _, c := range categories {
if age < c.cutoff {
if messageAge < c.cutoff {
category = c.name
break
}
@ -70,14 +70,14 @@ func ListCmd(ctx *lmcli.Context) *cobra.Command {
formatted := fmt.Sprintf(
"%s - %s - %s",
item.ShortName,
util.HumanTimeElapsedSince(age),
item.Title,
message.Conversation.ShortName.String,
util.HumanTimeElapsedSince(messageAge),
message.Conversation.Title,
)
categorized[category] = append(
categorized[category],
ConversationLine{age, formatted},
ConversationLine{messageAge, formatted},
)
}
@ -93,7 +93,7 @@ func ListCmd(ctx *lmcli.Context) *cobra.Command {
fmt.Printf("%s:\n", category.name)
for _, conv := range conversationLines {
if conversationsPrinted >= count && !all {
fmt.Printf("%d remaining conversation(s), use --all to view.\n", list.Total-conversationsPrinted)
fmt.Printf("%d remaining conversation(s), use --all to view.\n", len(messages)-conversationsPrinted)
break outer
}

View File

@ -5,7 +5,6 @@ import (
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"github.com/spf13/cobra"
)
@ -26,12 +25,12 @@ func NewCmd(ctx *lmcli.Context) *cobra.Command {
return fmt.Errorf("No message was provided.")
}
messages := []conversation.Message{{
messages := []api.Message{{
Role: api.MessageRoleUser,
Content: input,
}}
conversation, messages, err := ctx.Conversations.StartConversation(messages...)
conversation, messages, err := ctx.Store.StartConversation(messages...)
if err != nil {
return fmt.Errorf("Could not start a new conversation: %v", err)
}
@ -44,7 +43,7 @@ func NewCmd(ctx *lmcli.Context) *cobra.Command {
}
conversation.Title = title
err = ctx.Conversations.UpdateConversation(conversation)
err = ctx.Store.UpdateConversation(conversation)
if err != nil {
lmcli.Warn("Could not save conversation title: %v\n", err)
}

View File

@ -5,7 +5,6 @@ import (
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"github.com/spf13/cobra"
)
@ -26,7 +25,7 @@ func PromptCmd(ctx *lmcli.Context) *cobra.Command {
return fmt.Errorf("No message was provided.")
}
messages := []conversation.Message{{
messages := []api.Message{{
Role: api.MessageRoleUser,
Content: input,
}}

View File

@ -4,8 +4,8 @@ import (
"fmt"
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"github.com/spf13/cobra"
)
@ -23,14 +23,14 @@ func RemoveCmd(ctx *lmcli.Context) *cobra.Command {
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
var toRemove []*conversation.Conversation
var toRemove []*api.Conversation
for _, shortName := range args {
conversation := cmdutil.LookupConversation(ctx, shortName)
toRemove = append(toRemove, conversation)
}
var errors []error
for _, c := range toRemove {
err := ctx.Conversations.DeleteConversation(c)
err := ctx.Store.DeleteConversation(c)
if err != nil {
errors = append(errors, fmt.Errorf("Could not remove conversation %s: %v", c.ShortName.String, err))
}
@ -44,7 +44,7 @@ func RemoveCmd(ctx *lmcli.Context) *cobra.Command {
compMode := cobra.ShellCompDirectiveNoFileComp
var completions []string
outer:
for _, completion := range ctx.Conversations.ConversationShortNameCompletions(toComplete) {
for _, completion := range ctx.Store.ConversationShortNameCompletions(toComplete) {
parts := strings.Split(completion, "\t")
for _, arg := range args {
if parts[0] == arg {

View File

@ -30,7 +30,7 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
generate, _ := cmd.Flags().GetBool("generate")
if generate {
messages, err := ctx.Conversations.PathToLeaf(conversation.SelectedRoot)
messages, err := ctx.Store.PathToLeaf(conversation.SelectedRoot)
if err != nil {
return fmt.Errorf("Could not retrieve conversation messages: %v", err)
}
@ -46,7 +46,7 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
}
conversation.Title = title
err = ctx.Conversations.UpdateConversation(conversation)
err = ctx.Store.UpdateConversation(conversation)
if err != nil {
lmcli.Warn("Could not update conversation title: %v\n", err)
}
@ -57,7 +57,7 @@ func RenameCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}

View File

@ -5,7 +5,6 @@ import (
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"github.com/spf13/cobra"
)
@ -29,14 +28,14 @@ func ReplyCmd(ctx *lmcli.Context) *cobra.Command {
}
shortName := args[0]
c := cmdutil.LookupConversation(ctx, shortName)
conversation := cmdutil.LookupConversation(ctx, shortName)
reply := inputFromArgsOrEditor(args[1:], "# How would you like to reply?\n", "")
if reply == "" {
return fmt.Errorf("No reply was provided.")
}
cmdutil.HandleConversationReply(ctx, c, true, conversation.Message{
cmdutil.HandleConversationReply(ctx, conversation, true, api.Message{
Role: api.MessageRoleUser,
Content: reply,
})
@ -47,7 +46,7 @@ func ReplyCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}

View File

@ -28,12 +28,12 @@ func RetryCmd(ctx *lmcli.Context) *cobra.Command {
}
shortName := args[0]
c := cmdutil.LookupConversation(ctx, shortName)
conversation := cmdutil.LookupConversation(ctx, shortName)
// Load the complete thread from the root message
messages, err := ctx.Conversations.PathToLeaf(c.SelectedRoot)
messages, err := ctx.Store.PathToLeaf(conversation.SelectedRoot)
if err != nil {
return fmt.Errorf("Could not retrieve messages for conversation: %s", c.Title)
return fmt.Errorf("Could not retrieve messages for conversation: %s", conversation.Title)
}
offset, _ := cmd.Flags().GetInt("offset")
@ -67,7 +67,7 @@ func RetryCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}

View File

@ -9,8 +9,6 @@ import (
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/util"
"github.com/charmbracelet/lipgloss"
@ -18,13 +16,13 @@ import (
// Prompt prompts the configured the configured model and streams the response
// to stdout. Returns all model reply messages.
func Prompt(ctx *lmcli.Context, messages []conversation.Message, callback func(conversation.Message)) (*api.Message, error) {
m, _, p, err := ctx.GetModelProvider(*ctx.Config.Defaults.Model, "")
func Prompt(ctx *lmcli.Context, messages []api.Message, callback func(api.Message)) (*api.Message, error) {
m, provider, err := ctx.GetModelProvider(*ctx.Config.Defaults.Model)
if err != nil {
return nil, err
}
params := provider.RequestParameters{
params := api.RequestParameters{
Model: m,
MaxTokens: *ctx.Config.Defaults.MaxTokens,
Temperature: *ctx.Config.Defaults.Temperature,
@ -41,17 +39,17 @@ func Prompt(ctx *lmcli.Context, messages []conversation.Message, callback func(c
}
if system != "" {
messages = conversation.ApplySystemPrompt(messages, system, false)
messages = api.ApplySystemPrompt(messages, system, false)
}
content := make(chan provider.Chunk)
content := make(chan api.Chunk)
defer close(content)
// render the content received over the channel
go ShowDelayedContent(content)
reply, err := p.CreateChatCompletionStream(
context.Background(), params, conversation.MessagesToAPI(messages), content,
reply, err := provider.CreateChatCompletionStream(
context.Background(), params, messages, content,
)
if reply.Content != "" {
@ -68,8 +66,8 @@ func Prompt(ctx *lmcli.Context, messages []conversation.Message, callback func(c
// lookupConversation either returns the conversation found by the
// short name or exits the program
func LookupConversation(ctx *lmcli.Context, shortName string) *conversation.Conversation {
c, err := ctx.Conversations.FindConversationByShortName(shortName)
func LookupConversation(ctx *lmcli.Context, shortName string) *api.Conversation {
c, err := ctx.Store.ConversationByShortName(shortName)
if err != nil {
lmcli.Fatal("Could not lookup conversation: %v\n", err)
}
@ -79,8 +77,8 @@ func LookupConversation(ctx *lmcli.Context, shortName string) *conversation.Conv
return c
}
func LookupConversationE(ctx *lmcli.Context, shortName string) (*conversation.Conversation, error) {
c, err := ctx.Conversations.FindConversationByShortName(shortName)
func LookupConversationE(ctx *lmcli.Context, shortName string) (*api.Conversation, error) {
c, err := ctx.Store.ConversationByShortName(shortName)
if err != nil {
return nil, fmt.Errorf("Could not lookup conversation: %v", err)
}
@ -90,8 +88,8 @@ func LookupConversationE(ctx *lmcli.Context, shortName string) (*conversation.Co
return c, nil
}
func HandleConversationReply(ctx *lmcli.Context, c *conversation.Conversation, persist bool, toSend ...conversation.Message) {
messages, err := ctx.Conversations.PathToLeaf(c.SelectedRoot)
func HandleConversationReply(ctx *lmcli.Context, c *api.Conversation, persist bool, toSend ...api.Message) {
messages, err := ctx.Store.PathToLeaf(c.SelectedRoot)
if err != nil {
lmcli.Fatal("Could not load messages: %v\n", err)
}
@ -100,40 +98,40 @@ func HandleConversationReply(ctx *lmcli.Context, c *conversation.Conversation, p
// handleConversationReply handles sending messages to an existing
// conversation, optionally persisting both the sent replies and responses.
func HandleReply(ctx *lmcli.Context, to *conversation.Message, persist bool, messages ...conversation.Message) {
func HandleReply(ctx *lmcli.Context, to *api.Message, persist bool, messages ...api.Message) {
if to == nil {
lmcli.Fatal("Can't prompt from an empty message.")
}
existing, err := ctx.Conversations.PathToRoot(to)
existing, err := ctx.Store.PathToRoot(to)
if err != nil {
lmcli.Fatal("Could not load messages: %v\n", err)
}
RenderConversation(ctx, append(existing, messages...), true)
var savedReplies []conversation.Message
var savedReplies []api.Message
if persist && len(messages) > 0 {
savedReplies, err = ctx.Conversations.Reply(to, messages...)
savedReplies, err = ctx.Store.Reply(to, messages...)
if err != nil {
lmcli.Warn("Could not save messages: %v\n", err)
}
}
// render a message header with no contents
RenderMessage(ctx, (&conversation.Message{Role: api.MessageRoleAssistant}))
RenderMessage(ctx, (&api.Message{Role: api.MessageRoleAssistant}))
var lastSavedMessage *conversation.Message
var lastSavedMessage *api.Message
lastSavedMessage = to
if len(savedReplies) > 0 {
lastSavedMessage = &savedReplies[len(savedReplies)-1]
}
replyCallback := func(reply conversation.Message) {
replyCallback := func(reply api.Message) {
if !persist {
return
}
savedReplies, err = ctx.Conversations.Reply(lastSavedMessage, reply)
savedReplies, err = ctx.Store.Reply(lastSavedMessage, reply)
if err != nil {
lmcli.Warn("Could not save reply: %v\n", err)
}
@ -146,7 +144,7 @@ func HandleReply(ctx *lmcli.Context, to *conversation.Message, persist bool, mes
}
}
func FormatForExternalPrompt(messages []conversation.Message, system bool) string {
func FormatForExternalPrompt(messages []api.Message, system bool) string {
sb := strings.Builder{}
for _, message := range messages {
if message.Content == "" {
@ -165,7 +163,7 @@ func FormatForExternalPrompt(messages []conversation.Message, system bool) strin
return sb.String()
}
func GenerateTitle(ctx *lmcli.Context, messages []conversation.Message) (string, error) {
func GenerateTitle(ctx *lmcli.Context, messages []api.Message) (string, error) {
const systemPrompt = `You will be shown a conversation between a user and an AI assistant. Your task is to generate a short title (8 words or less) for the provided conversation that reflects the conversation's topic. Your response is expected to be in JSON in the format shown below.
Example conversation:
@ -190,36 +188,36 @@ Example response:
}
// Serialize the conversation to JSON
jsonBytes, err := json.Marshal(msgs)
conversation, err := json.Marshal(msgs)
if err != nil {
return "", err
}
generateRequest := []conversation.Message{
generateRequest := []api.Message{
{
Role: api.MessageRoleSystem,
Content: systemPrompt,
},
{
Role: api.MessageRoleUser,
Content: string(jsonBytes),
Content: string(conversation),
},
}
m, _, p, err := ctx.GetModelProvider(
*ctx.Config.Conversations.TitleGenerationModel, "",
m, provider, err := ctx.GetModelProvider(
*ctx.Config.Conversations.TitleGenerationModel,
)
if err != nil {
return "", err
}
requestParams := provider.RequestParameters{
requestParams := api.RequestParameters{
Model: m,
MaxTokens: 25,
}
response, err := p.CreateChatCompletion(
context.Background(), requestParams, conversation.MessagesToAPI(generateRequest),
response, err := provider.CreateChatCompletion(
context.Background(), requestParams, generateRequest,
)
if err != nil {
return "", err
@ -274,7 +272,7 @@ func ShowWaitAnimation(signal chan any) {
// chunked) content is received on the channel, the waiting animation is
// replaced by the content.
// Blocks until the channel is closed.
func ShowDelayedContent(content <-chan provider.Chunk) {
func ShowDelayedContent(content <-chan api.Chunk) {
waitSignal := make(chan any)
go ShowWaitAnimation(waitSignal)
@ -294,7 +292,7 @@ func ShowDelayedContent(content <-chan provider.Chunk) {
// RenderConversation renders the given messages to TTY, with optional space
// for a subsequent message. spaceForResponse controls how many '\n' characters
// are printed immediately after the final message (1 if false, 2 if true)
func RenderConversation(ctx *lmcli.Context, messages []conversation.Message, spaceForResponse bool) {
func RenderConversation(ctx *lmcli.Context, messages []api.Message, spaceForResponse bool) {
l := len(messages)
for i, message := range messages {
RenderMessage(ctx, &message)
@ -305,7 +303,7 @@ func RenderConversation(ctx *lmcli.Context, messages []conversation.Message, spa
}
}
func RenderMessage(ctx *lmcli.Context, m *conversation.Message) {
func RenderMessage(ctx *lmcli.Context, m *api.Message) {
var messageAge string
if m.CreatedAt.IsZero() {
messageAge = "now"

View File

@ -24,7 +24,7 @@ func ViewCmd(ctx *lmcli.Context) *cobra.Command {
shortName := args[0]
conversation := cmdutil.LookupConversation(ctx, shortName)
messages, err := ctx.Conversations.PathToLeaf(conversation.SelectedRoot)
messages, err := ctx.Store.PathToLeaf(conversation.SelectedRoot)
if err != nil {
return fmt.Errorf("Could not retrieve messages for conversation %s: %v", conversation.ShortName.String, err)
}
@ -37,7 +37,7 @@ func ViewCmd(ctx *lmcli.Context) *cobra.Command {
if len(args) != 0 {
return nil, compMode
}
return ctx.Conversations.ConversationShortNameCompletions(toComplete), compMode
return ctx.Store.ConversationShortNameCompletions(toComplete), compMode
},
}

View File

@ -1,99 +0,0 @@
package conversation
import (
"database/sql"
"database/sql/driver"
"encoding/json"
"fmt"
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
)
type Conversation struct {
ID uint `gorm:"primaryKey"`
ShortName sql.NullString
Title string
SelectedRootID *uint
SelectedRoot *Message `gorm:"foreignKey:SelectedRootID"`
RootMessages []Message `gorm:"-:all"`
LastMessageAt time.Time
}
type MessageMeta struct {
GenerationProvider *string `json:"generation_provider,omitempty"`
GenerationModel *string `json:"generation_model,omitempty"`
}
type Message struct {
ID uint `gorm:"primaryKey"`
CreatedAt time.Time
Metadata MessageMeta
ConversationID *uint `gorm:"index"`
Conversation *Conversation `gorm:"foreignKey:ConversationID"`
ParentID *uint
Parent *Message `gorm:"foreignKey:ParentID"`
Replies []Message `gorm:"foreignKey:ParentID"`
SelectedReplyID *uint
SelectedReply *Message `gorm:"foreignKey:SelectedReplyID"`
Role api.MessageRole
Content string
ToolCalls ToolCalls // a json array of tool calls (from the model)
ToolResults ToolResults // a json array of tool results
}
func (m *MessageMeta) Scan(value interface{}) error {
return json.Unmarshal(value.([]byte), m)
}
func (m MessageMeta) Value() (driver.Value, error) {
return json.Marshal(m)
}
type ToolCalls []api.ToolCall
func (tc *ToolCalls) Scan(value any) (err error) {
s := value.(string)
if value == nil || s == "" {
*tc = nil
return
}
err = json.Unmarshal([]byte(s), tc)
return
}
func (tc ToolCalls) Value() (driver.Value, error) {
if len(tc) == 0 {
return "", nil
}
jsonBytes, err := json.Marshal(tc)
if err != nil {
return "", fmt.Errorf("Could not marshal ToolCalls to JSON: %v\n", err)
}
return string(jsonBytes), nil
}
type ToolResults []api.ToolResult
func (tr *ToolResults) Scan(value any) (err error) {
s := value.(string)
if value == nil || s == "" {
*tr = nil
return
}
err = json.Unmarshal([]byte(s), tr)
return
}
func (tr ToolResults) Value() (driver.Value, error) {
if len(tr) == 0 {
return "", nil
}
jsonBytes, err := json.Marshal([]api.ToolResult(tr))
if err != nil {
return "", fmt.Errorf("Could not marshal ToolResults to JSON: %v\n", err)
}
return string(jsonBytes), nil
}

View File

@ -1,55 +0,0 @@
package conversation
import (
"git.mlow.ca/mlow/lmcli/pkg/api"
)
// ApplySystemPrompt updates the contents of an existing system Message if it
// exists, or returns a new slice with the system Message prepended.
func ApplySystemPrompt(m []Message, system string, force bool) []Message {
if len(m) > 0 && m[0].Role == api.MessageRoleSystem {
if force {
m[0].Content = system
}
return m
} else {
return append([]Message{{
Role: api.MessageRoleSystem,
Content: system,
}}, m...)
}
}
func MessageToAPI(m Message) api.Message {
return api.Message{
Role: m.Role,
Content: m.Content,
ToolCalls: m.ToolCalls,
ToolResults: m.ToolResults,
}
}
func MessagesToAPI(messages []Message) []api.Message {
ret := make([]api.Message, 0, len(messages))
for _, m := range messages {
ret = append(ret, MessageToAPI(m))
}
return ret
}
func MessageFromAPI(m api.Message) Message {
return Message{
Role: m.Role,
Content: m.Content,
ToolCalls: m.ToolCalls,
ToolResults: m.ToolResults,
}
}
func MessagesFromAPI(messages []api.Message) []Message {
ret := make([]Message, 0, len(messages))
for _, m := range messages {
ret = append(ret, MessageFromAPI(m))
}
return ret
}

View File

@ -15,7 +15,8 @@ type Config struct {
Temperature *float32 `yaml:"temperature" default:"0.2"`
SystemPrompt string `yaml:"systemPrompt,omitempty"`
SystemPromptFile string `yaml:"systemPromptFile,omitempty"`
Agent string `yaml:"agent"`
// CLI only
Agent string `yaml:"-"`
} `yaml:"defaults"`
Conversations *struct {
TitleGenerationModel *string `yaml:"titleGenerationModel" default:"gpt-3.5-turbo"`
@ -31,12 +32,10 @@ type Config struct {
} `yaml:"agents"`
Providers []*struct {
Name string `yaml:"name,omitempty"`
Display string `yaml:"display,omitempty"`
Kind string `yaml:"kind"`
BaseURL string `yaml:"baseUrl,omitempty"`
APIKey string `yaml:"apiKey,omitempty"`
Models []string `yaml:"models"`
Headers map[string]string `yaml:"headers"`
} `yaml:"providers"`
}

View File

@ -1,28 +1,21 @@
package lmcli
import (
"errors"
"fmt"
"io/fs"
"log"
"os"
"path/filepath"
"strings"
"time"
"git.mlow.ca/mlow/lmcli/pkg/agents"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/provider"
"git.mlow.ca/mlow/lmcli/pkg/provider/anthropic"
"git.mlow.ca/mlow/lmcli/pkg/provider/google"
"git.mlow.ca/mlow/lmcli/pkg/provider/ollama"
"git.mlow.ca/mlow/lmcli/pkg/provider/openai"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/anthropic"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/google"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/ollama"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/openai"
"git.mlow.ca/mlow/lmcli/pkg/util"
"git.mlow.ca/mlow/lmcli/pkg/util/tty"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
)
type Agent struct {
@ -34,7 +27,7 @@ type Agent struct {
type Context struct {
// high level app configuration, may be mutated at runtime
Config Config
Conversations conversation.Repo
Store ConversationStore
Chroma *tty.ChromaHighlighter
}
@ -45,55 +38,23 @@ func NewContext() (*Context, error) {
return nil, err
}
store, err := getConversationService()
databaseFile := filepath.Join(dataDir(), "conversations.db")
db, err := gorm.Open(sqlite.Open(databaseFile), &gorm.Config{
//Logger: logger.Default.LogMode(logger.Info),
})
if err != nil {
return nil, fmt.Errorf("Error establishing connection to store: %v", err)
}
store, err := NewSQLStore(db)
if err != nil {
return nil, err
}
chroma := tty.NewChromaHighlighter("markdown", *config.Chroma.Formatter, *config.Chroma.Style)
return &Context{*config, store, chroma}, nil
}
func createOrOpenAppend(path string) (*os.File, error) {
var file *os.File
if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) {
file, err = os.Create(path)
if err != nil {
return nil, err
}
} else {
file, err = os.OpenFile(path, os.O_APPEND, fs.ModeAppend)
if err != nil {
return nil, err
}
}
return file, nil
}
func getConversationService() (conversation.Repo, error) {
databaseFile := filepath.Join(dataDir(), "conversations.db")
gormLogFile, err := createOrOpenAppend(filepath.Join(dataDir(), "database.log"))
if err != nil {
return nil, fmt.Errorf("Could not open database log file: %v", err)
}
db, err := gorm.Open(sqlite.Open(databaseFile), &gorm.Config{
Logger: logger.New(log.New(gormLogFile, "\n", log.LstdFlags), logger.Config{
SlowThreshold: 200 * time.Millisecond,
LogLevel: logger.Info,
IgnoreRecordNotFoundError: false,
Colorful: true,
}),
})
if err != nil {
return nil, fmt.Errorf("Error establishing connection to store: %v", err)
}
repo, err := conversation.NewRepo(db)
if err != nil {
return nil, err
}
return repo, nil
}
func (c *Context) GetModels() (models []string) {
modelCounts := make(map[string]int)
for _, p := range c.Config.Providers {
@ -125,7 +86,7 @@ func (c *Context) GetAgents() (agents []string) {
}
func (c *Context) GetAgent(name string) *Agent {
if name == "" || name == "none" {
if name == "" {
return nil
}
@ -162,10 +123,11 @@ func (c *Context) DefaultSystemPrompt() string {
return c.Config.Defaults.SystemPrompt
}
func (c *Context) GetModelProvider(model string, provider string) (string, string, provider.ChatCompletionProvider, error) {
func (c *Context) GetModelProvider(model string) (string, api.ChatCompletionProvider, error) {
parts := strings.Split(model, "@")
if provider == "" && len(parts) > 1 {
var provider string
if len(parts) > 1 {
model = parts[0]
provider = parts[1]
}
@ -188,7 +150,7 @@ func (c *Context) GetModelProvider(model string, provider string) (string, strin
if p.BaseURL != "" {
url = p.BaseURL
}
return model, name, &anthropic.AnthropicClient{
return model, &anthropic.AnthropicClient{
BaseURL: url,
APIKey: p.APIKey,
}, nil
@ -197,7 +159,7 @@ func (c *Context) GetModelProvider(model string, provider string) (string, strin
if p.BaseURL != "" {
url = p.BaseURL
}
return model, name, &google.Client{
return model, &google.Client{
BaseURL: url,
APIKey: p.APIKey,
}, nil
@ -206,7 +168,7 @@ func (c *Context) GetModelProvider(model string, provider string) (string, strin
if p.BaseURL != "" {
url = p.BaseURL
}
return model, name, &ollama.OllamaClient{
return model, &ollama.OllamaClient{
BaseURL: url,
}, nil
case "openai":
@ -214,18 +176,17 @@ func (c *Context) GetModelProvider(model string, provider string) (string, strin
if p.BaseURL != "" {
url = p.BaseURL
}
return model, name, &openai.OpenAIClient{
return model, &openai.OpenAIClient{
BaseURL: url,
APIKey: p.APIKey,
Headers: p.Headers,
}, nil
default:
return "", "", nil, fmt.Errorf("unknown provider kind: %s", p.Kind)
return "", nil, fmt.Errorf("unknown provider kind: %s", p.Kind)
}
}
}
}
return "", "", nil, fmt.Errorf("unknown model: %s", model)
return "", nil, fmt.Errorf("unknown model: %s", model)
}
func configDir() string {

View File

@ -1,4 +1,4 @@
package conversation
package lmcli
import (
"database/sql"
@ -8,57 +8,43 @@ import (
"strings"
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
sqids "github.com/sqids/sqids-go"
"gorm.io/gorm"
)
// Repo exposes low-level message and conversation management. See
// Service for high-level helpers
type Repo interface {
LoadConversationList() (ConversationList, error)
FindConversationByShortName(shortName string) (*Conversation, error)
type ConversationStore interface {
ConversationByShortName(shortName string) (*api.Conversation, error)
ConversationShortNameCompletions(search string) []string
GetConversationByID(int uint) (*Conversation, error)
GetRootMessages(conversationID uint) ([]Message, error)
RootMessages(conversationID uint) ([]api.Message, error)
LatestConversationMessages() ([]api.Message, error)
CreateConversation(title string) (*Conversation, error)
UpdateConversation(*Conversation) error
DeleteConversation(*Conversation) error
DeleteConversationById(id uint) error
StartConversation(messages ...api.Message) (*api.Conversation, []api.Message, error)
UpdateConversation(conversation *api.Conversation) error
DeleteConversation(conversation *api.Conversation) error
CloneConversation(toClone api.Conversation) (*api.Conversation, uint, error)
GetMessageByID(messageID uint) (*Message, error)
MessageByID(messageID uint) (*api.Message, error)
MessageReplies(messageID uint) ([]api.Message, error)
SaveMessage(message Message) (*Message, error)
UpdateMessage(message *Message) error
DeleteMessage(message *Message, prune bool) error
CloneBranch(toClone Message) (*Message, uint, error)
Reply(to *Message, messages ...Message) ([]Message, error)
UpdateMessage(message *api.Message) error
DeleteMessage(message *api.Message, prune bool) error
CloneBranch(toClone api.Message) (*api.Message, uint, error)
Reply(to *api.Message, messages ...api.Message) ([]api.Message, error)
PathToRoot(message *Message) ([]Message, error)
PathToLeaf(message *Message) ([]Message, error)
// Retrieves and return the "selected thread" of the conversation.
// The "selected thread" of the conversation is a chain of messages
// starting from the Conversation's SelectedRoot Message, following each
// Message's SelectedReply until the tail Message is reached.
GetSelectedThread(*Conversation) ([]Message, error)
// Start a new conversation with the given messages
StartConversation(messages ...Message) (*Conversation, []Message, error)
CloneConversation(toClone Conversation) (*Conversation, uint, error)
PathToRoot(message *api.Message) ([]api.Message, error)
PathToLeaf(message *api.Message) ([]api.Message, error)
}
type repo struct {
type SQLStore struct {
db *gorm.DB
sqids *sqids.Sqids
}
func NewRepo(db *gorm.DB) (Repo, error) {
func NewSQLStore(db *gorm.DB) (*SQLStore, error) {
models := []any{
&Conversation{},
&Message{},
&api.Conversation{},
&api.Message{},
}
for _, x := range models {
@ -69,86 +55,13 @@ func NewRepo(db *gorm.DB) (Repo, error) {
}
_sqids, _ := sqids.New(sqids.Options{MinLength: 4})
return &repo{db, _sqids}, nil
return &SQLStore{db, _sqids}, nil
}
type ConversationListItem struct {
ID uint
ShortName string
Title string
LastMessageAt time.Time
}
type ConversationList struct {
Total int
Items []ConversationListItem
}
// LoadConversationList loads existing conversations, ordered by the date
// of their latest message, from most recent to oldest.
func (s *repo) LoadConversationList() (ConversationList, error) {
list := ConversationList{}
var convos []Conversation
err := s.db.Order("last_message_at DESC").Find(&convos).Error
if err != nil {
return list, err
}
for _, c := range convos {
list.Items = append(list.Items, ConversationListItem{
ID: c.ID,
ShortName: c.ShortName.String,
Title: c.Title,
LastMessageAt: c.LastMessageAt,
})
}
list.Total = len(list.Items)
return list, nil
}
func (s *repo) FindConversationByShortName(shortName string) (*Conversation, error) {
if shortName == "" {
return nil, errors.New("shortName is empty")
}
var conversation Conversation
err := s.db.Preload("SelectedRoot").Where("short_name = ?", shortName).Find(&conversation).Error
return &conversation, err
}
func (s *repo) ConversationShortNameCompletions(shortName string) []string {
var conversations []Conversation
// ignore error for completions
s.db.Find(&conversations)
completions := make([]string, 0, len(conversations))
for _, conversation := range conversations {
if shortName == "" || strings.HasPrefix(conversation.ShortName.String, shortName) {
completions = append(completions, fmt.Sprintf("%s\t%s", conversation.ShortName.String, conversation.Title))
}
}
return completions
}
func (s *repo) GetConversationByID(id uint) (*Conversation, error) {
var conversation Conversation
err := s.db.Preload("SelectedRoot").Where("id = ?", id).Find(&conversation).Error
if err != nil {
return nil, fmt.Errorf("Cannot get conversation %d: %v", id, err)
}
rootMessages, err := s.GetRootMessages(id)
if err != nil {
return nil, fmt.Errorf("Could not load conversation's root messages %d: %v", id, err)
}
conversation.RootMessages = rootMessages
return &conversation, nil
}
func (s *repo) CreateConversation(title string) (*Conversation, error) {
func (s *SQLStore) createConversation() (*api.Conversation, error) {
// Create the new conversation
c := &Conversation{Title: title}
err := s.db.Create(c).Error
c := &api.Conversation{}
err := s.db.Save(c).Error
if err != nil {
return nil, err
}
@ -162,61 +75,159 @@ func (s *repo) CreateConversation(title string) (*Conversation, error) {
return c, nil
}
func (s *repo) UpdateConversation(c *Conversation) error {
func (s *SQLStore) UpdateConversation(c *api.Conversation) error {
if c == nil || c.ID == 0 {
return fmt.Errorf("Conversation is nil or invalid (missing ID)")
}
return s.db.Updates(c).Error
}
func (s *repo) DeleteConversation(c *Conversation) error {
if c == nil || c.ID == 0 {
return fmt.Errorf("Conversation is nil or invalid (missing ID)")
}
return s.DeleteConversationById(c.ID)
}
func (s *repo) DeleteConversationById(id uint) error {
if id == 0 {
return fmt.Errorf("Invalid conversation ID: %d", id)
}
err := s.db.Where("conversation_id = ?", id).Delete(&Message{}).Error
func (s *SQLStore) DeleteConversation(c *api.Conversation) error {
// Delete messages first
err := s.db.Where("conversation_id = ?", c.ID).Delete(&api.Message{}).Error
if err != nil {
return err
}
return s.db.Where("id = ?", id).Delete(&Conversation{}).Error
return s.db.Delete(c).Error
}
func (s *repo) SaveMessage(m Message) (*Message, error) {
if m.Conversation == nil {
return nil, fmt.Errorf("Can't save a message without a conversation (this is a bug)")
}
newMessage := m
newMessage.ID = 0
newMessage.CreatedAt = time.Now()
return &newMessage, s.db.Create(&newMessage).Error
func (s *SQLStore) DeleteMessage(message *api.Message, prune bool) error {
panic("Not yet implemented")
//return s.db.Delete(&message).Error
}
func (s *repo) UpdateMessage(m *Message) error {
func (s *SQLStore) UpdateMessage(m *api.Message) error {
if m == nil || m.ID == 0 {
return fmt.Errorf("Message is nil or invalid (missing ID)")
}
return s.db.Updates(m).Error
}
func (s *repo) DeleteMessage(message *Message, prune bool) error {
return s.db.Delete(&message).Error
func (s *SQLStore) ConversationShortNameCompletions(shortName string) []string {
var conversations []api.Conversation
// ignore error for completions
s.db.Find(&conversations)
completions := make([]string, 0, len(conversations))
for _, conversation := range conversations {
if shortName == "" || strings.HasPrefix(conversation.ShortName.String, shortName) {
completions = append(completions, fmt.Sprintf("%s\t%s", conversation.ShortName.String, conversation.Title))
}
}
return completions
}
func (s *repo) GetMessageByID(messageID uint) (*Message, error) {
var message Message
func (s *SQLStore) ConversationByShortName(shortName string) (*api.Conversation, error) {
if shortName == "" {
return nil, errors.New("shortName is empty")
}
var conversation api.Conversation
err := s.db.Preload("SelectedRoot").Where("short_name = ?", shortName).Find(&conversation).Error
return &conversation, err
}
func (s *SQLStore) RootMessages(conversationID uint) ([]api.Message, error) {
var rootMessages []api.Message
err := s.db.Where("conversation_id = ? AND parent_id IS NULL", conversationID).Find(&rootMessages).Error
if err != nil {
return nil, err
}
return rootMessages, nil
}
func (s *SQLStore) MessageByID(messageID uint) (*api.Message, error) {
var message api.Message
err := s.db.Preload("Parent").Preload("Replies").Preload("SelectedReply").Where("id = ?", messageID).Find(&message).Error
return &message, err
}
// Reply to a message with a series of messages (each followed by the next)
func (s *repo) Reply(to *Message, messages ...Message) ([]Message, error) {
var savedMessages []Message
func (s *SQLStore) MessageReplies(messageID uint) ([]api.Message, error) {
var replies []api.Message
err := s.db.Where("parent_id = ?", messageID).Find(&replies).Error
return replies, err
}
// StartConversation starts a new conversation with the provided messages
func (s *SQLStore) StartConversation(messages ...api.Message) (*api.Conversation, []api.Message, error) {
if len(messages) == 0 {
return nil, nil, fmt.Errorf("Must provide at least 1 message")
}
// Create new conversation
conversation, err := s.createConversation()
if err != nil {
return nil, nil, err
}
// Create first message
messages[0].Conversation = conversation
err = s.db.Create(&messages[0]).Error
if err != nil {
return nil, nil, err
}
// Update conversation's selected root message
conversation.SelectedRoot = &messages[0]
err = s.UpdateConversation(conversation)
if err != nil {
return nil, nil, err
}
// Add additional replies to conversation
if len(messages) > 1 {
newMessages, err := s.Reply(&messages[0], messages[1:]...)
if err != nil {
return nil, nil, err
}
messages = append([]api.Message{messages[0]}, newMessages...)
}
return conversation, messages, nil
}
// CloneConversation clones the given conversation and all of its root meesages
func (s *SQLStore) CloneConversation(toClone api.Conversation) (*api.Conversation, uint, error) {
rootMessages, err := s.RootMessages(toClone.ID)
if err != nil {
return nil, 0, err
}
clone, err := s.createConversation()
if err != nil {
return nil, 0, fmt.Errorf("Could not create clone: %s", err)
}
clone.Title = toClone.Title + " - Clone"
var errors []error
var messageCnt uint = 0
for _, root := range rootMessages {
messageCnt++
newRoot := root
newRoot.ConversationID = &clone.ID
cloned, count, err := s.CloneBranch(newRoot)
if err != nil {
errors = append(errors, err)
continue
}
messageCnt += count
if root.ID == *toClone.SelectedRootID {
clone.SelectedRootID = &cloned.ID
if err := s.UpdateConversation(clone); err != nil {
errors = append(errors, fmt.Errorf("Could not set selected root on clone: %v", err))
}
}
}
if len(errors) > 0 {
return nil, 0, fmt.Errorf("Messages failed to be cloned: %v", errors)
}
return clone, messageCnt, nil
}
// Reply to a message with a series of messages (each following the next)
func (s *SQLStore) Reply(to *api.Message, messages ...api.Message) ([]api.Message, error) {
var savedMessages []api.Message
err := s.db.Transaction(func(tx *gorm.DB) error {
currentParent := to
@ -245,26 +256,23 @@ func (s *repo) Reply(to *Message, messages ...Message) ([]Message, error) {
return nil
})
if err != nil {
return savedMessages, err
}
to.Conversation.LastMessageAt = savedMessages[len(savedMessages)-1].CreatedAt
err = s.UpdateConversation(to.Conversation)
return savedMessages, err
}
// CloneBranch returns a deep clone of the given message and its replies, returning
// a new message object. The new message will be attached to the same parent as
// the messageToClone
func (s *repo) CloneBranch(messageToClone Message) (*Message, uint, error) {
func (s *SQLStore) CloneBranch(messageToClone api.Message) (*api.Message, uint, error) {
newMessage := messageToClone
newMessage.ID = 0
newMessage.Replies = nil
newMessage.SelectedReplyID = nil
newMessage.SelectedReply = nil
originalReplies := messageToClone.Replies
originalReplies, err := s.MessageReplies(messageToClone.ID)
if err != nil {
return nil, 0, fmt.Errorf("Could not fetch message %d replies: %v", messageToClone.ID, err)
}
if err := s.db.Create(&newMessage).Error; err != nil {
return nil, 0, fmt.Errorf("Could not clone message: %s", err)
@ -296,19 +304,19 @@ func (s *repo) CloneBranch(messageToClone Message) (*Message, uint, error) {
return &newMessage, replyCount, nil
}
func fetchMessages(db *gorm.DB) ([]Message, error) {
var messages []Message
func fetchMessages(db *gorm.DB) ([]api.Message, error) {
var messages []api.Message
if err := db.Preload("Conversation").Find(&messages).Error; err != nil {
return nil, fmt.Errorf("Could not fetch messages: %v", err)
}
messageMap := make(map[uint]Message)
messageMap := make(map[uint]api.Message)
for i, message := range messages {
messageMap[messages[i].ID] = message
}
// Create a map to store replies by their parent ID
repliesMap := make(map[uint][]Message)
repliesMap := make(map[uint][]api.Message)
for i, message := range messages {
if messages[i].ParentID != nil {
repliesMap[*messages[i].ParentID] = append(repliesMap[*messages[i].ParentID], message)
@ -318,7 +326,7 @@ func fetchMessages(db *gorm.DB) ([]Message, error) {
// Assign replies, parent, and selected reply to each message
for i := range messages {
if replies, exists := repliesMap[messages[i].ID]; exists {
messages[i].Replies = make([]Message, len(replies))
messages[i].Replies = make([]api.Message, len(replies))
for j, m := range replies {
messages[i].Replies[j] = m
}
@ -337,51 +345,21 @@ func fetchMessages(db *gorm.DB) ([]Message, error) {
return messages, nil
}
func (r repo) GetRootMessages(conversationID uint) ([]Message, error) {
var rootMessages []Message
err := r.db.Where("conversation_id = ? AND parent_id IS NULL", conversationID).Find(&rootMessages).Error
if err != nil {
return nil, fmt.Errorf("Could not retrieve root messages for conversation %d: %v", conversationID, err)
}
return rootMessages, nil
}
func (s *repo) buildPath(message *Message, getNext func(*Message) *uint) ([]Message, error) {
var messages []Message
func (s *SQLStore) buildPath(message *api.Message, getNext func(*api.Message) *uint) ([]api.Message, error) {
var messages []api.Message
messages, err := fetchMessages(s.db.Where("conversation_id = ?", message.ConversationID))
if err != nil {
return nil, err
}
// Create a map to store messages by their ID
messageMap := make(map[uint]*Message, len(messages))
messageMap := make(map[uint]*api.Message)
for i := range messages {
messageMap[messages[i].ID] = &messages[i]
}
// Construct Replies
repliesMap := make(map[uint][]*Message, len(messages))
for _, m := range messageMap {
if m.ParentID == nil {
continue
}
if p, ok := messageMap[*m.ParentID]; ok {
repliesMap[p.ID] = append(repliesMap[p.ID], m)
}
}
// Add replies to messages
for _, m := range messageMap {
if replies, ok := repliesMap[m.ID]; ok {
m.Replies = make([]Message, len(replies))
for idx, reply := range replies {
m.Replies[idx] = *reply
}
}
}
// Build the path
var path []Message
var path []api.Message
nextID := &message.ID
for {
@ -404,12 +382,12 @@ func (s *repo) buildPath(message *Message, getNext func(*Message) *uint) ([]Mess
// PathToRoot traverses the provided message's Parent until reaching the tree
// root and returns a slice of all messages traversed in chronological order
// (starting with the root and ending with the message provided)
func (s *repo) PathToRoot(message *Message) ([]Message, error) {
func (s *SQLStore) PathToRoot(message *api.Message) ([]api.Message, error) {
if message == nil || message.ID <= 0 {
return nil, fmt.Errorf("Message is nil or has invalid ID")
}
path, err := s.buildPath(message, func(m *Message) *uint {
path, err := s.buildPath(message, func(m *api.Message) *uint {
return m.ParentID
})
if err != nil {
@ -423,98 +401,33 @@ func (s *repo) PathToRoot(message *Message) ([]Message, error) {
// PathToLeaf traverses the provided message's SelectedReply until reaching a
// tree leaf and returns a slice of all messages traversed in chronological
// order (starting with the message provided and ending with the leaf)
func (s *repo) PathToLeaf(message *Message) ([]Message, error) {
func (s *SQLStore) PathToLeaf(message *api.Message) ([]api.Message, error) {
if message == nil || message.ID <= 0 {
return nil, fmt.Errorf("Message is nil or has invalid ID")
}
return s.buildPath(message, func(m *Message) *uint {
return s.buildPath(message, func(m *api.Message) *uint {
return m.SelectedReplyID
})
}
func (s *repo) StartConversation(messages ...Message) (*Conversation, []Message, error) {
if len(messages) == 0 {
return nil, nil, fmt.Errorf("Must provide at least 1 message")
}
func (s *SQLStore) LatestConversationMessages() ([]api.Message, error) {
var latestMessages []api.Message
subQuery := s.db.Model(&api.Message{}).
Select("MAX(created_at) as max_created_at, conversation_id").
Group("conversation_id")
err := s.db.Model(&api.Message{}).
Joins("JOIN (?) as sub on messages.conversation_id = sub.conversation_id AND messages.created_at = sub.max_created_at", subQuery).
Group("messages.conversation_id").
Order("created_at DESC").
Preload("Conversation").
Find(&latestMessages).Error
// Create new conversation
conversation, err := s.CreateConversation("")
if err != nil {
return nil, nil, err
}
messages[0].Conversation = conversation
// Create first message
firstMessage, err := s.SaveMessage(messages[0])
if err != nil {
return nil, nil, err
}
messages[0] = *firstMessage
// Update conversation's selected root message
conversation.RootMessages = []Message{messages[0]}
conversation.SelectedRoot = &messages[0]
conversation.LastMessageAt = messages[0].CreatedAt
// Add additional replies to conversation
if len(messages) > 1 {
newMessages, err := s.Reply(&messages[0], messages[1:]...)
if err != nil {
return nil, nil, err
}
messages = append([]Message{messages[0]}, newMessages...)
conversation.LastMessageAt = messages[len(messages)-1].CreatedAt
return nil, err
}
err = s.UpdateConversation(conversation)
return conversation, messages, err
}
// CloneConversation clones the given conversation and all of its meesages
func (s *repo) CloneConversation(toClone Conversation) (*Conversation, uint, error) {
rootMessages, err := s.GetRootMessages(toClone.ID)
if err != nil {
return nil, 0, fmt.Errorf("Could not create clone: %v", err)
}
clone, err := s.CreateConversation(toClone.Title + " - Clone")
if err != nil {
return nil, 0, fmt.Errorf("Could not create clone: %v", err)
}
var errors []error
var messageCnt uint = 0
for _, root := range rootMessages {
messageCnt++
newRoot := root
newRoot.ConversationID = &clone.ID
cloned, count, err := s.CloneBranch(newRoot)
if err != nil {
errors = append(errors, err)
continue
}
messageCnt += count
if root.ID == *toClone.SelectedRootID {
clone.SelectedRootID = &cloned.ID
if err := s.UpdateConversation(clone); err != nil {
errors = append(errors, fmt.Errorf("Could not set selected root on clone: %v", err))
}
}
}
if len(errors) > 0 {
return nil, 0, fmt.Errorf("Messages failed to be cloned: %v", errors)
}
return clone, messageCnt, nil
}
func (s *repo) GetSelectedThread(c *Conversation) ([]Message, error) {
if c.SelectedRoot == nil {
return nil, fmt.Errorf("No SelectedRoot on conversation - this is a bug")
}
return s.PathToLeaf(c.SelectedRoot)
return latestMessages, nil
}

View File

@ -1,41 +0,0 @@
package provider
import (
"context"
"git.mlow.ca/mlow/lmcli/pkg/api"
)
type Chunk struct {
Content string
TokenCount uint
}
type RequestParameters struct {
Model string
MaxTokens int
Temperature float32
TopP float32
Toolbox []api.ToolSpec
}
type ChatCompletionProvider interface {
// CreateChatCompletion generates a chat completion response to the
// provided messages.
CreateChatCompletion(
ctx context.Context,
params RequestParameters,
messages []api.Message,
) (*api.Message, error)
// Like CreateChageCompletion, except the response is streamed via
// the output channel.
CreateChatCompletionStream(
ctx context.Context,
params RequestParameters,
messages []api.Message,
chunks chan<- Chunk,
) (*api.Message, error)
}

View File

@ -1,260 +0,0 @@
package list
import (
"fmt"
"strings"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"github.com/charmbracelet/bubbles/textinput"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
type Option struct {
Label string
Value interface{}
}
type OptionGroup struct {
Name string
Options []Option
}
type Model struct {
ID int
HeaderStyle lipgloss.Style
ItemStyle lipgloss.Style
SelectedStyle lipgloss.Style
ItemRender func(Option, bool) string
Width int
Height int
optionGroups []OptionGroup
selected int
filterInput textinput.Model
filteredIndices []filteredIndex
content viewport.Model
itemYOffsets []int
}
type filteredIndex struct {
groupIndex int
optionIndex int
}
type MsgOptionSelected struct {
ID int
Option Option
}
func New(opts []Option) Model {
return NewWithGroups([]OptionGroup{{Options: opts}})
}
func NewWithGroups(groups []OptionGroup) Model {
ti := textinput.New()
ti.Prompt = "/"
ti.PromptStyle = lipgloss.NewStyle().Faint(true)
m := Model{
HeaderStyle: lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("12")).Padding(1, 0, 1, 1),
ItemStyle: lipgloss.NewStyle(),
SelectedStyle: lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("6")),
optionGroups: groups,
selected: 0,
filterInput: ti,
filteredIndices: make([]filteredIndex, 0),
content: viewport.New(0, 0),
itemYOffsets: make([]int, 0),
}
m.filterItems()
m.content.SetContent(m.renderOptionsList())
return m
}
func (m *Model) Focused() {
m.filterInput.Focused()
}
func (m *Model) Focus() {
m.filterInput.Focus()
}
func (m *Model) Blur() {
m.filterInput.Blur()
}
func (m *Model) filterItems() {
filterText := strings.ToLower(m.filterInput.Value())
var prevSelection *filteredIndex
if m.selected <= len(m.filteredIndices)-1 {
prevSelection = &m.filteredIndices[m.selected]
}
m.filteredIndices = make([]filteredIndex, 0)
for groupIndex, group := range m.optionGroups {
for optionIndex, option := range group.Options {
if filterText == "" ||
strings.Contains(strings.ToLower(option.Label), filterText) ||
(group.Name != "" && strings.Contains(strings.ToLower(group.Name), filterText)) {
m.filteredIndices = append(m.filteredIndices, filteredIndex{groupIndex, optionIndex})
}
}
}
found := false
if len(m.filteredIndices) > 0 && prevSelection != nil {
// Preserve previous selection if possible
for i, filterIdx := range m.filteredIndices {
if prevSelection.groupIndex == filterIdx.groupIndex && prevSelection.optionIndex == filterIdx.optionIndex {
m.selected = i
found = true
break
}
}
}
if !found {
m.selected = 0
}
}
func (m *Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmd tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
if m.filterInput.Focused() {
switch msg.String() {
case "esc":
m.filterInput.Blur()
m.filterInput.SetValue("")
m.filterItems()
m.refreshContent()
return *m, shared.KeyHandled(msg)
case "enter":
m.filterInput.Blur()
m.refreshContent()
break
case "up", "down":
break
default:
m.filterInput, cmd = m.filterInput.Update(msg)
m.filterItems()
m.refreshContent()
return *m, cmd
}
}
switch msg.String() {
case "up", "k":
m.moveSelection(-1)
return *m, shared.KeyHandled(msg)
case "down", "j":
m.moveSelection(1)
return *m, shared.KeyHandled(msg)
case "enter":
return *m, func() tea.Msg {
idx := m.filteredIndices[m.selected]
return MsgOptionSelected{
ID: m.ID,
Option: m.optionGroups[idx.groupIndex].Options[idx.optionIndex],
}
}
case "/":
m.filterInput.Focus()
return *m, textinput.Blink
}
}
m.content, cmd = m.content.Update(msg)
return *m, cmd
}
func (m *Model) refreshContent() {
m.content.SetContent(m.renderOptionsList())
m.ensureSelectedVisible()
}
func (m *Model) ensureSelectedVisible() {
if m.selected == 0 {
m.content.GotoTop()
} else if m.selected == len(m.filteredIndices)-1 {
m.content.GotoBottom()
} else {
tuiutil.ScrollIntoView(&m.content, m.itemYOffsets[m.selected], 0)
}
}
func (m *Model) moveSelection(delta int) {
prev := m.selected
m.selected = min(len(m.filteredIndices)-1, max(0, m.selected+delta))
if prev != m.selected {
m.refreshContent()
}
}
func (m *Model) View() string {
filter := ""
if m.filterInput.Focused() {
m.filterInput.Width = m.Width
filter = m.filterInput.View()
}
contentHeight := m.Height - tuiutil.Height(filter)
m.content.Width, m.content.Height = m.Width, contentHeight
parts := []string{m.content.View()}
if filter != "" {
parts = append(parts, filter)
}
return lipgloss.JoinVertical(lipgloss.Left, parts...)
}
func (m *Model) renderOptionsList() string {
yOffset := 0
lastGroupIndex := -1
m.itemYOffsets = make([]int, len(m.filteredIndices))
var sb strings.Builder
for i, idx := range m.filteredIndices {
if idx.groupIndex != lastGroupIndex {
group := m.optionGroups[idx.groupIndex].Name
if group != "" {
headingStr := m.HeaderStyle.Render(group)
yOffset += tuiutil.Height(headingStr)
sb.WriteString(headingStr)
sb.WriteRune('\n')
}
lastGroupIndex = idx.groupIndex
}
m.itemYOffsets[i] = yOffset
option := m.optionGroups[idx.groupIndex].Options[idx.optionIndex]
var item string
if m.ItemRender != nil {
item = m.ItemRender(option, i == m.selected)
} else {
prefix := " "
if i == m.selected {
prefix = "> "
item = m.SelectedStyle.Render(option.Label)
} else {
item = m.ItemStyle.Render(option.Label)
}
item = fmt.Sprintf("%s%s", prefix, item)
}
sb.WriteString(item)
yOffset += tuiutil.Height(item)
if i < len(m.filteredIndices)-1 {
sb.WriteRune('\n')
}
}
return sb.String()
}

View File

@ -1,281 +0,0 @@
package model
import (
"context"
"fmt"
"git.mlow.ca/mlow/lmcli/pkg/agents"
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/provider"
"github.com/charmbracelet/lipgloss"
)
type AppModel struct {
Ctx *lmcli.Context
Conversations conversation.ConversationList
Conversation conversation.Conversation
Messages []conversation.Message
Model string
ProviderName string
Provider provider.ChatCompletionProvider
Agent *lmcli.Agent
}
func NewAppModel(ctx *lmcli.Context, initialConversation *conversation.Conversation) *AppModel {
app := &AppModel{
Ctx: ctx,
Model: *ctx.Config.Defaults.Model,
}
if initialConversation == nil {
app.NewConversation()
} else {
}
model, provider, _, _ := ctx.GetModelProvider(*ctx.Config.Defaults.Model, "")
app.Model = model
app.ProviderName = provider
app.Agent = ctx.GetAgent(ctx.Config.Defaults.Agent)
return app
}
var (
defaultStyle = lipgloss.NewStyle().Faint(true)
accentStyle = defaultStyle.Foreground(lipgloss.Color("6"))
)
func (a *AppModel) ActiveModel(style lipgloss.Style) string {
defaultStyle := style.Inherit(defaultStyle)
accentStyle := style.Inherit(accentStyle)
return defaultStyle.Render(a.Model) + accentStyle.Render("@") + defaultStyle.Render(a.ProviderName)
}
type MessageCycleDirection int
const (
CycleNext MessageCycleDirection = 1
CyclePrev MessageCycleDirection = -1
)
func (m *AppModel) ClearConversation() {
m.Conversation = conversation.Conversation{}
m.Messages = []conversation.Message{}
}
func (m *AppModel) ApplySystemPrompt() {
var system string
agent := m.Ctx.GetAgent(m.Ctx.Config.Defaults.Agent)
if agent != nil && agent.SystemPrompt != "" {
system = agent.SystemPrompt
}
if system == "" {
system = m.Ctx.DefaultSystemPrompt()
}
if system != "" {
m.Messages = conversation.ApplySystemPrompt(m.Messages, system, false)
}
}
func (m *AppModel) NewConversation() {
m.ClearConversation()
m.ApplySystemPrompt()
}
func (a *AppModel) LoadConversationMessages() ([]conversation.Message, error) {
messages, err := a.Ctx.Conversations.PathToLeaf(a.Conversation.SelectedRoot)
if err != nil {
return nil, fmt.Errorf("Could not load conversation messages: %v %v", a.Conversation.SelectedRoot, err)
}
return messages, nil
}
func (a *AppModel) GenerateConversationTitle(messages []conversation.Message) (string, error) {
return cmdutil.GenerateTitle(a.Ctx, messages)
}
func (a *AppModel) CloneMessage(message conversation.Message, selected bool) (*conversation.Message, error) {
msg, _, err := a.Ctx.Conversations.CloneBranch(message)
if err != nil {
return nil, fmt.Errorf("Could not clone message: %v", err)
}
if selected {
if msg.Parent == nil {
msg.Conversation.SelectedRoot = msg
err = a.Ctx.Conversations.UpdateConversation(msg.Conversation)
} else {
msg.Parent.SelectedReply = msg
err = a.Ctx.Conversations.UpdateMessage(msg.Parent)
}
if err != nil {
return nil, fmt.Errorf("Could not update selected message: %v", err)
}
}
return msg, nil
}
func (a *AppModel) UpdateMessageContent(message *conversation.Message) error {
return a.Ctx.Conversations.UpdateMessage(message)
}
func cycleSelectedMessage(selected *conversation.Message, choices []conversation.Message, dir MessageCycleDirection) (*conversation.Message, error) {
currentIndex := -1
for i, reply := range choices {
if reply.ID == selected.ID {
currentIndex = i
break
}
}
if currentIndex < 0 {
return nil, fmt.Errorf("Selected message %d not found in choices, this is a bug", selected.ID)
}
var next int
if dir == CyclePrev {
next = (currentIndex - 1 + len(choices)) % len(choices)
} else {
next = (currentIndex + 1) % len(choices)
}
return &choices[next], nil
}
func (a *AppModel) CycleSelectedRoot(conv *conversation.Conversation, dir MessageCycleDirection) (*conversation.Message, error) {
if len(conv.RootMessages) < 2 {
return nil, nil
}
nextRoot, err := cycleSelectedMessage(conv.SelectedRoot, conv.RootMessages, dir)
if err != nil {
return nil, err
}
conv.SelectedRoot = nextRoot
err = a.Ctx.Conversations.UpdateConversation(conv)
if err != nil {
return nil, fmt.Errorf("Could not update conversation SelectedRoot: %v", err)
}
return nextRoot, nil
}
func (a *AppModel) CycleSelectedReply(message *conversation.Message, dir MessageCycleDirection) (*conversation.Message, error) {
if len(message.Replies) < 2 {
return nil, nil
}
nextReply, err := cycleSelectedMessage(message.SelectedReply, message.Replies, dir)
if err != nil {
return nil, err
}
message.SelectedReply = nextReply
err = a.Ctx.Conversations.UpdateMessage(message)
if err != nil {
return nil, fmt.Errorf("Could not update message SelectedReply: %v", err)
}
return nextReply, nil
}
func (a *AppModel) PersistMessages() ([]conversation.Message, error) {
messages := make([]conversation.Message, len(a.Messages))
for i, m := range a.Messages {
if i == 0 && m.ID == 0 {
m.Conversation = &a.Conversation
m, err := a.Ctx.Conversations.SaveMessage(m)
if err != nil {
return nil, fmt.Errorf("Could not create first message %d: %v", a.Messages[i].ID, err)
}
messages[i] = *m
// let's set the conversation root message(s), as this is the first message
m.Conversation.RootMessages = []conversation.Message{*m}
m.Conversation.SelectedRoot = &m.Conversation.RootMessages[0]
a.Ctx.Conversations.UpdateConversation(m.Conversation)
} else if m.ID > 0 {
// Existing message, update it
err := a.Ctx.Conversations.UpdateMessage(&m)
if err != nil {
return nil, fmt.Errorf("Could not update message %d: %v", a.Messages[i].ID, err)
}
messages[i] = m
} else if i > 0 {
// New message, reply to previous
replies, err := a.Ctx.Conversations.Reply(&messages[i-1], m)
if err != nil {
return nil, fmt.Errorf("Could not reply with new message: %v", err)
}
messages[i] = replies[0]
} else {
return nil, fmt.Errorf("No messages to reply to (this is a bug)")
}
}
return messages, nil
}
func (a *AppModel) PersistConversation() (conversation.Conversation, error) {
conv := a.Conversation
var err error
if a.Conversation.ID > 0 {
err = a.Ctx.Conversations.UpdateConversation(&conv)
} else {
c, e := a.Ctx.Conversations.CreateConversation("")
err = e
if e == nil && c != nil {
conv = *c
}
}
return conv, err
}
func (a *AppModel) ExecuteToolCalls(toolCalls []api.ToolCall) ([]api.ToolResult, error) {
agent := a.Ctx.GetAgent(a.Ctx.Config.Defaults.Agent)
if agent == nil {
return nil, fmt.Errorf("Attempted to execute tool calls with no agent configured")
}
return agents.ExecuteToolCalls(toolCalls, agent.Toolbox)
}
func (a *AppModel) Prompt(
messages []conversation.Message,
chatReplyChunks chan provider.Chunk,
stopSignal chan struct{},
) (*conversation.Message, error) {
model, _, p, err := a.Ctx.GetModelProvider(a.Model, a.ProviderName)
if err != nil {
return nil, err
}
params := provider.RequestParameters{
Model: model,
MaxTokens: *a.Ctx.Config.Defaults.MaxTokens,
Temperature: *a.Ctx.Config.Defaults.Temperature,
}
if a.Agent != nil {
params.Toolbox = a.Agent.Toolbox
}
ctx, cancel := context.WithCancel(context.Background())
go func() {
select {
case <-stopSignal:
cancel()
}
}()
msg, err := p.CreateChatCompletionStream(
ctx, params, conversation.MessagesToAPI(messages), chatReplyChunks,
)
if msg != nil {
msg := conversation.MessageFromAPI(*msg)
msg.Metadata.GenerationProvider = &a.ProviderName
msg.Metadata.GenerationModel = &a.Model
return &msg, err
}
return nil, err
}

View File

@ -1,66 +1,52 @@
package shared
import (
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
tea "github.com/charmbracelet/bubbletea"
)
// An analogue to tea.Model with support for checking if the model has been
// initialized before
type ViewModel interface {
Init() tea.Cmd
Update(tea.Msg) (ViewModel, tea.Cmd)
type Values struct {
ConvShortname string
}
// View methods
Header(width int) string
// Render the view's main content into a container of the given dimensions
Content(width, height int) string
Footer(width int) string
type Shared struct {
Ctx *lmcli.Context
Values *Values
Width int
Height int
Err error
}
// a convenience struct for holding rendered content for indiviudal UI
// elements
type Sections struct {
Header string
Content string
Error string
Input string
Footer string
}
type (
// send to change the current state
MsgViewChange View
// sent to a state when it is entered
MsgViewEnter struct{}
// sent when an error occurs
MsgError error
)
func WrapError(err error) tea.Cmd {
return func() tea.Msg {
return MsgError(err)
}
}
type View int
const (
ViewChat View = iota
ViewConversations
ViewSettings
StateChat View = iota
StateConversations
//StateSettings
//StateHelp
)
type (
// send to change the current state
MsgViewChange View
// sent to a state when it is entered, with the view we're leaving
MsgViewEnter View
// sent when a recoverable error occurs (displayed to user)
MsgError struct { Err error }
// sent when the view has handled a key input
MsgKeyHandled tea.KeyMsg
)
func ViewEnter(from View) tea.Cmd {
return func() tea.Msg {
return MsgViewEnter(from)
}
}
func ChangeView(view View) tea.Cmd {
return func() tea.Msg {
return MsgViewChange(view)
}
}
func KeyHandled(key tea.KeyMsg) tea.Cmd {
return func() tea.Msg {
return MsgKeyHandled(key)
}
}
func WrapError(err error) tea.Cmd {
return func() tea.Msg {
return MsgError{ Err: err }
}
}
func AsMsgError(err error) MsgError {
return MsgError{ Err: err }
}

View File

@ -1,163 +1,132 @@
package tui
// The terminal UI for lmcli, launched from the `lmcli chat` command
// TODO:
// - change model
// - rename conversation
// - set system prompt
import (
"fmt"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/lmcli"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"git.mlow.ca/mlow/lmcli/pkg/tui/views/chat"
"git.mlow.ca/mlow/lmcli/pkg/tui/views/conversations"
"git.mlow.ca/mlow/lmcli/pkg/tui/views/settings"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
// Application model
type Model struct {
App *model.AppModel
shared.Shared
// window size
width int
height int
// errors to display
// TODO: allow dismissing errors
errs []error
activeView shared.View
views map[shared.View]shared.ViewModel
state shared.View
chat chat.Model
conversations conversations.Model
}
func initialModel(ctx *lmcli.Context, opts LaunchOptions) *Model {
app := model.NewAppModel(ctx, opts.InitialConversation)
func initialModel(ctx *lmcli.Context, values shared.Values) Model {
m := Model{
App: app,
activeView: opts.InitialView,
views: map[shared.View]shared.ViewModel{
shared.ViewChat: chat.Chat(app),
shared.ViewConversations: conversations.Conversations(app),
shared.ViewSettings: settings.Settings(app),
Shared: shared.Shared{
Ctx: ctx,
Values: &values,
},
}
return &m
m.state = shared.StateChat
m.chat = chat.Chat(m.Shared)
m.conversations = conversations.Conversations(m.Shared)
return m
}
func (m *Model) Init() tea.Cmd {
func (m Model) Init() tea.Cmd {
return tea.Batch(
m.conversations.Init(),
m.chat.Init(),
func() tea.Msg {
return shared.MsgViewChange(m.state)
},
)
}
func (m *Model) handleGlobalInput(msg tea.KeyMsg) (bool, tea.Cmd) {
// delegate input to the active child state first, only handling it at the
// global level if the child state does not
var cmds []tea.Cmd
for _, v := range m.views {
// Init views
cmds = append(cmds, v.Init())
switch m.state {
case shared.StateChat:
handled, cmd := m.chat.HandleInput(msg)
cmds = append(cmds, cmd)
if handled {
m.chat, cmd = m.chat.Update(nil)
cmds = append(cmds, cmd)
return true, tea.Batch(cmds...)
}
cmds = append(cmds, func() tea.Msg {
// Initial view change
return shared.MsgViewChange(m.activeView)
})
return tea.Batch(cmds...)
case shared.StateConversations:
handled, cmd := m.conversations.HandleInput(msg)
cmds = append(cmds, cmd)
if handled {
m.conversations, cmd = m.conversations.Update(nil)
cmds = append(cmds, cmd)
return true, tea.Batch(cmds...)
}
func (m *Model) handleGlobalInput(msg tea.KeyMsg) tea.Cmd {
view, cmd := m.views[m.activeView].Update(msg)
m.views[m.activeView] = view
if cmd != nil {
return cmd
}
switch msg.String() {
case "ctrl+c", "ctrl+q":
return tea.Quit
return true, tea.Quit
}
return nil
return false, nil
}
func (m *Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.WindowSizeMsg:
m.width, m.height = msg.Width, msg.Height
case tea.KeyMsg:
cmd := m.handleGlobalInput(msg)
if cmd != nil {
handled, cmd := m.handleGlobalInput(msg)
if handled {
return m, cmd
}
case shared.MsgViewChange:
currView := m.activeView
m.activeView = shared.View(msg)
return m, tea.Batch(tea.WindowSize(), shared.ViewEnter(currView))
case shared.MsgError:
m.errs = append(m.errs, msg.Err)
m.state = shared.View(msg)
switch m.state {
case shared.StateChat:
m.chat.HandleResize(m.Width, m.Height)
case shared.StateConversations:
m.conversations.HandleResize(m.Width, m.Height)
}
return m, func() tea.Msg { return shared.MsgViewEnter(struct{}{}) }
case tea.WindowSizeMsg:
m.Width, m.Height = msg.Width, msg.Height
}
view, cmd := m.views[m.activeView].Update(msg)
m.views[m.activeView] = view
return m, cmd
var cmd tea.Cmd
switch m.state {
case shared.StateConversations:
m.conversations, cmd = m.conversations.Update(msg)
case shared.StateChat:
m.chat, cmd = m.chat.Update(msg)
}
if cmd != nil {
cmds = append(cmds, cmd)
}
func (m *Model) View() string {
if m.width == 0 || m.height == 0 {
// we're dimensionless!
return m, tea.Batch(cmds...)
}
func (m Model) View() string {
switch m.state {
case shared.StateConversations:
return m.conversations.View()
case shared.StateChat:
return m.chat.View()
}
return ""
}
header := m.views[m.activeView].Header(m.width)
footer := m.views[m.activeView].Footer(m.width)
fixedUIHeight := tuiutil.Height(header) + tuiutil.Height(footer)
errBanners := make([]string, len(m.errs))
for idx, err := range m.errs {
errBanners[idx] = tuiutil.ErrorBanner(err, m.width)
fixedUIHeight += tuiutil.Height(errBanners[idx])
}
content := m.views[m.activeView].Content(m.width, m.height-fixedUIHeight)
sections := make([]string, 0, 4)
if header != "" {
sections = append(sections, header)
}
if content != "" {
sections = append(sections, content)
}
if footer != "" {
sections = append(sections, footer)
}
for _, errBanner := range errBanners {
sections = append(sections, errBanner)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
type LaunchOptions struct {
InitialConversation *conversation.Conversation
InitialView shared.View
}
type LaunchOption func(*LaunchOptions)
func WithInitialConversation(conv *conversation.Conversation) LaunchOption {
return func(opts *LaunchOptions) {
opts.InitialConversation = conv
}
}
func WithInitialView(view shared.View) LaunchOption {
return func(opts *LaunchOptions) {
opts.InitialView = view
}
}
func Launch(ctx *lmcli.Context, options ...LaunchOption) error {
opts := &LaunchOptions{
InitialView: shared.ViewChat,
}
for _, opt := range options {
opt(opts)
}
program := tea.NewProgram(initialModel(ctx, *opts), tea.WithAltScreen())
if _, err := program.Run(); err != nil {
func Launch(ctx *lmcli.Context, convShortname string) error {
p := tea.NewProgram(initialModel(ctx, shared.Values{ConvShortname: convShortname}), tea.WithAltScreen())
if _, err := p.Run(); err != nil {
return fmt.Errorf("Error running program: %v", err)
}
return nil

View File

@ -54,60 +54,23 @@ func Height(str string) int {
return strings.Count(str, "\n") + 1
}
func Width(str string) int {
if str == "" {
return 0
}
return ansi.PrintableRuneWidth(str)
}
func TruncateRightToCellWidth(str string, width int, tail string) string {
// truncate a string until its rendered cell width + the provided tail fits
// within the given width
func TruncateToCellWidth(str string, width int, tail string) string {
cellWidth := ansi.PrintableRuneWidth(str)
if cellWidth <= width {
return str
}
tailWidth := ansi.PrintableRuneWidth(tail)
if width <= tailWidth {
return tail[:width]
for {
str = str[:len(str)-((cellWidth+tailWidth)-width)]
cellWidth = ansi.PrintableRuneWidth(str)
if cellWidth+tailWidth <= max(width, 0) {
break
}
}
targetWidth := width - tailWidth
runes := []rune(str)
for i := len(runes) - 1; i >= 0; i-- {
str = string(runes[:i])
if ansi.PrintableRuneWidth(str) <= targetWidth {
return str + tail
}
}
return tail
}
func TruncateLeftToCellWidth(str string, width int, tail string) string {
cellWidth := ansi.PrintableRuneWidth(str)
if cellWidth <= width {
return str
}
tailWidth := ansi.PrintableRuneWidth(tail)
if width <= tailWidth {
return tail[:width]
}
targetWidth := width - tailWidth
runes := []rune(str)
for i := 0; i < len(runes); i++ {
str = string(runes[i:])
if ansi.PrintableRuneWidth(str) <= targetWidth {
return tail + str
}
}
return tail
}
func ScrollIntoView(vp *viewport.Model, offset int, edge int) {
currentOffset := vp.YOffset
@ -135,3 +98,4 @@ func ErrorBanner(err error, width int) string {
Foreground(lipgloss.Color("1")).
Render(fmt.Sprintf("%s", err))
}

View File

@ -4,9 +4,7 @@ import (
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/provider"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"github.com/charmbracelet/bubbles/cursor"
"github.com/charmbracelet/bubbles/spinner"
"github.com/charmbracelet/bubbles/textarea"
@ -17,35 +15,39 @@ import (
// custom tea.Msg types
type (
// sent when a conversation is (re)loaded
msgConversationLoaded struct {
conversation *api.Conversation
rootMessages []api.Message
}
// sent when a new conversation title generated
msgConversationTitleGenerated string
// sent when the conversation has been persisted, triggers a reload of contents
msgConversationPersisted conversation.Conversation
msgMessagesPersisted []conversation.Message
msgConversationPersisted struct {
isNew bool
conversation *api.Conversation
messages []api.Message
}
// sent when a conversation's messages are laoded
msgConversationMessagesLoaded struct {
messages []conversation.Message
}
msgMessagesLoaded []api.Message
// a special case of common.MsgError that stops the response waiting animation
msgChatResponseError struct {
Err error
}
msgChatResponseError error
// sent on each chunk received from LLM
msgChatResponseChunk provider.Chunk
msgChatResponseChunk api.Chunk
// sent on each completed reply
msgChatResponse conversation.Message
msgChatResponse *api.Message
// sent when the response is canceled
msgChatResponseCanceled struct{}
// sent when results from a tool call are returned
msgToolResults []api.ToolResult
// sent when the given message is made the new selected reply of its parent
msgSelectedReplyCycled *conversation.Message
msgSelectedReplyCycled *api.Message
// sent when the given message is made the new selected root of the current conversation
msgSelectedRootCycled *conversation.Message
msgSelectedRootCycled *api.Message
// sent when a message's contents are updated and saved
msgMessageUpdated *conversation.Message
msgMessageUpdated *api.Message
// sent when a message is cloned, with the cloned message
msgMessageCloned *conversation.Message
msgMessageCloned *api.Message
)
type focusState int
@ -71,24 +73,25 @@ const (
)
type Model struct {
// App state
App *model.AppModel
Height int
Width int
shared.Shared
shared.Sections
// Chat view state
// app state
state state // current overall status of the view
conversation *api.Conversation
rootMessages []api.Message
messages []api.Message
selectedMessage int
editorTarget editorTarget
stopSignal chan struct{}
replyChan chan conversation.Message
chatReplyChunks chan provider.Chunk
replyChan chan api.Message
chatReplyChunks chan api.Chunk
persistence bool // whether we will save new messages in the conversation
// UI state
// ui state
focus focusState
showDetails bool // whether various details are shown in the UI (e.g. system prompt, tool calls/results, message metadata)
wrap bool // whether message content is wrapped to viewport width
showToolResults bool // whether tool calls and results are shown
messageCache []string // cache of syntax highlighted and wrapped message content
messageOffsets []int
@ -104,51 +107,53 @@ type Model struct {
elapsed time.Duration
}
func getSpinner() spinner.Model {
return spinner.New(spinner.WithSpinner(
spinner.Spinner{
Frames: []string{
"∙∙∙",
"●∙∙",
"●●∙",
"●●●",
"∙●●",
"∙∙●",
"∙∙∙",
"∙∙●",
"∙●●",
"●●●",
"●●∙",
"●∙∙",
},
FPS: 440 * time.Millisecond,
},
))
}
func Chat(app *model.AppModel) *Model {
func Chat(shared shared.Shared) Model {
m := Model{
App: app,
Shared: shared,
state: idle,
conversation: &api.Conversation{},
persistence: true,
stopSignal: make(chan struct{}),
replyChan: make(chan conversation.Message),
chatReplyChunks: make(chan provider.Chunk),
replyChan: make(chan api.Message),
chatReplyChunks: make(chan api.Chunk),
wrap: true,
selectedMessage: -1,
content: viewport.New(0, 0),
input: textarea.New(),
spinner: getSpinner(),
spinner: spinner.New(spinner.WithSpinner(
spinner.Spinner{
Frames: []string{
". ",
".. ",
"...",
".. ",
". ",
" ",
},
FPS: time.Second / 3,
},
)),
replyCursor: cursor.New(),
}
m.replyCursor.SetChar(" ")
m.replyCursor.Focus()
system := shared.Ctx.DefaultSystemPrompt()
agent := shared.Ctx.GetAgent(shared.Ctx.Config.Defaults.Agent)
if agent != nil && agent.SystemPrompt != "" {
system = agent.SystemPrompt
}
if system != "" {
m.messages = api.ApplySystemPrompt(m.messages, system, false)
}
m.input.Focus()
m.input.MaxHeight = 0
m.input.CharLimit = 0
@ -158,10 +163,11 @@ func Chat(app *model.AppModel) *Model {
m.input.FocusedStyle.CursorLine = lipgloss.NewStyle()
m.input.FocusedStyle.Base = inputFocusedStyle
m.input.BlurredStyle.Base = inputBlurredStyle
return &m
return m
}
func (m *Model) Init() tea.Cmd {
func (m Model) Init() tea.Cmd {
return tea.Batch(
m.waitForResponseChunk(),
)

View File

@ -1,136 +0,0 @@
package chat
import (
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tea "github.com/charmbracelet/bubbletea"
)
func (m *Model) waitForResponseChunk() tea.Cmd {
return func() tea.Msg {
return msgChatResponseChunk(<-m.chatReplyChunks)
}
}
func (m *Model) loadConversationMessages() tea.Cmd {
return func() tea.Msg {
messages, err := m.App.LoadConversationMessages()
if err != nil {
return shared.AsMsgError(err)
}
return msgConversationMessagesLoaded{messages}
}
}
func (m *Model) generateConversationTitle() tea.Cmd {
return func() tea.Msg {
title, err := m.App.GenerateConversationTitle(m.App.Messages)
if err != nil {
return shared.AsMsgError(err)
}
return msgConversationTitleGenerated(title)
}
}
func (m *Model) cloneMessage(message conversation.Message, selected bool) tea.Cmd {
return func() tea.Msg {
msg, err := m.App.CloneMessage(message, selected)
if err != nil {
return shared.WrapError(err)
}
return msgMessageCloned(msg)
}
}
func (m *Model) updateMessageContent(message *conversation.Message) tea.Cmd {
return func() tea.Msg {
err := m.App.UpdateMessageContent(message)
if err != nil {
return shared.WrapError(err)
}
return msgMessageUpdated(message)
}
}
func (m *Model) cycleSelectedRoot(conv *conversation.Conversation, dir model.MessageCycleDirection) tea.Cmd {
if len(conv.RootMessages) < 2 {
return nil
}
return func() tea.Msg {
nextRoot, err := m.App.CycleSelectedRoot(conv, dir)
if err != nil {
return shared.WrapError(err)
}
return msgSelectedRootCycled(nextRoot)
}
}
func (m *Model) cycleSelectedReply(message *conversation.Message, dir model.MessageCycleDirection) tea.Cmd {
if len(message.Replies) < 2 {
return nil
}
return func() tea.Msg {
nextReply, err := m.App.CycleSelectedReply(message, dir)
if err != nil {
return shared.WrapError(err)
}
return msgSelectedReplyCycled(nextReply)
}
}
func (m *Model) persistConversation() tea.Cmd {
return func() tea.Msg {
conversation, err := m.App.PersistConversation()
if err != nil {
return shared.AsMsgError(err)
}
return msgConversationPersisted(conversation)
}
}
func (m *Model) persistMessages() tea.Cmd {
return func() tea.Msg {
messages, err := m.App.PersistMessages()
if err != nil {
return shared.AsMsgError(err)
}
return msgMessagesPersisted(messages)
}
}
func (m *Model) executeToolCalls(toolCalls []api.ToolCall) tea.Cmd {
return func() tea.Msg {
results, err := m.App.ExecuteToolCalls(toolCalls)
if err != nil {
return shared.AsMsgError(err)
}
return msgToolResults(results)
}
}
func (m *Model) promptLLM() tea.Cmd {
m.state = pendingResponse
m.spinner = getSpinner()
m.replyCursor.Blink = false
m.startTime = time.Now()
m.elapsed = 0
m.tokenCount = 0
return tea.Batch(
m.spinner.Tick,
func() tea.Msg {
resp, err := m.App.Prompt(m.App.Messages, m.chatReplyChunks, m.stopSignal)
if err != nil {
return msgChatResponseError{Err: err}
}
return msgChatResponse(*resp)
},
)
}

View File

@ -0,0 +1,308 @@
package chat
import (
"context"
"errors"
"fmt"
"time"
"git.mlow.ca/mlow/lmcli/pkg/agents"
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tea "github.com/charmbracelet/bubbletea"
)
func (m *Model) setMessage(i int, msg api.Message) {
if i >= len(m.messages) {
panic("i out of range")
}
m.messages[i] = msg
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) addMessage(msg api.Message) {
m.messages = append(m.messages, msg)
m.messageCache = append(m.messageCache, m.renderMessage(len(m.messages)-1))
}
func (m *Model) setMessageContents(i int, content string) {
if i >= len(m.messages) {
panic("i out of range")
}
m.messages[i].Content = content
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) rebuildMessageCache() {
m.messageCache = make([]string, len(m.messages))
for i := range m.messages {
m.messageCache[i] = m.renderMessage(i)
}
}
func (m *Model) updateContent() {
atBottom := m.content.AtBottom()
m.content.SetContent(m.conversationMessagesView())
if atBottom {
// if we were at bottom before the update, scroll with the output
m.content.GotoBottom()
}
}
func (m *Model) loadConversation(shortname string) tea.Cmd {
return func() tea.Msg {
if shortname == "" {
return nil
}
c, err := m.Shared.Ctx.Store.ConversationByShortName(shortname)
if err != nil {
return shared.MsgError(fmt.Errorf("Could not lookup conversation: %v", err))
}
if c.ID == 0 {
return shared.MsgError(fmt.Errorf("Conversation not found: %s", shortname))
}
rootMessages, err := m.Shared.Ctx.Store.RootMessages(c.ID)
if err != nil {
return shared.MsgError(fmt.Errorf("Could not load conversation root messages: %v\n", err))
}
return msgConversationLoaded{c, rootMessages}
}
}
func (m *Model) loadConversationMessages() tea.Cmd {
return func() tea.Msg {
messages, err := m.Shared.Ctx.Store.PathToLeaf(m.conversation.SelectedRoot)
if err != nil {
return shared.MsgError(fmt.Errorf("Could not load conversation messages: %v\n", err))
}
return msgMessagesLoaded(messages)
}
}
func (m *Model) generateConversationTitle() tea.Cmd {
return func() tea.Msg {
title, err := cmdutil.GenerateTitle(m.Shared.Ctx, m.messages)
if err != nil {
return shared.MsgError(err)
}
return msgConversationTitleGenerated(title)
}
}
func (m *Model) updateConversationTitle(conversation *api.Conversation) tea.Cmd {
return func() tea.Msg {
err := m.Shared.Ctx.Store.UpdateConversation(conversation)
if err != nil {
return shared.WrapError(err)
}
return nil
}
}
// Clones the given message (and its descendents). If selected is true, updates
// either its parent's SelectedReply or its conversation's SelectedRoot to
// point to the new clone
func (m *Model) cloneMessage(message api.Message, selected bool) tea.Cmd {
return func() tea.Msg {
msg, _, err := m.Ctx.Store.CloneBranch(message)
if err != nil {
return shared.WrapError(fmt.Errorf("Could not clone message: %v", err))
}
if selected {
if msg.Parent == nil {
msg.Conversation.SelectedRoot = msg
err = m.Shared.Ctx.Store.UpdateConversation(msg.Conversation)
} else {
msg.Parent.SelectedReply = msg
err = m.Shared.Ctx.Store.UpdateMessage(msg.Parent)
}
if err != nil {
return shared.WrapError(fmt.Errorf("Could not update selected message: %v", err))
}
}
return msgMessageCloned(msg)
}
}
func (m *Model) updateMessageContent(message *api.Message) tea.Cmd {
return func() tea.Msg {
err := m.Shared.Ctx.Store.UpdateMessage(message)
if err != nil {
return shared.WrapError(fmt.Errorf("Could not update message: %v", err))
}
return msgMessageUpdated(message)
}
}
func cycleSelectedMessage(selected *api.Message, choices []api.Message, dir MessageCycleDirection) (*api.Message, error) {
currentIndex := -1
for i, reply := range choices {
if reply.ID == selected.ID {
currentIndex = i
break
}
}
if currentIndex < 0 {
// this should probably be an assert
return nil, fmt.Errorf("Selected message %d not found in choices, this is a bug", selected.ID)
}
var next int
if dir == CyclePrev {
// Wrap around to the last reply if at the beginning
next = (currentIndex - 1 + len(choices)) % len(choices)
} else {
// Wrap around to the first reply if at the end
next = (currentIndex + 1) % len(choices)
}
return &choices[next], nil
}
func (m *Model) cycleSelectedRoot(conv *api.Conversation, dir MessageCycleDirection) tea.Cmd {
if len(m.rootMessages) < 2 {
return nil
}
return func() tea.Msg {
nextRoot, err := cycleSelectedMessage(conv.SelectedRoot, m.rootMessages, dir)
if err != nil {
return shared.WrapError(err)
}
conv.SelectedRoot = nextRoot
err = m.Shared.Ctx.Store.UpdateConversation(conv)
if err != nil {
return shared.WrapError(fmt.Errorf("Could not update conversation SelectedRoot: %v", err))
}
return msgSelectedRootCycled(nextRoot)
}
}
func (m *Model) cycleSelectedReply(message *api.Message, dir MessageCycleDirection) tea.Cmd {
if len(message.Replies) < 2 {
return nil
}
return func() tea.Msg {
nextReply, err := cycleSelectedMessage(message.SelectedReply, message.Replies, dir)
if err != nil {
return shared.WrapError(err)
}
message.SelectedReply = nextReply
err = m.Shared.Ctx.Store.UpdateMessage(message)
if err != nil {
return shared.WrapError(fmt.Errorf("Could not update message SelectedReply: %v", err))
}
return msgSelectedReplyCycled(nextReply)
}
}
func (m *Model) persistConversation() tea.Cmd {
conversation := m.conversation
messages := m.messages
var err error
if conversation.ID == 0 {
return func() tea.Msg {
// Start a new conversation with all messages so far
conversation, messages, err = m.Shared.Ctx.Store.StartConversation(messages...)
if err != nil {
return shared.MsgError(fmt.Errorf("Could not start new conversation: %v", err))
}
return msgConversationPersisted{true, conversation, messages}
}
}
return func() tea.Msg {
// else, we'll handle updating an existing conversation's messages
for i := range messages {
if messages[i].ID > 0 {
// message has an ID, update it
err := m.Shared.Ctx.Store.UpdateMessage(&messages[i])
if err != nil {
return shared.MsgError(err)
}
} else if i > 0 {
// messages is new, so add it as a reply to previous message
saved, err := m.Shared.Ctx.Store.Reply(&messages[i-1], messages[i])
if err != nil {
return shared.MsgError(err)
}
messages[i] = saved[0]
} else {
// message has no id and no previous messages to add it to
// this shouldn't happen?
return fmt.Errorf("Error: no messages to reply to")
}
}
return msgConversationPersisted{false, conversation, messages}
}
}
func (m *Model) executeToolCalls(toolCalls []api.ToolCall) tea.Cmd {
return func() tea.Msg {
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
if agent == nil {
return shared.MsgError(fmt.Errorf("Attempted to execute tool calls with no agent configured"))
}
results, err := agents.ExecuteToolCalls(toolCalls, agent.Toolbox)
if err != nil {
return shared.MsgError(err)
}
return msgToolResults(results)
}
}
func (m *Model) promptLLM() tea.Cmd {
m.state = pendingResponse
m.replyCursor.Blink = false
m.startTime = time.Now()
m.elapsed = 0
m.tokenCount = 0
return func() tea.Msg {
model, provider, err := m.Shared.Ctx.GetModelProvider(*m.Shared.Ctx.Config.Defaults.Model)
if err != nil {
return shared.MsgError(err)
}
params := api.RequestParameters{
Model: model,
MaxTokens: *m.Shared.Ctx.Config.Defaults.MaxTokens,
Temperature: *m.Shared.Ctx.Config.Defaults.Temperature,
}
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
if agent != nil {
params.Toolbox = agent.Toolbox
}
ctx, cancel := context.WithCancel(context.Background())
go func() {
select {
case <-m.stopSignal:
cancel()
}
}()
resp, err := provider.CreateChatCompletionStream(
ctx, params, m.messages, m.chatReplyChunks,
)
if errors.Is(err, context.Canceled) {
return msgChatResponseCanceled(struct{}{})
}
if err != nil {
return msgChatResponseError(err)
}
return msgChatResponse(resp)
}
}

View File

@ -5,24 +5,29 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
tea "github.com/charmbracelet/bubbletea"
)
func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
type MessageCycleDirection int
const (
CycleNext MessageCycleDirection = 1
CyclePrev MessageCycleDirection = -1
)
func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
switch m.focus {
case focusInput:
cmd := m.handleInputKey(msg)
if cmd != nil {
return cmd
consumed, cmd := m.handleInputKey(msg)
if consumed {
return true, cmd
}
case focusMessages:
cmd := m.handleMessagesKey(msg)
if cmd != nil {
return cmd
consumed, cmd := m.handleMessagesKey(msg)
if consumed {
return true, cmd
}
}
@ -30,152 +35,126 @@ func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
case "esc":
if m.state == pendingResponse {
m.stopSignal <- struct{}{}
return shared.KeyHandled(msg)
return true, nil
}
return func() tea.Msg {
return shared.MsgViewChange(shared.ViewConversations)
return true, func() tea.Msg {
return shared.MsgViewChange(shared.StateConversations)
}
case "ctrl+c":
if m.state == pendingResponse {
m.stopSignal <- struct{}{}
return shared.KeyHandled(msg)
}
case "ctrl+g":
if m.state == pendingResponse {
m.stopSignal <- struct{}{}
return shared.KeyHandled(msg)
}
return func() tea.Msg {
return shared.MsgViewChange(shared.ViewSettings)
return true, nil
}
case "ctrl+p":
m.persistence = !m.persistence
return shared.KeyHandled(msg)
return true, nil
case "ctrl+t":
m.showDetails = !m.showDetails
m.showToolResults = !m.showToolResults
m.rebuildMessageCache()
m.updateContent()
return shared.KeyHandled(msg)
return true, nil
case "ctrl+w":
m.wrap = !m.wrap
m.rebuildMessageCache()
m.updateContent()
return shared.KeyHandled(msg)
case "ctrl+n":
m.App.NewConversation()
m.rebuildMessageCache()
m.updateContent()
return shared.KeyHandled(msg)
return true, nil
}
return nil
}
func (m *Model) scrollSelection(dir int) {
if m.selectedMessage+dir < 0 || m.selectedMessage+dir >= len(m.App.Messages) {
return
}
newIdx := m.selectedMessage
for i := newIdx + dir; i >= 0 && i < len(m.App.Messages); i += dir {
if !m.showDetails && m.App.Messages[i].Role.IsSystem() {
continue
}
newIdx = i
break
}
if newIdx != m.selectedMessage {
m.selectedMessage = newIdx
m.updateContent()
}
yOffset := m.messageOffsets[m.selectedMessage]
tuiutil.ScrollIntoView(&m.content, yOffset, m.content.Height/2)
return false, nil
}
// handleMessagesKey handles input when the messages pane is focused
func (m *Model) handleMessagesKey(msg tea.KeyMsg) tea.Cmd {
func (m *Model) handleMessagesKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "tab", "enter":
m.focus = focusInput
m.updateContent()
m.input.Focus()
return shared.KeyHandled(msg)
return true, nil
case "e":
if m.selectedMessage < len(m.App.Messages) {
if m.selectedMessage < len(m.messages) {
m.editorTarget = selectedMessage
return tuiutil.OpenTempfileEditor(
return true, tuiutil.OpenTempfileEditor(
"message.*.md",
m.App.Messages[m.selectedMessage].Content,
m.messages[m.selectedMessage].Content,
"# Edit the message below\n",
)
}
return nil
case "ctrl+k", "ctrl+up":
if m.selectedMessage > 0 {
m.scrollSelection(-1)
return false, nil
case "ctrl+k":
if m.selectedMessage > 0 && len(m.messages) == len(m.messageOffsets) {
m.selectedMessage--
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
return shared.KeyHandled(msg)
case "ctrl+j", "ctrl+down":
if m.selectedMessage < len(m.App.Messages)-1 {
m.scrollSelection(1)
return true, nil
case "ctrl+j":
if m.selectedMessage < len(m.messages)-1 && len(m.messages) == len(m.messageOffsets) {
m.selectedMessage++
m.updateContent()
offset := m.messageOffsets[m.selectedMessage]
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
return shared.KeyHandled(msg)
case "ctrl+h", "ctrl+left", "ctrl+l", "ctrl+right":
dir := model.CyclePrev
if msg.String() == "ctrl+l" || msg.String() == "ctrl+right" {
dir = model.CycleNext
return true, nil
case "ctrl+h", "ctrl+l":
dir := CyclePrev
if msg.String() == "ctrl+l" {
dir = CycleNext
}
var cmd tea.Cmd
if m.selectedMessage == 0 {
cmd = m.cycleSelectedRoot(&m.App.Conversation, dir)
cmd = m.cycleSelectedRoot(m.conversation, dir)
} else if m.selectedMessage > 0 {
cmd = m.cycleSelectedReply(&m.App.Messages[m.selectedMessage-1], dir)
cmd = m.cycleSelectedReply(&m.messages[m.selectedMessage-1], dir)
}
return cmd
return cmd != nil, cmd
case "ctrl+r":
// prompt the model with all messages up to and including the selected message
if m.state == idle && m.selectedMessage < len(m.App.Messages) {
m.App.Messages = m.App.Messages[:m.selectedMessage+1]
// resubmit the conversation with all messages up until and including the selected message
if m.state == idle && m.selectedMessage < len(m.messages) {
m.messages = m.messages[:m.selectedMessage+1]
m.messageCache = m.messageCache[:m.selectedMessage+1]
cmd := m.promptLLM()
m.updateContent()
m.content.GotoBottom()
return cmd
return true, cmd
}
}
return nil
return false, nil
}
// handleInputKey handles input when the input textarea is focused
func (m *Model) handleInputKey(msg tea.KeyMsg) tea.Cmd {
func (m *Model) handleInputKey(msg tea.KeyMsg) (bool, tea.Cmd) {
switch msg.String() {
case "esc":
m.focus = focusMessages
if len(m.App.Messages) > 0 {
if m.selectedMessage < 0 || m.selectedMessage >= len(m.App.Messages) {
m.selectedMessage = len(m.App.Messages) - 1
if len(m.messages) > 0 {
if m.selectedMessage < 0 || m.selectedMessage >= len(m.messages) {
m.selectedMessage = len(m.messages) - 1
}
offset := m.messageOffsets[m.selectedMessage]
tuiutil.ScrollIntoView(&m.content, offset, m.content.Height/2)
}
m.updateContent()
m.input.Blur()
return shared.KeyHandled(msg)
return true, nil
case "ctrl+s":
// TODO: call a "handleSend" function which returns a tea.Cmd
if m.state != idle {
return nil
return false, nil
}
input := strings.TrimSpace(m.input.Value())
if input == "" {
return shared.KeyHandled(msg)
return true, nil
}
if len(m.App.Messages) > 0 && m.App.Messages[len(m.App.Messages)-1].Role.IsUser() {
return shared.WrapError(fmt.Errorf("Can't reply to a user message"))
if len(m.messages) > 0 && m.messages[len(m.messages)-1].Role == api.MessageRoleUser {
return true, shared.WrapError(fmt.Errorf("Can't reply to a user message"))
}
m.addMessage(conversation.Message{
m.addMessage(api.Message{
Role: api.MessageRoleUser,
Content: input,
})
@ -191,11 +170,11 @@ func (m *Model) handleInputKey(msg tea.KeyMsg) tea.Cmd {
m.updateContent()
m.content.GotoBottom()
return tea.Batch(cmds...)
return true, tea.Batch(cmds...)
case "ctrl+e":
cmd := tuiutil.OpenTempfileEditor("message.*.md", m.input.Value(), "# Edit your input below\n")
m.editorTarget = input
return cmd
return true, cmd
}
return nil
return false, nil
}

View File

@ -5,87 +5,57 @@ import (
"time"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"github.com/charmbracelet/bubbles/cursor"
tea "github.com/charmbracelet/bubbletea"
)
func (m *Model) setMessage(i int, msg conversation.Message) {
if i >= len(m.App.Messages) {
panic("i out of range")
}
m.App.Messages[i] = msg
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) addMessage(msg conversation.Message) {
m.App.Messages = append(m.App.Messages, msg)
m.messageCache = append(m.messageCache, m.renderMessage(len(m.App.Messages)-1))
}
func (m *Model) setMessageContents(i int, content string) {
if i >= len(m.App.Messages) {
panic("i out of range")
}
m.App.Messages[i].Content = content
m.messageCache[i] = m.renderMessage(i)
}
func (m *Model) rebuildMessageCache() {
m.messageCache = make([]string, len(m.App.Messages))
for i := range m.App.Messages {
m.messageCache[i] = m.renderMessage(i)
}
}
func (m *Model) updateContent() {
atBottom := m.content.AtBottom()
m.content.SetContent(m.conversationMessagesView())
if atBottom {
m.content.GotoBottom()
}
}
func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
inputHandled := false
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
cmd := m.handleInput(msg)
if cmd != nil {
inputHandled = true
cmds = append(cmds, cmd)
}
case tea.WindowSizeMsg:
m.Width, m.Height = msg.Width, msg.Height
m.content.Width = msg.Width
m.input.SetWidth(msg.Width - m.input.FocusedStyle.Base.GetHorizontalFrameSize())
if len(m.App.Messages) > 0 {
func (m *Model) HandleResize(width, height int) {
m.Width, m.Height = width, height
m.content.Width = width
m.input.SetWidth(width - m.input.FocusedStyle.Base.GetHorizontalFrameSize())
if len(m.messages) > 0 {
m.rebuildMessageCache()
m.updateContent()
}
}
func (m *Model) waitForResponseChunk() tea.Cmd {
return func() tea.Msg {
return msgChatResponseChunk(<-m.chatReplyChunks)
}
}
func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.WindowSizeMsg:
m.HandleResize(msg.Width, msg.Height)
case shared.MsgViewEnter:
// wake up spinners and cursors
cmds = append(cmds, cursor.Blink, m.spinner.Tick)
// Refresh view
if m.Shared.Values.ConvShortname != "" {
// (re)load conversation contents
cmds = append(cmds, m.loadConversation(m.Shared.Values.ConvShortname))
if m.conversation.ShortName.String != m.Shared.Values.ConvShortname {
// clear existing messages if we're loading a new conversation
m.messages = []api.Message{}
m.selectedMessage = 0
}
}
m.rebuildMessageCache()
m.updateContent()
if m.App.Conversation.ID > 0 {
// (re)load conversation contents
cmds = append(cmds, m.loadConversationMessages())
}
case tuiutil.MsgTempfileEditorClosed:
contents := string(msg)
switch m.editorTarget {
case input:
m.input.SetValue(contents)
case selectedMessage:
toEdit := m.App.Messages[m.selectedMessage]
toEdit := m.messages[m.selectedMessage]
if toEdit.Content != contents {
toEdit.Content = contents
m.setMessage(m.selectedMessage, toEdit)
@ -95,12 +65,19 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
}
}
}
case msgConversationMessagesLoaded:
m.App.Messages = msg.messages
case msgConversationLoaded:
m.conversation = msg.conversation
m.rootMessages = msg.rootMessages
m.selectedMessage = -1
if len(m.rootMessages) > 0 {
cmds = append(cmds, m.loadConversationMessages())
}
case msgMessagesLoaded:
m.messages = msg
if m.selectedMessage == -1 {
m.selectedMessage = len(msg.messages) - 1
m.selectedMessage = len(msg) - 1
} else {
m.selectedMessage = min(m.selectedMessage, len(m.App.Messages))
m.selectedMessage = min(m.selectedMessage, len(m.messages))
}
m.rebuildMessageCache()
m.updateContent()
@ -111,13 +88,13 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
break
}
last := len(m.App.Messages) - 1
if last >= 0 && m.App.Messages[last].Role.IsAssistant() {
last := len(m.messages) - 1
if last >= 0 && m.messages[last].Role.IsAssistant() {
// append chunk to existing message
m.setMessageContents(last, m.App.Messages[last].Content+msg.Content)
m.setMessageContents(last, m.messages[last].Content+msg.Content)
} else {
// use chunk in a new message
m.addMessage(conversation.Message{
m.addMessage(api.Message{
Role: api.MessageRoleAssistant,
Content: msg.Content,
})
@ -133,24 +110,25 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
case msgChatResponse:
m.state = idle
reply := conversation.Message(msg)
reply := (*api.Message)(msg)
reply.Content = strings.TrimSpace(reply.Content)
last := len(m.App.Messages) - 1
last := len(m.messages) - 1
if last < 0 {
panic("Unexpected empty messages handling msgAssistantReply")
}
if m.App.Messages[last].Role.IsAssistant() {
// TODO: handle continuations gracefully - only some models support them
m.setMessage(last, reply)
if m.messages[last].Role.IsAssistant() {
// TODO: handle continuations gracefully - some models support them well, others fail horribly.
m.setMessage(last, *reply)
} else {
m.addMessage(reply)
m.addMessage(*reply)
}
if reply.Role == api.MessageRoleToolCall {
switch reply.Role {
case api.MessageRoleToolCall:
// TODO: user confirmation before execution
// m.state = confirmToolUse
// m.state = waitingForConfirmation
cmds = append(cmds, m.executeToolCalls(reply.ToolCalls))
}
@ -158,29 +136,31 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
cmds = append(cmds, m.persistConversation())
}
if m.App.Conversation.Title == "" && len(m.App.Messages) > 0 {
if m.conversation.Title == "" {
cmds = append(cmds, m.generateConversationTitle())
}
m.updateContent()
case msgChatResponseCanceled:
m.state = idle
m.updateContent()
case msgChatResponseError:
m.state = idle
m.Shared.Err = error(msg)
m.updateContent()
return m, shared.WrapError(msg.Err)
case msgToolResults:
last := len(m.App.Messages) - 1
last := len(m.messages) - 1
if last < 0 {
panic("Unexpected empty messages handling msgAssistantReply")
}
if m.App.Messages[last].Role != api.MessageRoleToolCall {
if m.messages[last].Role != api.MessageRoleToolCall {
panic("Previous message not a tool call, unexpected")
}
m.addMessage(conversation.Message{
m.addMessage(api.Message{
Role: api.MessageRoleToolResult,
ToolResults: conversation.ToolResults(msg),
ToolResults: api.ToolResults(msg),
})
if m.persistence {
@ -190,25 +170,30 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
m.updateContent()
case msgConversationTitleGenerated:
title := string(msg)
m.App.Conversation.Title = title
if m.persistence && m.App.Conversation.ID > 0 {
cmds = append(cmds, m.persistConversation())
m.conversation.Title = title
if m.persistence {
cmds = append(cmds, m.updateConversationTitle(m.conversation))
}
case cursor.BlinkMsg:
if m.state == pendingResponse {
// ensure we show the updated "wait for response" cursor blink state
last := len(m.App.Messages) - 1
last := len(m.messages)-1
m.messageCache[last] = m.renderMessage(last)
m.updateContent()
}
case msgConversationPersisted:
m.App.Conversation = conversation.Conversation(msg)
cmds = append(cmds, m.persistMessages())
case msgMessagesPersisted:
m.App.Messages = msg
m.conversation = msg.conversation
m.messages = msg.messages
if msg.isNew {
m.rootMessages = []api.Message{m.messages[0]}
}
m.rebuildMessageCache()
m.updateContent()
case msgMessageCloned:
if msg.Parent == nil {
m.conversation = msg.Conversation
m.rootMessages = append(m.rootMessages, *msg)
}
cmds = append(cmds, m.loadConversationMessages())
case msgSelectedRootCycled, msgSelectedReplyCycled, msgMessageUpdated:
cmds = append(cmds, m.loadConversationMessages())
@ -225,22 +210,38 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
}
prevInputLineCnt := m.input.LineCount()
if !inputHandled {
inputCaptured := false
m.input, cmd = m.input.Update(msg)
if cmd != nil {
inputHandled = true
inputCaptured = true
cmds = append(cmds, cmd)
}
}
if !inputHandled {
if !inputCaptured {
m.content, cmd = m.content.Update(msg)
if cmd != nil {
cmds = append(cmds, cmd)
}
}
// update views once window dimensions are known
if m.Width > 0 {
m.Header = m.headerView()
m.Footer = m.footerView()
m.Error = tuiutil.ErrorBanner(m.Err, m.Width)
fixedHeight := tuiutil.Height(m.Header) + tuiutil.Height(m.Error) + tuiutil.Height(m.Footer)
// calculate clamped input height to accomodate input text
// minimum 4 lines, maximum half of content area
newHeight := max(4, min((m.Height-fixedHeight-1)/2, m.input.LineCount()))
m.input.SetHeight(newHeight)
m.Input = m.input.View()
// remaining height towards content
m.content.Height = m.Height - fixedHeight - tuiutil.Height(m.Input)
m.Content = m.content.View()
}
// this is a pretty nasty hack to ensure the input area viewport doesn't
// scroll below its content, which can happen when the input viewport
// height has grown, or previously entered lines have been deleted
@ -263,8 +264,5 @@ func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
}
}
if len(cmds) > 0 {
return m, tea.Batch(cmds...)
}
return m, nil
}

View File

@ -6,7 +6,6 @@ import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
"github.com/charmbracelet/lipgloss"
@ -17,19 +16,15 @@ import (
// styles
var (
boldStyle = lipgloss.NewStyle().Bold(true)
faintStyle = lipgloss.NewStyle().Faint(true)
boldFaintStyle = lipgloss.NewStyle().Faint(true).Bold(true)
messageHeadingStyle = lipgloss.NewStyle().
MarginTop(1).
MarginBottom(1)
MarginBottom(1).
PaddingLeft(1).
Bold(true)
userStyle = boldFaintStyle.Foreground(lipgloss.Color("10"))
userStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("10"))
assistantStyle = boldFaintStyle.Foreground(lipgloss.Color("12"))
systemStyle = boldStyle.Foreground(lipgloss.Color("8"))
assistantStyle = lipgloss.NewStyle().Faint(true).Foreground(lipgloss.Color("12"))
messageStyle = lipgloss.NewStyle().
PaddingLeft(2).
@ -42,14 +37,40 @@ var (
Faint(true).
Border(lipgloss.RoundedBorder(), true, true, true, false)
footerStyle = lipgloss.NewStyle().Padding(0, 1)
footerStyle = lipgloss.NewStyle()
)
func (m *Model) renderMessageHeading(i int, message *conversation.Message) string {
func (m Model) View() string {
if m.Width == 0 {
return ""
}
sections := make([]string, 0, 6)
if m.Header != "" {
sections = append(sections, m.Header)
}
sections = append(sections, m.Content)
if m.Error != "" {
sections = append(sections, m.Error)
}
sections = append(sections, m.Input)
if m.Footer != "" {
sections = append(sections, m.Footer)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
func (m *Model) renderMessageHeading(i int, message *api.Message) string {
icon := ""
friendly := message.Role.FriendlyRole()
style := systemStyle
style := lipgloss.NewStyle().Faint(true).Bold(true)
switch message.Role {
case api.MessageRoleSystem:
icon = "⚙️"
case api.MessageRoleUser:
style = userStyle
case api.MessageRoleAssistant:
@ -57,75 +78,70 @@ func (m *Model) renderMessageHeading(i int, message *conversation.Message) strin
case api.MessageRoleToolCall:
style = assistantStyle
friendly = api.MessageRoleAssistant.FriendlyRole()
case api.MessageRoleSystem:
case api.MessageRoleToolResult:
icon = "🔧"
}
user := style.Render(friendly)
user := style.Render(icon + friendly)
var prefix, suffix string
var prefix string
var suffix string
if i == m.selectedMessage && m.focus == focusMessages {
prefix = "> "
} else {
prefix = " "
}
faint := lipgloss.NewStyle().Faint(true)
if i == 0 && m.App.Conversation.SelectedRootID != nil && len(m.App.Conversation.RootMessages) > 1 {
if i == 0 && len(m.rootMessages) > 1 && m.conversation.SelectedRootID != nil {
selectedRootIndex := 0
for j, reply := range m.App.Conversation.RootMessages {
if reply.ID == *m.App.Conversation.SelectedRootID {
for j, reply := range m.rootMessages {
if reply.ID == *m.conversation.SelectedRootID {
selectedRootIndex = j
break
}
}
suffix += faintStyle.Render(fmt.Sprintf(" <%d/%d>", selectedRootIndex+1, len(m.App.Conversation.RootMessages)))
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedRootIndex+1, len(m.rootMessages)))
}
if i > 0 && len(m.App.Messages[i-1].Replies) > 1 {
if i > 0 && len(m.messages[i-1].Replies) > 1 {
// Find the selected reply index
selectedReplyIndex := 0
for j, reply := range m.App.Messages[i-1].Replies {
if reply.ID == *m.App.Messages[i-1].SelectedReplyID {
for j, reply := range m.messages[i-1].Replies {
if reply.ID == *m.messages[i-1].SelectedReplyID {
selectedReplyIndex = j
break
}
}
suffix += faintStyle.Render(fmt.Sprintf(" <%d/%d>", selectedReplyIndex+1, len(m.App.Messages[i-1].Replies)))
suffix += faint.Render(fmt.Sprintf(" <%d/%d>", selectedReplyIndex+1, len(m.messages[i-1].Replies)))
}
if m.focus == focusMessages {
if i == m.selectedMessage {
prefix = "> "
}
}
if message.ID == 0 {
suffix += faintStyle.Render(" (not saved)")
suffix += faint.Render(" (not saved)")
}
heading := prefix + user + suffix
if message.Metadata.GenerationModel != nil && m.showDetails {
heading += faintStyle.Render(
fmt.Sprintf(" | %s", *message.Metadata.GenerationModel),
)
}
return messageHeadingStyle.Render(heading)
return messageHeadingStyle.Render(prefix + user + suffix)
}
// renderMessages renders the message at the given index as it should be shown
// *at this moment* - we render differently depending on the current application
// state (window size, etc, etc).
func (m *Model) renderMessage(i int) string {
msg := &m.App.Messages[i]
msg := &m.messages[i]
// Write message contents
sb := &strings.Builder{}
sb.Grow(len(msg.Content) * 2)
if msg.Content != "" {
err := m.App.Ctx.Chroma.Highlight(sb, msg.Content)
err := m.Shared.Ctx.Chroma.Highlight(sb, msg.Content)
if err != nil {
sb.Reset()
sb.WriteString(msg.Content)
}
}
isLast := i == len(m.App.Messages)-1
isLast := i == len(m.messages)-1
isAssistant := msg.Role == api.MessageRoleAssistant
if m.state == pendingResponse && isLast && isAssistant {
@ -151,7 +167,7 @@ func (m *Model) renderMessage(i int) string {
var toolResults []renderedResult
for _, result := range msg.ToolResults {
if m.showDetails {
if m.showToolResults {
var jsonResult interface{}
err := json.Unmarshal([]byte(result.Result), &jsonResult)
if err != nil {
@ -189,7 +205,7 @@ func (m *Model) renderMessage(i int) string {
if msg.Content != "" {
sb.WriteString("\n\n")
}
_ = m.App.Ctx.Chroma.HighlightLang(sb, toolString, "yaml")
_ = m.Shared.Ctx.Chroma.HighlightLang(sb, toolString, "yaml")
}
content := strings.TrimRight(sb.String(), "\n")
@ -207,16 +223,12 @@ func (m *Model) renderMessage(i int) string {
// render the conversation into a string
func (m *Model) conversationMessagesView() string {
m.messageOffsets = make([]int, len(m.App.Messages))
lineCnt := 1
sb := strings.Builder{}
for i, message := range m.App.Messages {
m.messageOffsets[i] = lineCnt
if !m.showDetails && message.Role.IsSystem() {
continue
}
m.messageOffsets = make([]int, len(m.messages))
lineCnt := 1
for i, message := range m.messages {
m.messageOffsets[i] = lineCnt
heading := m.renderMessageHeading(i, &message)
sb.WriteString(heading)
@ -230,136 +242,80 @@ func (m *Model) conversationMessagesView() string {
}
// Render a placeholder for the incoming assistant reply
if m.state == pendingResponse && m.App.Messages[len(m.App.Messages)-1].Role != api.MessageRoleAssistant {
heading := m.renderMessageHeading(-1, &conversation.Message{
if m.state == pendingResponse && m.messages[len(m.messages)-1].Role != api.MessageRoleAssistant {
heading := m.renderMessageHeading(-1, &api.Message{
Role: api.MessageRoleAssistant,
Metadata: conversation.MessageMeta{
GenerationModel: &m.App.Model,
},
})
sb.WriteString(heading)
sb.WriteString("\n")
sb.WriteString(messageStyle.Width(0).Render(m.replyCursor.View()))
sb.WriteString("\n")
}
return sb.String()
}
func (m *Model) Content(width, height int) string {
// calculate clamped input height to accomodate input text
// minimum 4 lines, maximum half of content area
inputHeight := max(4, min(height/2, m.input.LineCount()))
m.input.SetHeight(inputHeight)
input := m.input.View()
// remaining height towards content
m.content.Width, m.content.Height = width, height-tuiutil.Height(input)
content := m.content.View()
return lipgloss.JoinVertical(lipgloss.Left, content, input)
}
func (m *Model) Header(width int) string {
func (m *Model) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
var title string
if m.App.Conversation.Title != "" {
title = m.App.Conversation.Title
if m.conversation != nil && m.conversation.Title != "" {
title = m.conversation.Title
} else {
title = "Untitled"
}
title = tuiutil.TruncateRightToCellWidth(title, width-styles.Header.GetHorizontalPadding(), "...")
title = tuiutil.TruncateToCellWidth(title, m.Width-styles.Header.GetHorizontalPadding(), "...")
header := titleStyle.Render(title)
return styles.Header.Width(width).Render(header)
return styles.Header.Width(m.Width).Render(header)
}
func (m *Model) Footer(width int) string {
segmentStyle := lipgloss.NewStyle().Faint(true)
segmentSeparator := segmentStyle.Render(" | ")
func (m *Model) footerView() string {
segmentStyle := lipgloss.NewStyle().PaddingLeft(1).PaddingRight(1).Faint(true)
segmentSeparator := "|"
// Left segments
leftSegments := make([]string, 0, 4)
if m.state == pendingResponse {
leftSegments = append(leftSegments, segmentStyle.Render(m.spinner.View()))
savingStyle := segmentStyle.Copy().Bold(true)
saving := ""
if m.persistence {
saving = savingStyle.Foreground(lipgloss.Color("2")).Render("✅💾")
} else {
leftSegments = append(leftSegments, segmentStyle.Render("∙∙∙"))
saving = savingStyle.Foreground(lipgloss.Color("1")).Render("❌💾")
}
var status string
switch m.state {
case pendingResponse:
status = "Press ctrl+c to cancel" + m.spinner.View()
default:
status = "Press ctrl+s to send"
}
leftSegments := []string{
saving,
segmentStyle.Render(status),
}
rightSegments := []string{}
if m.elapsed > 0 && m.tokenCount > 0 {
throughput := fmt.Sprintf("%.0f t/sec", float64(m.tokenCount)/m.elapsed.Seconds())
leftSegments = append(leftSegments, segmentStyle.Render(throughput))
rightSegments = append(rightSegments, segmentStyle.Render(throughput))
}
// var status string
// switch m.state {
// case pendingResponse:
// status = "Press ctrl+c to cancel"
// default:
// status = "Press ctrl+s to send"
// }
// leftSegments = append(leftSegments, segmentStyle.Render(status))
model := fmt.Sprintf("Model: %s", *m.Shared.Ctx.Config.Defaults.Model)
rightSegments = append(rightSegments, segmentStyle.Render(model))
// Right segments
rightSegments := make([]string, 0, 8)
if m.App.Agent != nil {
rightSegments = append(rightSegments, segmentStyle.Render(m.App.Agent.Name))
}
model := segmentStyle.Render(m.App.ActiveModel(lipgloss.NewStyle()))
rightSegments = append(rightSegments, model)
savingStyle := segmentStyle.Bold(true)
saving := ""
if m.persistence {
saving = savingStyle.Foreground(lipgloss.Color("2")).Render("💾✅")
} else {
saving = savingStyle.Foreground(lipgloss.Color("1")).Render("💾❌")
}
rightSegments = append(rightSegments, saving)
return m.layoutFooter(width, leftSegments, rightSegments, segmentSeparator)
}
func (m *Model) layoutFooter(
width int,
leftSegments []string,
rightSegments []string,
segmentSeparator string,
) string {
left := strings.Join(leftSegments, segmentSeparator)
right := strings.Join(rightSegments, segmentSeparator)
leftWidth := tuiutil.Width(left)
rightWidth := tuiutil.Width(right)
sepWidth := tuiutil.Width(segmentSeparator)
frameWidth := footerStyle.GetHorizontalFrameSize()
totalWidth := lipgloss.Width(left) + lipgloss.Width(right)
remaining := m.Width - totalWidth
availableWidth := width - frameWidth - leftWidth - rightWidth
if availableWidth >= sepWidth {
// Everything fits
padding := strings.Repeat(" ", availableWidth)
return footerStyle.Render(left + padding + right)
var padding string
if remaining > 0 {
padding = strings.Repeat(" ", remaining)
}
// Inserted between left and right segments when they're being truncated
div := "..."
totalAvailableWidth := width - frameWidth
availableTruncWidth := totalAvailableWidth - len(div)
minVisibleLength := 3
if availableTruncWidth < 2*minVisibleLength {
minVisibleLength = availableTruncWidth / 2
footer := left + padding + right
if remaining < 0 {
footer = tuiutil.TruncateToCellWidth(footer, m.Width, "...")
}
leftProportion := float64(leftWidth) / float64(leftWidth+rightWidth)
newLeftWidth := int(max(float64(minVisibleLength), leftProportion*float64(availableTruncWidth)))
newRightWidth := totalAvailableWidth - newLeftWidth
truncatedLeft := faintStyle.Render(tuiutil.TruncateRightToCellWidth(left, newLeftWidth, ""))
truncatedRight := faintStyle.Render(tuiutil.TruncateLeftToCellWidth(right, newRightWidth, "..."))
return footerStyle.Width(width).Render(truncatedLeft + truncatedRight)
return footerStyle.Width(m.Width).Render(footer)
}

View File

@ -5,9 +5,8 @@ import (
"strings"
"time"
"git.mlow.ca/mlow/lmcli/pkg/conversation"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/tui/bubbles"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
tuiutil "git.mlow.ca/mlow/lmcli/pkg/tui/util"
@ -17,56 +16,67 @@ import (
"github.com/charmbracelet/lipgloss"
)
type loadedConversation struct {
conv api.Conversation
lastReply api.Message
}
type (
// sent when conversation list is loaded
msgConversationsLoaded conversation.ConversationList
// sent when a single conversation is loaded
msgConversationLoaded conversation.Conversation
msgConversationsLoaded ([]loadedConversation)
// sent when a conversation is selected
msgConversationSelected api.Conversation
// sent when a conversation is deleted
msgConversationDeleted struct{}
)
type Model struct {
App *model.AppModel
width int
height int
// Prompt payloads
type (
deleteConversationPayload api.Conversation
)
cursor int
itemOffsets []int // conversation y offsets
type Model struct {
shared.Shared
shared.Sections
conversations []loadedConversation
cursor int // index of the currently selected conversation
itemOffsets []int // keeps track of the viewport y offset of each rendered item
content viewport.Model
confirmPrompt bubbles.ConfirmPrompt
}
func Conversations(app *model.AppModel) *Model {
viewport.New(0, 0)
func Conversations(shared shared.Shared) Model {
m := Model{
App: app,
Shared: shared,
content: viewport.New(0, 0),
}
return &m
return m
}
func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
func (m *Model) HandleInput(msg tea.KeyMsg) (bool, tea.Cmd) {
if m.confirmPrompt.Focused() {
var cmd tea.Cmd
m.confirmPrompt, cmd = m.confirmPrompt.Update(msg)
if cmd != nil {
return cmd
return true, cmd
}
}
conversations := m.App.Conversations.Items
switch msg.String() {
case "enter":
if len(conversations) > 0 && m.cursor < len(conversations) {
return m.loadConversation(conversations[m.cursor].ID)
if len(m.conversations) > 0 && m.cursor < len(m.conversations) {
return true, func() tea.Msg {
return msgConversationSelected(m.conversations[m.cursor].conv)
}
}
case "j", "down":
if m.cursor < len(conversations)-1 {
if m.cursor < len(m.conversations)-1 {
m.cursor++
if m.cursor == len(conversations)-1 {
if m.cursor == len(m.conversations)-1 {
// if last conversation, simply scroll to the bottom
m.content.GotoBottom()
} else {
// this hack positions the *next* conversatoin slightly
@ -76,10 +86,10 @@ func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
}
m.content.SetContent(m.renderConversationList())
} else {
m.cursor = len(conversations) - 1
m.cursor = len(m.conversations) - 1
m.content.GotoBottom()
}
return shared.KeyHandled(msg)
return true, nil
case "k", "up":
if m.cursor > 0 {
m.cursor--
@ -93,24 +103,23 @@ func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
m.cursor = 0
m.content.GotoTop()
}
return shared.KeyHandled(msg)
return true, nil
case "n":
m.App.NewConversation()
return shared.ChangeView(shared.ViewChat)
// new conversation
case "d":
if !m.confirmPrompt.Focused() && len(conversations) > 0 && m.cursor < len(conversations) {
title := conversations[m.cursor].Title
if !m.confirmPrompt.Focused() && len(m.conversations) > 0 && m.cursor < len(m.conversations) {
title := m.conversations[m.cursor].conv.Title
if title == "" {
title = "(untitled)"
}
m.confirmPrompt = bubbles.NewConfirmPrompt(
fmt.Sprintf("Delete '%s'?", title),
conversations[m.cursor],
deleteConversationPayload(m.conversations[m.cursor].conv),
)
m.confirmPrompt.Style = lipgloss.NewStyle().
Bold(true).
Foreground(lipgloss.Color("3"))
return shared.KeyHandled(msg)
return true, nil
}
case "c":
// copy/clone conversation
@ -119,115 +128,122 @@ func (m *Model) handleInput(msg tea.KeyMsg) tea.Cmd {
case "shift+r":
// show prompt to generate name for conversation
}
return false, nil
}
func (m Model) Init() tea.Cmd {
return nil
}
func (m *Model) Init() tea.Cmd {
return nil
func (m *Model) HandleResize(width, height int) {
m.Width, m.Height = width, height
m.content.Width = width
}
func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
isInput := false
inputHandled := false
func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmds []tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
isInput = true
cmd := m.handleInput(msg)
if cmd != nil {
cmds = append(cmds, cmd)
inputHandled = true
}
case shared.MsgViewEnter:
cmds = append(cmds, m.loadConversations())
m.content.SetContent(m.renderConversationList())
case tea.WindowSizeMsg:
m.width, m.height = msg.Width, msg.Height
m.HandleResize(msg.Width, msg.Height)
m.content.SetContent(m.renderConversationList())
case msgConversationsLoaded:
m.App.Conversations = conversation.ConversationList(msg)
m.cursor = max(0, min(len(m.App.Conversations.Items), m.cursor))
m.conversations = msg
m.cursor = max(0, min(len(m.conversations), m.cursor))
m.content.SetContent(m.renderConversationList())
case msgConversationLoaded:
m.App.ClearConversation()
m.App.Conversation = conversation.Conversation(msg)
case msgConversationSelected:
m.Values.ConvShortname = msg.ShortName.String
cmds = append(cmds, func() tea.Msg {
return shared.MsgViewChange(shared.ViewChat)
return shared.MsgViewChange(shared.StateChat)
})
case bubbles.MsgConfirmPromptAnswered:
m.confirmPrompt.Blur()
if msg.Value {
conv, ok := msg.Payload.(conversation.ConversationListItem)
if ok {
cmds = append(cmds, m.deleteConversation(conv))
switch payload := msg.Payload.(type) {
case deleteConversationPayload:
cmds = append(cmds, m.deleteConversation(api.Conversation(payload)))
}
}
case msgConversationDeleted:
cmds = append(cmds, m.loadConversations())
}
if !isInput || !inputHandled {
content, cmd := m.content.Update(msg)
m.content = content
var cmd tea.Cmd
m.content, cmd = m.content.Update(msg)
if cmd != nil {
cmds = append(cmds, cmd)
}
}
if len(cmds) > 0 {
if m.Width > 0 {
wrap := lipgloss.NewStyle().Width(m.Width)
m.Header = m.headerView()
m.Footer = "" // TODO: "Press ? for help"
if m.confirmPrompt.Focused() {
m.Footer = wrap.Render(m.confirmPrompt.View())
}
m.Error = tuiutil.ErrorBanner(m.Err, m.Width)
fixedHeight := tuiutil.Height(m.Header) + tuiutil.Height(m.Error) + tuiutil.Height(m.Footer)
m.content.Height = m.Height - fixedHeight
m.Content = m.content.View()
}
return m, tea.Batch(cmds...)
}
return m, nil
}
func (m *Model) loadConversations() tea.Cmd {
return func() tea.Msg {
list, err := m.App.Ctx.Conversations.LoadConversationList()
messages, err := m.Ctx.Store.LatestConversationMessages()
if err != nil {
return shared.AsMsgError(fmt.Errorf("Could not load conversations: %v", err))
return shared.MsgError(fmt.Errorf("Could not load conversations: %v", err))
}
return msgConversationsLoaded(list)
loaded := make([]loadedConversation, len(messages))
for i, m := range messages {
loaded[i].lastReply = m
loaded[i].conv = *m.Conversation
}
return msgConversationsLoaded(loaded)
}
}
func (m *Model) loadConversation(conversationID uint) tea.Cmd {
func (m *Model) deleteConversation(conv api.Conversation) tea.Cmd {
return func() tea.Msg {
conversation, err := m.App.Ctx.Conversations.GetConversationByID(conversationID)
err := m.Ctx.Store.DeleteConversation(&conv)
if err != nil {
return shared.AsMsgError(fmt.Errorf("Could not load conversation %d: %v", conversationID, err))
}
return msgConversationLoaded(*conversation)
}
}
func (m *Model) deleteConversation(conv conversation.ConversationListItem) tea.Cmd {
return func() tea.Msg {
err := m.App.Ctx.Conversations.DeleteConversationById(conv.ID)
if err != nil {
return shared.AsMsgError(fmt.Errorf("Could not delete conversation: %v", err))
return shared.MsgError(fmt.Errorf("Could not delete conversation: %v", err))
}
return msgConversationDeleted{}
}
}
func (m *Model) Header(width int) string {
func (m Model) View() string {
if m.Width == 0 {
return ""
}
sections := make([]string, 0, 6)
if m.Header != "" {
sections = append(sections, m.Header)
}
sections = append(sections, m.Content)
if m.Error != "" {
sections = append(sections, m.Error)
}
if m.Footer != "" {
sections = append(sections, m.Footer)
}
return lipgloss.JoinVertical(lipgloss.Left, sections...)
}
func (m *Model) headerView() string {
titleStyle := lipgloss.NewStyle().Bold(true)
header := titleStyle.Render("Conversations")
return styles.Header.Width(width).Render(header)
}
func (m *Model) Content(width int, height int) string {
m.content.Width, m.content.Height = width, height
return m.content.View()
}
func (m *Model) Footer(width int) string {
if m.confirmPrompt.Focused() {
return lipgloss.NewStyle().Width(width).Render(m.confirmPrompt.View())
}
return ""
return styles.Header.Width(m.Width).Render(header)
}
func (m *Model) renderConversationList() string {
@ -273,12 +289,12 @@ func (m *Model) renderConversationList() string {
sb strings.Builder
)
m.itemOffsets = make([]int, len(m.App.Conversations.Items))
m.itemOffsets = make([]int, len(m.conversations))
sb.WriteRune('\n')
currentOffset += 1
for i, c := range m.App.Conversations.Items {
lastReplyAge := now.Sub(c.LastMessageAt)
for i, c := range m.conversations {
lastReplyAge := now.Sub(c.lastReply.CreatedAt)
var category string
for _, g := range categories {
@ -297,15 +313,15 @@ func (m *Model) renderConversationList() string {
sb.WriteRune('\n')
}
tStyle := titleStyle
if c.Title == "" {
tStyle := titleStyle.Copy()
if c.conv.Title == "" {
tStyle = tStyle.Inherit(untitledStyle).SetString("(untitled)")
}
if i == m.cursor {
tStyle = tStyle.Inherit(selectedStyle)
}
title := tStyle.Width(m.width - 3).PaddingLeft(2).Render(c.Title)
title := tStyle.Width(m.Width - 3).PaddingLeft(2).Render(c.conv.Title)
if i == m.cursor {
title = ">" + title[1:]
}
@ -318,7 +334,7 @@ func (m *Model) renderConversationList() string {
))
sb.WriteString(item)
currentOffset += tuiutil.Height(item)
if i < len(m.App.Conversations.Items)-1 {
if i < len(m.conversations)-1 {
sb.WriteRune('\n')
}
}

View File

@ -1,137 +0,0 @@
package settings
import (
"strings"
"git.mlow.ca/mlow/lmcli/pkg/tui/bubbles/list"
"git.mlow.ca/mlow/lmcli/pkg/tui/model"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
"git.mlow.ca/mlow/lmcli/pkg/tui/styles"
"github.com/charmbracelet/bubbles/viewport"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
)
type Model struct {
App *model.AppModel
prevView shared.View
content viewport.Model
modelList list.Model
width int
height int
}
type modelOpt struct {
provider string
model string
}
const (
modelListId int = iota + 1
)
func Settings(app *model.AppModel) *Model {
m := &Model{
App: app,
content: viewport.New(0, 0),
}
return m
}
func (m *Model) Init() tea.Cmd {
m.modelList = list.NewWithGroups(m.getModelOptions())
m.modelList.ID = modelListId
return nil
}
func (m *Model) Update(msg tea.Msg) (shared.ViewModel, tea.Cmd) {
var cmd tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
m.modelList, cmd = m.modelList.Update(msg)
if cmd != nil {
return m, cmd
}
switch msg.String() {
case "esc":
return m, func() tea.Msg {
return shared.MsgViewChange(m.prevView)
}
}
case shared.MsgViewEnter:
m.prevView = shared.View(msg)
m.modelList.Focus()
m.content.SetContent(m.renderContent())
case tea.WindowSizeMsg:
m.width, m.height = msg.Width, msg.Height
m.content.Width = msg.Width
m.content.Height = msg.Height
m.content.SetContent(m.renderContent())
case list.MsgOptionSelected:
switch msg.ID {
case modelListId:
if modelOpt, ok := msg.Option.Value.(modelOpt); ok {
m.App.Model = modelOpt.model
m.App.ProviderName = modelOpt.provider
}
return m, shared.ChangeView(m.prevView)
}
}
m.modelList, cmd = m.modelList.Update(msg)
if cmd != nil {
return m, cmd
}
m.content.SetContent(m.renderContent())
return m, nil
}
func (m *Model) getModelOptions() []list.OptionGroup {
modelOpts := []list.OptionGroup{}
for _, p := range m.App.Ctx.Config.Providers {
provider := p.Name
if provider == "" {
provider = p.Kind
}
providerLabel := p.Display
if providerLabel == "" {
providerLabel = strings.ToUpper(provider[:1]) + provider[1:]
}
group := list.OptionGroup{
Name: providerLabel,
}
for _, model := range p.Models {
group.Options = append(group.Options, list.Option{
Label: model,
Value: modelOpt{provider, model},
})
}
modelOpts = append(modelOpts, group)
}
return modelOpts
}
func (m *Model) Header(width int) string {
boldStyle := lipgloss.NewStyle().Bold(true)
// TODO: update header depending on active settings mode (model, agent, etc)
header := boldStyle.Render("Model selection")
return styles.Header.Width(width).Render(header)
}
func (m *Model) Content(width, height int) string {
// TODO: see Header()
currentModel := " Active model: " + m.App.ActiveModel(lipgloss.NewStyle())
m.modelList.Width, m.modelList.Height = width, height - 2
return "\n" + currentModel + "\n" + m.modelList.View()
}
func (m *Model) Footer(width int) string {
return ""
}
func (m *Model) renderContent() string {
return m.modelList.View()
}

View File

@ -138,7 +138,7 @@ func SetStructDefaults(data interface{}) bool {
// Get the "default" struct tag
defaultTag, ok := v.Type().Field(i).Tag.Lookup("default")
if !ok {
if (!ok) {
continue
}