Introduce "agents"

An agent is currently a name given to a system prompt and a set of
tools which the agent has access to.

This resolves the previous issue of the set of configured tools being
available in *all* contexts, which wasn't always desired. Tools are now
only available when an agent is explicitly requested using the
`-a/--agent` flag.

Agents are expected to be expanded on: the concept of task-specilized
agents (e.g. coding), the ability to define a set of files an agent
should always have access to for RAG purposes, etc.

Other changes:

- Removes the "tools" top-level config structure (though this is expected
to come back along with the abillity to define custom tools).

- Renamed `pkg/agent` to `pkg/agents`
This commit is contained in:
Matt Low 2024-06-23 18:57:08 +00:00
parent cea5118cac
commit 8ddac2f820
15 changed files with 108 additions and 37 deletions

View File

@ -4,15 +4,15 @@
when calling anthropic?
- [x] `dir_tree` tool
- [x] Implement native Anthropic API tool calling
- [ ] Agents - a name given to a system prompt + set of available tools +
- [x] Agents - a name given to a system prompt + set of available tools +
potentially other relevent data (e.g. external service credentials, files for
RAG, etc), which the user explicitly selects (e.g. `lmcli chat --agent
pair-programmer`, `lmcli chat -a financier`).
- Specialized agents which have integrations beyond basic tool calling,
code-helper`, `lmcli chat -a financier`).
- [ ] Specialized agents which have integrations beyond basic tool calling,
e.g. a coding agent which bakes in efficient code context management
(only the current state of relevant files get shown to the model in the
system prompt, rather than having them in the conversation messages)
- Agents may have some form of long term memory management (key-value?
- [ ] Agents may have some form of long term memory management (key-value?
natural lang?).
- [ ] Support for arbitrary external script tools
- [ ] Search - RAG driven search of existing conversation "hey, remind me of

View File

@ -7,7 +7,7 @@ import (
"strconv"
"strings"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -5,7 +5,7 @@ import (
"os"
"strings"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -5,7 +5,7 @@ import (
"os"
"strings"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -6,7 +6,7 @@ import (
"path/filepath"
"strings"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -5,7 +5,7 @@ import (
"os"
"strings"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -4,7 +4,7 @@ import (
"fmt"
"os"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agent/toolbox/util"
toolutil "git.mlow.ca/mlow/lmcli/pkg/agents/toolbox/util"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -1,9 +1,9 @@
package agent
package agents
import (
"fmt"
"git.mlow.ca/mlow/lmcli/pkg/agent/toolbox"
"git.mlow.ca/mlow/lmcli/pkg/agents/toolbox"
"git.mlow.ca/mlow/lmcli/pkg/api"
)

View File

@ -51,6 +51,12 @@ func applyGenerationFlags(ctx *lmcli.Context, cmd *cobra.Command) {
return ctx.GetModels(), cobra.ShellCompDirectiveDefault
})
// -a, --agent
f.StringVarP(&ctx.Config.Defaults.Agent, "agent", "a", ctx.Config.Defaults.Agent, "Which agent to interact with")
cmd.RegisterFlagCompletionFunc("agent", func(*cobra.Command, []string, string) ([]string, cobra.ShellCompDirective) {
return ctx.GetAgents(), cobra.ShellCompDirectiveDefault
})
// --max-length
f.IntVar(ctx.Config.Defaults.MaxTokens, "max-length", *ctx.Config.Defaults.MaxTokens, "Maximum response tokens")
// --temperature
@ -65,14 +71,21 @@ func applyGenerationFlags(ctx *lmcli.Context, cmd *cobra.Command) {
func validateGenerationFlags(ctx *lmcli.Context, cmd *cobra.Command) error {
f := cmd.Flags()
model, err := f.GetString("model")
if err != nil {
return fmt.Errorf("Error parsing --model: %w", err)
}
if !slices.Contains(ctx.GetModels(), model) {
if model != "" && !slices.Contains(ctx.GetModels(), model) {
return fmt.Errorf("Unknown model: %s", model)
}
agent, err := f.GetString("agent")
if err != nil {
return fmt.Errorf("Error parsing --agent: %w", err)
}
if agent != "" && !slices.Contains(ctx.GetAgents(), agent) {
return fmt.Errorf("Unknown agent: %s", agent)
}
return nil
}

View File

@ -29,6 +29,15 @@ func Prompt(ctx *lmcli.Context, messages []api.Message, callback func(api.Messag
}
system := ctx.DefaultSystemPrompt()
agent := ctx.GetAgent(ctx.Config.Defaults.Agent)
if agent != nil {
if agent.SystemPrompt != "" {
system = agent.SystemPrompt
}
params.ToolBag = agent.Toolbox
}
if system != "" {
messages = api.ApplySystemPrompt(messages, system, false)
}

View File

@ -15,6 +15,8 @@ type Config struct {
Temperature *float32 `yaml:"temperature" default:"0.2"`
SystemPrompt string `yaml:"systemPrompt,omitempty"`
SystemPromptFile string `yaml:"systemPromptFile,omitempty"`
// CLI only
Agent string `yaml:"-"`
} `yaml:"defaults"`
Conversations *struct {
TitleGenerationModel *string `yaml:"titleGenerationModel" default:"gpt-3.5-turbo"`
@ -23,9 +25,11 @@ type Config struct {
Style *string `yaml:"style" default:"onedark"`
Formatter *string `yaml:"formatter" default:"terminal16m"`
} `yaml:"chroma"`
Tools *struct {
EnabledTools []string `yaml:"enabledTools"`
} `yaml:"tools"`
Agents []*struct {
Name string `yaml:"name"`
SystemPrompt string `yaml:"systemPrompt"`
Tools []string `yaml:"tools"`
} `yaml:"agents"`
Providers []*struct {
Name string `yaml:"name,omitempty"`
Kind string `yaml:"kind"`

View File

@ -6,7 +6,7 @@ import (
"path/filepath"
"strings"
"git.mlow.ca/mlow/lmcli/pkg/agent"
"git.mlow.ca/mlow/lmcli/pkg/agents"
"git.mlow.ca/mlow/lmcli/pkg/api"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/anthropic"
"git.mlow.ca/mlow/lmcli/pkg/api/provider/google"
@ -18,20 +18,24 @@ import (
"gorm.io/gorm"
)
type Agent struct {
Name string
SystemPrompt string
Toolbox []api.ToolSpec
}
type Context struct {
// high level app configuration, may be mutated at runtime
Config Config
Store ConversationStore
Chroma *tty.ChromaHighlighter
EnabledTools []api.ToolSpec
}
func NewContext() (*Context, error) {
configFile := filepath.Join(configDir(), "config.yaml")
config, err := NewConfig(configFile)
if err != nil {
Fatal("%v\n", err)
return nil, err
}
databaseFile := filepath.Join(dataDir(), "conversations.db")
@ -43,20 +47,12 @@ func NewContext() (*Context, error) {
}
store, err := NewSQLStore(db)
if err != nil {
Fatal("%v\n", err)
return nil, err
}
chroma := tty.NewChromaHighlighter("markdown", *config.Chroma.Formatter, *config.Chroma.Style)
var enabledTools []api.ToolSpec
for _, toolName := range config.Tools.EnabledTools {
tool, ok := agent.AvailableTools[toolName]
if ok {
enabledTools = append(enabledTools, tool)
}
}
return &Context{*config, store, chroma, enabledTools}, nil
return &Context{*config, store, chroma}, nil
}
func (c *Context) GetModels() (models []string) {
@ -82,6 +78,40 @@ func (c *Context) GetModels() (models []string) {
return
}
func (c *Context) GetAgents() (agents []string) {
for _, p := range c.Config.Agents {
agents = append(agents, p.Name)
}
return
}
func (c *Context) GetAgent(name string) *Agent {
if name == "" {
return nil
}
for _, a := range c.Config.Agents {
if name != a.Name {
continue
}
var enabledTools []api.ToolSpec
for _, toolName := range a.Tools {
tool, ok := agents.AvailableTools[toolName]
if ok {
enabledTools = append(enabledTools, tool)
}
}
return &Agent{
Name: a.Name,
SystemPrompt: a.SystemPrompt,
Toolbox: enabledTools,
}
}
return nil
}
func (c *Context) DefaultSystemPrompt() string {
if c.Config.Defaults.SystemPromptFile != "" {
content, err := util.ReadFileContents(c.Config.Defaults.SystemPromptFile)

View File

@ -144,6 +144,12 @@ func Chat(shared shared.Shared) Model {
m.replyCursor.Focus()
system := shared.Ctx.DefaultSystemPrompt()
agent := shared.Ctx.GetAgent(shared.Ctx.Config.Defaults.Agent)
if agent != nil && agent.SystemPrompt != "" {
system = agent.SystemPrompt
}
if system != "" {
m.messages = api.ApplySystemPrompt(m.messages, system, false)
}

View File

@ -6,7 +6,7 @@ import (
"fmt"
"time"
"git.mlow.ca/mlow/lmcli/pkg/agent"
"git.mlow.ca/mlow/lmcli/pkg/agents"
"git.mlow.ca/mlow/lmcli/pkg/api"
cmdutil "git.mlow.ca/mlow/lmcli/pkg/cmd/util"
"git.mlow.ca/mlow/lmcli/pkg/tui/shared"
@ -244,7 +244,12 @@ func (m *Model) persistConversation() tea.Cmd {
func (m *Model) executeToolCalls(toolCalls []api.ToolCall) tea.Cmd {
return func() tea.Msg {
results, err := agent.ExecuteToolCalls(toolCalls, m.Ctx.EnabledTools)
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
if agent == nil {
return shared.MsgError(fmt.Errorf("Attempted to execute tool calls with no agent configured"))
}
results, err := agents.ExecuteToolCalls(toolCalls, agent.Toolbox)
if err != nil {
return shared.MsgError(err)
}
@ -266,11 +271,15 @@ func (m *Model) promptLLM() tea.Cmd {
return shared.MsgError(err)
}
requestParams := api.RequestParameters{
params := api.RequestParameters{
Model: model,
MaxTokens: *m.Shared.Ctx.Config.Defaults.MaxTokens,
Temperature: *m.Shared.Ctx.Config.Defaults.Temperature,
ToolBag: m.Shared.Ctx.EnabledTools,
}
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
if agent != nil {
params.ToolBag = agent.Toolbox
}
ctx, cancel := context.WithCancel(context.Background())
@ -283,7 +292,7 @@ func (m *Model) promptLLM() tea.Cmd {
}()
resp, err := provider.CreateChatCompletionStream(
ctx, requestParams, m.messages, m.chatReplyChunks,
ctx, params, m.messages, m.chatReplyChunks,
)
if errors.Is(err, context.Canceled) {