Renamed RequestParameters.ToolBag to Toolbox

This commit is contained in:
Matt Low 2024-06-23 19:10:03 +00:00
parent 8ddac2f820
commit 914d9ac0c1
6 changed files with 9 additions and 9 deletions

View File

@ -18,7 +18,7 @@ type RequestParameters struct {
Temperature float32
TopP float32
ToolBag []ToolSpec
Toolbox []ToolSpec
}
type ChatCompletionProvider interface {

View File

@ -183,8 +183,8 @@ func createChatCompletionRequest(
Temperature: params.Temperature,
}
if len(params.ToolBag) > 0 {
request.Tools = convertTools(params.ToolBag)
if len(params.Toolbox) > 0 {
request.Tools = convertTools(params.Toolbox)
}
var prefill string

View File

@ -253,8 +253,8 @@ func createGenerateContentRequest(
}
}
if len(params.ToolBag) > 0 {
request.Tools = convertTools(params.ToolBag)
if len(params.Toolbox) > 0 {
request.Tools = convertTools(params.Toolbox)
}
return request, nil

View File

@ -177,8 +177,8 @@ func createChatCompletionRequest(
N: 1, // limit responses to 1 "choice". we use choices[0] to reference it
}
if len(params.ToolBag) > 0 {
request.Tools = convertTools(params.ToolBag)
if len(params.Toolbox) > 0 {
request.Tools = convertTools(params.Toolbox)
request.ToolChoice = "auto"
}

View File

@ -35,7 +35,7 @@ func Prompt(ctx *lmcli.Context, messages []api.Message, callback func(api.Messag
if agent.SystemPrompt != "" {
system = agent.SystemPrompt
}
params.ToolBag = agent.Toolbox
params.Toolbox = agent.Toolbox
}
if system != "" {

View File

@ -279,7 +279,7 @@ func (m *Model) promptLLM() tea.Cmd {
agent := m.Shared.Ctx.GetAgent(m.Shared.Ctx.Config.Defaults.Agent)
if agent != nil {
params.ToolBag = agent.Toolbox
params.Toolbox = agent.Toolbox
}
ctx, cancel := context.WithCancel(context.Background())