fix: dispatch agent missing tool histories (#1794)

This commit is contained in:
yetone
2025-04-01 23:29:51 +08:00
committed by GitHub
parent 7dc5560909
commit 06757e8bf5
4 changed files with 25 additions and 8 deletions

View File

@@ -155,7 +155,13 @@ end
---@param opts AvanteGeneratePromptsOptions ---@param opts AvanteGeneratePromptsOptions
---@return AvantePromptOptions ---@return AvantePromptOptions
function M.generate_prompts(opts) function M.generate_prompts(opts)
if opts.prompt_opts then return opts.prompt_opts end if opts.prompt_opts then
local prompt_opts = vim.tbl_deep_extend("force", opts.prompt_opts, {
tool_histories = opts.tool_histories,
})
---@cast prompt_opts AvantePromptOptions
return prompt_opts
end
local provider = opts.provider or Providers[Config.provider] local provider = opts.provider or Providers[Config.provider]
local mode = opts.mode or "planning" local mode = opts.mode or "planning"
---@type AvanteProviderFunctor | AvanteBedrockProviderFunctor ---@type AvanteProviderFunctor | AvanteBedrockProviderFunctor
@@ -280,6 +286,10 @@ Merge all changes from the <update> snippet into the <code> below.
table.insert(messages, { role = "user", content = user_prompt }) table.insert(messages, { role = "user", content = user_prompt })
end end
opts.session_ctx = opts.session_ctx or {}
opts.session_ctx.system_prompt = system_prompt
opts.session_ctx.messages = messages
---@type AvantePromptOptions ---@type AvantePromptOptions
return { return {
system_prompt = system_prompt, system_prompt = system_prompt,
@@ -522,6 +532,7 @@ function M._stream(opts)
if LLMToolHelpers then LLMToolHelpers.is_cancelled = false end if LLMToolHelpers then LLMToolHelpers.is_cancelled = false end
local provider = opts.provider or Providers[Config.provider] local provider = opts.provider or Providers[Config.provider]
opts.session_ctx = opts.session_ctx or {}
---@cast provider AvanteProviderFunctor ---@cast provider AvanteProviderFunctor

View File

@@ -239,9 +239,9 @@ function M.func(opts, on_log, on_complete, session_ctx)
if not on_complete then return false, "on_complete not provided" end if not on_complete then return false, "on_complete not provided" end
Helpers.confirm( Helpers.confirm(
"Are you sure you want to run the command: `" .. opts.command .. "` in the directory: " .. abs_path, "Are you sure you want to run the command: `" .. opts.command .. "` in the directory: " .. abs_path,
function(ok) function(ok, reason)
if not ok then if not ok then
on_complete(false, "User canceled") on_complete(false, "User declined, reason: " .. (reason and reason or "unknown"))
return return
end end
Utils.shell_run_async(opts.command, "bash -c", function(output, exit_code) Utils.shell_run_async(opts.command, "bash -c", function(output, exit_code)

View File

@@ -66,7 +66,7 @@ local function get_available_tools()
end end
---@type AvanteLLMToolFunc<{ prompt: string }> ---@type AvanteLLMToolFunc<{ prompt: string }>
function M.func(opts, on_log, on_complete) function M.func(opts, on_log, on_complete, session_ctx)
local Llm = require("avante.llm") local Llm = require("avante.llm")
if not on_complete then return false, "on_complete not provided" end if not on_complete then return false, "on_complete not provided" end
local prompt = opts.prompt local prompt = opts.prompt
@@ -80,18 +80,24 @@ Your task is to help the user with their request: "${prompt}"
Be thorough and use the tools available to you to find the most relevant information. Be thorough and use the tools available to you to find the most relevant information.
When you're done, provide a clear and concise summary of what you found.]]):gsub("${prompt}", prompt) When you're done, provide a clear and concise summary of what you found.]]):gsub("${prompt}", prompt)
local messages = session_ctx and session_ctx.messages or {}
messages = messages or {}
table.insert(messages, { role = "user", content = prompt })
local total_tokens = 0 local total_tokens = 0
local final_response = "" local final_response = ""
Llm._stream({ Llm._stream({
ask = true, ask = true,
code_lang = "unknown", code_lang = "unknown",
provider = Providers[Config.provider], provider = Providers[Config.provider],
on_tool_log = function(tool_name, log)
if on_log then on_log(string.format("[%s] %s", tool_name, log)) end
end,
session_ctx = session_ctx,
prompt_opts = { prompt_opts = {
system_prompt = system_prompt, system_prompt = system_prompt,
tools = tools, tools = tools,
messages = { messages = messages,
{ role = "user", content = prompt },
},
}, },
on_start = function(_) end, on_start = function(_) end,
on_chunk = function(chunk) on_chunk = function(chunk)

View File

@@ -326,6 +326,7 @@ vim.g.avante_login = vim.g.avante_login
---@field original_code? string ---@field original_code? string
---@field update_snippets? string[] ---@field update_snippets? string[]
---@field prompt_opts? AvantePromptOptions ---@field prompt_opts? AvantePromptOptions
---@field session_ctx? table
--- ---
---@class AvanteLLMToolHistory ---@class AvanteLLMToolHistory
---@field tool_result? AvanteLLMToolResult ---@field tool_result? AvanteLLMToolResult
@@ -334,7 +335,6 @@ vim.g.avante_login = vim.g.avante_login
---@alias AvanteLLMMemorySummarizeCallback fun(dropped_history_messages: AvanteLLMMessage[]): nil ---@alias AvanteLLMMemorySummarizeCallback fun(dropped_history_messages: AvanteLLMMessage[]): nil
--- ---
---@class AvanteLLMStreamOptions: AvanteGeneratePromptsOptions ---@class AvanteLLMStreamOptions: AvanteGeneratePromptsOptions
---@field session_ctx? table
---@field on_start AvanteLLMStartCallback ---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback ---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback ---@field on_stop AvanteLLMStopCallback