diff --git a/lua/avante/llm.lua b/lua/avante/llm.lua index 2f228e7..551c149 100644 --- a/lua/avante/llm.lua +++ b/lua/avante/llm.lua @@ -155,7 +155,13 @@ end ---@param opts AvanteGeneratePromptsOptions ---@return AvantePromptOptions function M.generate_prompts(opts) - if opts.prompt_opts then return opts.prompt_opts end + if opts.prompt_opts then + local prompt_opts = vim.tbl_deep_extend("force", opts.prompt_opts, { + tool_histories = opts.tool_histories, + }) + ---@cast prompt_opts AvantePromptOptions + return prompt_opts + end local provider = opts.provider or Providers[Config.provider] local mode = opts.mode or "planning" ---@type AvanteProviderFunctor | AvanteBedrockProviderFunctor @@ -280,6 +286,10 @@ Merge all changes from the snippet into the below. table.insert(messages, { role = "user", content = user_prompt }) end + opts.session_ctx = opts.session_ctx or {} + opts.session_ctx.system_prompt = system_prompt + opts.session_ctx.messages = messages + ---@type AvantePromptOptions return { system_prompt = system_prompt, @@ -522,6 +532,7 @@ function M._stream(opts) if LLMToolHelpers then LLMToolHelpers.is_cancelled = false end local provider = opts.provider or Providers[Config.provider] + opts.session_ctx = opts.session_ctx or {} ---@cast provider AvanteProviderFunctor diff --git a/lua/avante/llm_tools/bash.lua b/lua/avante/llm_tools/bash.lua index c64a172..971387e 100644 --- a/lua/avante/llm_tools/bash.lua +++ b/lua/avante/llm_tools/bash.lua @@ -239,9 +239,9 @@ function M.func(opts, on_log, on_complete, session_ctx) if not on_complete then return false, "on_complete not provided" end Helpers.confirm( "Are you sure you want to run the command: `" .. opts.command .. "` in the directory: " .. abs_path, - function(ok) + function(ok, reason) if not ok then - on_complete(false, "User canceled") + on_complete(false, "User declined, reason: " .. (reason and reason or "unknown")) return end Utils.shell_run_async(opts.command, "bash -c", function(output, exit_code) diff --git a/lua/avante/llm_tools/dispatch_agent.lua b/lua/avante/llm_tools/dispatch_agent.lua index 4d31618..c8d33fb 100644 --- a/lua/avante/llm_tools/dispatch_agent.lua +++ b/lua/avante/llm_tools/dispatch_agent.lua @@ -66,7 +66,7 @@ local function get_available_tools() end ---@type AvanteLLMToolFunc<{ prompt: string }> -function M.func(opts, on_log, on_complete) +function M.func(opts, on_log, on_complete, session_ctx) local Llm = require("avante.llm") if not on_complete then return false, "on_complete not provided" end local prompt = opts.prompt @@ -80,18 +80,24 @@ Your task is to help the user with their request: "${prompt}" Be thorough and use the tools available to you to find the most relevant information. When you're done, provide a clear and concise summary of what you found.]]):gsub("${prompt}", prompt) + local messages = session_ctx and session_ctx.messages or {} + messages = messages or {} + table.insert(messages, { role = "user", content = prompt }) + local total_tokens = 0 local final_response = "" Llm._stream({ ask = true, code_lang = "unknown", provider = Providers[Config.provider], + on_tool_log = function(tool_name, log) + if on_log then on_log(string.format("[%s] %s", tool_name, log)) end + end, + session_ctx = session_ctx, prompt_opts = { system_prompt = system_prompt, tools = tools, - messages = { - { role = "user", content = prompt }, - }, + messages = messages, }, on_start = function(_) end, on_chunk = function(chunk) diff --git a/lua/avante/types.lua b/lua/avante/types.lua index 59a7149..81c6d1a 100644 --- a/lua/avante/types.lua +++ b/lua/avante/types.lua @@ -326,6 +326,7 @@ vim.g.avante_login = vim.g.avante_login ---@field original_code? string ---@field update_snippets? string[] ---@field prompt_opts? AvantePromptOptions +---@field session_ctx? table --- ---@class AvanteLLMToolHistory ---@field tool_result? AvanteLLMToolResult @@ -334,7 +335,6 @@ vim.g.avante_login = vim.g.avante_login ---@alias AvanteLLMMemorySummarizeCallback fun(dropped_history_messages: AvanteLLMMessage[]): nil --- ---@class AvanteLLMStreamOptions: AvanteGeneratePromptsOptions ----@field session_ctx? table ---@field on_start AvanteLLMStartCallback ---@field on_chunk AvanteLLMChunkCallback ---@field on_stop AvanteLLMStopCallback