feat: streaming attempt completion (#2113)

This commit is contained in:
yetone
2025-06-02 21:21:05 +08:00
committed by GitHub
parent 1c4e062199
commit 647a459a2b
3 changed files with 37 additions and 28 deletions

View File

@@ -47,6 +47,7 @@ local Highlights = {
AVANTE_STATE_SPINNER_SEARCHING = { name = "AvanteStateSpinnerSearching", fg = "#1e222a", bg = "#c678dd" },
AVANTE_STATE_SPINNER_THINKING = { name = "AvanteStateSpinnerThinking", fg = "#1e222a", bg = "#c678dd" },
AVANTE_STATE_SPINNER_COMPACTING = { name = "AvanteStateSpinnerCompacting", fg = "#1e222a", bg = "#c678dd" },
AVANTE_TASK_COMPLETED = { name = "AvanteTaskCompleted", fg = "#98c379", bg_link = "Normal" },
}
Highlights.conflict = {

View File

@@ -834,25 +834,30 @@ function M._stream(opts)
table.insert(tool_results, tool_result)
return handle_next_tool_use(partial_tool_use_list, tool_use_index + 1, tool_results)
end
local is_replace_func_call = Utils.is_replace_func_call_tool_use(partial_tool_use)
if partial_tool_use.state == "generating" and not is_replace_func_call then return end
if is_replace_func_call then
if type(partial_tool_use.input) == "table" then partial_tool_use.input.tool_use_id = partial_tool_use.id end
if partial_tool_use.state == "generating" then
if type(partial_tool_use.input) == "table" then
partial_tool_use.input.streaming = true
LLMTools.process_tool_use(
prompt_opts.tools,
partial_tool_use,
function() end,
function() end,
opts.session_ctx
)
end
return
else
if streaming_tool_use then return end
local is_replace_tool_use = Utils.is_replace_func_call_tool_use(partial_tool_use)
local is_attempt_completion_tool_use = partial_tool_use.name == "attempt_completion"
if
partial_tool_use.state == "generating"
and not is_replace_tool_use
and not is_attempt_completion_tool_use
then
return
end
if type(partial_tool_use.input) == "table" then partial_tool_use.input.tool_use_id = partial_tool_use.id end
if partial_tool_use.state == "generating" then
if type(partial_tool_use.input) == "table" then
partial_tool_use.input.streaming = true
LLMTools.process_tool_use(
prompt_opts.tools,
partial_tool_use,
function() end,
function() end,
opts.session_ctx
)
end
return
else
if streaming_tool_use then return end
end
-- Either on_complete handles the tool result asynchronously or we receive the result and error synchronously when either is not nil
local result, error = LLMTools.process_tool_use(

View File

@@ -1,8 +1,9 @@
local Base = require("avante.llm_tools.base")
local Config = require("avante.config")
local HistoryMessage = require("avante.history_message")
local Highlights = require("avante.highlights")
local Line = require("avante.ui.line")
---@alias AttemptCompletionInput {result: string, command?: string}
---@alias AttemptCompletionInput {result: string, command?: string, streaming?: boolean}
---@class AvanteLLMTool
local M = setmetatable({}, Base)
@@ -54,7 +55,16 @@ M.returns = {
}
---@type AvanteLLMToolOnRender<AttemptCompletionInput>
function M.on_render() return {} end
function M.on_render(opts)
local lines = {}
table.insert(lines, Line:new({ { "✓ Task Completed", Highlights.AVANTE_TASK_COMPLETED } }))
local result = opts.result or ""
local text_lines = vim.split(result, "\n")
for _, text_line in ipairs(text_lines) do
table.insert(lines, Line:new({ { text_line } }))
end
return lines
end
---@type AvanteLLMToolFunc<AttemptCompletionInput>
function M.func(opts, on_log, on_complete, session_ctx)
@@ -62,13 +72,6 @@ function M.func(opts, on_log, on_complete, session_ctx)
local sidebar = require("avante").get()
if not sidebar then return false, "Avante sidebar not found" end
session_ctx.attempt_completion_is_called = true
local message = HistoryMessage:new({
role = "assistant",
content = opts.result,
}, {
just_for_display = true,
})
sidebar:add_history_messages({ message })
if opts.command and opts.command ~= "" and opts.command ~= vim.NIL then
require("avante.llm_tools.bash").func({ command = opts.command }, on_log, on_complete, session_ctx)
else