fix: types (#1344)

This commit is contained in:
yetone
2025-02-22 23:24:20 +08:00
committed by GitHub
parent 0704a5820a
commit b04bffa441
23 changed files with 493 additions and 328 deletions

31
.github/workflows/.luarc.json vendored Normal file
View File

@@ -0,0 +1,31 @@
{
"$schema": "https://raw.githubusercontent.com/sumneko/vscode-lua/master/setting/schema.json",
"runtime": {
"version": "LuaJIT",
"pathStrict": true
},
"workspace": {
"library": [
"$VIMRUNTIME/lua",
"$VIMRUNTIME/lua/vim/lsp",
"$PWD/lua",
"${3rd}/luv/library",
"$DEPS_PATH/luvit-meta/library",
"$DEPS_PATH/lazy.nvim/lua",
"$DEPS_PATH/nvim-treesitter/lua",
"$DEPS_PATH/dressing.nvim/lua",
"$DEPS_PATH/plenary.nvim/lua",
"$DEPS_PATH/nui.nvim/lua",
"$DEPS_PATH/mini.pick/lua",
"$DEPS_PATH/telescope.nvim/lua",
"$DEPS_PATH/nvim-cmp/lua",
"$DEPS_PATH/fzf-lua/lua",
"$DEPS_PATH/nvim-web-devicons/lua",
"$DEPS_PATH/copilot.lua/lua"
],
"ignoreDir": [
"/lua"
],
"checkThirdParty": false
}
}

View File

@@ -74,3 +74,98 @@ jobs:
uses: lunarmodules/luacheck@cc089e3f65acdd1ef8716cc73a3eca24a6b845e4 # ratchet:lunarmodules/luacheck@v1
with:
args: ./lua/
typecheck:
name: Typecheck
runs-on: ubuntu-latest
strategy:
matrix:
nvim_version: [ stable ]
luals_version: [ 3.11.0 ]
steps:
- name: Checkout dependency neodev
uses: actions/checkout@v3
with:
repository: "folke/neodev.nvim"
path: "deps/neodev.nvim"
- name: Checkout dependency nvim-treesitter
uses: actions/checkout@v3
with:
repository: "nvim-treesitter/nvim-treesitter"
path: "deps/nvim-treesitter"
- name: Checkout dependency dressing.nvim
uses: actions/checkout@v3
with:
repository: "stevearc/dressing.nvim"
path: "deps/dressing.nvim"
- name: Checkout dependency plenary.nvim
uses: actions/checkout@v3
with:
repository: "nvim-lua/plenary.nvim"
path: "deps/plenary.nvim"
- name: Checkout dependency nui.nvim
uses: actions/checkout@v3
with:
repository: "MunifTanjim/nui.nvim"
path: "deps/nui.nvim"
- name: Checkout dependency mini.pick
uses: actions/checkout@v3
with:
repository: "echasnovski/mini.nvim"
path: "deps/mini.nvim"
- name: Checkout dependency telescope.nvim
uses: actions/checkout@v3
with:
repository: "nvim-telescope/telescope.nvim"
- name: Checkout dependency nvim-cmp
uses: actions/checkout@v3
with:
repository: "hrsh7th/nvim-cmp"
path: "deps/nvim-cmp"
- name: Checkout dependency fzf-lua
uses: actions/checkout@v3
with:
repository: "ibhagwan/fzf-lua"
path: "deps/fzf-lua"
- name: Checkout dependency nvim-web-devicons
uses: actions/checkout@v3
with:
repository: "nvim-tree/nvim-web-devicons"
path: "deps/nvim-web-devicons"
- name: Checkout dependency copilot.lua
uses: actions/checkout@v3
with:
repository: "zbirenbaum/copilot.lua"
path: "deps/copilot.lua"
- uses: rhysd/action-setup-vim@v1
with:
neovim: true
version: ${{ matrix.nvim_version }}
- name: Checkout Code
uses: actions/checkout@v3
- name: Install luals
run: |
mkdir -p luals
curl -L "https://github.com/LuaLS/lua-language-server/releases/download/${{ matrix.luals_version }}/lua-language-server-${{ matrix.luals_version }}-linux-x64.tar.gz" | tar zx --directory luals
- run: echo "luals/bin" >> "$GITHUB_PATH"
- name: Typecheck
env:
VIMRUNTIME: /home/runner/nvim-${{ matrix.nvim_version }}/share/nvim/runtime
DEPS_PATH: /github/workspace/deps/
run: |
make lua-typecheck

View File

@@ -96,3 +96,7 @@ luatest:
.PHONY: lint
lint: luacheck luastylecheck ruststylecheck rustlint
.PHONY: lua-typecheck
lua-typecheck:
bash ./scripts/lua-typecheck.sh

View File

@@ -205,6 +205,7 @@ M.focus = function(opts)
end
else
if sidebar.code.winid then vim.api.nvim_set_current_win(sidebar.code.winid) end
---@cast opts SidebarOpenOptions
sidebar:open(opts)
if sidebar.input_container.winid then vim.api.nvim_set_current_win(sidebar.input_container.winid) end
end

View File

@@ -48,6 +48,7 @@ M.paste_image = function(line)
if vim.fn.has("wsl") > 0 or vim.fn.has("win32") > 0 then opts.use_absolute_path = true end
---@diagnostic disable-next-line: need-check-nil, undefined-field
return ImgClip.paste_image(opts, line)
end

View File

@@ -408,6 +408,7 @@ M._defaults = {
}
---@type avante.Config
---@diagnostic disable-next-line: missing-fields
M._options = {}
---@type Provider[]
@@ -450,7 +451,7 @@ function M.setup(opts)
end
end
---@param opts? avante.Config
---@param opts table<string, any>
function M.override(opts)
vim.validate({ opts = { opts, "table", true } })

View File

@@ -238,6 +238,7 @@ end
function FileSelector:mini_pick_ui(handler)
-- luacheck: globals MiniPick
---@diagnostic disable-next-line: undefined-field
if not _G.MiniPick then
Utils.error("mini.pick is not set up. Please install and set up mini.pick to use it as a file selector.")
return
@@ -245,11 +246,18 @@ function FileSelector:mini_pick_ui(handler)
local choose = function(item) handler(type(item) == "string" and { item } or item) end
local choose_marked = function(items_marked) handler(items_marked) end
local source = { choose = choose, choose_marked = choose_marked }
---@diagnostic disable-next-line: undefined-global
local result = MiniPick.builtin.files(nil, { source = source })
if result == nil then handler(nil) end
end
function FileSelector:snacks_picker_ui(handler)
---@diagnostic disable-next-line: undefined-field
if not _G.Snacks then
Utils.error("Snacks is not set up. Please install and set up Snacks to use it as a file selector.")
return
end
---@diagnostic disable-next-line: undefined-global
Snacks.picker.files({
exclude = self.selected_filepaths,
confirm = function(picker)

View File

@@ -297,6 +297,7 @@ M.toggle_sidebar = function(opts)
local sidebar = M.get()
if not sidebar then
M._init(api.nvim_get_current_tabpage())
---@cast opts SidebarOpenOptions
M.current.sidebar:open(opts)
return true
end
@@ -316,6 +317,7 @@ M.open_sidebar = function(opts)
if opts.ask == nil then opts.ask = true end
local sidebar = M.get()
if not sidebar then M._init(api.nvim_get_current_tabpage()) end
---@cast opts SidebarOpenOptions
M.current.sidebar:open(opts)
end

View File

@@ -19,7 +19,7 @@ M.CANCEL_PATTERN = "AvanteLLMEscape"
local group = api.nvim_create_augroup("avante_llm", { clear = true })
---@param opts GeneratePromptsOptions
---@param opts AvanteGeneratePromptsOptions
---@return AvantePromptOptions
M.generate_prompts = function(opts)
local provider = opts.provider or Providers[Config.provider]
@@ -139,7 +139,7 @@ Merge all changes from the <update> snippet into the <code> below.
}
end
---@param opts GeneratePromptsOptions
---@param opts AvanteGeneratePromptsOptions
---@return integer
M.calculate_tokens = function(opts)
local prompt_opts = M.generate_prompts(opts)
@@ -150,7 +150,7 @@ M.calculate_tokens = function(opts)
return tokens
end
---@param opts StreamOptions
---@param opts AvanteLLMStreamOptions
M._stream = function(opts)
local provider = opts.provider or Providers[Config.provider]
@@ -407,45 +407,7 @@ M._dual_boost_stream = function(opts, Provider1, Provider2)
if not success then Utils.error("Failed to start dual_boost streams: " .. tostring(err)) end
end
---@alias LlmMode "planning" | "editing" | "suggesting" | "cursor-planning" | "cursor-applying"
---
---@class SelectedFiles
---@field path string
---@field content string
---@field file_type string
---
---@class TemplateOptions
---@field use_xml_format boolean
---@field ask boolean
---@field question string
---@field code_lang string
---@field selected_code string | nil
---@field project_context string | nil
---@field selected_files SelectedFiles[] | nil
---@field diagnostics string | nil
---@field history_messages AvanteLLMMessage[]
---
---@class GeneratePromptsOptions: TemplateOptions
---@field ask boolean
---@field instructions? string
---@field mode LlmMode
---@field provider AvanteProviderFunctor | AvanteBedrockProviderFunctor | nil
---@field tools? AvanteLLMTool[]
---@field tool_histories? AvanteLLMToolHistory[]
---@field original_code? string
---@field update_snippets? string[]
---
---@class AvanteLLMToolHistory
---@field tool_result? AvanteLLMToolResult
---@field tool_use? AvanteLLMToolUse
---
---@class StreamOptions: GeneratePromptsOptions
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---@field on_tool_log? function(tool_name: string, log: string): nil
---@param opts StreamOptions
---@param opts AvanteLLMStreamOptions
M.stream = function(opts)
local is_completed = false
if opts.on_tool_log ~= nil then

View File

@@ -554,29 +554,6 @@ function M.python(opts, on_log)
return output, nil
end
---@class AvanteLLMTool
---@field name string
---@field description string
---@field func? fun(input: any): (string | nil, string | nil)
---@field param AvanteLLMToolParam
---@field returns AvanteLLMToolReturn[]
---@class AvanteLLMToolParam
---@field type string
---@field fields AvanteLLMToolParamField[]
---@class AvanteLLMToolParamField
---@field name string
---@field description string
---@field type string
---@field optional? boolean
---@class AvanteLLMToolReturn
---@field name string
---@field description string
---@field type string
---@field optional? boolean
---@type AvanteLLMTool[]
M.tools = {
{

View File

@@ -78,12 +78,12 @@ P.history = History
local Prompt = {}
-- Given a mode, return the file name for the custom prompt.
---@param mode LlmMode
---@param mode AvanteLlmMode
Prompt.get_mode_file = function(mode) return string.format("custom.%s.avanterules", mode) end
---@class AvanteTemplates
---@field initialize fun(directory: string): nil
---@field render fun(template: string, context: TemplateOptions): string
---@field render fun(template: string, context: AvanteTemplateOptions): string
local templates = nil
Prompt.templates = { planning = nil, editing = nil, suggesting = nil }
@@ -133,18 +133,18 @@ Prompt.get = function(project_root)
return cache_prompt_dir:absolute()
end
---@param mode LlmMode
---@param mode AvanteLlmMode
Prompt.get_file = function(mode)
if Prompt.templates[mode] ~= nil then return Prompt.get_mode_file(mode) end
return string.format("%s.avanterules", mode)
end
---@param path string
---@param opts TemplateOptions
---@param opts AvanteTemplateOptions
Prompt.render_file = function(path, opts) return templates.render(path, opts) end
---@param mode LlmMode
---@param opts TemplateOptions
---@param mode AvanteLlmMode
---@param opts AvanteTemplateOptions
Prompt.render_mode = function(mode, opts) return templates.render(Prompt.get_file(mode), opts) end
Prompt.initialize = function(directory) templates.initialize(directory) end

View File

@@ -35,8 +35,10 @@ M.parse_curl_args = function(provider, prompt_opts)
url = Utils.url_join(
provider_conf.endpoint,
"/openai/deployments/"
---@diagnostic disable-next-line: undefined-field
.. provider_conf.deployment
.. "/chat/completions?api-version="
---@diagnostic disable-next-line: undefined-field
.. provider_conf.api_version
),
proxy = provider_conf.proxy,

View File

@@ -1,14 +1,6 @@
local Utils = require("avante.utils")
local P = require("avante.providers")
---@alias AvanteBedrockPayloadBuilder fun(prompt_opts: AvantePromptOptions, body_opts: table<string, any>): table<string, any>
---
---@class AvanteBedrockModelHandler
---@field role_map table<"user" | "assistant", string>
---@field parse_messages AvanteMessagesParser
---@field parse_response AvanteResponseParser
---@field build_bedrock_payload AvanteBedrockPayloadBuilder
---@class AvanteBedrockProviderFunctor
local M = {}

View File

@@ -32,38 +32,42 @@ M.parse_messages = function(opts)
})
end
if opts.tool_use then
local msg = {
role = "assistant",
content = {},
}
if opts.response_content then
msg.content[#msg.content + 1] = {
type = "text",
text = opts.response_content,
}
end
msg.content[#msg.content + 1] = {
type = "tool_use",
id = opts.tool_use.id,
name = opts.tool_use.name,
input = vim.json.decode(opts.tool_use.input_json),
}
messages[#messages + 1] = msg
end
if opts.tool_histories then
for _, tool_history in ipairs(opts.tool_histories) do
if tool_history.tool_use then
local msg = {
role = "assistant",
content = {},
}
if tool_history.tool_use.response_content then
msg.content[#msg.content + 1] = {
type = "text",
text = tool_history.tool_use.response_content,
}
end
msg.content[#msg.content + 1] = {
type = "tool_use",
id = tool_history.tool_use.id,
name = tool_history.tool_use.name,
input = vim.json.decode(tool_history.tool_use.input_json),
}
messages[#messages + 1] = msg
end
if opts.tool_result then
messages[#messages + 1] = {
role = "user",
content = {
{
type = "tool_result",
tool_use_id = opts.tool_result.tool_use_id,
content = opts.tool_result.content,
is_error = opts.tool_result.is_error,
},
},
}
if tool_history.tool_result then
messages[#messages + 1] = {
role = "user",
content = {
{
type = "tool_result",
tool_use_id = tool_history.tool_result.tool_use_id,
content = tool_history.tool_result.content,
is_error = tool_history.tool_result.is_error,
},
},
}
end
end
end
return messages

View File

@@ -2,36 +2,6 @@ local Utils = require("avante.utils")
local Clipboard = require("avante.clipboard")
local P = require("avante.providers")
---@class AvanteClaudeBaseMessage
---@field cache_control {type: "ephemeral"}?
---
---@class AvanteClaudeTextMessage: AvanteClaudeBaseMessage
---@field type "text"
---@field text string
---
---@class AvanteClaudeImageMessage: AvanteClaudeBaseMessage
---@field type "image"
---@field source {type: "base64", media_type: string, data: string}
---
---@class AvanteClaudeMessage
---@field role "user" | "assistant"
---@field content [AvanteClaudeTextMessage | AvanteClaudeImageMessage][]
---@class AvanteClaudeTool
---@field name string
---@field description string
---@field input_schema AvanteClaudeToolInputSchema
---@class AvanteClaudeToolInputSchema
---@field type "object"
---@field properties table<string, AvanteClaudeToolInputSchemaProperty>
---@field required string[]
---@class AvanteClaudeToolInputSchemaProperty
---@field type "string" | "number" | "boolean"
---@field description string
---@field enum? string[]
---@param tool AvanteLLMTool
---@return AvanteClaudeTool
local function transform_tool(tool)

View File

@@ -125,6 +125,7 @@ H.get_oauth_token = function()
return vim
.iter(
---@type table<string, OAuthToken>
---@diagnostic disable-next-line: param-type-mismatch
vim.json.decode(yason:read())
)
:filter(function(k, _) return k:match("github.com") end)
@@ -375,6 +376,7 @@ M.cleanup = function()
-- Cleanup file watcher
if M._file_watcher then
---@diagnostic disable-next-line: param-type-mismatch
M._file_watcher:stop()
M._file_watcher = nil
end

View File

@@ -10,125 +10,6 @@ local DressingConfig = {
}
local DressingState = { winid = nil, input_winid = nil, input_bufnr = nil }
---@class AvanteHandlerOptions: table<[string], string>
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---
---@class AvanteLLMMessage
---@field role "user" | "assistant"
---@field content string
---
---@class AvanteLLMToolResult
---@field tool_name string
---@field tool_use_id string
---@field content string
---@field is_error? boolean
---
---@class AvantePromptOptions: table<[string], string>
---@field system_prompt string
---@field messages AvanteLLMMessage[]
---@field image_paths? string[]
---@field tools? AvanteLLMTool[]
---@field tool_histories? AvanteLLMToolHistory[]
---
---@class AvanteGeminiMessage
---@field role "user"
---@field parts { text: string }[]
---
---@alias AvanteChatMessage AvanteClaudeMessage | OpenAIMessage | AvanteGeminiMessage
---
---@alias AvanteMessagesParser fun(opts: AvantePromptOptions): AvanteChatMessage[]
---
---@class AvanteCurlOutput: {url: string, proxy: string, insecure: boolean, body: table<string, any> | string, headers: table<string, string>, rawArgs: string[] | nil}
---@alias AvanteCurlArgsParser fun(provider: AvanteProvider | AvanteProviderFunctor | AvanteBedrockProviderFunctor, prompt_opts: AvantePromptOptions): AvanteCurlOutput
---
---@class ResponseParser
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---@alias AvanteResponseParser fun(ctx: any, data_stream: string, event_state: string, opts: ResponseParser): nil
---
---@class AvanteDefaultBaseProvider: table<string, any>
---@field endpoint? string
---@field model? string
---@field local? boolean
---@field proxy? string
---@field timeout? integer
---@field allow_insecure? boolean
---@field api_key_name? string
---@field _shellenv? string
---@field disable_tools? boolean
---
---@class AvanteSupportedProvider: AvanteDefaultBaseProvider
---@field __inherited_from? string
---@field temperature? number
---@field max_tokens? number
---@field reasoning_effort? string
---
---@class AvanteLLMUsage
---@field input_tokens number
---@field cache_creation_input_tokens number
---@field cache_read_input_tokens number
---@field output_tokens number
---
---@class AvanteLLMToolUse
---@field name string
---@field id string
---@field input_json string
---@field response_content? string
---
---@class AvanteLLMStartCallbackOptions
---@field usage? AvanteLLMUsage
---
---@class AvanteLLMStopCallbackOptions
---@field reason "complete" | "tool_use" | "error"
---@field error? string | table
---@field usage? AvanteLLMUsage
---@field tool_use_list? AvanteLLMToolUse[]
---
---@alias AvanteStreamParser fun(line: string, handler_opts: AvanteHandlerOptions): nil
---@alias AvanteLLMStartCallback fun(opts: AvanteLLMStartCallbackOptions): nil
---@alias AvanteLLMChunkCallback fun(chunk: string): any
---@alias AvanteLLMStopCallback fun(opts: AvanteLLMStopCallbackOptions): nil
---@alias AvanteLLMConfigHandler fun(opts: AvanteSupportedProvider): AvanteDefaultBaseProvider, table<string, any>
---
---@class AvanteProvider: AvanteSupportedProvider
---@field parse_response_data AvanteResponseParser
---@field parse_curl_args? AvanteCurlArgsParser
---@field parse_stream_data? AvanteStreamParser
---@field parse_api_key? fun(): string | nil
---
---@class AvanteProviderFunctor
---@field role_map table<"user" | "assistant", string>
---@field parse_messages AvanteMessagesParser
---@field parse_response AvanteResponseParser
---@field parse_curl_args AvanteCurlArgsParser
---@field setup fun(): nil
---@field has fun(): boolean
---@field api_key_name string
---@field tokenizer_id string | "gpt-4o"
---@field use_xml_format boolean
---@field model? string
---@field parse_api_key fun(): string | nil
---@field parse_stream_data? AvanteStreamParser
---@field on_error? fun(result: table<string, any>): nil
---
---@class AvanteBedrockProviderFunctor
---@field parse_response AvanteResponseParser
---@field parse_curl_args AvanteCurlArgsParser
---@field setup fun(): nil
---@field has fun(): boolean
---@field api_key_name string
---@field tokenizer_id string | "gpt-4o"
---@field use_xml_format boolean
---@field model? string
---@field parse_api_key fun(): string | nil
---@field parse_stream_data? AvanteStreamParser
---@field on_error? fun(result: table<string, any>): nil
---@field load_model_handler fun(): AvanteBedrockModelHandler
---@field build_bedrock_payload? fun(prompt_opts: AvantePromptOptions, body_opts: table<string, any>): table<string, any>
---
---@class avante.Providers
---@field openai AvanteProviderFunctor
---@field claude AvanteProviderFunctor

View File

@@ -3,64 +3,6 @@ local Config = require("avante.config")
local Clipboard = require("avante.clipboard")
local P = require("avante.providers")
---@class OpenAIChatResponse
---@field id string
---@field object "chat.completion" | "chat.completion.chunk"
---@field created integer
---@field model string
---@field system_fingerprint string
---@field choices? OpenAIResponseChoice[] | OpenAIResponseChoiceComplete[]
---@field usage {prompt_tokens: integer, completion_tokens: integer, total_tokens: integer}
---
---@class OpenAIResponseChoice
---@field index integer
---@field delta OpenAIMessage
---@field logprobs? integer
---@field finish_reason? "stop" | "length"
---
---@class OpenAIResponseChoiceComplete
---@field message OpenAIMessage
---@field finish_reason "stop" | "length" | "eos_token"
---@field index integer
---@field logprobs integer
---
---@class OpenAIMessageToolCallFunction
---@field name string
---@field arguments string
---
---@class OpenAIMessageToolCall
---@field index integer
---@field id string
---@field type "function"
---@field function OpenAIMessageToolCallFunction
---
---@class OpenAIMessage
---@field role? "user" | "system" | "assistant"
---@field content? string
---@field reasoning_content? string
---@field reasoning? string
---@field tool_calls? OpenAIMessageToolCall[]
---
---@class AvanteOpenAITool
---@field type "function"
---@field function AvanteOpenAIToolFunction
---
---@class AvanteOpenAIToolFunction
---@field name string
---@field description string
---@field parameters AvanteOpenAIToolFunctionParameters
---@field strict boolean
---
---@class AvanteOpenAIToolFunctionParameters
---@field type string
---@field properties table<string, AvanteOpenAIToolFunctionParameterProperty>
---@field required string[]
---@field additionalProperties boolean
---
---@class AvanteOpenAIToolFunctionParameterProperty
---@field type string
---@field description string
---@class AvanteProviderFunctor
local M = {}
@@ -201,7 +143,7 @@ M.parse_response = function(ctx, data_stream, _, opts)
return
end
if data_stream:match('"delta":') then
---@type OpenAIChatResponse
---@type AvanteOpenAIChatResponse
local jsn = vim.json.decode(data_stream)
if jsn.choices and jsn.choices[1] then
local choice = jsn.choices[1]
@@ -264,7 +206,7 @@ M.parse_response = function(ctx, data_stream, _, opts)
end
M.parse_response_without_stream = function(data, _, opts)
---@type OpenAIChatResponse
---@type AvanteOpenAIChatResponse
local json = vim.json.decode(data)
if json.choices and json.choices[1] then
local choice = json.choices[1]

View File

@@ -181,6 +181,7 @@ function Sidebar:toggle(opts)
self:close()
return false
else
---@cast opts SidebarOpenOptions
self:open(opts)
return true
end
@@ -2322,7 +2323,7 @@ function Sidebar:create_input_container(opts)
})
---@param request string
---@return GeneratePromptsOptions
---@return AvanteGeneratePromptsOptions
local function get_generate_prompts_options(request)
local filetype = api.nvim_get_option_value("filetype", { buf = self.code.bufnr })
@@ -2574,7 +2575,7 @@ function Sidebar:create_input_container(opts)
end
local generate_prompts_options = get_generate_prompts_options(request)
---@type StreamOptions
---@type AvanteLLMStreamOptions
---@diagnostic disable-next-line: assign-type-mismatch
local stream_options = vim.tbl_deep_extend("force", generate_prompts_options, {
on_start = on_start,

View File

@@ -70,3 +70,278 @@ function vim.api.nvim_create_user_command(name, command, opts) end
---@type boolean
vim.g.avante_login = vim.g.avante_login
---@class AvanteHandlerOptions: table<[string], string>
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---
---@class AvanteLLMMessage
---@field role "user" | "assistant"
---@field content string
---
---@class AvanteLLMToolResult
---@field tool_name string
---@field tool_use_id string
---@field content string
---@field is_error? boolean
---
---@class AvantePromptOptions: table<[string], string>
---@field system_prompt string
---@field messages AvanteLLMMessage[]
---@field image_paths? string[]
---@field tools? AvanteLLMTool[]
---@field tool_histories? AvanteLLMToolHistory[]
---
---@class AvanteGeminiMessage
---@field role "user"
---@field parts { text: string }[]
---
---@class AvanteClaudeBaseMessage
---@field cache_control {type: "ephemeral"}?
---
---@class AvanteClaudeTextMessage: AvanteClaudeBaseMessage
---@field type "text"
---@field text string
---
---@class AvanteClaudeImageMessage: AvanteClaudeBaseMessage
---@field type "image"
---@field source {type: "base64", media_type: string, data: string}
---
---@class AvanteClaudeMessage
---@field role "user" | "assistant"
---@field content [AvanteClaudeTextMessage | AvanteClaudeImageMessage][]
---@class AvanteClaudeTool
---@field name string
---@field description string
---@field input_schema AvanteClaudeToolInputSchema
---@class AvanteClaudeToolInputSchema
---@field type "object"
---@field properties table<string, AvanteClaudeToolInputSchemaProperty>
---@field required string[]
---@class AvanteClaudeToolInputSchemaProperty
---@field type "string" | "number" | "boolean"
---@field description string
---@field enum? string[]
---
---@class AvanteOpenAIChatResponse
---@field id string
---@field object "chat.completion" | "chat.completion.chunk"
---@field created integer
---@field model string
---@field system_fingerprint string
---@field choices? AvanteOpenAIResponseChoice[] | AvanteOpenAIResponseChoiceComplete[]
---@field usage {prompt_tokens: integer, completion_tokens: integer, total_tokens: integer}
---
---@class AvanteOpenAIResponseChoice
---@field index integer
---@field delta AvanteOpenAIMessage
---@field logprobs? integer
---@field finish_reason? "stop" | "length"
---
---@class AvanteOpenAIResponseChoiceComplete
---@field message AvanteOpenAIMessage
---@field finish_reason "stop" | "length" | "eos_token"
---@field index integer
---@field logprobs integer
---
---@class AvanteOpenAIMessageToolCallFunction
---@field name string
---@field arguments string
---
---@class AvanteOpenAIMessageToolCall
---@field index integer
---@field id string
---@field type "function"
---@field function AvanteOpenAIMessageToolCallFunction
---
---@class AvanteOpenAIMessage
---@field role? "user" | "system" | "assistant"
---@field content? string
---@field reasoning_content? string
---@field reasoning? string
---@field tool_calls? AvanteOpenAIMessageToolCall[]
---
---@class AvanteOpenAITool
---@field type "function"
---@field function AvanteOpenAIToolFunction
---
---@class AvanteOpenAIToolFunction
---@field name string
---@field description string
---@field parameters AvanteOpenAIToolFunctionParameters
---@field strict boolean
---
---@class AvanteOpenAIToolFunctionParameters
---@field type string
---@field properties table<string, AvanteOpenAIToolFunctionParameterProperty>
---@field required string[]
---@field additionalProperties boolean
---
---@class AvanteOpenAIToolFunctionParameterProperty
---@field type string
---@field description string
---
---@alias AvanteChatMessage AvanteClaudeMessage | AvanteOpenAIMessage | AvanteGeminiMessage
---
---@alias AvanteMessagesParser fun(opts: AvantePromptOptions): AvanteChatMessage[]
---
---@class AvanteCurlOutput: {url: string, proxy: string, insecure: boolean, body: table<string, any> | string, headers: table<string, string>, rawArgs: string[] | nil}
---@alias AvanteCurlArgsParser fun(provider: AvanteProvider | AvanteProviderFunctor | AvanteBedrockProviderFunctor, prompt_opts: AvantePromptOptions): AvanteCurlOutput
---
---@class AvanteResponseParserOptions
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---@alias AvanteResponseParser fun(ctx: any, data_stream: string, event_state: string, opts: AvanteResponseParserOptions): nil
---
---@class AvanteDefaultBaseProvider: table<string, any>
---@field endpoint? string
---@field model? string
---@field local? boolean
---@field proxy? string
---@field timeout? integer
---@field allow_insecure? boolean
---@field api_key_name? string
---@field _shellenv? string
---@field disable_tools? boolean
---
---@class AvanteSupportedProvider: AvanteDefaultBaseProvider
---@field __inherited_from? string
---@field temperature? number
---@field max_tokens? number
---@field reasoning_effort? string
---
---@class AvanteLLMUsage
---@field input_tokens number
---@field cache_creation_input_tokens number
---@field cache_read_input_tokens number
---@field output_tokens number
---
---@class AvanteLLMToolUse
---@field name string
---@field id string
---@field input_json string
---@field response_content? string
---
---@class AvanteLLMStartCallbackOptions
---@field usage? AvanteLLMUsage
---
---@class AvanteLLMStopCallbackOptions
---@field reason "complete" | "tool_use" | "error"
---@field error? string | table
---@field usage? AvanteLLMUsage
---@field tool_use_list? AvanteLLMToolUse[]
---
---@alias AvanteStreamParser fun(line: string, handler_opts: AvanteHandlerOptions): nil
---@alias AvanteLLMStartCallback fun(opts: AvanteLLMStartCallbackOptions): nil
---@alias AvanteLLMChunkCallback fun(chunk: string): any
---@alias AvanteLLMStopCallback fun(opts: AvanteLLMStopCallbackOptions): nil
---@alias AvanteLLMConfigHandler fun(opts: AvanteSupportedProvider): AvanteDefaultBaseProvider, table<string, any>
---
---@class AvanteProvider: AvanteSupportedProvider
---@field parse_response_data AvanteResponseParser
---@field parse_curl_args? AvanteCurlArgsParser
---@field parse_stream_data? AvanteStreamParser
---@field parse_api_key? fun(): string | nil
---
---@class AvanteProviderFunctor
---@field role_map table<"user" | "assistant", string>
---@field parse_messages AvanteMessagesParser
---@field parse_response AvanteResponseParser
---@field parse_curl_args AvanteCurlArgsParser
---@field setup fun(): nil
---@field has fun(): boolean
---@field api_key_name string
---@field tokenizer_id string | "gpt-4o"
---@field use_xml_format boolean
---@field model? string
---@field parse_api_key fun(): string | nil
---@field parse_stream_data? AvanteStreamParser
---@field on_error? fun(result: table<string, any>): nil
---
---@class AvanteBedrockProviderFunctor
---@field parse_response AvanteResponseParser
---@field parse_curl_args AvanteCurlArgsParser
---@field setup fun(): nil
---@field has fun(): boolean
---@field api_key_name string
---@field tokenizer_id string | "gpt-4o"
---@field use_xml_format boolean
---@field model? string
---@field parse_api_key fun(): string | nil
---@field parse_stream_data? AvanteStreamParser
---@field on_error? fun(result: table<string, any>): nil
---@field load_model_handler fun(): AvanteBedrockModelHandler
---@field build_bedrock_payload? fun(prompt_opts: AvantePromptOptions, body_opts: table<string, any>): table<string, any>
---
---@alias AvanteBedrockPayloadBuilder fun(prompt_opts: AvantePromptOptions, body_opts: table<string, any>): table<string, any>
---
---@class AvanteBedrockModelHandler
---@field role_map table<"user" | "assistant", string>
---@field parse_messages AvanteMessagesParser
---@field parse_response AvanteResponseParser
---@field build_bedrock_payload AvanteBedrockPayloadBuilder
---
---@alias AvanteLlmMode "planning" | "editing" | "suggesting" | "cursor-planning" | "cursor-applying"
---
---@class AvanteSelectedFiles
---@field path string
---@field content string
---@field file_type string
---
---@class AvanteTemplateOptions
---@field use_xml_format boolean | nil
---@field ask boolean
---@field code_lang string
---@field selected_code string | nil
---@field project_context string | nil
---@field selected_files AvanteSelectedFiles[] | nil
---@field diagnostics string | nil
---@field history_messages AvanteLLMMessage[] | nil
---
---@class AvanteGeneratePromptsOptions: AvanteTemplateOptions
---@field ask boolean
---@field instructions? string
---@field mode AvanteLlmMode
---@field provider AvanteProviderFunctor | AvanteBedrockProviderFunctor | nil
---@field tools? AvanteLLMTool[]
---@field tool_histories? AvanteLLMToolHistory[]
---@field original_code? string
---@field update_snippets? string[]
---
---@class AvanteLLMToolHistory
---@field tool_result? AvanteLLMToolResult
---@field tool_use? AvanteLLMToolUse
---
---@class AvanteLLMStreamOptions: AvanteGeneratePromptsOptions
---@field on_start AvanteLLMStartCallback
---@field on_chunk AvanteLLMChunkCallback
---@field on_stop AvanteLLMStopCallback
---@field on_tool_log? function(tool_name: string, log: string): nil
---
---@class AvanteLLMTool
---@field name string
---@field description string
---@field func? fun(input: any): (string | nil, string | nil)
---@field param AvanteLLMToolParam
---@field returns AvanteLLMToolReturn[]
---@class AvanteLLMToolParam
---@field type string
---@field fields AvanteLLMToolParamField[]
---@class AvanteLLMToolParamField
---@field name string
---@field description string
---@field type string
---@field optional? boolean
---@class AvanteLLMToolReturn
---@field name string
---@field description string
---@field type string
---@field optional? boolean

View File

@@ -137,10 +137,6 @@ end
---@param rhs string|function Right-hand side |{rhs}| of the mapping, can be a Lua function.
---
---@param opts? vim.keymap.set.Opts
---@see |nvim_set_keymap()|
---@see |maparg()|
---@see |mapcheck()|
---@see |mapset()|
M.safe_keymap_set = function(mode, lhs, rhs, opts)
---@type boolean
local ok
@@ -160,6 +156,7 @@ M.safe_keymap_set = function(mode, lhs, rhs, opts)
---@cast modes -string
---@param m string
---@diagnostic disable-next-line: undefined-field
modes = vim.tbl_filter(function(m) return not (Keys and Keys.have and Keys:have(lhs, m)) end, modes)
-- don't create keymap if a lazy keys handler exists
@@ -250,6 +247,7 @@ function M.get_buf_lines(start, end_, buf) return api.nvim_buf_get_lines(buf or
---Get cursor row and column as (1, 0) based
---@param win_id integer?
---@return integer, integer
---@diagnostic disable-next-line: redundant-return-value
function M.get_cursor_pos(win_id) return unpack(api.nvim_win_get_cursor(win_id or 0)) end
---Check if the buffer is likely to have actionable conflict markers

View File

@@ -42,10 +42,11 @@ end
---@param patterns string[]|string
function M.detectors.pattern(buf, patterns)
patterns = type(patterns) == "string" and { patterns } or patterns
local patterns_ = type(patterns) == "string" and { patterns } or patterns
---@cast patterns_ string[]
local path = M.bufpath(buf) or vim.uv.cwd()
local pattern = vim.fs.find(function(name)
for _, p in ipairs(patterns) do
for _, p in ipairs(patterns_) do
if name == p then return true end
if p:sub(1, 1) == "*" and name:find(vim.pesc(p:sub(2)) .. "$") then return true end
end

15
scripts/lua-typecheck.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/env bash
if [ -z "${VIMRUNTIME}" ]; then
export VIMRUNTIME=$(nvim --headless --noplugin -u NONE -c "echo \$VIMRUNTIME" +qa 2>&1)
fi
echo "VIMRUNTIME: ${VIMRUNTIME}"
if [ -z "${DEPS_PATH}" ]; then
export DEPS_PATH=${HOME}/.local/share/nvim/lazy/
fi
echo "DEPS_PATH: ${DEPS_PATH}"
lua-language-server --check=${PWD}/lua --configpath=${PWD}/.github/workflows/.luarc.json --checklevel=Information