chore: run stylua [generated] (#460)

* chore: add stylua

Signed-off-by: Aaron Pham <contact@aarnphm.xyz>

* chore: running stylua

Signed-off-by: Aaron Pham <contact@aarnphm.xyz>

---------

Signed-off-by: Aaron Pham <contact@aarnphm.xyz>
This commit is contained in:
Aaron Pham
2024-09-03 04:19:54 -04:00
committed by GitHub
parent 4ad913435c
commit e8c71d931e
28 changed files with 608 additions and 1181 deletions

View File

@@ -1,4 +1,4 @@
local Utils = require("avante.utils")
local Utils = require "avante.utils"
---@class AvanteTokenizer
---@field from_pretrained fun(model: string): nil
@@ -11,19 +11,15 @@ local M = {}
M.setup = function(model)
vim.defer_fn(function()
local ok, core = pcall(require, "avante_tokenizers")
if not ok then
return
end
if not ok then return end
---@cast core AvanteTokenizer
if tokenizers == nil then
tokenizers = core
end
if tokenizers == nil then tokenizers = core end
core.from_pretrained(model)
end, 1000)
local HF_TOKEN = os.getenv("HF_TOKEN")
local HF_TOKEN = os.getenv "HF_TOKEN"
if HF_TOKEN == nil and model ~= "gpt-4o" then
Utils.warn(
"Please set HF_TOKEN environment variable to use HuggingFace tokenizer if " .. model .. " is gated",
@@ -33,35 +29,23 @@ M.setup = function(model)
vim.env.HF_HUB_DISABLE_PROGRESS_BARS = 1
end
M.available = function()
return tokenizers ~= nil
end
M.available = function() return tokenizers ~= nil end
---@param prompt string
M.encode = function(prompt)
if not tokenizers then
return nil
end
if not prompt or prompt == "" then
return nil
end
if type(prompt) ~= "string" then
error("Prompt is not type string", 2)
end
if not tokenizers then return nil end
if not prompt or prompt == "" then return nil end
if type(prompt) ~= "string" then error("Prompt is not type string", 2) end
return tokenizers.encode(prompt)
end
---@param prompt string
M.count = function(prompt)
if not tokenizers then
return math.ceil(#prompt * 0.5)
end
if not tokenizers then return math.ceil(#prompt * 0.5) end
local tokens = M.encode(prompt)
if not tokens then
return 0
end
if not tokens then return 0 end
return #tokens
end