feat: supports OpenAI Response API and copilot's gpt-5-codex model (#2802)
* fix: upgrade vscode version * feat: support openai response api * refactor: refine todos tools * fix: trim suffix empty lines
This commit is contained in:
@@ -1960,10 +1960,20 @@ function Sidebar:get_message_lines(ctx, message, messages, ignore_record_prefix)
|
||||
elseif type(content) == "string" then
|
||||
text_len = #content
|
||||
end
|
||||
local cache_key = message.uuid .. ":" .. tostring(text_len) .. ":" .. tostring(expanded == true)
|
||||
local cache_key = message.uuid
|
||||
.. ":"
|
||||
.. message.state
|
||||
.. ":"
|
||||
.. tostring(text_len)
|
||||
.. ":"
|
||||
.. tostring(expanded == true)
|
||||
local cached_lines = _message_to_lines_lru_cache:get(cache_key)
|
||||
if cached_lines then return cached_lines end
|
||||
local lines = self:_get_message_lines(ctx, message, messages, ignore_record_prefix)
|
||||
--- trim suffix empty lines
|
||||
while #lines > 0 and tostring(lines[#lines]) == "" do
|
||||
table.remove(lines)
|
||||
end
|
||||
_message_to_lines_lru_cache:set(cache_key, lines)
|
||||
return lines
|
||||
end
|
||||
@@ -2518,7 +2528,7 @@ function Sidebar:get_history_messages_for_api(opts)
|
||||
|
||||
if not Config.acp_providers[Config.provider] then
|
||||
local tool_limit
|
||||
if Providers[Config.provider].use_ReAct_prompt then
|
||||
if Providers[Config.provider].use_ReAct_prompt or Providers[Config.provider].use_response_api then
|
||||
tool_limit = nil
|
||||
else
|
||||
tool_limit = 25
|
||||
|
||||
Reference in New Issue
Block a user