fix: revert max_completion_tokens to max_tokens (#1741)

This commit is contained in:
yetone
2025-03-27 16:53:55 +08:00
committed by GitHub
parent a06bb97db6
commit cd13eeb7d9
7 changed files with 6 additions and 9 deletions

View File

@@ -67,7 +67,7 @@ For building binary if you wish to build from source, then `cargo` is required.
model = "gpt-4o", -- your desired model (or use gpt-4o, etc.)
timeout = 30000, -- Timeout in milliseconds, increase this for reasoning models
temperature = 0,
max_completion_tokens = 8192, -- Increase this to include reasoning tokens (for reasoning models)
max_tokens = 8192, -- Increase this to include reasoning tokens (for reasoning models)
--reasoning_effort = "medium", -- low|medium|high, only used for reasoning models
},
},