feat: add ollama as supported provider (#1543)

* feat: add ollama as supported provider

*This implementation is only working with `stream = true`*
- Uses the actual ollama api and allows for passing additional options
- Properly passes the system prompt to api

Use ollama as provider in opts like this:
opts = {
        debug = true,
        provider = "ollama",
        ollama = {
                api_key_name = "",
                endpoint = "http://127.0.0.1:11434",
                model = "qwen2.5-coder:latest",
                options = {
                        num_ctx = 32768,
                        temperature = 0,
                },
                stream = true,
        },

* fix: ollama types

---------

Co-authored-by: jtabke <25010496+jtabke@users.noreply.github.com>
This commit is contained in:
yetone
2025-03-10 02:23:56 +08:00
committed by GitHub
parent 4976807a33
commit 750ee80971
5 changed files with 100 additions and 5 deletions

View File

@@ -67,12 +67,11 @@ function M.is_o_series_model(model) return model and string.match(model, "^o%d+"
function M:parse_messages(opts)
local messages = {}
local provider = P[Config.provider]
local base, _ = P.parse_config(provider)
local provider_conf, _ = P.parse_config(self)
-- NOTE: Handle the case where the selected model is the `o1` model
-- "o1" models are "smart" enough to understand user prompt as a system prompt in this context
if self.is_o_series_model(base.model) then
if self.is_o_series_model(provider_conf.model) then
table.insert(messages, { role = "user", content = opts.system_prompt })
else
table.insert(messages, { role = "system", content = opts.system_prompt })