feat: add support for ollama RAG providers (#1427)
* fix: openai env * feat: add support for multiple RAG providers - Added provider, model and endpoint configuration options for RAG service - Updated RAG service to support both OpenAI and Ollama providers - Added Ollama embedding support and dependencies - Improved environment variable handling for RAG service configuration Signed-off-by: wfhtqp@gmail.com <wfhtqp@gmail.com> * fix: update docker env * feat: rag server add ollama llm * fix: pre-commit * feat: check embed model and clean * docs: add rag server config docs * fix: pyright ignore --------- Signed-off-by: wfhtqp@gmail.com <wfhtqp@gmail.com>
This commit is contained in:
@@ -35,6 +35,10 @@ M._defaults = {
|
||||
tokenizer = "tiktoken",
|
||||
rag_service = {
|
||||
enabled = false, -- Enables the rag service, requires OPENAI_API_KEY to be set
|
||||
provider = "openai", -- The provider to use for RAG service. eg: openai or ollama
|
||||
llm_model = "", -- The LLM model to use for RAG service
|
||||
embed_model = "", -- The embedding model to use for RAG service
|
||||
endpoint = "https://api.openai.com/v1", -- The API endpoint for RAG service
|
||||
},
|
||||
web_search_engine = {
|
||||
provider = "tavily",
|
||||
|
||||
Reference in New Issue
Block a user