feat: Enhanced Model Provider Support and Configuration Flexibility For Rag Service (#2056)
Co-authored-by: doodleEsc <cokie@foxmail.com> Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
This commit is contained in:
35
py/rag-service/src/providers/openrouter.py
Normal file
35
py/rag-service/src/providers/openrouter.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# src/providers/openrouter.py
|
||||
|
||||
from typing import Any
|
||||
|
||||
from llama_index.core.llms.llm import LLM
|
||||
from llama_index.llms.openrouter import OpenRouter
|
||||
|
||||
|
||||
def initialize_llm_model(
|
||||
llm_endpoint: str,
|
||||
llm_api_key: str,
|
||||
llm_model: str,
|
||||
**llm_extra: Any, # noqa: ANN401
|
||||
) -> LLM:
|
||||
"""
|
||||
Create OpenRouter LLM model.
|
||||
|
||||
Args:
|
||||
llm_model: The name of the LLM model.
|
||||
llm_endpoint: The API endpoint for the OpenRouter API.
|
||||
llm_api_key: The API key for the OpenRouter API.
|
||||
llm_extra: The Extra Parameters for OpenROuter,
|
||||
|
||||
Returns:
|
||||
The initialized llm_model.
|
||||
|
||||
"""
|
||||
# Use the provided endpoint directly.
|
||||
# We are not using llm_api_key parameter here, relying on env var as original code did.
|
||||
return OpenRouter(
|
||||
model=llm_model,
|
||||
api_base=llm_endpoint,
|
||||
api_key=llm_api_key,
|
||||
**llm_extra,
|
||||
)
|
||||
Reference in New Issue
Block a user