fix: auto-add provider prefix for custom LLM endpoints
LiteLLM requires model names in format "provider/model-name". When LLM_API_BASE is set, automatically prefix model with "openai/" if no provider prefix is present. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -44,6 +44,20 @@
|
|||||||
# LLM_API_BASE=http://localhost:8000/v1
|
# LLM_API_BASE=http://localhost:8000/v1
|
||||||
# LLM_API_KEY=your-key
|
# LLM_API_KEY=your-key
|
||||||
# LLM_MODEL=qwen2.5
|
# LLM_MODEL=qwen2.5
|
||||||
|
# Note: When LLM_API_BASE is set, model is auto-prefixed as "openai/qwen2.5"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Model naming convention (LiteLLM requires provider prefix)
|
||||||
|
# See: https://docs.litellm.ai/docs/providers
|
||||||
|
# =============================================================================
|
||||||
|
# Format: provider/model-name
|
||||||
|
# Examples:
|
||||||
|
# openai/gpt-4
|
||||||
|
# anthropic/claude-3-haiku-20240307
|
||||||
|
# gemini/gemini-pro
|
||||||
|
# ollama/llama2
|
||||||
|
# huggingface/starcoder
|
||||||
|
# azure/your-deployment-name
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Force mock mode (no API calls, uses predefined responses)
|
# Force mock mode (no API calls, uses predefined responses)
|
||||||
|
|||||||
@@ -76,6 +76,13 @@ class LLMService:
|
|||||||
self._mock_mode = os.environ.get("LLM_MOCK_MODE", "").lower() == "true"
|
self._mock_mode = os.environ.get("LLM_MOCK_MODE", "").lower() == "true"
|
||||||
self._acompletion = None
|
self._acompletion = None
|
||||||
|
|
||||||
|
# Auto-add provider prefix for custom endpoints
|
||||||
|
# LiteLLM requires format: provider/model (e.g., openai/gpt-4)
|
||||||
|
# See: https://docs.litellm.ai/docs/providers
|
||||||
|
if self._api_base and "/" not in self._model:
|
||||||
|
self._model = f"openai/{self._model}"
|
||||||
|
logger.info(f"Auto-prefixed model name: {self._model} (custom endpoint detected)")
|
||||||
|
|
||||||
if self._mock_mode:
|
if self._mock_mode:
|
||||||
logger.info("LLMService running in MOCK mode (forced by LLM_MOCK_MODE)")
|
logger.info("LLMService running in MOCK mode (forced by LLM_MOCK_MODE)")
|
||||||
return
|
return
|
||||||
|
|||||||
Reference in New Issue
Block a user