fix: not imported when LiteLLMProvider is not used

LiteLLM:WARNING: get_model_cost_map.py:213 - LiteLLM: Failed to fetch remote model cost map from https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json: The read operation timed out. Falling back to local backup.
This commit is contained in:
hcanyz
2026-03-05 11:33:20 +08:00
parent fb74281434
commit a08aae93e6

View File

@@ -200,8 +200,6 @@ def onboard():
def _make_provider(config: Config): def _make_provider(config: Config):
"""Create the appropriate LLM provider from config.""" """Create the appropriate LLM provider from config."""
from nanobot.providers.custom_provider import CustomProvider
from nanobot.providers.litellm_provider import LiteLLMProvider
from nanobot.providers.openai_codex_provider import OpenAICodexProvider from nanobot.providers.openai_codex_provider import OpenAICodexProvider
model = config.agents.defaults.model model = config.agents.defaults.model
@@ -213,6 +211,7 @@ def _make_provider(config: Config):
return OpenAICodexProvider(default_model=model) return OpenAICodexProvider(default_model=model)
# Custom: direct OpenAI-compatible endpoint, bypasses LiteLLM # Custom: direct OpenAI-compatible endpoint, bypasses LiteLLM
from nanobot.providers.custom_provider import CustomProvider
if provider_name == "custom": if provider_name == "custom":
return CustomProvider( return CustomProvider(
api_key=p.api_key if p else "no-key", api_key=p.api_key if p else "no-key",
@@ -220,6 +219,7 @@ def _make_provider(config: Config):
default_model=model, default_model=model,
) )
from nanobot.providers.litellm_provider import LiteLLMProvider
from nanobot.providers.registry import find_by_name from nanobot.providers.registry import find_by_name
spec = find_by_name(provider_name) spec = find_by_name(provider_name)
if not model.startswith("bedrock/") and not (p and p.api_key) and not (spec and spec.is_oauth): if not model.startswith("bedrock/") and not (p and p.api_key) and not (spec and spec.is_oauth):