add Openrouter prompt caching via cache_control

This commit is contained in:
tercerapersona
2026-02-20 11:34:50 -03:00
committed by GitHub
parent c8089021a5
commit b286457c85

View File

@@ -100,6 +100,7 @@ PROVIDERS: tuple[ProviderSpec, ...] = (
default_api_base="https://openrouter.ai/api/v1",
strip_model_prefix=False,
model_overrides=(),
supports_prompt_caching=True,
),
# AiHubMix: global gateway, OpenAI-compatible interface.