feat: add vLLM/local LLM support

- Add vllm provider configuration in config schema
- Auto-detect vLLM endpoints and use hosted_vllm/ prefix for LiteLLM
- Pass api_base directly to acompletion for custom endpoints
- Add vLLM status display in CLI status command
- Add vLLM setup documentation in README
This commit is contained in:
ZhihaoZhang97
2026-02-02 11:23:04 +11:00
parent 959c4dadf8
commit 2b19dcf9fd
4 changed files with 61 additions and 2 deletions

View File

@@ -624,10 +624,13 @@ def status():
has_openrouter = bool(config.providers.openrouter.api_key)
has_anthropic = bool(config.providers.anthropic.api_key)
has_openai = bool(config.providers.openai.api_key)
has_vllm = bool(config.providers.vllm.api_base)
console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}")
console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}")
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
console.print(f"vLLM/Local: {vllm_status}")
if __name__ == "__main__":