feat: add vLLM/local LLM support
- Add vllm provider configuration in config schema - Auto-detect vLLM endpoints and use hosted_vllm/ prefix for LiteLLM - Pass api_base directly to acompletion for custom endpoints - Add vLLM status display in CLI status command - Add vLLM setup documentation in README
This commit is contained in:
@@ -624,10 +624,13 @@ def status():
|
||||
has_openrouter = bool(config.providers.openrouter.api_key)
|
||||
has_anthropic = bool(config.providers.anthropic.api_key)
|
||||
has_openai = bool(config.providers.openai.api_key)
|
||||
has_vllm = bool(config.providers.vllm.api_base)
|
||||
|
||||
console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}")
|
||||
console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}")
|
||||
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
|
||||
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
|
||||
console.print(f"vLLM/Local: {vllm_status}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Reference in New Issue
Block a user