feat: add Gemini provider support
- Update configuration schema to include Gemini provider - Modify API key retrieval priority to include Gemini - Enhance CLI status command to display Gemini API status - Update LiteLLMProvider to support Gemini integration
This commit is contained in:
parent
2049e1a826
commit
ab45185ed8
@ -624,11 +624,13 @@ def status():
|
|||||||
has_openrouter = bool(config.providers.openrouter.api_key)
|
has_openrouter = bool(config.providers.openrouter.api_key)
|
||||||
has_anthropic = bool(config.providers.anthropic.api_key)
|
has_anthropic = bool(config.providers.anthropic.api_key)
|
||||||
has_openai = bool(config.providers.openai.api_key)
|
has_openai = bool(config.providers.openai.api_key)
|
||||||
|
has_gemini = bool(config.providers.gemini.api_key)
|
||||||
has_vllm = bool(config.providers.vllm.api_base)
|
has_vllm = bool(config.providers.vllm.api_base)
|
||||||
|
|
||||||
console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}")
|
console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}")
|
||||||
console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}")
|
console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}")
|
||||||
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
|
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
|
||||||
|
console.print(f"Gemini API: {'[green]✓[/green]' if has_gemini else '[dim]not set[/dim]'}")
|
||||||
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
|
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
|
||||||
console.print(f"vLLM/Local: {vllm_status}")
|
console.print(f"vLLM/Local: {vllm_status}")
|
||||||
|
|
||||||
|
|||||||
@ -51,6 +51,7 @@ class ProvidersConfig(BaseModel):
|
|||||||
openai: ProviderConfig = Field(default_factory=ProviderConfig)
|
openai: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
|
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
|
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
|
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
|
|
||||||
|
|
||||||
class GatewayConfig(BaseModel):
|
class GatewayConfig(BaseModel):
|
||||||
@ -89,11 +90,12 @@ class Config(BaseSettings):
|
|||||||
return Path(self.agents.defaults.workspace).expanduser()
|
return Path(self.agents.defaults.workspace).expanduser()
|
||||||
|
|
||||||
def get_api_key(self) -> str | None:
|
def get_api_key(self) -> str | None:
|
||||||
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > vLLM."""
|
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > vLLM."""
|
||||||
return (
|
return (
|
||||||
self.providers.openrouter.api_key or
|
self.providers.openrouter.api_key or
|
||||||
self.providers.anthropic.api_key or
|
self.providers.anthropic.api_key or
|
||||||
self.providers.openai.api_key or
|
self.providers.openai.api_key or
|
||||||
|
self.providers.gemini.api_key or
|
||||||
self.providers.vllm.api_key or
|
self.providers.vllm.api_key or
|
||||||
None
|
None
|
||||||
)
|
)
|
||||||
|
|||||||
@ -13,7 +13,7 @@ class LiteLLMProvider(LLMProvider):
|
|||||||
"""
|
"""
|
||||||
LLM provider using LiteLLM for multi-provider support.
|
LLM provider using LiteLLM for multi-provider support.
|
||||||
|
|
||||||
Supports OpenRouter, Anthropic, OpenAI, and many other providers through
|
Supports OpenRouter, Anthropic, OpenAI, Gemini, and many other providers through
|
||||||
a unified interface.
|
a unified interface.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -47,6 +47,8 @@ class LiteLLMProvider(LLMProvider):
|
|||||||
os.environ.setdefault("ANTHROPIC_API_KEY", api_key)
|
os.environ.setdefault("ANTHROPIC_API_KEY", api_key)
|
||||||
elif "openai" in default_model or "gpt" in default_model:
|
elif "openai" in default_model or "gpt" in default_model:
|
||||||
os.environ.setdefault("OPENAI_API_KEY", api_key)
|
os.environ.setdefault("OPENAI_API_KEY", api_key)
|
||||||
|
elif "gemini" in default_model.lower():
|
||||||
|
os.environ.setdefault("GEMINI_API_KEY", api_key)
|
||||||
|
|
||||||
if api_base:
|
if api_base:
|
||||||
litellm.api_base = api_base
|
litellm.api_base = api_base
|
||||||
@ -86,6 +88,10 @@ class LiteLLMProvider(LLMProvider):
|
|||||||
if self.is_vllm:
|
if self.is_vllm:
|
||||||
model = f"hosted_vllm/{model}"
|
model = f"hosted_vllm/{model}"
|
||||||
|
|
||||||
|
# For Gemini, ensure gemini/ prefix if not already present
|
||||||
|
if "gemini" in model.lower() and not model.startswith("gemini/"):
|
||||||
|
model = f"gemini/{model}"
|
||||||
|
|
||||||
kwargs: dict[str, Any] = {
|
kwargs: dict[str, Any] = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user