diff --git a/.gitignore b/.gitignore index 684a756..9720f3b 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.pyc dist/ build/ +docs/ *.egg-info/ *.egg *.pyc diff --git a/nanobot/cli/commands.py b/nanobot/cli/commands.py index 8dcc460..d847710 100644 --- a/nanobot/cli/commands.py +++ b/nanobot/cli/commands.py @@ -624,11 +624,13 @@ def status(): has_openrouter = bool(config.providers.openrouter.api_key) has_anthropic = bool(config.providers.anthropic.api_key) has_openai = bool(config.providers.openai.api_key) + has_gemini = bool(config.providers.gemini.api_key) has_vllm = bool(config.providers.vllm.api_base) console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}") console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}") console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}") + console.print(f"Gemini API: {'[green]✓[/green]' if has_gemini else '[dim]not set[/dim]'}") vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]" console.print(f"vLLM/Local: {vllm_status}") diff --git a/nanobot/config/schema.py b/nanobot/config/schema.py index 5b4ef67..c2109a1 100644 --- a/nanobot/config/schema.py +++ b/nanobot/config/schema.py @@ -52,6 +52,7 @@ class ProvidersConfig(BaseModel): openrouter: ProviderConfig = Field(default_factory=ProviderConfig) zhipu: ProviderConfig = Field(default_factory=ProviderConfig) vllm: ProviderConfig = Field(default_factory=ProviderConfig) + gemini: ProviderConfig = Field(default_factory=ProviderConfig) class GatewayConfig(BaseModel): @@ -90,11 +91,12 @@ class Config(BaseSettings): return Path(self.agents.defaults.workspace).expanduser() def get_api_key(self) -> str | None: - """Get API key in priority order: OpenRouter > Anthropic > OpenAI > Zhipu > vLLM.""" + """Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > Zhipu > vLLM.""" return ( self.providers.openrouter.api_key or self.providers.anthropic.api_key or self.providers.openai.api_key or + self.providers.gemini.api_key or self.providers.zhipu.api_key or self.providers.vllm.api_key or None diff --git a/nanobot/providers/litellm_provider.py b/nanobot/providers/litellm_provider.py index 3621a7e..42b4bf5 100644 --- a/nanobot/providers/litellm_provider.py +++ b/nanobot/providers/litellm_provider.py @@ -13,7 +13,7 @@ class LiteLLMProvider(LLMProvider): """ LLM provider using LiteLLM for multi-provider support. - Supports OpenRouter, Anthropic, OpenAI, and many other providers through + Supports OpenRouter, Anthropic, OpenAI, Gemini, and many other providers through a unified interface. """ @@ -47,6 +47,8 @@ class LiteLLMProvider(LLMProvider): os.environ.setdefault("ANTHROPIC_API_KEY", api_key) elif "openai" in default_model or "gpt" in default_model: os.environ.setdefault("OPENAI_API_KEY", api_key) + elif "gemini" in default_model.lower(): + os.environ.setdefault("GEMINI_API_KEY", api_key) elif "zhipu" in default_model or "glm" in default_model or "zai" in default_model: os.environ.setdefault("ZHIPUAI_API_KEY", api_key) @@ -97,6 +99,10 @@ class LiteLLMProvider(LLMProvider): if self.is_vllm: model = f"hosted_vllm/{model}" + # For Gemini, ensure gemini/ prefix if not already present + if "gemini" in model.lower() and not model.startswith("gemini/"): + model = f"gemini/{model}" + kwargs: dict[str, Any] = { "model": model, "messages": messages,