diff --git a/.gitignore b/.gitignore
index 684a756..9720f3b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
*.pyc
dist/
build/
+docs/
*.egg-info/
*.egg
*.pyc
diff --git a/README.md b/README.md
index 167ae22..5cb7049 100644
--- a/README.md
+++ b/README.md
@@ -321,7 +321,11 @@ nanobot/
βββ cli/ # π₯οΈ Commands
```
-## πΊοΈ Roadmap
+## π€ Contribute & Roadmap
+
+PRs welcome! The codebase is intentionally small and readable. π€
+
+**Roadmap** β Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)!
- [ ] **Multi-modal** β See and hear (images, voice, video)
- [ ] **Long-term memory** β Never forget important context
@@ -329,14 +333,16 @@ nanobot/
- [ ] **More integrations** β Discord, Slack, email, calendar
- [ ] **Self-improvement** β Learn from feedback and mistakes
-**Want to help?** Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)!
+### Contributors
+
+
+
+
---
## β Star History
-*Community Growth Trajectory*
-
Thanks for visiting β¨ nanobot!
diff --git a/nanobot/agent/loop.py b/nanobot/agent/loop.py
index 3925a44..4a96b84 100644
--- a/nanobot/agent/loop.py
+++ b/nanobot/agent/loop.py
@@ -190,7 +190,8 @@ class AgentLoop:
# Execute tools
for tool_call in response.tool_calls:
- logger.debug(f"Executing tool: {tool_call.name}")
+ args_str = json.dumps(tool_call.arguments)
+ logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}")
result = await self.tools.execute(tool_call.name, tool_call.arguments)
messages = self.context.add_tool_result(
messages, tool_call.id, tool_call.name, result
@@ -282,7 +283,8 @@ class AgentLoop:
)
for tool_call in response.tool_calls:
- logger.debug(f"Executing tool: {tool_call.name}")
+ args_str = json.dumps(tool_call.arguments)
+ logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}")
result = await self.tools.execute(tool_call.name, tool_call.arguments)
messages = self.context.add_tool_result(
messages, tool_call.id, tool_call.name, result
diff --git a/nanobot/channels/base.py b/nanobot/channels/base.py
index d83367c..8f16399 100644
--- a/nanobot/channels/base.py
+++ b/nanobot/channels/base.py
@@ -72,7 +72,14 @@ class BaseChannel(ABC):
if not allow_list:
return True
- return str(sender_id) in allow_list
+ sender_str = str(sender_id)
+ if sender_str in allow_list:
+ return True
+ if "|" in sender_str:
+ for part in sender_str.split("|"):
+ if part and part in allow_list:
+ return True
+ return False
async def _handle_message(
self,
diff --git a/nanobot/channels/telegram.py b/nanobot/channels/telegram.py
index 840c250..ac2dba4 100644
--- a/nanobot/channels/telegram.py
+++ b/nanobot/channels/telegram.py
@@ -199,8 +199,10 @@ class TelegramChannel(BaseChannel):
user = update.effective_user
chat_id = message.chat_id
- # Get sender identifier (prefer username, fallback to user_id)
- sender_id = str(user.username or user.id)
+ # Use stable numeric ID, but keep username for allowlist compatibility
+ sender_id = str(user.id)
+ if user.username:
+ sender_id = f"{sender_id}|{user.username}"
# Store chat_id for replies
self._chat_ids[sender_id] = chat_id
diff --git a/nanobot/cli/commands.py b/nanobot/cli/commands.py
index 8dcc460..d847710 100644
--- a/nanobot/cli/commands.py
+++ b/nanobot/cli/commands.py
@@ -624,11 +624,13 @@ def status():
has_openrouter = bool(config.providers.openrouter.api_key)
has_anthropic = bool(config.providers.anthropic.api_key)
has_openai = bool(config.providers.openai.api_key)
+ has_gemini = bool(config.providers.gemini.api_key)
has_vllm = bool(config.providers.vllm.api_base)
console.print(f"OpenRouter API: {'[green]β[/green]' if has_openrouter else '[dim]not set[/dim]'}")
console.print(f"Anthropic API: {'[green]β[/green]' if has_anthropic else '[dim]not set[/dim]'}")
console.print(f"OpenAI API: {'[green]β[/green]' if has_openai else '[dim]not set[/dim]'}")
+ console.print(f"Gemini API: {'[green]β[/green]' if has_gemini else '[dim]not set[/dim]'}")
vllm_status = f"[green]β {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
console.print(f"vLLM/Local: {vllm_status}")
diff --git a/nanobot/config/schema.py b/nanobot/config/schema.py
index e30fbb2..c2109a1 100644
--- a/nanobot/config/schema.py
+++ b/nanobot/config/schema.py
@@ -50,7 +50,9 @@ class ProvidersConfig(BaseModel):
anthropic: ProviderConfig = Field(default_factory=ProviderConfig)
openai: ProviderConfig = Field(default_factory=ProviderConfig)
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
+ zhipu: ProviderConfig = Field(default_factory=ProviderConfig)
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
+ gemini: ProviderConfig = Field(default_factory=ProviderConfig)
class GatewayConfig(BaseModel):
@@ -89,19 +91,23 @@ class Config(BaseSettings):
return Path(self.agents.defaults.workspace).expanduser()
def get_api_key(self) -> str | None:
- """Get API key in priority order: OpenRouter > Anthropic > OpenAI > vLLM."""
+ """Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > Zhipu > vLLM."""
return (
self.providers.openrouter.api_key or
self.providers.anthropic.api_key or
self.providers.openai.api_key or
+ self.providers.gemini.api_key or
+ self.providers.zhipu.api_key or
self.providers.vllm.api_key or
None
)
def get_api_base(self) -> str | None:
- """Get API base URL if using OpenRouter or vLLM."""
+ """Get API base URL if using OpenRouter, Zhipu or vLLM."""
if self.providers.openrouter.api_key:
return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1"
+ if self.providers.zhipu.api_key:
+ return self.providers.zhipu.api_base
if self.providers.vllm.api_base:
return self.providers.vllm.api_base
return None
diff --git a/nanobot/providers/litellm_provider.py b/nanobot/providers/litellm_provider.py
index 4e7305b..42b4bf5 100644
--- a/nanobot/providers/litellm_provider.py
+++ b/nanobot/providers/litellm_provider.py
@@ -13,7 +13,7 @@ class LiteLLMProvider(LLMProvider):
"""
LLM provider using LiteLLM for multi-provider support.
- Supports OpenRouter, Anthropic, OpenAI, and many other providers through
+ Supports OpenRouter, Anthropic, OpenAI, Gemini, and many other providers through
a unified interface.
"""
@@ -47,6 +47,10 @@ class LiteLLMProvider(LLMProvider):
os.environ.setdefault("ANTHROPIC_API_KEY", api_key)
elif "openai" in default_model or "gpt" in default_model:
os.environ.setdefault("OPENAI_API_KEY", api_key)
+ elif "gemini" in default_model.lower():
+ os.environ.setdefault("GEMINI_API_KEY", api_key)
+ elif "zhipu" in default_model or "glm" in default_model or "zai" in default_model:
+ os.environ.setdefault("ZHIPUAI_API_KEY", api_key)
if api_base:
litellm.api_base = api_base
@@ -81,11 +85,24 @@ class LiteLLMProvider(LLMProvider):
if self.is_openrouter and not model.startswith("openrouter/"):
model = f"openrouter/{model}"
+ # For Zhipu/Z.ai, ensure prefix is present
+ # Handle cases like "glm-4.7-flash" -> "zhipu/glm-4.7-flash"
+ if ("glm" in model.lower() or "zhipu" in model.lower()) and not (
+ model.startswith("zhipu/") or
+ model.startswith("zai/") or
+ model.startswith("openrouter/")
+ ):
+ model = f"zhipu/{model}"
+
# For vLLM, use hosted_vllm/ prefix per LiteLLM docs
# Convert openai/ prefix to hosted_vllm/ if user specified it
if self.is_vllm:
model = f"hosted_vllm/{model}"
+ # For Gemini, ensure gemini/ prefix if not already present
+ if "gemini" in model.lower() and not model.startswith("gemini/"):
+ model = f"gemini/{model}"
+
kwargs: dict[str, Any] = {
"model": model,
"messages": messages,