diff --git a/.gitignore b/.gitignore index 684a756..9720f3b 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.pyc dist/ build/ +docs/ *.egg-info/ *.egg *.pyc diff --git a/README.md b/README.md index 9247f21..5f00b3f 100644 --- a/README.md +++ b/README.md @@ -372,7 +372,11 @@ nanobot/ └── cli/ # πŸ–₯️ Commands ``` -## πŸ—ΊοΈ Roadmap +## 🀝 Contribute & Roadmap + +PRs welcome! The codebase is intentionally small and readable. πŸ€— + +**Roadmap** β€” Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)! - [ ] **Multi-modal** β€” See and hear (images, voice, video) - [ ] **Long-term memory** β€” Never forget important context @@ -380,14 +384,16 @@ nanobot/ - [ ] **More integrations** β€” Discord, Slack, email, calendar - [ ] **Self-improvement** β€” Learn from feedback and mistakes -**Want to help?** Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)! +### Contributors + + + + --- ## ⭐ Star History -*Community Growth Trajectory* -
@@ -398,12 +404,6 @@ nanobot/
---- - -## 🀝 Contribute - -PRs welcome! The codebase is intentionally small and readable. πŸ€— -

Thanks for visiting ✨ nanobot!

Views diff --git a/nanobot/agent/loop.py b/nanobot/agent/loop.py index 6fe2cfd..1d2b070 100644 --- a/nanobot/agent/loop.py +++ b/nanobot/agent/loop.py @@ -189,7 +189,8 @@ class AgentLoop: # Execute tools for tool_call in response.tool_calls: - logger.debug(f"Executing tool: {tool_call.name}") + args_str = json.dumps(tool_call.arguments) + logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}") result = await self.tools.execute(tool_call.name, tool_call.arguments) messages = self.context.add_tool_result( messages, tool_call.id, tool_call.name, result @@ -281,7 +282,8 @@ class AgentLoop: ) for tool_call in response.tool_calls: - logger.debug(f"Executing tool: {tool_call.name}") + args_str = json.dumps(tool_call.arguments) + logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}") result = await self.tools.execute(tool_call.name, tool_call.arguments) messages = self.context.add_tool_result( messages, tool_call.id, tool_call.name, result diff --git a/nanobot/channels/base.py b/nanobot/channels/base.py index d83367c..8f16399 100644 --- a/nanobot/channels/base.py +++ b/nanobot/channels/base.py @@ -72,7 +72,14 @@ class BaseChannel(ABC): if not allow_list: return True - return str(sender_id) in allow_list + sender_str = str(sender_id) + if sender_str in allow_list: + return True + if "|" in sender_str: + for part in sender_str.split("|"): + if part and part in allow_list: + return True + return False async def _handle_message( self, diff --git a/nanobot/channels/telegram.py b/nanobot/channels/telegram.py index 840c250..ac2dba4 100644 --- a/nanobot/channels/telegram.py +++ b/nanobot/channels/telegram.py @@ -199,8 +199,10 @@ class TelegramChannel(BaseChannel): user = update.effective_user chat_id = message.chat_id - # Get sender identifier (prefer username, fallback to user_id) - sender_id = str(user.username or user.id) + # Use stable numeric ID, but keep username for allowlist compatibility + sender_id = str(user.id) + if user.username: + sender_id = f"{sender_id}|{user.username}" # Store chat_id for replies self._chat_ids[sender_id] = chat_id diff --git a/nanobot/cli/commands.py b/nanobot/cli/commands.py index 943ab0b..57fd48b 100644 --- a/nanobot/cli/commands.py +++ b/nanobot/cli/commands.py @@ -154,7 +154,7 @@ This file stores important information that should persist across sessions. @app.command() def gateway( - port: int = typer.Option(18789, "--port", "-p", help="Gateway port"), + port: int = typer.Option(18790, "--port", "-p", help="Gateway port"), verbose: bool = typer.Option(False, "--verbose", "-v", help="Verbose output"), ): """Start the nanobot gateway.""" diff --git a/nanobot/config/schema.py b/nanobot/config/schema.py index e73e083..15a2a88 100644 --- a/nanobot/config/schema.py +++ b/nanobot/config/schema.py @@ -60,6 +60,7 @@ class ProvidersConfig(BaseModel): anthropic: ProviderConfig = Field(default_factory=ProviderConfig) openai: ProviderConfig = Field(default_factory=ProviderConfig) openrouter: ProviderConfig = Field(default_factory=ProviderConfig) + zhipu: ProviderConfig = Field(default_factory=ProviderConfig) vllm: ProviderConfig = Field(default_factory=ProviderConfig) gemini: ProviderConfig = Field(default_factory=ProviderConfig) @@ -67,7 +68,7 @@ class ProvidersConfig(BaseModel): class GatewayConfig(BaseModel): """Gateway/server configuration.""" host: str = "0.0.0.0" - port: int = 18789 + port: int = 18790 class WebSearchConfig(BaseModel): @@ -101,19 +102,23 @@ class Config(BaseSettings): def get_api_key(self) -> str | None: """Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > vLLM.""" + """Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > Zhipu > vLLM.""" return ( self.providers.openrouter.api_key or self.providers.anthropic.api_key or self.providers.openai.api_key or self.providers.gemini.api_key or + self.providers.zhipu.api_key or self.providers.vllm.api_key or None ) def get_api_base(self) -> str | None: - """Get API base URL if using OpenRouter or vLLM.""" + """Get API base URL if using OpenRouter, Zhipu or vLLM.""" if self.providers.openrouter.api_key: return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1" + if self.providers.zhipu.api_key: + return self.providers.zhipu.api_base if self.providers.vllm.api_base: return self.providers.vllm.api_base return None diff --git a/nanobot/providers/litellm_provider.py b/nanobot/providers/litellm_provider.py index 4502c8f..42b4bf5 100644 --- a/nanobot/providers/litellm_provider.py +++ b/nanobot/providers/litellm_provider.py @@ -49,6 +49,8 @@ class LiteLLMProvider(LLMProvider): os.environ.setdefault("OPENAI_API_KEY", api_key) elif "gemini" in default_model.lower(): os.environ.setdefault("GEMINI_API_KEY", api_key) + elif "zhipu" in default_model or "glm" in default_model or "zai" in default_model: + os.environ.setdefault("ZHIPUAI_API_KEY", api_key) if api_base: litellm.api_base = api_base @@ -83,6 +85,15 @@ class LiteLLMProvider(LLMProvider): if self.is_openrouter and not model.startswith("openrouter/"): model = f"openrouter/{model}" + # For Zhipu/Z.ai, ensure prefix is present + # Handle cases like "glm-4.7-flash" -> "zhipu/glm-4.7-flash" + if ("glm" in model.lower() or "zhipu" in model.lower()) and not ( + model.startswith("zhipu/") or + model.startswith("zai/") or + model.startswith("openrouter/") + ): + model = f"zhipu/{model}" + # For vLLM, use hosted_vllm/ prefix per LiteLLM docs # Convert openai/ prefix to hosted_vllm/ if user specified it if self.is_vllm: