Merge branch 'main' into feat/discord-support

This commit is contained in:
Anunay Aatipamula 2026-02-02 18:55:16 +05:30 committed by GitHub
commit 226cb5b46b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 46 additions and 18 deletions

1
.gitignore vendored
View File

@ -3,6 +3,7 @@
*.pyc
dist/
build/
docs/
*.egg-info/
*.egg
*.pyc

View File

@ -372,7 +372,11 @@ nanobot/
└── cli/ # 🖥️ Commands
```
## 🗺️ Roadmap
## 🤝 Contribute & Roadmap
PRs welcome! The codebase is intentionally small and readable. 🤗
**Roadmap** — Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)!
- [ ] **Multi-modal** — See and hear (images, voice, video)
- [ ] **Long-term memory** — Never forget important context
@ -380,14 +384,16 @@ nanobot/
- [ ] **More integrations** — Discord, Slack, email, calendar
- [ ] **Self-improvement** — Learn from feedback and mistakes
**Want to help?** Pick an item and [open a PR](https://github.com/HKUDS/nanobot/pulls)!
### Contributors
<a href="https://github.com/HKUDS/nanobot/graphs/contributors">
<img src="https://contrib.rocks/image?repo=HKUDS/nanobot" />
</a>
---
## ⭐ Star History
*Community Growth Trajectory*
<div align="center">
<a href="https://star-history.com/#HKUDS/nanobot&Date">
<picture>
@ -398,12 +404,6 @@ nanobot/
</a>
</div>
---
## 🤝 Contribute
PRs welcome! The codebase is intentionally small and readable. 🤗
<p align="center">
<em> Thanks for visiting ✨ nanobot!</em><br><br>
<img src="https://visitor-badge.laobi.icu/badge?page_id=HKUDS.nanobot&style=for-the-badge&color=00d4ff" alt="Views">

View File

@ -189,7 +189,8 @@ class AgentLoop:
# Execute tools
for tool_call in response.tool_calls:
logger.debug(f"Executing tool: {tool_call.name}")
args_str = json.dumps(tool_call.arguments)
logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}")
result = await self.tools.execute(tool_call.name, tool_call.arguments)
messages = self.context.add_tool_result(
messages, tool_call.id, tool_call.name, result
@ -281,7 +282,8 @@ class AgentLoop:
)
for tool_call in response.tool_calls:
logger.debug(f"Executing tool: {tool_call.name}")
args_str = json.dumps(tool_call.arguments)
logger.debug(f"Executing tool: {tool_call.name} with arguments: {args_str}")
result = await self.tools.execute(tool_call.name, tool_call.arguments)
messages = self.context.add_tool_result(
messages, tool_call.id, tool_call.name, result

View File

@ -72,7 +72,14 @@ class BaseChannel(ABC):
if not allow_list:
return True
return str(sender_id) in allow_list
sender_str = str(sender_id)
if sender_str in allow_list:
return True
if "|" in sender_str:
for part in sender_str.split("|"):
if part and part in allow_list:
return True
return False
async def _handle_message(
self,

View File

@ -199,8 +199,10 @@ class TelegramChannel(BaseChannel):
user = update.effective_user
chat_id = message.chat_id
# Get sender identifier (prefer username, fallback to user_id)
sender_id = str(user.username or user.id)
# Use stable numeric ID, but keep username for allowlist compatibility
sender_id = str(user.id)
if user.username:
sender_id = f"{sender_id}|{user.username}"
# Store chat_id for replies
self._chat_ids[sender_id] = chat_id

View File

@ -154,7 +154,7 @@ This file stores important information that should persist across sessions.
@app.command()
def gateway(
port: int = typer.Option(18789, "--port", "-p", help="Gateway port"),
port: int = typer.Option(18790, "--port", "-p", help="Gateway port"),
verbose: bool = typer.Option(False, "--verbose", "-v", help="Verbose output"),
):
"""Start the nanobot gateway."""

View File

@ -60,6 +60,7 @@ class ProvidersConfig(BaseModel):
anthropic: ProviderConfig = Field(default_factory=ProviderConfig)
openai: ProviderConfig = Field(default_factory=ProviderConfig)
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
zhipu: ProviderConfig = Field(default_factory=ProviderConfig)
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
@ -67,7 +68,7 @@ class ProvidersConfig(BaseModel):
class GatewayConfig(BaseModel):
"""Gateway/server configuration."""
host: str = "0.0.0.0"
port: int = 18789
port: int = 18790
class WebSearchConfig(BaseModel):
@ -101,19 +102,23 @@ class Config(BaseSettings):
def get_api_key(self) -> str | None:
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > vLLM."""
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > Zhipu > vLLM."""
return (
self.providers.openrouter.api_key or
self.providers.anthropic.api_key or
self.providers.openai.api_key or
self.providers.gemini.api_key or
self.providers.zhipu.api_key or
self.providers.vllm.api_key or
None
)
def get_api_base(self) -> str | None:
"""Get API base URL if using OpenRouter or vLLM."""
"""Get API base URL if using OpenRouter, Zhipu or vLLM."""
if self.providers.openrouter.api_key:
return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1"
if self.providers.zhipu.api_key:
return self.providers.zhipu.api_base
if self.providers.vllm.api_base:
return self.providers.vllm.api_base
return None

View File

@ -49,6 +49,8 @@ class LiteLLMProvider(LLMProvider):
os.environ.setdefault("OPENAI_API_KEY", api_key)
elif "gemini" in default_model.lower():
os.environ.setdefault("GEMINI_API_KEY", api_key)
elif "zhipu" in default_model or "glm" in default_model or "zai" in default_model:
os.environ.setdefault("ZHIPUAI_API_KEY", api_key)
if api_base:
litellm.api_base = api_base
@ -83,6 +85,15 @@ class LiteLLMProvider(LLMProvider):
if self.is_openrouter and not model.startswith("openrouter/"):
model = f"openrouter/{model}"
# For Zhipu/Z.ai, ensure prefix is present
# Handle cases like "glm-4.7-flash" -> "zhipu/glm-4.7-flash"
if ("glm" in model.lower() or "zhipu" in model.lower()) and not (
model.startswith("zhipu/") or
model.startswith("zai/") or
model.startswith("openrouter/")
):
model = f"zhipu/{model}"
# For vLLM, use hosted_vllm/ prefix per LiteLLM docs
# Convert openai/ prefix to hosted_vllm/ if user specified it
if self.is_vllm: