Merge PR #202: add Moonshot provider support

This commit is contained in:
Re-bin 2026-02-06 08:02:10 +00:00
commit 9a8e9bf108
2 changed files with 67 additions and 23 deletions

View File

@ -77,6 +77,7 @@ class ProvidersConfig(BaseModel):
zhipu: ProviderConfig = Field(default_factory=ProviderConfig)
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
moonshot: ProviderConfig = Field(default_factory=ProviderConfig)
class GatewayConfig(BaseModel):
@ -121,27 +122,57 @@ class Config(BaseSettings):
"""Get expanded workspace path."""
return Path(self.agents.defaults.workspace).expanduser()
def get_api_key(self) -> str | None:
"""Get API key in priority order: OpenRouter > DeepSeek > Anthropic > OpenAI > Gemini > Zhipu > Groq > vLLM."""
return (
self.providers.openrouter.api_key or
self.providers.deepseek.api_key or
self.providers.anthropic.api_key or
self.providers.openai.api_key or
self.providers.gemini.api_key or
self.providers.zhipu.api_key or
self.providers.groq.api_key or
self.providers.vllm.api_key or
None
)
def _match_provider(self, model: str | None = None) -> ProviderConfig | None:
"""Match a provider based on model name."""
model = (model or self.agents.defaults.model).lower()
# Map of keywords to provider configs
providers = {
"openrouter": self.providers.openrouter,
"deepseek": self.providers.deepseek,
"anthropic": self.providers.anthropic,
"claude": self.providers.anthropic,
"openai": self.providers.openai,
"gpt": self.providers.openai,
"gemini": self.providers.gemini,
"zhipu": self.providers.zhipu,
"glm": self.providers.zhipu,
"zai": self.providers.zhipu,
"groq": self.providers.groq,
"moonshot": self.providers.moonshot,
"kimi": self.providers.moonshot,
"vllm": self.providers.vllm,
}
for keyword, provider in providers.items():
if keyword in model and provider.api_key:
return provider
return None
def get_api_key(self, model: str | None = None) -> str | None:
"""Get API key for the given model (or default model). Falls back to first available key."""
# Try matching by model name first
matched = self._match_provider(model)
if matched:
return matched.api_key
# Fallback: return first available key
for provider in [
self.providers.openrouter, self.providers.deepseek,
self.providers.anthropic, self.providers.openai,
self.providers.gemini, self.providers.zhipu,
self.providers.moonshot, self.providers.vllm,
self.providers.groq,
]:
if provider.api_key:
return provider.api_key
return None
def get_api_base(self) -> str | None:
"""Get API base URL if using OpenRouter, Zhipu or vLLM."""
if self.providers.openrouter.api_key:
def get_api_base(self, model: str | None = None) -> str | None:
"""Get API base URL based on model name."""
model = (model or self.agents.defaults.model).lower()
if "openrouter" in model:
return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1"
if self.providers.zhipu.api_key:
if any(k in model for k in ("zhipu", "glm", "zai")):
return self.providers.zhipu.api_base
if self.providers.vllm.api_base:
if "vllm" in model:
return self.providers.vllm.api_base
return None

View File

@ -55,6 +55,9 @@ class LiteLLMProvider(LLMProvider):
os.environ.setdefault("ZHIPUAI_API_KEY", api_key)
elif "groq" in default_model:
os.environ.setdefault("GROQ_API_KEY", api_key)
elif "moonshot" in default_model or "kimi" in default_model:
os.environ.setdefault("MOONSHOT_API_KEY", api_key)
os.environ.setdefault("MOONSHOT_API_BASE", api_base or "https://api.moonshot.cn/v1")
if api_base:
litellm.api_base = api_base
@ -97,16 +100,26 @@ class LiteLLMProvider(LLMProvider):
model.startswith("openrouter/")
):
model = f"zai/{model}"
# For Moonshot/Kimi, ensure moonshot/ prefix (before vLLM check)
if ("moonshot" in model.lower() or "kimi" in model.lower()) and not (
model.startswith("moonshot/") or model.startswith("openrouter/")
):
model = f"moonshot/{model}"
# For Gemini, ensure gemini/ prefix if not already present
if "gemini" in model.lower() and not model.startswith("gemini/"):
model = f"gemini/{model}"
# For vLLM, use hosted_vllm/ prefix per LiteLLM docs
# Convert openai/ prefix to hosted_vllm/ if user specified it
if self.is_vllm:
model = f"hosted_vllm/{model}"
# For Gemini, ensure gemini/ prefix if not already present
if "gemini" in model.lower() and not model.startswith("gemini/"):
model = f"gemini/{model}"
# kimi-k2.5 only supports temperature=1.0
if "kimi-k2.5" in model.lower():
temperature = 1.0
kwargs: dict[str, Any] = {
"model": model,
"messages": messages,