feat: add DeepSeek provider support

This commit is contained in:
Kyya Wang 2026-02-03 03:09:13 +00:00
parent 30d6e4b4b6
commit f23548f296
3 changed files with 12 additions and 2 deletions

4
.gitignore vendored
View File

@ -12,4 +12,6 @@ docs/
*.pyw *.pyw
*.pyz *.pyz
*.pywz *.pywz
*.pyzz *.pyzz
.venv/
__pycache__/

View File

@ -50,6 +50,7 @@ class ProvidersConfig(BaseModel):
anthropic: ProviderConfig = Field(default_factory=ProviderConfig) anthropic: ProviderConfig = Field(default_factory=ProviderConfig)
openai: ProviderConfig = Field(default_factory=ProviderConfig) openai: ProviderConfig = Field(default_factory=ProviderConfig)
openrouter: ProviderConfig = Field(default_factory=ProviderConfig) openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
deepseek: ProviderConfig = Field(default_factory=ProviderConfig)
zhipu: ProviderConfig = Field(default_factory=ProviderConfig) zhipu: ProviderConfig = Field(default_factory=ProviderConfig)
vllm: ProviderConfig = Field(default_factory=ProviderConfig) vllm: ProviderConfig = Field(default_factory=ProviderConfig)
gemini: ProviderConfig = Field(default_factory=ProviderConfig) gemini: ProviderConfig = Field(default_factory=ProviderConfig)
@ -91,9 +92,10 @@ class Config(BaseSettings):
return Path(self.agents.defaults.workspace).expanduser() return Path(self.agents.defaults.workspace).expanduser()
def get_api_key(self) -> str | None: def get_api_key(self) -> str | None:
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > Zhipu > vLLM.""" """Get API key in priority order: OpenRouter > DeepSeek > Anthropic > OpenAI > Gemini > Zhipu > vLLM."""
return ( return (
self.providers.openrouter.api_key or self.providers.openrouter.api_key or
self.providers.deepseek.api_key or
self.providers.anthropic.api_key or self.providers.anthropic.api_key or
self.providers.openai.api_key or self.providers.openai.api_key or
self.providers.gemini.api_key or self.providers.gemini.api_key or

View File

@ -43,6 +43,8 @@ class LiteLLMProvider(LLMProvider):
elif self.is_vllm: elif self.is_vllm:
# vLLM/custom endpoint - uses OpenAI-compatible API # vLLM/custom endpoint - uses OpenAI-compatible API
os.environ["OPENAI_API_KEY"] = api_key os.environ["OPENAI_API_KEY"] = api_key
elif "deepseek" in default_model:
os.environ.setdefault("DEEPSEEK_API_KEY", api_key)
elif "anthropic" in default_model: elif "anthropic" in default_model:
os.environ.setdefault("ANTHROPIC_API_KEY", api_key) os.environ.setdefault("ANTHROPIC_API_KEY", api_key)
elif "openai" in default_model or "gpt" in default_model: elif "openai" in default_model or "gpt" in default_model:
@ -103,6 +105,10 @@ class LiteLLMProvider(LLMProvider):
if "gemini" in model.lower() and not model.startswith("gemini/"): if "gemini" in model.lower() and not model.startswith("gemini/"):
model = f"gemini/{model}" model = f"gemini/{model}"
# Force set env vars for the provider based on model
if "deepseek" in model:
os.environ["DEEPSEEK_API_KEY"] = self.api_key
kwargs: dict[str, Any] = { kwargs: dict[str, Any] = {
"model": model, "model": model,
"messages": messages, "messages": messages,