fix: integrate OpenAI Codex provider with new registry system
- Add OpenAI Codex ProviderSpec to registry.py - Add openai_codex config field to ProvidersConfig in schema.py - Mark Codex as OAuth-based (no API key required) - Set appropriate default_api_base for Codex API This integrates the Codex OAuth provider with the refactored provider registry system introduced in upstream commit 299d8b3.
This commit is contained in:
parent
6bca38b89d
commit
c1dc8d3f55
@ -81,6 +81,7 @@ class ProvidersConfig(BaseModel):
|
||||
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||
moonshot: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||
aihubmix: ProviderConfig = Field(default_factory=ProviderConfig) # AiHubMix API gateway
|
||||
openai_codex: ProviderConfig = Field(default_factory=ProviderConfig) # OpenAI Codex (OAuth) # AiHubMix API gateway
|
||||
|
||||
|
||||
class GatewayConfig(BaseModel):
|
||||
|
||||
@ -141,6 +141,24 @@ PROVIDERS: tuple[ProviderSpec, ...] = (
|
||||
model_overrides=(),
|
||||
),
|
||||
|
||||
# OpenAI Codex: uses OAuth, not API key.
|
||||
ProviderSpec(
|
||||
name="openai_codex",
|
||||
keywords=("openai-codex", "codex"),
|
||||
env_key="", # OAuth-based, no API key
|
||||
display_name="OpenAI Codex",
|
||||
litellm_prefix="", # Not routed through LiteLLM
|
||||
skip_prefixes=(),
|
||||
env_extras=(),
|
||||
is_gateway=False,
|
||||
is_local=False,
|
||||
detect_by_key_prefix="",
|
||||
detect_by_base_keyword="codex",
|
||||
default_api_base="https://chatgpt.com/backend-api",
|
||||
strip_model_prefix=False,
|
||||
model_overrides=(),
|
||||
),
|
||||
|
||||
# DeepSeek: needs "deepseek/" prefix for LiteLLM routing.
|
||||
ProviderSpec(
|
||||
name="deepseek",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user