Merge pull request #604 to add custom provider and non-destructive onboard

feat: add custom provider and non-destructive onboard
This commit is contained in:
Xubin Ren 2026-02-14 00:08:40 +08:00 committed by GitHub
commit bc045fae1f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 24 additions and 9 deletions

View File

@ -155,21 +155,21 @@ def main(
@app.command()
def onboard():
"""Initialize nanobot configuration and workspace."""
from nanobot.config.loader import get_config_path, save_config
from nanobot.config.loader import get_config_path, load_config, save_config
from nanobot.config.schema import Config
from nanobot.utils.helpers import get_workspace_path
config_path = get_config_path()
if config_path.exists():
console.print(f"[yellow]Config already exists at {config_path}[/yellow]")
if not typer.confirm("Overwrite?"):
raise typer.Exit()
# Create default config
config = Config()
save_config(config)
console.print(f"[green]✓[/green] Created config at {config_path}")
# Load existing config — Pydantic fills in defaults for any new fields
config = load_config()
save_config(config)
console.print(f"[green]✓[/green] Config refreshed at {config_path} (existing values preserved)")
else:
config = Config()
save_config(config)
console.print(f"[green]✓[/green] Created config at {config_path}")
# Create workspace
workspace = get_workspace_path()

View File

@ -179,6 +179,7 @@ class ProviderConfig(BaseModel):
class ProvidersConfig(BaseModel):
"""Configuration for LLM providers."""
custom: ProviderConfig = Field(default_factory=ProviderConfig) # Any OpenAI-compatible endpoint
anthropic: ProviderConfig = Field(default_factory=ProviderConfig)
openai: ProviderConfig = Field(default_factory=ProviderConfig)
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)

View File

@ -62,6 +62,20 @@ class ProviderSpec:
PROVIDERS: tuple[ProviderSpec, ...] = (
# === Custom (user-provided OpenAI-compatible endpoint) =================
# No auto-detection — only activates when user explicitly configures "custom".
ProviderSpec(
name="custom",
keywords=(),
env_key="OPENAI_API_KEY",
display_name="Custom",
litellm_prefix="openai",
skip_prefixes=("openai/",),
is_gateway=True,
strip_model_prefix=True,
),
# === Gateways (detected by api_key / api_base, not model name) =========
# Gateways can route any model, so they win in fallback.