This commit improves the setup of the authentication database by adding a new function to create necessary tables for both frontends. It also ensures that environment variables are loaded from a `.env` file before any database operations, enhancing configuration management. Additionally, minor updates are made to related scripts for better clarity and functionality.
843 lines
36 KiB
Python
843 lines
36 KiB
Python
from __future__ import annotations
|
||
|
||
import os
|
||
import subprocess
|
||
import sys
|
||
from contextlib import asynccontextmanager
|
||
from pathlib import Path
|
||
|
||
from fastapi import FastAPI
|
||
from fastapi.middleware.cors import CORSMiddleware
|
||
from sqlalchemy import inspect, text
|
||
|
||
from backend.api.auth import router as auth_router
|
||
from backend.api.faces import router as faces_router
|
||
from backend.api.health import router as health_router
|
||
from backend.api.jobs import router as jobs_router
|
||
from backend.api.metrics import router as metrics_router
|
||
from backend.api.people import router as people_router
|
||
from backend.api.pending_identifications import router as pending_identifications_router
|
||
from backend.api.pending_linkages import router as pending_linkages_router
|
||
from backend.api.photos import router as photos_router
|
||
from backend.api.reported_photos import router as reported_photos_router
|
||
from backend.api.pending_photos import router as pending_photos_router
|
||
from backend.api.tags import router as tags_router
|
||
from backend.api.users import router as users_router
|
||
from backend.api.auth_users import router as auth_users_router
|
||
from backend.api.role_permissions import router as role_permissions_router
|
||
from backend.api.videos import router as videos_router
|
||
from backend.api.version import router as version_router
|
||
from backend.settings import APP_TITLE, APP_VERSION
|
||
from backend.constants.roles import DEFAULT_ADMIN_ROLE, DEFAULT_USER_ROLE, ROLE_VALUES
|
||
from backend.db.base import Base, engine
|
||
from backend.db.session import auth_engine, database_url, get_auth_database_url
|
||
# Import models to ensure they're registered with Base.metadata
|
||
from backend.db import models # noqa: F401
|
||
from backend.db.models import RolePermission
|
||
from backend.utils.password import hash_password
|
||
|
||
# Global worker process (will be set in lifespan)
|
||
_worker_process: subprocess.Popen | None = None
|
||
|
||
|
||
def start_worker() -> None:
|
||
"""Start RQ worker in background subprocess."""
|
||
global _worker_process
|
||
|
||
try:
|
||
from redis import Redis
|
||
|
||
# Check Redis connection first
|
||
redis_conn = Redis(host="localhost", port=6379, db=0, decode_responses=False)
|
||
redis_conn.ping()
|
||
|
||
# Start worker as a subprocess (avoids signal handler issues)
|
||
# __file__ is backend/app.py, so parent.parent is the project root (punimtag/)
|
||
project_root = Path(__file__).parent.parent
|
||
|
||
# Use explicit Python path to avoid Cursor interception
|
||
# Check if sys.executable is Cursor, if so use /usr/bin/python3
|
||
python_executable = sys.executable
|
||
if "cursor" in python_executable.lower() or not python_executable.startswith("/usr"):
|
||
python_executable = "/usr/bin/python3"
|
||
|
||
# Ensure PYTHONPATH is set correctly and pass DATABASE_URL_AUTH explicitly
|
||
# Load .env file to get DATABASE_URL_AUTH if not already in environment
|
||
from dotenv import load_dotenv
|
||
env_file = project_root / ".env"
|
||
if env_file.exists():
|
||
load_dotenv(dotenv_path=env_file)
|
||
|
||
worker_env = {
|
||
**{k: v for k, v in os.environ.items()},
|
||
"PYTHONPATH": str(project_root),
|
||
}
|
||
|
||
# Explicitly ensure DATABASE_URL_AUTH is passed to worker subprocess
|
||
if "DATABASE_URL_AUTH" in os.environ:
|
||
worker_env["DATABASE_URL_AUTH"] = os.environ["DATABASE_URL_AUTH"]
|
||
|
||
_worker_process = subprocess.Popen(
|
||
[
|
||
python_executable,
|
||
"-m",
|
||
"backend.worker",
|
||
],
|
||
cwd=str(project_root),
|
||
stdout=None, # Don't capture - let output go to console
|
||
stderr=None, # Don't capture - let errors go to console
|
||
env=worker_env
|
||
)
|
||
# Give it a moment to start, then check if it's still running
|
||
import time
|
||
time.sleep(0.5)
|
||
if _worker_process.poll() is not None:
|
||
# Process already exited - there was an error
|
||
print(f"❌ Worker process exited immediately with code {_worker_process.returncode}")
|
||
print(" Check worker errors above")
|
||
else:
|
||
print(f"✅ RQ worker started in background subprocess (PID: {_worker_process.pid})")
|
||
except Exception as e:
|
||
print(f"⚠️ Failed to start RQ worker: {e}")
|
||
print(" Background jobs will not be processed. Ensure Redis is running.")
|
||
|
||
|
||
def stop_worker() -> None:
|
||
"""Stop RQ worker gracefully."""
|
||
global _worker_process
|
||
|
||
if _worker_process:
|
||
try:
|
||
_worker_process.terminate()
|
||
try:
|
||
_worker_process.wait(timeout=5)
|
||
except subprocess.TimeoutExpired:
|
||
_worker_process.kill()
|
||
print("✅ RQ worker stopped")
|
||
except Exception:
|
||
pass
|
||
|
||
|
||
def ensure_user_password_hash_column(inspector) -> None:
|
||
"""Ensure users table contains password_hash column."""
|
||
if "users" not in inspector.get_table_names():
|
||
print("ℹ️ Users table does not exist yet - will be created with password_hash column")
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("users")}
|
||
if "password_hash" in columns:
|
||
print("ℹ️ password_hash column already exists in users table")
|
||
return
|
||
|
||
print("🔄 Adding password_hash column to users table...")
|
||
|
||
default_hash = hash_password("changeme")
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
# PostgreSQL: Add column as nullable first, then update, then set NOT NULL
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN IF NOT EXISTS password_hash TEXT")
|
||
)
|
||
connection.execute(
|
||
text(
|
||
"UPDATE users SET password_hash = :default_hash "
|
||
"WHERE password_hash IS NULL OR password_hash = ''"
|
||
),
|
||
{"default_hash": default_hash},
|
||
)
|
||
# Set NOT NULL constraint
|
||
connection.execute(
|
||
text("ALTER TABLE users ALTER COLUMN password_hash SET NOT NULL")
|
||
)
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN password_hash TEXT")
|
||
)
|
||
connection.execute(
|
||
text(
|
||
"UPDATE users SET password_hash = :default_hash "
|
||
"WHERE password_hash IS NULL OR password_hash = ''"
|
||
),
|
||
{"default_hash": default_hash},
|
||
)
|
||
print("✅ Added password_hash column to users table (default password: changeme)")
|
||
|
||
|
||
def ensure_user_password_change_required_column(inspector) -> None:
|
||
"""Ensure users table contains password_change_required column."""
|
||
if "users" not in inspector.get_table_names():
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("users")}
|
||
if "password_change_required" in columns:
|
||
print("ℹ️ password_change_required column already exists in users table")
|
||
return
|
||
|
||
print("🔄 Adding password_change_required column to users table...")
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN IF NOT EXISTS password_change_required BOOLEAN NOT NULL DEFAULT true")
|
||
)
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN password_change_required BOOLEAN DEFAULT 1")
|
||
)
|
||
connection.execute(
|
||
text("UPDATE users SET password_change_required = 1 WHERE password_change_required IS NULL")
|
||
)
|
||
print("✅ Added password_change_required column to users table")
|
||
|
||
|
||
def ensure_user_email_unique_constraint(inspector) -> None:
|
||
"""Ensure users table email column has a unique constraint."""
|
||
if "users" not in inspector.get_table_names():
|
||
return
|
||
|
||
# Check if email column exists
|
||
columns = {col["name"] for col in inspector.get_columns("users")}
|
||
if "email" not in columns:
|
||
print("ℹ️ email column does not exist in users table yet")
|
||
return
|
||
|
||
# Check if unique constraint already exists on email
|
||
dialect = engine.dialect.name
|
||
with engine.connect() as connection:
|
||
if dialect == "postgresql":
|
||
# Check if unique constraint exists
|
||
result = connection.execute(text("""
|
||
SELECT constraint_name
|
||
FROM information_schema.table_constraints
|
||
WHERE table_name = 'users'
|
||
AND constraint_type = 'UNIQUE'
|
||
AND constraint_name LIKE '%email%'
|
||
"""))
|
||
if result.first():
|
||
print("ℹ️ Unique constraint on email column already exists")
|
||
return
|
||
|
||
# Try to add unique constraint (will fail if duplicates exist)
|
||
try:
|
||
print("🔄 Adding unique constraint to email column...")
|
||
connection.execute(text("ALTER TABLE users ADD CONSTRAINT uq_users_email UNIQUE (email)"))
|
||
connection.commit()
|
||
print("✅ Added unique constraint to email column")
|
||
except Exception as e:
|
||
# If constraint already exists or duplicates exist, that's okay
|
||
# API validation will prevent new duplicates
|
||
if "already exists" in str(e).lower() or "duplicate" in str(e).lower():
|
||
print(f"ℹ️ Could not add unique constraint (may have duplicates): {e}")
|
||
else:
|
||
print(f"⚠️ Could not add unique constraint: {e}")
|
||
else:
|
||
# SQLite - unique constraint is handled at column level
|
||
# Check if column already has unique constraint
|
||
# SQLite doesn't easily support adding unique constraints to existing columns
|
||
# The model definition will handle it for new tables
|
||
print("ℹ️ SQLite: Unique constraint on email will be enforced by model definition for new tables")
|
||
|
||
|
||
def ensure_face_identified_by_user_id_column(inspector) -> None:
|
||
"""Ensure faces table contains identified_by_user_id column."""
|
||
if "faces" not in inspector.get_table_names():
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("faces")}
|
||
if "identified_by_user_id" in columns:
|
||
print("ℹ️ identified_by_user_id column already exists in faces table")
|
||
return
|
||
|
||
print("🔄 Adding identified_by_user_id column to faces table...")
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
connection.execute(
|
||
text("ALTER TABLE faces ADD COLUMN IF NOT EXISTS identified_by_user_id INTEGER REFERENCES users(id)")
|
||
)
|
||
# Add index
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX IF NOT EXISTS idx_faces_identified_by ON faces(identified_by_user_id)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE faces ADD COLUMN identified_by_user_id INTEGER REFERENCES users(id)")
|
||
)
|
||
# SQLite doesn't support IF NOT EXISTS for indexes, so we'll try to create it
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX idx_faces_identified_by ON faces(identified_by_user_id)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
print("✅ Added identified_by_user_id column to faces table")
|
||
|
||
|
||
def ensure_user_role_column(inspector) -> None:
|
||
"""Ensure users table has a role column with valid values."""
|
||
if "users" not in inspector.get_table_names():
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("users")}
|
||
dialect = engine.dialect.name
|
||
role_values = sorted(ROLE_VALUES)
|
||
placeholder_parts = ", ".join(
|
||
f":role_value_{index}" for index, _ in enumerate(role_values)
|
||
)
|
||
where_clause = (
|
||
"role IS NULL OR role = ''"
|
||
if not placeholder_parts
|
||
else f"role IS NULL OR role = '' OR role NOT IN ({placeholder_parts})"
|
||
)
|
||
params = {
|
||
f"role_value_{index}": value for index, value in enumerate(role_values)
|
||
}
|
||
params["admin_role"] = DEFAULT_ADMIN_ROLE
|
||
params["default_role"] = DEFAULT_USER_ROLE
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if "role" not in columns:
|
||
if dialect == "postgresql":
|
||
connection.execute(
|
||
text(
|
||
f"ALTER TABLE users ADD COLUMN IF NOT EXISTS role TEXT "
|
||
f"NOT NULL DEFAULT '{DEFAULT_USER_ROLE}'"
|
||
)
|
||
)
|
||
else:
|
||
connection.execute(
|
||
text(
|
||
f"ALTER TABLE users ADD COLUMN role TEXT "
|
||
f"DEFAULT '{DEFAULT_USER_ROLE}'"
|
||
)
|
||
)
|
||
connection.execute(
|
||
text(
|
||
f"""
|
||
UPDATE users
|
||
SET role = CASE
|
||
WHEN is_admin THEN :admin_role
|
||
ELSE :default_role
|
||
END
|
||
WHERE {where_clause}
|
||
"""
|
||
),
|
||
params,
|
||
)
|
||
connection.execute(
|
||
text("CREATE INDEX IF NOT EXISTS idx_users_role ON users(role)")
|
||
)
|
||
print("✅ Ensured users.role column exists and is populated")
|
||
|
||
|
||
def ensure_photo_media_type_column(inspector) -> None:
|
||
"""Ensure photos table contains media_type column."""
|
||
if "photos" not in inspector.get_table_names():
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("photos")}
|
||
if "media_type" in columns:
|
||
print("ℹ️ media_type column already exists in photos table")
|
||
return
|
||
|
||
print("🔄 Adding media_type column to photos table...")
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
connection.execute(
|
||
text("ALTER TABLE photos ADD COLUMN IF NOT EXISTS media_type TEXT NOT NULL DEFAULT 'image'")
|
||
)
|
||
# Add index
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX IF NOT EXISTS idx_photos_media_type ON photos(media_type)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE photos ADD COLUMN media_type TEXT DEFAULT 'image'")
|
||
)
|
||
# Update existing rows to have 'image' as default
|
||
connection.execute(
|
||
text("UPDATE photos SET media_type = 'image' WHERE media_type IS NULL")
|
||
)
|
||
# SQLite doesn't support IF NOT EXISTS for indexes, so we'll try to create it
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX idx_photos_media_type ON photos(media_type)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
print("✅ Added media_type column to photos table")
|
||
|
||
|
||
def ensure_face_excluded_column(inspector) -> None:
|
||
"""Ensure faces table contains excluded column."""
|
||
if "faces" not in inspector.get_table_names():
|
||
print("ℹ️ Faces table does not exist yet - will be created with excluded column")
|
||
return
|
||
|
||
columns = {column["name"] for column in inspector.get_columns("faces")}
|
||
if "excluded" in columns:
|
||
# Column already exists, no need to print or do anything
|
||
return
|
||
|
||
print("🔄 Adding excluded column to faces table...")
|
||
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
# PostgreSQL: Add column with default value
|
||
connection.execute(
|
||
text("ALTER TABLE faces ADD COLUMN IF NOT EXISTS excluded BOOLEAN DEFAULT FALSE NOT NULL")
|
||
)
|
||
# Create index
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX IF NOT EXISTS idx_faces_excluded ON faces(excluded)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE faces ADD COLUMN excluded BOOLEAN DEFAULT 0 NOT NULL")
|
||
)
|
||
# Create index
|
||
try:
|
||
connection.execute(
|
||
text("CREATE INDEX idx_faces_excluded ON faces(excluded)")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
print("✅ Added excluded column to faces table")
|
||
|
||
|
||
def ensure_photo_person_linkage_table(inspector) -> None:
|
||
"""Ensure photo_person_linkage table exists for direct video-person associations."""
|
||
if "photo_person_linkage" in inspector.get_table_names():
|
||
print("ℹ️ photo_person_linkage table already exists")
|
||
return
|
||
|
||
print("🔄 Creating photo_person_linkage table...")
|
||
dialect = engine.dialect.name
|
||
|
||
with engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
connection.execute(text("""
|
||
CREATE TABLE IF NOT EXISTS photo_person_linkage (
|
||
id SERIAL PRIMARY KEY,
|
||
photo_id INTEGER NOT NULL REFERENCES photos(id) ON DELETE CASCADE,
|
||
person_id INTEGER NOT NULL REFERENCES people(id) ON DELETE CASCADE,
|
||
identified_by_user_id INTEGER REFERENCES users(id),
|
||
created_date TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||
UNIQUE(photo_id, person_id)
|
||
)
|
||
"""))
|
||
# Create indexes
|
||
for idx_name, idx_col in [
|
||
("idx_photo_person_photo", "photo_id"),
|
||
("idx_photo_person_person", "person_id"),
|
||
("idx_photo_person_user", "identified_by_user_id"),
|
||
]:
|
||
try:
|
||
connection.execute(
|
||
text(f"CREATE INDEX IF NOT EXISTS {idx_name} ON photo_person_linkage({idx_col})")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
else:
|
||
# SQLite
|
||
connection.execute(text("""
|
||
CREATE TABLE IF NOT EXISTS photo_person_linkage (
|
||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||
photo_id INTEGER NOT NULL REFERENCES photos(id) ON DELETE CASCADE,
|
||
person_id INTEGER NOT NULL REFERENCES people(id) ON DELETE CASCADE,
|
||
identified_by_user_id INTEGER REFERENCES users(id),
|
||
created_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||
UNIQUE(photo_id, person_id)
|
||
)
|
||
"""))
|
||
# Create indexes
|
||
for idx_name, idx_col in [
|
||
("idx_photo_person_photo", "photo_id"),
|
||
("idx_photo_person_person", "person_id"),
|
||
("idx_photo_person_user", "identified_by_user_id"),
|
||
]:
|
||
try:
|
||
connection.execute(
|
||
text(f"CREATE INDEX {idx_name} ON photo_person_linkage({idx_col})")
|
||
)
|
||
except Exception:
|
||
pass # Index might already exist
|
||
print("✅ Created photo_person_linkage table")
|
||
|
||
|
||
def ensure_auth_user_is_active_column() -> None:
|
||
"""Ensure auth database users table contains is_active column.
|
||
|
||
NOTE: Auth database is managed by the frontend. This function only checks/updates
|
||
if the database and table already exist. It will not fail if they don't exist.
|
||
"""
|
||
if auth_engine is None:
|
||
# Auth database not configured
|
||
return
|
||
|
||
try:
|
||
from sqlalchemy import inspect as sqlalchemy_inspect
|
||
|
||
# Try to get inspector - gracefully handle if database doesn't exist
|
||
try:
|
||
auth_inspector = sqlalchemy_inspect(auth_engine)
|
||
except Exception as inspect_exc:
|
||
error_str = str(inspect_exc).lower()
|
||
if "does not exist" in error_str or "database" in error_str:
|
||
# Database doesn't exist - that's okay, frontend will create it
|
||
return
|
||
# Some other error - log but don't fail
|
||
print(f"ℹ️ Could not inspect auth database: {inspect_exc}")
|
||
return
|
||
|
||
if "users" not in auth_inspector.get_table_names():
|
||
# Table doesn't exist - that's okay, frontend will create it
|
||
return
|
||
|
||
columns = {column["name"] for column in auth_inspector.get_columns("users")}
|
||
if "is_active" in columns:
|
||
print("ℹ️ is_active column already exists in auth database users table")
|
||
return
|
||
|
||
# Column doesn't exist - try to add it
|
||
print("🔄 Adding is_active column to auth database users table...")
|
||
|
||
dialect = auth_engine.dialect.name
|
||
|
||
try:
|
||
with auth_engine.connect() as connection:
|
||
with connection.begin():
|
||
if dialect == "postgresql":
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN IF NOT EXISTS is_active BOOLEAN DEFAULT TRUE")
|
||
)
|
||
else:
|
||
# SQLite
|
||
connection.execute(
|
||
text("ALTER TABLE users ADD COLUMN is_active BOOLEAN DEFAULT 1")
|
||
)
|
||
print("✅ Added is_active column to auth database users table")
|
||
except Exception as alter_exc:
|
||
# Check if it's a permission error
|
||
error_str = str(alter_exc)
|
||
if "permission" in error_str.lower() or "insufficient" in error_str.lower() or "owner" in error_str.lower():
|
||
print("⚠️ Cannot add is_active column: insufficient database privileges")
|
||
print(" The column will need to be added manually by a database administrator:")
|
||
if dialect == "postgresql":
|
||
print(" ALTER TABLE users ADD COLUMN is_active BOOLEAN DEFAULT TRUE;")
|
||
else:
|
||
print(" ALTER TABLE users ADD COLUMN is_active BOOLEAN DEFAULT 1;")
|
||
print(" Until then, users with linked data cannot be deleted.")
|
||
else:
|
||
# Some other error
|
||
print(f"⚠️ Failed to add is_active column to auth database users table: {alter_exc}")
|
||
except Exception as exc:
|
||
print(f"⚠️ Failed to check/add is_active column to auth database users table: {exc}")
|
||
# Don't raise - auth database might not be available or have permission issues
|
||
# The delete endpoint will handle this case gracefully
|
||
|
||
|
||
def ensure_role_permissions_table(inspector) -> None:
|
||
"""Ensure the role_permissions table exists for permission matrix."""
|
||
if "role_permissions" in inspector.get_table_names():
|
||
return
|
||
|
||
try:
|
||
print("🔄 Creating role_permissions table...")
|
||
RolePermission.__table__.create(bind=engine, checkfirst=True)
|
||
print("✅ Created role_permissions table")
|
||
except Exception as exc:
|
||
print(f"⚠️ Failed to create role_permissions table: {exc}")
|
||
|
||
|
||
def ensure_postgresql_database(db_url: str) -> None:
|
||
"""Ensure PostgreSQL database exists, create it if it doesn't."""
|
||
if not db_url.startswith("postgresql"):
|
||
return # Not PostgreSQL, skip
|
||
|
||
try:
|
||
from urllib.parse import urlparse, parse_qs
|
||
import os
|
||
import psycopg2
|
||
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
||
|
||
# Parse the database URL
|
||
parsed = urlparse(db_url.replace("postgresql+psycopg2://", "postgresql://"))
|
||
db_name = parsed.path.lstrip("/")
|
||
user = parsed.username
|
||
password = parsed.password
|
||
host = parsed.hostname or "localhost"
|
||
port = parsed.port or 5432
|
||
|
||
if not db_name:
|
||
return # No database name specified
|
||
|
||
# Try to connect to the database
|
||
try:
|
||
test_conn = psycopg2.connect(
|
||
host=host,
|
||
port=port,
|
||
user=user,
|
||
password=password,
|
||
database=db_name
|
||
)
|
||
test_conn.close()
|
||
return # Database exists
|
||
except psycopg2.OperationalError as e:
|
||
if "does not exist" not in str(e):
|
||
# Some other error (permissions, connection, etc.)
|
||
print(f"⚠️ Cannot check if database '{db_name}' exists: {e}")
|
||
return
|
||
|
||
# Database doesn't exist - try to create it
|
||
print(f"🔄 Creating PostgreSQL database '{db_name}'...")
|
||
|
||
# Connect to postgres database to create the new database
|
||
# Try with the configured user first (they might have CREATEDB privilege)
|
||
admin_conn = None
|
||
try:
|
||
admin_conn = psycopg2.connect(
|
||
host=host,
|
||
port=port,
|
||
user=user,
|
||
password=password,
|
||
database="postgres"
|
||
)
|
||
except psycopg2.OperationalError:
|
||
# Try postgres superuser (might need password from environment or .pgpass)
|
||
try:
|
||
import os
|
||
postgres_password = os.getenv("POSTGRES_PASSWORD", "")
|
||
admin_conn = psycopg2.connect(
|
||
host=host,
|
||
port=port,
|
||
user="postgres",
|
||
password=postgres_password if postgres_password else None,
|
||
database="postgres"
|
||
)
|
||
except psycopg2.OperationalError as e:
|
||
print(f"⚠️ Cannot create database '{db_name}': insufficient privileges")
|
||
print(f" Error: {e}")
|
||
print(f" Please create it manually:")
|
||
print(f" sudo -u postgres psql -c \"CREATE DATABASE {db_name};\"")
|
||
print(f" sudo -u postgres psql -c \"GRANT ALL PRIVILEGES ON DATABASE {db_name} TO {user};\"")
|
||
return
|
||
|
||
if admin_conn is None:
|
||
return
|
||
|
||
admin_conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||
cursor = admin_conn.cursor()
|
||
|
||
# Check if database exists
|
||
cursor.execute(
|
||
"SELECT 1 FROM pg_database WHERE datname = %s",
|
||
(db_name,)
|
||
)
|
||
exists = cursor.fetchone()
|
||
|
||
if not exists:
|
||
# Create the database
|
||
try:
|
||
cursor.execute(f'CREATE DATABASE "{db_name}"')
|
||
if user != "postgres" and admin_conn.info.user == "postgres":
|
||
# Grant privileges to the user if we're connected as postgres
|
||
try:
|
||
cursor.execute(f'GRANT ALL PRIVILEGES ON DATABASE "{db_name}" TO "{user}"')
|
||
except Exception as grant_exc:
|
||
print(f"⚠️ Created database '{db_name}' but could not grant privileges: {grant_exc}")
|
||
|
||
# Grant schema permissions (needed for creating tables)
|
||
if admin_conn.info.user == "postgres":
|
||
try:
|
||
# Connect to the new database to grant schema permissions
|
||
cursor.close()
|
||
admin_conn.close()
|
||
schema_conn = psycopg2.connect(
|
||
host=host,
|
||
port=port,
|
||
user="postgres",
|
||
password=os.getenv("POSTGRES_PASSWORD", "") if os.getenv("POSTGRES_PASSWORD") else None,
|
||
database=db_name
|
||
)
|
||
schema_conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
||
schema_cursor = schema_conn.cursor()
|
||
schema_cursor.execute(f'GRANT ALL ON SCHEMA public TO "{user}"')
|
||
schema_cursor.execute(f'ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO "{user}"')
|
||
schema_cursor.execute(f'ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO "{user}"')
|
||
schema_cursor.close()
|
||
schema_conn.close()
|
||
print(f"✅ Granted schema permissions to user '{user}'")
|
||
except Exception as schema_exc:
|
||
print(f"⚠️ Created database '{db_name}' but could not grant schema permissions: {schema_exc}")
|
||
print(f" Please run manually:")
|
||
print(f" sudo -u postgres psql -d {db_name} -c \"GRANT ALL ON SCHEMA public TO {user};\"")
|
||
print(f" sudo -u postgres psql -d {db_name} -c \"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO {user};\"")
|
||
|
||
print(f"✅ Created database '{db_name}'")
|
||
except Exception as create_exc:
|
||
print(f"⚠️ Failed to create database '{db_name}': {create_exc}")
|
||
print(f" Please create it manually:")
|
||
print(f" sudo -u postgres psql -c \"CREATE DATABASE {db_name};\"")
|
||
if user != "postgres":
|
||
print(f" sudo -u postgres psql -c \"GRANT ALL PRIVILEGES ON DATABASE {db_name} TO {user};\"")
|
||
cursor.close()
|
||
admin_conn.close()
|
||
return
|
||
else:
|
||
print(f"ℹ️ Database '{db_name}' already exists")
|
||
|
||
cursor.close()
|
||
admin_conn.close()
|
||
except Exception as exc:
|
||
print(f"⚠️ Failed to ensure database exists: {exc}")
|
||
import traceback
|
||
print(f" Traceback: {traceback.format_exc()}")
|
||
# Don't raise - let the connection attempt fail naturally with a clearer error
|
||
|
||
|
||
def ensure_auth_database_tables() -> None:
|
||
"""Ensure auth database tables exist, create them if they don't.
|
||
|
||
NOTE: This function is deprecated. Auth database is now managed by the frontend.
|
||
This function is kept for backward compatibility but will not create tables.
|
||
"""
|
||
# Auth database is managed by the frontend - do not create tables here
|
||
return
|
||
async def lifespan(app: FastAPI):
|
||
"""Lifespan context manager for startup and shutdown events."""
|
||
# Ensure database exists and tables are created on first run
|
||
try:
|
||
# Ensure main PostgreSQL database exists
|
||
# This must happen BEFORE we try to use the engine
|
||
ensure_postgresql_database(database_url)
|
||
|
||
# Note: Auth database is managed by the frontend, not created here
|
||
|
||
if database_url.startswith("sqlite"):
|
||
db_path = database_url.replace("sqlite:///", "")
|
||
db_file = Path(db_path)
|
||
db_file.parent.mkdir(parents=True, exist_ok=True)
|
||
|
||
# Only create tables if they don't already exist (safety check)
|
||
inspector = inspect(engine)
|
||
existing_tables = set(inspector.get_table_names())
|
||
|
||
# Check if required application tables exist (not just alembic_version)
|
||
required_tables = {"photos", "people", "faces", "tags", "phototaglinkage", "person_encodings", "photo_favorites", "users", "photo_person_linkage"}
|
||
missing_tables = required_tables - existing_tables
|
||
|
||
if missing_tables:
|
||
# Some required tables are missing - create all tables
|
||
# create_all() only creates missing tables, won't drop existing ones
|
||
Base.metadata.create_all(bind=engine)
|
||
if len(missing_tables) == len(required_tables):
|
||
print("✅ Database initialized (first run - tables created)")
|
||
else:
|
||
print(f"✅ Database tables created (missing tables: {', '.join(missing_tables)})")
|
||
else:
|
||
# All required tables exist - don't recreate (prevents data loss)
|
||
print(f"✅ Database already initialized ({len(existing_tables)} tables exist)")
|
||
|
||
# Ensure new columns exist (backward compatibility without migrations)
|
||
ensure_user_password_hash_column(inspector)
|
||
ensure_user_password_change_required_column(inspector)
|
||
ensure_user_email_unique_constraint(inspector)
|
||
ensure_face_identified_by_user_id_column(inspector)
|
||
ensure_user_role_column(inspector)
|
||
ensure_photo_media_type_column(inspector)
|
||
ensure_photo_person_linkage_table(inspector)
|
||
ensure_face_excluded_column(inspector)
|
||
ensure_role_permissions_table(inspector)
|
||
|
||
# Setup auth database tables for both frontends (viewer and admin)
|
||
if auth_engine is not None:
|
||
try:
|
||
ensure_auth_user_is_active_column()
|
||
# Import and call worker's setup function to create all auth tables
|
||
from backend.worker import setup_auth_database_tables
|
||
setup_auth_database_tables()
|
||
except Exception as auth_exc:
|
||
# Auth database might not exist yet - that's okay, frontend will handle it
|
||
print(f"ℹ️ Auth database not available: {auth_exc}")
|
||
print(" Frontend will manage auth database setup")
|
||
except Exception as exc:
|
||
print(f"❌ Database initialization failed: {exc}")
|
||
raise
|
||
# Startup
|
||
start_worker()
|
||
yield
|
||
# Shutdown
|
||
stop_worker()
|
||
|
||
|
||
def create_app() -> FastAPI:
|
||
"""Create and configure the FastAPI application instance."""
|
||
app = FastAPI(
|
||
title=APP_TITLE,
|
||
version=APP_VERSION,
|
||
lifespan=lifespan,
|
||
)
|
||
|
||
app.add_middleware(
|
||
CORSMiddleware,
|
||
allow_origins=["*"],
|
||
allow_credentials=True,
|
||
allow_methods=["*"],
|
||
allow_headers=["*"],
|
||
)
|
||
|
||
app.include_router(health_router, tags=["health"])
|
||
app.include_router(version_router, tags=["meta"])
|
||
app.include_router(metrics_router, tags=["metrics"])
|
||
app.include_router(auth_router, prefix="/api/v1")
|
||
app.include_router(jobs_router, prefix="/api/v1")
|
||
app.include_router(photos_router, prefix="/api/v1")
|
||
app.include_router(faces_router, prefix="/api/v1")
|
||
app.include_router(people_router, prefix="/api/v1")
|
||
app.include_router(videos_router, prefix="/api/v1")
|
||
app.include_router(pending_identifications_router, prefix="/api/v1")
|
||
app.include_router(pending_linkages_router, prefix="/api/v1")
|
||
app.include_router(reported_photos_router, prefix="/api/v1")
|
||
app.include_router(pending_photos_router, prefix="/api/v1")
|
||
app.include_router(tags_router, prefix="/api/v1")
|
||
app.include_router(users_router, prefix="/api/v1")
|
||
app.include_router(auth_users_router, prefix="/api/v1")
|
||
app.include_router(role_permissions_router, prefix="/api/v1")
|
||
|
||
return app
|
||
|
||
|
||
app = create_app()
|
||
|
||
|