punimtag/backend/db/session.py
Tanya 68d280e8f5 feat: Add new analysis documents and update installation scripts for backend integration
This commit introduces several new analysis documents, including Auto-Match Load Performance Analysis, Folder Picker Analysis, Monorepo Migration Summary, and various performance analysis documents. Additionally, the installation scripts are updated to reflect changes in backend service paths, ensuring proper integration with the new backend structure. These enhancements provide better documentation and streamline the setup process for users.
2025-12-30 15:04:32 -05:00

107 lines
2.8 KiB
Python

from __future__ import annotations
from pathlib import Path
from typing import Generator
from dotenv import load_dotenv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# Load environment variables from .env file if it exists
env_path = Path(__file__).parent.parent.parent.parent / ".env"
load_dotenv(dotenv_path=env_path)
def get_database_url() -> str:
"""Fetch database URL from environment or defaults."""
import os
# Check for environment variable first
db_url = os.getenv("DATABASE_URL")
if db_url:
return db_url
# Default to SQLite for development
return "sqlite:///data/punimtag.db"
def get_auth_database_url() -> str:
"""Fetch auth database URL from environment."""
import os
db_url = os.getenv("DATABASE_URL_AUTH")
if not db_url:
raise ValueError("DATABASE_URL_AUTH environment variable not set")
return db_url
database_url = get_database_url()
# SQLite-specific configuration
connect_args = {}
if database_url.startswith("sqlite"):
connect_args = {"check_same_thread": False}
# PostgreSQL connection pool settings
pool_kwargs = {"pool_pre_ping": True}
if database_url.startswith("postgresql"):
pool_kwargs.update({
"pool_size": 10,
"max_overflow": 20,
"pool_recycle": 3600,
})
engine = create_engine(
database_url,
future=True,
connect_args=connect_args,
**pool_kwargs
)
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, future=True)
def get_db() -> Generator:
"""Yield a DB session for request lifecycle."""
db = SessionLocal()
try:
yield db
finally:
db.close()
# Auth database setup
try:
auth_database_url = get_auth_database_url()
auth_connect_args = {}
if auth_database_url.startswith("sqlite"):
auth_connect_args = {"check_same_thread": False}
auth_pool_kwargs = {"pool_pre_ping": True}
if auth_database_url.startswith("postgresql"):
auth_pool_kwargs.update({
"pool_size": 10,
"max_overflow": 20,
"pool_recycle": 3600,
})
auth_engine = create_engine(
auth_database_url,
future=True,
connect_args=auth_connect_args,
**auth_pool_kwargs
)
AuthSessionLocal = sessionmaker(bind=auth_engine, autoflush=False, autocommit=False, future=True)
except ValueError:
# DATABASE_URL_AUTH not set - auth database not available
auth_engine = None
AuthSessionLocal = None
def get_auth_db() -> Generator:
"""Yield a DB session for auth database request lifecycle."""
if AuthSessionLocal is None:
raise ValueError("Auth database not configured. Set DATABASE_URL_AUTH environment variable.")
db = AuthSessionLocal()
try:
yield db
finally:
db.close()