From ac07932e144aa03dc55b72350ec8950b7dd7e028 Mon Sep 17 00:00:00 2001 From: tanyar09 Date: Mon, 10 Nov 2025 13:36:51 -0500 Subject: [PATCH] chore: Remove Alembic migration files and configuration This commit deletes the Alembic migration files and configuration, including the alembic.ini file, env.py, and various migration scripts. This cleanup is part of the transition to a new database management approach, ensuring that outdated migration artifacts do not interfere with future development. The requirements.txt file has also been updated to remove the Alembic dependency. No functional changes to the application are introduced in this commit. --- alembic.ini | 117 -------------- alembic/env.py | 79 ---------- alembic/script.py.mako | 27 ---- .../20251103_add_processed_to_photos.py | 30 ---- .../20251106_add_landmarks_to_faces.py | 25 --- .../versions/4d53a59b0e41_initial_schema.py | 143 ------------------ frontend/src/App.tsx | 9 +- frontend/src/context/DeveloperModeContext.tsx | 42 +++++ frontend/src/pages/AutoMatch.tsx | 4 + frontend/src/pages/Identify.tsx | 125 +++------------ frontend/src/pages/Modify.tsx | 4 +- frontend/src/pages/Process.tsx | 141 +++++++++-------- frontend/src/pages/Settings.tsx | 32 +++- requirements.txt | 1 - src/web/api/jobs.py | 31 +++- src/web/api/people.py | 12 +- src/web/services/face_service.py | 62 ++++---- src/web/services/tasks.py | 7 +- 18 files changed, 244 insertions(+), 647 deletions(-) delete mode 100644 alembic.ini delete mode 100644 alembic/env.py delete mode 100644 alembic/script.py.mako delete mode 100644 alembic/versions/20251103_add_processed_to_photos.py delete mode 100644 alembic/versions/20251106_add_landmarks_to_faces.py delete mode 100644 alembic/versions/4d53a59b0e41_initial_schema.py create mode 100644 frontend/src/context/DeveloperModeContext.tsx diff --git a/alembic.ini b/alembic.ini deleted file mode 100644 index 0323dd5..0000000 --- a/alembic.ini +++ /dev/null @@ -1,117 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = alembic - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -# sqlalchemy.url - will be read from src.web.db.session -# Override with: alembic -x db_url=sqlite:///data/punimtag.db -sqlalchemy.url = sqlite:///data/punimtag.db - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S - diff --git a/alembic/env.py b/alembic/env.py deleted file mode 100644 index eb190b9..0000000 --- a/alembic/env.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Alembic environment configuration.""" - -from logging.config import fileConfig - -from sqlalchemy import engine_from_config, pool - -from alembic import context - -# Import models for autogenerate -from src.web.db.models import Base - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -target_metadata = Base.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() - diff --git a/alembic/script.py.mako b/alembic/script.py.mako deleted file mode 100644 index 3c2e787..0000000 --- a/alembic/script.py.mako +++ /dev/null @@ -1,27 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} - diff --git a/alembic/versions/20251103_add_processed_to_photos.py b/alembic/versions/20251103_add_processed_to_photos.py deleted file mode 100644 index 361fec7..0000000 --- a/alembic/versions/20251103_add_processed_to_photos.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add processed column to photos - -Revision ID: add_processed_to_photos_20251103 -Revises: 4d53a59b0e41 -Create Date: 2025-11-03 -""" - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'add_processed_to_photos_20251103' -down_revision = '4d53a59b0e41' -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.add_column('photos', sa.Column('processed', sa.Boolean(), nullable=False, server_default=sa.false())) - # Drop server default after backfilling default - op.alter_column('photos', 'processed', server_default=None) - op.create_index('ix_photos_processed', 'photos', ['processed'], unique=False) - - -def downgrade() -> None: - op.drop_index('ix_photos_processed', table_name='photos') - op.drop_column('photos', 'processed') - - diff --git a/alembic/versions/20251106_add_landmarks_to_faces.py b/alembic/versions/20251106_add_landmarks_to_faces.py deleted file mode 100644 index f9038bf..0000000 --- a/alembic/versions/20251106_add_landmarks_to_faces.py +++ /dev/null @@ -1,25 +0,0 @@ -"""add landmarks column to faces - -Revision ID: add_landmarks_to_faces_20251106 -Revises: add_processed_to_photos_20251103 -Create Date: 2025-11-06 -""" - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'add_landmarks_to_faces_20251106' -down_revision = 'add_processed_to_photos_20251103' -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.add_column('faces', sa.Column('landmarks', sa.Text(), nullable=True)) - - -def downgrade() -> None: - op.drop_column('faces', 'landmarks') - diff --git a/alembic/versions/4d53a59b0e41_initial_schema.py b/alembic/versions/4d53a59b0e41_initial_schema.py deleted file mode 100644 index 2ece1d1..0000000 --- a/alembic/versions/4d53a59b0e41_initial_schema.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Initial schema - -Revision ID: 4d53a59b0e41 -Revises: -Create Date: 2025-10-31 12:03:50.406080 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '4d53a59b0e41' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('people', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('display_name', sa.String(length=256), nullable=False), - sa.Column('given_name', sa.String(length=128), nullable=True), - sa.Column('family_name', sa.String(length=128), nullable=True), - sa.Column('notes', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_people_display_name'), 'people', ['display_name'], unique=False) - op.create_index(op.f('ix_people_id'), 'people', ['id'], unique=False) - op.create_table('photos', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('path', sa.String(length=2048), nullable=False), - sa.Column('filename', sa.String(length=512), nullable=False), - sa.Column('checksum', sa.String(length=64), nullable=True), - sa.Column('date_added', sa.DateTime(), nullable=False), - sa.Column('date_taken', sa.DateTime(), nullable=True), - sa.Column('width', sa.Integer(), nullable=True), - sa.Column('height', sa.Integer(), nullable=True), - sa.Column('mime_type', sa.String(length=128), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_photos_checksum'), 'photos', ['checksum'], unique=True) - op.create_index(op.f('ix_photos_date_taken'), 'photos', ['date_taken'], unique=False) - op.create_index(op.f('ix_photos_id'), 'photos', ['id'], unique=False) - op.create_index(op.f('ix_photos_path'), 'photos', ['path'], unique=True) - op.create_table('tags', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('tag', sa.String(length=128), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_tags_id'), 'tags', ['id'], unique=False) - op.create_index(op.f('ix_tags_tag'), 'tags', ['tag'], unique=True) - op.create_table('faces', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('photo_id', sa.Integer(), nullable=False), - sa.Column('person_id', sa.Integer(), nullable=True), - sa.Column('bbox_x', sa.Integer(), nullable=False), - sa.Column('bbox_y', sa.Integer(), nullable=False), - sa.Column('bbox_w', sa.Integer(), nullable=False), - sa.Column('bbox_h', sa.Integer(), nullable=False), - sa.Column('embedding', sa.LargeBinary(), nullable=False), - sa.Column('confidence', sa.Integer(), nullable=True), - sa.Column('quality', sa.Integer(), nullable=True), - sa.Column('model', sa.String(length=64), nullable=True), - sa.Column('detector', sa.String(length=64), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['person_id'], ['people.id'], ), - sa.ForeignKeyConstraint(['photo_id'], ['photos.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('idx_faces_quality', 'faces', ['quality'], unique=False) - op.create_index(op.f('ix_faces_id'), 'faces', ['id'], unique=False) - op.create_index(op.f('ix_faces_person_id'), 'faces', ['person_id'], unique=False) - op.create_index(op.f('ix_faces_photo_id'), 'faces', ['photo_id'], unique=False) - op.create_index(op.f('ix_faces_quality'), 'faces', ['quality'], unique=False) - op.create_table('photo_tags', - sa.Column('photo_id', sa.Integer(), nullable=False), - sa.Column('tag_id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['photo_id'], ['photos.id'], ), - sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ), - sa.PrimaryKeyConstraint('photo_id', 'tag_id'), - sa.UniqueConstraint('photo_id', 'tag_id', name='uq_photo_tag') - ) - op.create_index('idx_photo_tags_photo', 'photo_tags', ['photo_id'], unique=False) - op.create_index('idx_photo_tags_tag', 'photo_tags', ['tag_id'], unique=False) - op.create_table('person_embeddings', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('person_id', sa.Integer(), nullable=False), - sa.Column('face_id', sa.Integer(), nullable=False), - sa.Column('embedding', sa.LargeBinary(), nullable=False), - sa.Column('quality', sa.Integer(), nullable=True), - sa.Column('model', sa.String(length=64), nullable=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['face_id'], ['faces.id'], ), - sa.ForeignKeyConstraint(['person_id'], ['people.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('idx_person_embeddings_person', 'person_embeddings', ['person_id'], unique=False) - op.create_index('idx_person_embeddings_quality', 'person_embeddings', ['quality'], unique=False) - op.create_index(op.f('ix_person_embeddings_face_id'), 'person_embeddings', ['face_id'], unique=False) - op.create_index(op.f('ix_person_embeddings_id'), 'person_embeddings', ['id'], unique=False) - op.create_index(op.f('ix_person_embeddings_person_id'), 'person_embeddings', ['person_id'], unique=False) - op.create_index(op.f('ix_person_embeddings_quality'), 'person_embeddings', ['quality'], unique=False) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index(op.f('ix_person_embeddings_quality'), table_name='person_embeddings') - op.drop_index(op.f('ix_person_embeddings_person_id'), table_name='person_embeddings') - op.drop_index(op.f('ix_person_embeddings_id'), table_name='person_embeddings') - op.drop_index(op.f('ix_person_embeddings_face_id'), table_name='person_embeddings') - op.drop_index('idx_person_embeddings_quality', table_name='person_embeddings') - op.drop_index('idx_person_embeddings_person', table_name='person_embeddings') - op.drop_table('person_embeddings') - op.drop_index('idx_photo_tags_tag', table_name='photo_tags') - op.drop_index('idx_photo_tags_photo', table_name='photo_tags') - op.drop_table('photo_tags') - op.drop_index(op.f('ix_faces_quality'), table_name='faces') - op.drop_index(op.f('ix_faces_photo_id'), table_name='faces') - op.drop_index(op.f('ix_faces_person_id'), table_name='faces') - op.drop_index(op.f('ix_faces_id'), table_name='faces') - op.drop_index('idx_faces_quality', table_name='faces') - op.drop_table('faces') - op.drop_index(op.f('ix_tags_tag'), table_name='tags') - op.drop_index(op.f('ix_tags_id'), table_name='tags') - op.drop_table('tags') - op.drop_index(op.f('ix_photos_path'), table_name='photos') - op.drop_index(op.f('ix_photos_id'), table_name='photos') - op.drop_index(op.f('ix_photos_date_taken'), table_name='photos') - op.drop_index(op.f('ix_photos_checksum'), table_name='photos') - op.drop_table('photos') - op.drop_index(op.f('ix_people_id'), table_name='people') - op.drop_index(op.f('ix_people_display_name'), table_name='people') - op.drop_table('people') - # ### end Alembic commands ### - diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index b05b073..084924d 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,5 +1,6 @@ import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom' import { AuthProvider, useAuth } from './context/AuthContext' +import { DeveloperModeProvider } from './context/DeveloperModeContext' import Login from './pages/Login' import Dashboard from './pages/Dashboard' import Search from './pages/Search' @@ -49,9 +50,11 @@ function AppRoutes() { function App() { return ( - - - + + + + + ) } diff --git a/frontend/src/context/DeveloperModeContext.tsx b/frontend/src/context/DeveloperModeContext.tsx new file mode 100644 index 0000000..25a1bfa --- /dev/null +++ b/frontend/src/context/DeveloperModeContext.tsx @@ -0,0 +1,42 @@ +import { createContext, useContext, useState, useEffect, ReactNode } from 'react' + +interface DeveloperModeContextType { + isDeveloperMode: boolean + setDeveloperMode: (enabled: boolean) => void +} + +const DeveloperModeContext = createContext(undefined) + +const STORAGE_KEY = 'punimtag_developer_mode' + +export function DeveloperModeProvider({ children }: { children: ReactNode }) { + const [isDeveloperMode, setIsDeveloperMode] = useState(false) + + // Load from localStorage on mount + useEffect(() => { + const stored = localStorage.getItem(STORAGE_KEY) + if (stored !== null) { + setIsDeveloperMode(stored === 'true') + } + }, []) + + const setDeveloperMode = (enabled: boolean) => { + setIsDeveloperMode(enabled) + localStorage.setItem(STORAGE_KEY, enabled.toString()) + } + + return ( + + {children} + + ) +} + +export function useDeveloperMode() { + const context = useContext(DeveloperModeContext) + if (context === undefined) { + throw new Error('useDeveloperMode must be used within a DeveloperModeProvider') + } + return context +} + diff --git a/frontend/src/pages/AutoMatch.tsx b/frontend/src/pages/AutoMatch.tsx index 75d361e..338655a 100644 --- a/frontend/src/pages/AutoMatch.tsx +++ b/frontend/src/pages/AutoMatch.tsx @@ -131,6 +131,10 @@ export default function AutoMatch() { if (summary) { alert(summary) } + + // Reload faces after auto-accept to remove auto-accepted faces from the list + await loadAutoMatch() + return } if (response.people.length === 0) { diff --git a/frontend/src/pages/Identify.tsx b/frontend/src/pages/Identify.tsx index f7eaff8..2c93216 100644 --- a/frontend/src/pages/Identify.tsx +++ b/frontend/src/pages/Identify.tsx @@ -35,7 +35,6 @@ export default function Identify() { const [imageLoading, setImageLoading] = useState(false) const [filtersCollapsed, setFiltersCollapsed] = useState(false) const [loadingFaces, setLoadingFaces] = useState(false) - const [loadingProgress, setLoadingProgress] = useState({ current: 0, total: 0, message: '' }) // Store form data per face ID (matching desktop behavior) const [faceFormData, setFaceFormData] = useState { setLoadingFaces(true) - setLoadingProgress({ current: 0, total: 0, message: 'Loading faces...' }) try { const res = await facesApi.getUnidentified({ @@ -73,7 +71,6 @@ export default function Identify() { // Apply unique faces filter if enabled if (uniqueFacesOnly) { - setLoadingProgress({ current: 0, total: res.items.length, message: 'Filtering unique faces...' }) const filtered = await filterUniqueFaces(res.items) setFaces(filtered) setTotal(filtered.length) @@ -84,7 +81,6 @@ export default function Identify() { setCurrentIdx(0) } finally { setLoadingFaces(false) - setLoadingProgress({ current: 0, total: 0, message: '' }) } } @@ -102,40 +98,17 @@ export default function Identify() { similarityMap.set(face.id, new Set()) } - // Update progress - loading all faces once - setLoadingProgress({ - current: 0, - total: faces.length, - message: 'Loading all faces from database...' - }) - try { // Get all face IDs const faceIds = faces.map(f => f.id) - // Update progress - calculating similarities - setLoadingProgress({ - current: 0, - total: faces.length, - message: `Calculating similarities for ${faces.length} faces (this may take a while)...` - }) - // Call batch similarity endpoint - loads all faces once from DB // Note: This is where the heavy computation happens (comparing N faces to M faces) - // The progress bar will show 0% during this time as we can't track backend progress const batchRes = await facesApi.batchSimilarity({ face_ids: faceIds, min_confidence: 60.0 }) - // Update progress - calculation complete, now processing results - const totalPairs = batchRes.pairs.length - setLoadingProgress({ - current: 0, - total: totalPairs, - message: `Similarity calculation complete! Processing ${totalPairs} results...` - }) - // Build similarity map from batch results // Note: results include similarities to all faces in DB, but we only care about // similarities between faces in the current list @@ -144,14 +117,8 @@ export default function Identify() { // Only include pairs where both faces are in the current list if (!faceMap.has(pair.face_id_1) || !faceMap.has(pair.face_id_2)) { processedPairs++ - // Update progress every 100 pairs or at the end - if (processedPairs % 100 === 0 || processedPairs === totalPairs) { - setLoadingProgress({ - current: processedPairs, - total: totalPairs, - message: `Processing similarity results... (${processedPairs} / ${totalPairs})` - }) - // Allow UI to update + // Allow UI to update periodically + if (processedPairs % 100 === 0 || processedPairs === batchRes.pairs.length) { await new Promise(resolve => setTimeout(resolve, 0)) } continue @@ -167,14 +134,8 @@ export default function Identify() { similarityMap.set(pair.face_id_2, set2) processedPairs++ - // Update progress every 100 pairs or at the end - if (processedPairs % 100 === 0 || processedPairs === totalPairs) { - setLoadingProgress({ - current: processedPairs, - total: totalPairs, - message: `Processing similarity results... (${processedPairs} / ${totalPairs})` - }) - // Allow UI to update + // Allow UI to update periodically + if (processedPairs % 100 === 0 || processedPairs === batchRes.pairs.length) { await new Promise(resolve => setTimeout(resolve, 0)) } } @@ -434,57 +395,25 @@ export default function Identify() {

Identify

- {/* Loading Progress Bar */} - {loadingFaces && ( -
-
- - {loadingProgress.message || 'Loading faces...'} - - {loadingProgress.total > 0 && ( - - {loadingProgress.current} / {loadingProgress.total} - {loadingProgress.total > 0 && ( - - ({Math.round((loadingProgress.current / loadingProgress.total) * 100)}%) - - )} - - )} -
-
- {loadingProgress.total > 0 ? ( -
- ) : ( -
-
- -
- )} -
-
- )} -
{/* Left: Controls and current face */}
+ {/* Unique Faces Checkbox - Outside Filters */} +
+ +

+ Hide duplicates with ≥60% match confidence +

+
+
setFiltersCollapsed(!filtersCollapsed)}>

Filters

@@ -545,20 +474,6 @@ export default function Identify() {
-
- -

- Hide duplicates with ≥60% match confidence -

-
-

Type Last Name

+

Search by Last Name or Maiden Name

{/* People list */} diff --git a/frontend/src/pages/Process.tsx b/frontend/src/pages/Process.tsx index a843f0a..11378af 100644 --- a/frontend/src/pages/Process.tsx +++ b/frontend/src/pages/Process.tsx @@ -1,6 +1,7 @@ import { useState, useRef, useEffect } from 'react' import { facesApi, ProcessFacesRequest } from '../api/faces' import { jobsApi, JobResponse, JobStatus } from '../api/jobs' +import { useDeveloperMode } from '../context/DeveloperModeContext' interface JobProgress { id: string @@ -17,6 +18,7 @@ const DETECTOR_OPTIONS = ['retinaface', 'mtcnn', 'opencv', 'ssd'] const MODEL_OPTIONS = ['ArcFace', 'Facenet', 'Facenet512', 'VGG-Face'] export default function Process() { + const { isDeveloperMode } = useDeveloperMode() const [batchSize, setBatchSize] = useState(undefined) const [detectorBackend, setDetectorBackend] = useState('retinaface') const [modelName, setModelName] = useState('ArcFace') @@ -84,26 +86,22 @@ export default function Process() { try { // Call API to cancel the job const result = await jobsApi.cancelJob(currentJob.id) - console.log('Job cancellation:', result) + console.log('Job cancellation requested:', result) - // Close SSE stream - if (eventSourceRef.current) { - eventSourceRef.current.close() - eventSourceRef.current = null - } + // Update job status to show cancellation is in progress + setCurrentJob({ + ...currentJob, + status: JobStatus.PROGRESS, + message: 'Cancellation requested - finishing current photo...', + }) - // Update UI state - setIsProcessing(false) - setError(`Job cancelled: ${result.message}`) + // Don't close SSE stream yet - keep it open to wait for job to actually stop + // The job will finish the current photo, then stop and send a final status update + // The SSE stream handler will close the stream when job status becomes SUCCESS or FAILURE - // Update job status - if (currentJob) { - setCurrentJob({ - ...currentJob, - status: JobStatus.FAILURE, - message: 'Cancelled by user', - }) - } + // Set a flag to indicate cancellation was requested + // This will be checked in the SSE handler + setError(null) // Clear any previous errors } catch (err: any) { console.error('Error cancelling job:', err) setError(err.response?.data?.detail || err.message || 'Failed to cancel job') @@ -155,6 +153,11 @@ export default function Process() { eventSource.close() eventSourceRef.current = null + // Show cancellation message if job was cancelled + if (data.message && (data.message.includes('Cancelled') || data.message.includes('cancelled'))) { + setError(`Job cancelled: ${data.message}`) + } + // Fetch final job result to get processing stats if (jobStatus === JobStatus.SUCCESS) { fetchJobResult(jobId) @@ -255,57 +258,61 @@ export default function Process() {

- {/* Detector Backend */} -
- - -

- RetinaFace recommended for best accuracy -

-
+ {/* Detector Backend - Only visible in developer mode */} + {isDeveloperMode && ( +
+ + +

+ RetinaFace recommended for best accuracy +

+
+ )} - {/* Model Name */} -
- - -

- ArcFace recommended for best accuracy -

-
+ {/* Model Name - Only visible in developer mode */} + {isDeveloperMode && ( +
+ + +

+ ArcFace recommended for best accuracy +

+
+ )} {/* Control Buttons */}
diff --git a/frontend/src/pages/Settings.tsx b/frontend/src/pages/Settings.tsx index c50a77c..92a1f69 100644 --- a/frontend/src/pages/Settings.tsx +++ b/frontend/src/pages/Settings.tsx @@ -1,9 +1,37 @@ +import { useDeveloperMode } from '../context/DeveloperModeContext' + export default function Settings() { + const { isDeveloperMode, setDeveloperMode } = useDeveloperMode() + return (

Settings

-
-

Settings panel coming soon.

+ +
+

Developer Options

+ +
+
+ +

+ Enable developer features. Additional features will be available when enabled. +

+
+
+ +
+
) diff --git a/requirements.txt b/requirements.txt index f7c63de..e4a2058 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ uvicorn[standard]==0.30.6 pydantic==2.9.1 SQLAlchemy==2.0.36 psycopg2-binary==2.9.9 -alembic==1.13.2 redis==5.0.8 rq==1.16.2 python-jose[cryptography]==3.3.0 diff --git a/src/web/api/jobs.py b/src/web/api/jobs.py index dfb81c4..b100f19 100644 --- a/src/web/api/jobs.py +++ b/src/web/api/jobs.py @@ -52,6 +52,11 @@ def get_job(job_id: str) -> JobResponse: message = job.meta.get("message", "") if job.meta else "" + # Check if job was cancelled + if job.meta and job.meta.get("cancelled", False): + job_status = JobStatus.FAILURE + message = job.meta.get("message", "Cancelled by user") + # If job failed, include error message if rq_status == "failed" and job.exc_info: # Extract error message from exception info @@ -95,16 +100,28 @@ def stream_job_progress(job_id: str): "failed": JobStatus.FAILURE, } job_status = status_map.get(job.get_status(), JobStatus.PENDING) - - progress = 0 - if job_status == JobStatus.STARTED or job_status == JobStatus.PROGRESS: + + # Check if job was cancelled first + if job.meta and job.meta.get("cancelled", False): + job_status = JobStatus.FAILURE + message = job.meta.get("message", "Cancelled by user") progress = job.meta.get("progress", 0) if job.meta else 0 - elif job_status == JobStatus.SUCCESS: - progress = 100 - elif job_status == JobStatus.FAILURE: + else: progress = 0 + if job_status == JobStatus.STARTED: + # Job is running - show progress if available + progress = job.meta.get("progress", 0) if job.meta else 0 + # Map to PROGRESS status if we have actual progress + if progress > 0: + job_status = JobStatus.PROGRESS + elif job_status == JobStatus.PROGRESS: + progress = job.meta.get("progress", 0) if job.meta else 0 + elif job_status == JobStatus.SUCCESS: + progress = 100 + elif job_status == JobStatus.FAILURE: + progress = 0 - message = job.meta.get("message", "") if job.meta else "" + message = job.meta.get("message", "") if job.meta else "" # Only send event if progress or message changed if progress != last_progress or message != last_message: diff --git a/src/web/api/people.py b/src/web/api/people.py index cf70fd1..45bf965 100644 --- a/src/web/api/people.py +++ b/src/web/api/people.py @@ -44,12 +44,12 @@ def list_people( @router.get("/with-faces", response_model=PeopleWithFacesListResponse) def list_people_with_faces( - last_name: str | None = Query(None, description="Filter by last name (case-insensitive)"), + last_name: str | None = Query(None, description="Filter by last name or maiden name (case-insensitive)"), db: Session = Depends(get_db), ) -> PeopleWithFacesListResponse: """List all people with face counts, sorted by last_name, first_name. - Optionally filter by last_name if provided (case-insensitive search). + Optionally filter by last_name or maiden_name if provided (case-insensitive search). Only returns people who have at least one face. """ # Query people with face counts @@ -64,8 +64,12 @@ def list_people_with_faces( ) if last_name: - # Case-insensitive search on last_name - query = query.filter(func.lower(Person.last_name).contains(func.lower(last_name))) + # Case-insensitive search on both last_name and maiden_name + search_term = last_name.lower() + query = query.filter( + (func.lower(Person.last_name).contains(search_term)) | + ((Person.maiden_name.isnot(None)) & (func.lower(Person.maiden_name).contains(search_term))) + ) results = query.order_by(Person.last_name.asc(), Person.first_name.asc()).all() diff --git a/src/web/services/face_service.py b/src/web/services/face_service.py index cb4a71c..84aa8cb 100644 --- a/src/web/services/face_service.py +++ b/src/web/services/face_service.py @@ -338,7 +338,7 @@ def process_photo_faces( try: pose_faces = pose_detector.detect_pose_faces(face_detection_path) if pose_faces: - print(f"[FaceService] Pose detection: found {len(pose_faces)} faces with pose data") + print(f"[FaceService] Pose detection for {photo.filename}: found {len(pose_faces)} faces with pose data") except Exception as e: print(f"[FaceService] ⚠️ Pose detection failed for {photo.filename}: {e}, using defaults") pose_faces = [] @@ -348,7 +348,7 @@ def process_photo_faces( pose_detector_local = PoseDetector() pose_faces = pose_detector_local.detect_pose_faces(face_detection_path) if pose_faces: - print(f"[FaceService] Pose detection: found {len(pose_faces)} faces with pose data") + print(f"[FaceService] Pose detection for {photo.filename}: found {len(pose_faces)} faces with pose data") except Exception as e: print(f"[FaceService] ⚠️ Pose detection failed for {photo.filename}: {e}, using defaults") pose_faces = [] @@ -1058,14 +1058,19 @@ def process_unprocessed_photos( if check_cancelled(): print(f"[FaceService] Job cancelled at photo {idx}/{total}") if update_progress: - update_progress( - idx - 1, - total, - "Cancelled by user", - total_faces_detected, - total_faces_stored, - ) - break + try: + update_progress( + idx - 1, + total, + "Cancelled by user", + total_faces_detected, + total_faces_stored, + ) + except KeyboardInterrupt: + # Expected when cancellation is detected + pass + # Raise KeyboardInterrupt to signal cancellation to the task handler + raise KeyboardInterrupt("Job cancelled by user") try: # Update progress before processing each photo @@ -1102,28 +1107,8 @@ def process_unprocessed_photos( first_photo_time = time.time() - first_photo_start print(f"[FaceService] First photo completed in {first_photo_time:.2f}s") - # Check for cancellation AFTER finishing the current photo completely - # This allows the current photo to complete (including pose detection and DB commit), - # then stops before the next one - if check_cancelled(): - print(f"[FaceService] Job cancelled after finishing photo {idx}/{total}") - # Update progress to show cancellation status - if update_progress: - try: - update_progress( - idx, - total, - "Cancelled by user - finished current photo", - total_faces_detected, - total_faces_stored, - ) - except KeyboardInterrupt: - # If update_progress raises KeyboardInterrupt, that's expected - # The cancellation check already happened, so we're good - pass - break - - # Update progress only if NOT cancelled (to avoid unnecessary KeyboardInterrupt) + # Update progress to show completion (including pose detection) + # This happens AFTER the entire photo processing is complete if update_progress: try: update_progress( @@ -1134,12 +1119,21 @@ def process_unprocessed_photos( total_faces_stored, ) except KeyboardInterrupt: - # If cancellation was detected during update_progress, check again and break + # If cancellation was detected during update_progress, check again if check_cancelled(): print(f"[FaceService] Job cancelled during progress update after photo {idx}/{total}") - break + # Raise KeyboardInterrupt to signal cancellation to the task handler + raise KeyboardInterrupt("Job cancelled by user after completing current photo") # Re-raise if it wasn't a cancellation raise + + # Check for cancellation AFTER updating progress (photo is fully complete) + # This ensures the entire photo processing is done (including pose detection and DB commit), + # and the progress shows "Completed", then stops before the next one + if check_cancelled(): + print(f"[FaceService] Job cancelled after completing photo {idx}/{total} (including pose detection)") + # Raise KeyboardInterrupt to signal cancellation to the task handler + raise KeyboardInterrupt("Job cancelled by user after completing current photo") except KeyboardInterrupt: # Cancellation was requested - stop processing gracefully print(f"[FaceService] Job cancelled during processing of photo {idx}/{total}") diff --git a/src/web/services/tasks.py b/src/web/services/tasks.py index 0152333..1b0dc43 100644 --- a/src/web/services/tasks.py +++ b/src/web/services/tasks.py @@ -211,8 +211,13 @@ def process_faces_task( try: job.meta = job.meta or {} job.meta.update({ - "message": "Cancelled by user", + "progress": job.meta.get("progress", 0), + "message": "Cancelled by user - finished current photo", "cancelled": True, + "processed": job.meta.get("processed", photos_processed), + "total": job.meta.get("total", 0), + "faces_detected": job.meta.get("faces_detected", total_faces_detected), + "faces_stored": job.meta.get("faces_stored", total_faces_stored), }) job.save_meta() except Exception: