diff --git a/.env.example b/.env.example index edc9eca..7642a0a 100644 --- a/.env.example +++ b/.env.example @@ -41,6 +41,6 @@ totp_issuer=MultiAI GOOGLE_CLIENT_ID= GOOGLE_CLIENT_SECRET= -GOOGLE_REDIRECT_URI=http://localhost:8000/staff/drive/callback +GOOGLE_REDIRECT_URI=http://127.0.0.1:8000/stuff/drive/callback GOOGLE_OAUTH_SCOPES=https://www.googleapis.com/auth/drive.readonly openid email profile FACE_ENCRYPTION_KEY=hkbribvfirirbvivbibvib \ No newline at end of file diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 5f14801..d946aae 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -38,22 +38,12 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=raw,value=latest - type=sha,prefix= - - name: Build and push uses: docker/build-push-action@v5 with: context: . push: true platforms: linux/amd64,linux/arm64 - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max + tags: | + ${{ env.IMAGE_NAME }}:latest + ${{ env.IMAGE_NAME }}:${{ github.sha }} diff --git a/.gitignore b/.gitignore index 5cc1fe4..5593ad3 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ db/schema.sql multiai-c9380-firebase-adminsdk-fbsvc-cb6e5ce41b.json db.txt +.venv +multiai-c9380-firebase-adminsdk-fbsvc-cb6e5ce41b.json diff --git a/app/container.py b/app/container.py index fd94a4f..c239142 100644 --- a/app/container.py +++ b/app/container.py @@ -17,11 +17,13 @@ from app.service.users import AuthService from app.service.user_notification import UserNotificationService from db.generated import devices as device_queries +from db.generated import photo_faces as photo_face_queries from db.generated import photos as photo_queries from db.generated import session as session_queries from db.generated import staff_drive_connections as staff_drive_queries from db.generated import staff_notifications as staff_notification_queries from db.generated import stuff_user as staff_user_queries +from db.generated import upload_request_groups as upload_request_group_queries from db.generated import upload_request_photos as upload_request_photo_queries from db.generated import upload_requests as upload_request_queries from db.generated import user as user_queries @@ -51,9 +53,11 @@ def __init__( self.device_querier = device_queries.AsyncQuerier(conn) self.staff_user_querier = staff_user_queries.AsyncQuerier(conn) self.staff_drive_querier = staff_drive_queries.AsyncQuerier(conn) + self.upload_request_group_querier = upload_request_group_queries.AsyncQuerier(conn) self.upload_request_querier = upload_request_queries.AsyncQuerier(conn) self.upload_request_photo_querier = upload_request_photo_queries.AsyncQuerier(conn) self.photo_querier = photo_queries.AsyncQuerier(conn) + self.photo_face_querier = photo_face_queries.AsyncQuerier(conn) self.staff_notification_querier = staff_notification_queries.AsyncQuerier(conn) self.notification_querier = notification_queries.AsyncQuerier(conn) self.audit_querier = audit_queries.AsyncQuerier(conn) @@ -94,6 +98,7 @@ def __init__( self.staged_upload_storage_service = StagedUploadStorageService() self.upload_requests_service = UploadRequestsService( + upload_request_group_querier=self.upload_request_group_querier, upload_request_querier=self.upload_request_querier, upload_request_photo_querier=self.upload_request_photo_querier, photo_querier=self.photo_querier, @@ -115,7 +120,6 @@ def __init__( ) self.staff_user_service = StaffUserService() - self.staff_user_service.init( staff_user_querier=self.staff_user_querier,) diff --git a/app/core/config.py b/app/core/config.py index 0fc3d9d..55d0841 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -1,4 +1,5 @@ -from pydantic_settings import BaseSettings +from pydantic_settings import BaseSettings, SettingsConfigDict +from pydantic import field_validator class Settings(BaseSettings): @@ -16,6 +17,8 @@ class Settings(BaseSettings): NATS_HOST: str NATS_PASSWORD: str NATS_USER: str + NATS_SINGLE_FACE_MATCH_STREAM: str = "single_face_matches" + NATS_SINGLE_FACE_MATCH_DURABLE: str = "single_face_match_worker" # MinIO @@ -23,6 +26,8 @@ class Settings(BaseSettings): MINIO_ROOT_USER: str MINIO_ROOT_PASSWORD: str MINIO_HOST: str + MINIO_RETRY_ATTEMPTS: int = 3 + MINIO_RETRY_BASE_SECONDS: float = 0.5 # PostgreSQL POSTGRES_USER: str @@ -35,13 +40,22 @@ class Settings(BaseSettings): MOBILE_SESSION_LIMIT: int = 3 MOBILE_SESSION_TTL_SECONDS: int = 180 MOBILE_SESSION_DAYS: int = 7 - + # Admin list defaults + ADMIN_USERS_DEFAULT_LIMIT: int = 20 + ADMIN_USERS_MAX_LIMIT: int = 100 # Security jwt_secret: str jwt_algorithm: str = "HS256" encryption_key: str totp_issuer: str = "multAI" + # Face embedding model + FACE_EMBEDDING_MODEL_NAME: str = "buffalo_l" + FACE_EMBEDDING_PROVIDERS: str = "CPUExecutionProvider" + FACE_EMBEDDING_CTX_ID: int = -1 + FACE_EMBEDDING_DET_WIDTH: int = 640 + FACE_EMBEDDING_DET_HEIGHT: int = 640 + # Google Drive OAuth GOOGLE_CLIENT_ID: str = "" GOOGLE_CLIENT_SECRET: str = "" @@ -53,9 +67,21 @@ class Settings(BaseSettings): FACE_ENCRYPTION_KEY: str FIREBASE_CREDENTIALS_PATH: str = "multiai-c9380-firebase-adminsdk-fbsvc-cb6e5ce41b.json" - class Config: - env_file = ".env" - extra = "ignore" + model_config = SettingsConfigDict( + env_file=".env", + extra="ignore", + ) + + @field_validator("debug", mode="before") + @classmethod + def _parse_debug(cls, value): # type: ignore[no-untyped-def] + if isinstance(value, str): + lowered = value.strip().lower() + if lowered in {"release", "prod", "production", "false", "0", "no"}: + return False + if lowered in {"true", "1", "yes"}: + return True + return value settings = Settings() # type: ignore diff --git a/app/core/constant.py b/app/core/constant.py index 0cae9dc..0925bbd 100644 --- a/app/core/constant.py +++ b/app/core/constant.py @@ -9,6 +9,9 @@ class RedisKey(str, Enum): NOTIFICATION_EVENT_SUBJECT = "notification_event" AUDIT_EVENT_SUBJECT = "audit.event" +FINAL_BUCKET_CLEANUP_SUBJECT = "ai.final_bucket.completed" +FINAL_BUCKET_CLEANUP_STREAM = "ai-final-bucket-cleanup" +FINAL_BUCKET_CLEANUP_DURABLE_NAME = "ai-final-bucket-cleaner" class AuditEventType(str, Enum): @@ -20,7 +23,6 @@ class AuditEventType(str, Enum): UPLOAD_REQUEST_REJECTED = "upload_request.rejected" - IMAGE_ALLOWED_TYPES = { "image/jpeg", "image/png", @@ -28,6 +30,19 @@ class AuditEventType(str, Enum): "image/heif" } +DEFAULT_CONTENT_TYPE = "application/octet-stream" +DRIVE_ALLOWED_HOSTS = {"drive.google.com", "docs.google.com"} +MINIO_URL_PREFIX = "minio://" + +IMAGES_BUCKET_NAME = "images" +DOCUMENTS_BUCKET_NAME = "documents" +WA_SIM_BUCKET_NAME = "wa-sim" + +GOOGLE_AUTH_URL = "https://accounts.google.com/o/oauth2/v2/auth" +GOOGLE_TOKEN_URL = "https://oauth2.googleapis.com/token" +GOOGLE_USERINFO_URL = "https://www.googleapis.com/oauth2/v2/userinfo" +GOOGLE_DRIVE_FILES_URL = "https://www.googleapis.com/drive/v3/files/{file_id}" + MAX_IMAGE_SIZE = 5 * 1024 * 1024 MIN_ENROLL_IMAGES = 3 MAX_ENROLL_IMAGES = 5 diff --git a/app/core/exceptions.py b/app/core/exceptions.py index 5f4a9b5..ddf0d36 100644 --- a/app/core/exceptions.py +++ b/app/core/exceptions.py @@ -75,6 +75,9 @@ def handle_check_violation(exc: Exception) -> HTTPException: def handle(exc: Exception) -> HTTPException: logger.error("Database error: %s", exc) + if isinstance(exc, HTTPException): + return exc + if isinstance(exc, IntegrityError): orig = getattr(exc, "orig", None) sqlstate = getattr(orig, "sqlstate", None) diff --git a/app/deps/token_auth.py b/app/deps/token_auth.py index c5fe522..a7eff17 100644 --- a/app/deps/token_auth.py +++ b/app/deps/token_auth.py @@ -43,6 +43,8 @@ async def get_current_mobile_user( user = await container.auth_service.user_querier.get_user_by_id(id=session.user_id) if not user: raise HTTPException(status_code=401, detail="User not found") + if user.blocked: + raise HTTPException(status_code=403, detail="User is blocked") return MobileUserSchema( user_id=user.id, diff --git a/app/infra/google_drive.py b/app/infra/google_drive.py index 0b32ad6..70e62ae 100644 --- a/app/infra/google_drive.py +++ b/app/infra/google_drive.py @@ -9,12 +9,13 @@ from app.core.exceptions import AppException from app.core.config import settings - - -GOOGLE_AUTH_URL = "https://accounts.google.com/o/oauth2/v2/auth" -GOOGLE_TOKEN_URL = "https://oauth2.googleapis.com/token" -GOOGLE_USERINFO_URL = "https://www.googleapis.com/oauth2/v2/userinfo" -GOOGLE_DRIVE_FILES_URL = "https://www.googleapis.com/drive/v3/files/{file_id}" +from app.core.constant import ( + GOOGLE_AUTH_URL, + GOOGLE_DRIVE_FILES_URL, + GOOGLE_TOKEN_URL, + GOOGLE_USERINFO_URL, +) +GOOGLE_DRIVE_LIST_FILES_URL = "https://www.googleapis.com/drive/v3/files" @dataclass @@ -48,6 +49,8 @@ class GoogleDriveFileDownload: class GoogleDriveClient: + _drive_folder_mime_type = "application/vnd.google-apps.folder" + @staticmethod def _require_str(data: dict[str, object], key: str) -> str: value = data.get(key) @@ -181,6 +184,71 @@ async def download_file( ) return GoogleDriveFileDownload(metadata=metadata, content=content) + @staticmethod + async def list_folder_files( + *, + access_token: str, + folder_id: str, + ) -> list[GoogleDriveFileMetadata]: + files: list[GoogleDriveFileMetadata] = [] + next_page_token: str | None = None + + while True: + query_params = { + "q": f"'{folder_id}' in parents and trashed = false", + "fields": "nextPageToken,files(id,name,mimeType,size)", + "supportsAllDrives": "true", + "includeItemsFromAllDrives": "true", + "pageSize": "100", + } + if next_page_token is not None: + query_params["pageToken"] = next_page_token + + data = await GoogleDriveClient._get_json( + GOOGLE_DRIVE_LIST_FILES_URL, + headers={"Authorization": f"Bearer {access_token}"}, + query_params=query_params, + error_context="Google Drive folder listing request", + ) + + raw_files = data.get("files", []) + if not isinstance(raw_files, list): + raise AppException.bad_request("Google Drive folder listing response is invalid") + + for raw_file in raw_files: + if not isinstance(raw_file, dict): + raise AppException.bad_request("Google Drive folder entry is invalid") + metadata = GoogleDriveClient._file_metadata_from_dict(raw_file) + if metadata.mime_type == GoogleDriveClient._drive_folder_mime_type: + continue + files.append(metadata) + + next_page_token_raw = data.get("nextPageToken") + if next_page_token_raw is None: + break + if not isinstance(next_page_token_raw, str) or not next_page_token_raw: + raise AppException.bad_request("Google Drive next page token is invalid") + next_page_token = next_page_token_raw + + return files + + @staticmethod + def _file_metadata_from_dict(data: dict[str, object]) -> GoogleDriveFileMetadata: + size_raw = data.get("size", "0") + if not isinstance(size_raw, (str, int)): + raise AppException.bad_request("Google Drive file size is invalid") + try: + size_bytes = int(size_raw) + except (TypeError, ValueError) as exc: + raise AppException.bad_request("Google Drive file size is invalid") from exc + + return GoogleDriveFileMetadata( + id=GoogleDriveClient._require_str(data, "id"), + name=GoogleDriveClient._require_str(data, "name"), + mime_type=GoogleDriveClient._require_str(data, "mimeType"), + size_bytes=size_bytes, + ) + @staticmethod async def _post_form(url: str, payload: dict[str, str]) -> dict[str, object]: encoded = urllib.parse.urlencode(payload).encode("utf-8") diff --git a/app/infra/minio.py b/app/infra/minio.py index 09104ea..e6249da 100644 --- a/app/infra/minio.py +++ b/app/infra/minio.py @@ -9,11 +9,18 @@ from app.core.utils import check_extension from app.core.exceptions import AppException +from app.core.constant import ( + DEFAULT_CONTENT_TYPE, + DOCUMENTS_BUCKET_NAME as CORE_DOCUMENTS_BUCKET_NAME, + IMAGES_BUCKET_NAME as CORE_IMAGES_BUCKET_NAME, + WA_SIM_BUCKET_NAME as CORE_WA_SIM_BUCKET_NAME, +) -IMAGES_BUCKET_NAME = "images" -DOCUMENTS_BUCKET_NAME = "documents" -WA_SIM_BUCKET_NAME = "wa-sim" +# Re-export bucket names for compatibility with existing imports. +IMAGES_BUCKET_NAME = CORE_IMAGES_BUCKET_NAME +DOCUMENTS_BUCKET_NAME = CORE_DOCUMENTS_BUCKET_NAME +WA_SIM_BUCKET_NAME = CORE_WA_SIM_BUCKET_NAME async def init_minio_client( minio_host: str, minio_port: int, minio_root_user: str, minio_root_password: str @@ -48,7 +55,7 @@ async def put(self, file: UploadFile, object_name: str | None = None) -> str: object_name = str(uuid.uuid4()) if file.content_type is None: - file.content_type = "application/octet-stream" + file.content_type = DEFAULT_CONTENT_TYPE if file.filename is None: file.filename = object_name @@ -80,7 +87,7 @@ async def get(self, object_name: str) -> tuple[bytes, str, str]: data = await res.read() content_type = ( - res.content_type if res.content_type else "application/octet-stream" + res.content_type if res.content_type else DEFAULT_CONTENT_TYPE ) filename = res.headers.get("x-amz-meta-filename", f"{object_name}") diff --git a/app/infra/nats.py b/app/infra/nats.py index 5a9a101..7696f55 100644 --- a/app/infra/nats.py +++ b/app/infra/nats.py @@ -2,12 +2,17 @@ from typing import Any, Callable, Optional from nats.aio.client import Client as NATS from nats.js.client import JetStreamContext -from nats.js.api import DeliverPolicy, AckPolicy +from nats.js.api import DeliverPolicy, AckPolicy, StreamConfig +from nats.js.errors import NotFoundError from nats.aio.msg import Msg from pydantic import BaseModel from app.core.config import settings -from app.core.constant import NOTIFICATION_EVENT_SUBJECT, AUDIT_EVENT_SUBJECT +from app.core.constant import ( + AUDIT_EVENT_SUBJECT, + FINAL_BUCKET_CLEANUP_SUBJECT, + NOTIFICATION_EVENT_SUBJECT, +) class Message(BaseModel): @@ -20,9 +25,15 @@ class NatsSubjects(Enum): USER_LOGOUT = "user.logout" NOTIFICATION_EVENT = NOTIFICATION_EVENT_SUBJECT AUDIT_EVENT = AUDIT_EVENT_SUBJECT + STAFF_UPLOAD_GROUP_CREATED = "staff.upload_group.created" + STAFF_UPLOAD_GROUP_APPROVED = "staff.upload_group.approved" + STAFF_UPLOAD_GROUP_REJECTED = "staff.upload_group.rejected" + FINAL_BUCKET_CLEANUP = FINAL_BUCKET_CLEANUP_SUBJECT STAFF_UPLOAD_REQUEST_CREATED = "staff.upload_request.created" STAFF_UPLOAD_REQUEST_APPROVED = "staff.upload_request.approved" STAFF_UPLOAD_REQUEST_REJECTED = "staff.upload_request.rejected" + SINGLE_FACE_MATCH_REQUESTED = "photo_faces.single.requested" + class NatsClient: _nc: Optional[NATS] = None @@ -44,7 +55,7 @@ async def connect( password=password or settings.NATS_PASSWORD, ) NatsClient._nc = nc - NatsClient._js = nc.jetstream() # type: ignore + NatsClient._js = nc.jetstream() # type: ignore @staticmethod async def close() -> None: @@ -70,11 +81,12 @@ async def subscribe(subject: NatsSubjects | str, callback: Callable[[Any], Any]) await NatsClient.connect() nc = NatsClient._nc assert nc is not None + async def _wrapper(msg: Msg) -> None: await callback(msg.data) subject_name = subject.value if isinstance(subject, NatsSubjects) else subject - await nc.subscribe(subject_name, cb=_wrapper) # type: ignore + await nc.subscribe(subject_name, cb=_wrapper) # type: ignore @staticmethod @@ -83,7 +95,7 @@ async def js_publish(subject: NatsSubjects, message: bytes, stream_name: str) -> await NatsClient.connect() js = NatsClient._js assert js is not None - subject_name = subject.value if isinstance(subject, NatsSubjects) else subject # type: ignore + subject_name = subject.value if isinstance(subject, NatsSubjects) else subject # type: ignore await js.publish(subject_name, message, stream=stream_name) @staticmethod @@ -97,17 +109,35 @@ async def js_subscribe( if NatsClient._js is None: await NatsClient.connect() + await NatsClient.ensure_stream(stream_name=stream_name, subjects=[subject.value]) + async def _wrapper(msg: Msg) -> None: await callback(msg.data) await msg.ack() js = NatsClient._js assert js is not None - subject_name = subject.value await js.subscribe( - subject=subject_name, + subject=subject.value, stream=stream_name, durable=durable_name, cb=_wrapper, deliver_policy=DeliverPolicy.NEW, # ack_policy=ack_policy ) + + @staticmethod + async def ensure_stream(*, stream_name: str, subjects: list[str]) -> None: + if NatsClient._js is None: + await NatsClient.connect() + js = NatsClient._js + assert js is not None + try: + await js.stream_info(stream_name) + except NotFoundError: + await js.add_stream( + name=stream_name, + config=StreamConfig( + name=stream_name, + subjects=subjects, + ), + ) diff --git a/app/router/mobile/auth.py b/app/router/mobile/auth.py index 52e34a1..93e9bce 100644 --- a/app/router/mobile/auth.py +++ b/app/router/mobile/auth.py @@ -4,7 +4,6 @@ from uuid import UUID from app.container import get_container, Container -from app.core.exceptions import AppException from app.deps.token_auth import MobileUserSchema, get_current_mobile_user from app.schema.request.mobile.auth import ( @@ -23,7 +22,6 @@ async def mobile_register_login( req: MobileAuthRequest, container: Container = Depends(get_container), ) -> MobileAuthResponse: - return await container.auth_service.mobile_register_login(container.redis, req) @@ -32,20 +30,18 @@ async def refresh_token( req: RefreshTokenRequest, container: Container = Depends(get_container), ) -> MobileAuthResponse: - return await container.auth_service.refresh_token(container.redis, req.refresh_token) @router.post("/logout") async def logout( container: Container = Depends(get_container), - User: MobileUserSchema = Depends(get_current_mobile_user), + current_user: MobileUserSchema = Depends(get_current_mobile_user), ) -> dict[str, str]: - return await container.auth_service.logout( container.redis, - str(User.user_id), - str(User.session_id), + str(current_user.user_id), + str(current_user.session_id), ) @@ -55,7 +51,6 @@ async def revoke_device( container: Container = Depends(get_container), current_user: MobileUserSchema = Depends(get_current_mobile_user), ) -> dict[str, str]: - await container.device_service.revoke_device( device_id=device_id, user_id=current_user.user_id, @@ -99,17 +94,14 @@ async def get_me( current_user: MobileUserSchema = Depends(get_current_mobile_user), container: Container = Depends(get_container), ) -> MeResponse: - - user = await container.auth_service.user_querier.get_user_by_id(id=current_user.user_id) - if user is None : - raise AppException.not_found("user not found") + user = await container.auth_service.get_user(user_id=current_user.user_id) devices, _ = await container.device_service.get_all_devices(current_user.user_id) device_list = [ DeviceSchema( id=d.id, - device_name=d.device_name or "uknown ", - device_type=d.device_type or "uknown ", + device_name=d.device_name or "unknown", + device_type=d.device_type or "unknown", totp_secret=d.totp_secret, ) for d in devices @@ -128,8 +120,6 @@ async def get_me( expires_at=sessions_objs.expires_at, ) - - return MeResponse( user=UserSchema(id=user.id, email=user.email), devices=device_list, diff --git a/app/router/mobile/enrollement.py b/app/router/mobile/enrollement.py index 109dfda..1a5f652 100644 --- a/app/router/mobile/enrollement.py +++ b/app/router/mobile/enrollement.py @@ -5,7 +5,13 @@ from app.container import Container, get_container from app.deps.token_auth import MobileUserSchema, get_current_mobile_user from app.core.exceptions import AppException -from app.core.constant import IMAGE_ALLOWED_TYPES, MAX_ENROLL_IMAGES, MAX_IMAGE_SIZE, MIN_ENROLL_IMAGES +from app.core.constant import ( + DEFAULT_CONTENT_TYPE, + IMAGE_ALLOWED_TYPES, + MAX_ENROLL_IMAGES, + MAX_IMAGE_SIZE, + MIN_ENROLL_IMAGES, +) from app.service.face_embedding import FaceImagePayload from db.generated.models import User @@ -57,7 +63,7 @@ async def enroll_face( payload: FaceImagePayload = FaceImagePayload( filename=file.filename or "unknown", - content_type=file.content_type or "application/octet-stream", + content_type=file.content_type or DEFAULT_CONTENT_TYPE, bytes=contents, ) diff --git a/app/router/staff/drive.py b/app/router/staff/drive.py index 6a28049..1537693 100644 --- a/app/router/staff/drive.py +++ b/app/router/staff/drive.py @@ -1,4 +1,5 @@ -from fastapi import APIRouter, Depends, Query +from fastapi import APIRouter, Depends, HTTPException, Query +from fastapi.responses import RedirectResponse from app.container import Container, get_container from app.core.exceptions import AppException @@ -17,11 +18,13 @@ @router.get("/connect", response_model=GoogleDriveConnectResponse) async def connect_google_drive( + redirect_url: str | None = Query(default=None), current_staff_user: StaffUser = Depends(get_current_staff_user), container: Container = Depends(get_container), ) -> GoogleDriveConnectResponse: authorization_url, state = await container.staff_drive_service.create_connect_url( - current_staff_user + current_staff_user, + redirect_url=redirect_url, ) return GoogleDriveConnectResponse(authorization_url=authorization_url, state=state) @@ -32,11 +35,40 @@ async def google_drive_callback( state: str = Query(...), error: str | None = Query(default=None), container: Container = Depends(get_container), -) -> GoogleDriveCallbackResponse: +) -> GoogleDriveCallbackResponse | RedirectResponse: + redirect_url = await container.staff_drive_service.get_callback_redirect_url(state) if error is not None: + if redirect_url is not None: + return RedirectResponse( + container.staff_drive_service.build_frontend_callback_url( + redirect_url, + status="error", + error=error, + ) + ) raise AppException.bad_request(f"Google OAuth error: {error}") - connection = await container.staff_drive_service.handle_callback(code, state) + try: + connection, redirect_url = await container.staff_drive_service.handle_callback(code, state) + except HTTPException as exc: + if redirect_url is not None: + return RedirectResponse( + container.staff_drive_service.build_frontend_callback_url( + redirect_url, + status="error", + error=str(exc.detail), + ) + ) + raise + + if redirect_url is not None: + return RedirectResponse( + container.staff_drive_service.build_frontend_callback_url( + redirect_url, + status="success", + google_email=connection.google_email, + ) + ) return GoogleDriveCallbackResponse( message="Google Drive connected successfully", google_email=connection.google_email, diff --git a/app/router/staff/uploads.py b/app/router/staff/uploads.py index 702cfbc..8268bf1 100644 --- a/app/router/staff/uploads.py +++ b/app/router/staff/uploads.py @@ -13,29 +13,40 @@ CreateUploadRequestRequest, RejectUploadRequestRequest, ) +from app.schema.response.staff.upload_groups import ( + UploadRequestGroupListResponse, + UploadRequestGroupPhotoListResponse, + UploadRequestGroupSchema, +) from app.schema.response.staff.uploads import ( UploadRequestListResponse, UploadRequestPhotoListResponse, UploadRequestSchema, ) +from app.service.upload_requests import UploadRequestGroupDetails from db.generated.models import StaffUser, UploadRequestStatus -router = APIRouter(prefix="/uploads") +router = APIRouter(prefix="/uploads", tags=["staff-uploads"]) -@router.post("/request", response_model=UploadRequestSchema) +@router.post("/request", response_model=UploadRequestSchema | UploadRequestGroupSchema) async def create_upload_request( req: CreateUploadRequestRequest, current_staff_user: StaffUser = Depends(get_current_staff_user), container: Container = Depends(get_container), -) -> UploadRequestSchema: - upload_request = await container.upload_requests_service.create_request( +) -> UploadRequestSchema | UploadRequestGroupSchema: + upload_result = await container.upload_requests_service.create_upload( event_id=req.event_id, + folder_id=req.folder_id, photos=req.to_inputs(), + visibility=req.visibility, + day_number=req.day_number, requested_by=current_staff_user, ) - return UploadRequestSchema.from_models(upload_request.request, upload_request.photos) + if isinstance(upload_result, UploadRequestGroupDetails): + return UploadRequestGroupSchema.from_details(upload_result) + return UploadRequestSchema.from_models(upload_result.request, upload_result.photos) @router.get("", response_model=UploadRequestListResponse) @@ -48,13 +59,82 @@ async def list_upload_requests( requests = await container.upload_requests_service.list_requests( current_staff_user=current_staff_user, scope=scope, - status=status + status=status.value if status is not None else None, ) return UploadRequestListResponse.from_models( [(item.request, item.photos) for item in requests] ) +@router.get("/groups", response_model=UploadRequestGroupListResponse) +async def list_upload_request_groups( + scope: Literal["my", "all"] = Query(default="my"), + status: UploadRequestStatus | None = Query(default=None), + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> UploadRequestGroupListResponse: + groups = await container.upload_requests_service.list_groups( + current_staff_user=current_staff_user, + scope=scope, + status=status.value if status is not None else None, + ) + return UploadRequestGroupListResponse.from_details_list(groups) + + +@router.get("/groups/{group_id}", response_model=UploadRequestGroupSchema) +async def get_upload_request_group( + group_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> UploadRequestGroupSchema: + group = await container.upload_requests_service.get_group_details( + group_id=group_id, + current_staff_user=current_staff_user, + ) + return UploadRequestGroupSchema.from_details(group) + + +@router.get("/groups/{group_id}/photos", response_model=UploadRequestGroupPhotoListResponse) +async def list_upload_request_group_photos( + group_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> UploadRequestGroupPhotoListResponse: + photos = await container.upload_requests_service.list_group_photos( + group_id=group_id, + current_staff_user=current_staff_user, + ) + return UploadRequestGroupPhotoListResponse.from_photos(photos) + + +@router.post("/groups/{group_id}/approve", response_model=UploadRequestGroupSchema) +async def approve_upload_request_group( + group_id: UUID, + current_staff_user: StaffUser = Depends(require_multi_team_lead_staff), + container: Container = Depends(get_container), +) -> UploadRequestGroupSchema: + group = await container.upload_requests_service.approve_group( + group_id=group_id, + approved_by=current_staff_user, + ) + return UploadRequestGroupSchema.from_details(group) + + +@router.post("/groups/{group_id}/reject", response_model=UploadRequestGroupSchema) +async def reject_upload_request_group( + group_id: UUID, + req: RejectUploadRequestRequest, + current_staff_user: StaffUser = Depends(require_multi_team_lead_staff), + container: Container = Depends(get_container), +) -> UploadRequestGroupSchema: + group = await container.upload_requests_service.reject_group( + group_id=group_id, + approved_by=current_staff_user, + reason=req.reason, + ) + return UploadRequestGroupSchema.from_details(group) + + @router.get("/{request_id}", response_model=UploadRequestSchema) async def get_upload_request( request_id: UUID, diff --git a/app/router/web/__init__.py b/app/router/web/__init__.py index 9b1e12e..b7939c3 100644 --- a/app/router/web/__init__.py +++ b/app/router/web/__init__.py @@ -3,8 +3,11 @@ from app.router.web.event import router as event_router from app.router.web.auth import router as auth_routes from app.router.web.audit import router as audit_router +from app.router.web.users import router as users_router + router = APIRouter(prefix="/admin", tags=["admin"]) router.include_router(staff_users_router) router.include_router(event_router) router.include_router(auth_routes) router.include_router(audit_router) +router.include_router(users_router) diff --git a/app/router/web/users.py b/app/router/web/users.py new file mode 100644 index 0000000..f167376 --- /dev/null +++ b/app/router/web/users.py @@ -0,0 +1,106 @@ +from uuid import UUID + +from fastapi import APIRouter, Depends, Query, status + +from app.container import Container, get_container +from app.core.config import settings +from app.core.logger import logger +from app.deps.cookie_auth import get_current_staff_user +from app.schema.request.web.user import AdminUserCreateRequest, AdminUserUpdateRequest +from app.schema.response.web.user import AdminUserSchema, to_admin_user_schema +from db.generated.models import StaffUser + +router = APIRouter(prefix="/users") + +@router.post("/", response_model=AdminUserSchema, status_code=status.HTTP_201_CREATED) +async def create_user( + req: AdminUserCreateRequest, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.create_user( + email=req.email, + password=req.password, + display_name=req.display_name, + blocked=req.blocked, + ) + logger.info("admin %s created user %s", current_staff_user.id, user.id) + return to_admin_user_schema(user) + +@router.get("/", response_model=list[AdminUserSchema]) +async def list_users( + limit: int = Query( + settings.ADMIN_USERS_DEFAULT_LIMIT, ge=1, le=settings.ADMIN_USERS_MAX_LIMIT + ), + offset: int = Query(0, ge=0), + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> list[AdminUserSchema]: + users = await container.auth_service.list_users(limit=limit, offset=offset) + return [to_admin_user_schema(user) for user in users] + + +@router.get("/{user_id}", response_model=AdminUserSchema) +async def get_user( + user_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.get_user(user_id=user_id) + return to_admin_user_schema(user) + + +@router.put("/{user_id}", response_model=AdminUserSchema) +async def update_user( + user_id: UUID, + req: AdminUserUpdateRequest, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.update_user( + user_id=user_id, + email=req.email, + display_name=req.display_name, + blocked=req.blocked, + ) + logger.info("admin %s updated user %s", current_staff_user.id, user_id) + return to_admin_user_schema(user) + + +@router.delete("/{user_id}", response_model=AdminUserSchema) +async def delete_user( + user_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.delete_user( + redis=container.redis, + user_id=user_id, + ) + logger.info("admin %s deleted user %s", current_staff_user.id, user_id) + return to_admin_user_schema(user) + + +@router.post("/{user_id}/block", response_model=AdminUserSchema) +async def block_user( + user_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.block_user( + redis=container.redis, + user_id=user_id, + ) + logger.info("admin %s blocked user %s", current_staff_user.id, user_id) + return to_admin_user_schema(user) + + +@router.post("/{user_id}/unblock", response_model=AdminUserSchema) +async def unblock_user( + user_id: UUID, + current_staff_user: StaffUser = Depends(get_current_staff_user), + container: Container = Depends(get_container), +) -> AdminUserSchema: + user = await container.auth_service.unblock_user(user_id=user_id) + logger.info("admin %s unblocked user %s", current_staff_user.id, user_id) + return to_admin_user_schema(user) diff --git a/app/schema/dto/single_face_match.py b/app/schema/dto/single_face_match.py new file mode 100644 index 0000000..e691808 --- /dev/null +++ b/app/schema/dto/single_face_match.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from uuid import UUID, uuid4 + +from pydantic import BaseModel, Field + + +class BBoxPayload(BaseModel): + x1: float + y1: float + x2: float + y2: float + + +class SingleFaceMatchJob(BaseModel): + job_id: UUID = Field(default_factory=uuid4) + photo_id: UUID + face_index: int = 0 + image_ref: str + bbox: BBoxPayload | None = None + faces_detected: int | None = None + submitted_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + + model_config = {"extra": "allow"} diff --git a/app/schema/request/staff/uploads.py b/app/schema/request/staff/uploads.py index ed50627..a0b1c55 100644 --- a/app/schema/request/staff/uploads.py +++ b/app/schema/request/staff/uploads.py @@ -1,7 +1,6 @@ from datetime import datetime -from typing import Literal -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, Field, field_validator, model_validator from uuid import UUID from app.schema.dto.staff.uploads import UploadPhotoInput @@ -14,7 +13,7 @@ class CreateUploadRequestPhotoRequest(BaseModel): drive_file_id: str = Field(min_length=1, max_length=255) taken_at: datetime | None = None day_number: int | None = None - visibility: Literal["private","public"] + visibility: str = "private" @field_validator("drive_file_id", mode="before") @classmethod @@ -42,12 +41,42 @@ def to_input(self) -> UploadPhotoInput: class CreateUploadRequestRequest(BaseModel): event_id: UUID - photos: list[CreateUploadRequestPhotoRequest] = Field( + folder_id: str | None = Field(default=None, min_length=1, max_length=255) + photos: list[CreateUploadRequestPhotoRequest] | None = Field( + default=None, min_length=1, max_length=MAX_UPLOAD_BATCH_SIZE, ) + visibility: str = "private" + day_number: int | None = None + + @field_validator("folder_id", mode="before") + @classmethod + def _strip_optional_text(cls, value: object) -> object: + if isinstance(value, str): + stripped_value = value.strip() + return stripped_value or None + return value + + @field_validator("visibility") + @classmethod + def _validate_request_visibility(cls, value: str) -> str: + normalized_value = value.strip().lower() + if normalized_value not in {"private", "public"}: + raise ValueError("visibility must be either 'private' or 'public'") + return normalized_value + + @model_validator(mode="after") + def _validate_source(self) -> "CreateUploadRequestRequest": + has_folder = self.folder_id is not None + has_photos = self.photos is not None + if has_folder == has_photos: + raise ValueError("Exactly one of folder_id or photos must be provided") + return self def to_inputs(self) -> list[UploadPhotoInput]: + if self.photos is None: + return [] return [photo.to_input() for photo in self.photos] diff --git a/app/schema/request/web/user.py b/app/schema/request/web/user.py new file mode 100644 index 0000000..2b41695 --- /dev/null +++ b/app/schema/request/web/user.py @@ -0,0 +1,15 @@ +from typing import Optional +from pydantic import BaseModel, EmailStr, Field + + +class AdminUserCreateRequest(BaseModel): + email: EmailStr + password: str = Field(..., min_length=8) + display_name: Optional[str] = None + blocked: bool = False + + +class AdminUserUpdateRequest(BaseModel): + email: Optional[EmailStr] = None + display_name: Optional[str] = None + blocked: Optional[bool] = None diff --git a/app/schema/response/staff/upload_groups.py b/app/schema/response/staff/upload_groups.py new file mode 100644 index 0000000..a08e7c2 --- /dev/null +++ b/app/schema/response/staff/upload_groups.py @@ -0,0 +1,67 @@ +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel + +from app.schema.response.staff.uploads import UploadRequestPhotoListResponse, UploadRequestSchema +from app.service.upload_requests import UploadRequestGroupDetails +from db.generated.models import UploadRequestPhoto + + +class UploadRequestGroupSchema(BaseModel): + id: UUID + event_id: UUID + folder_id: str + requested_by: UUID + approved_by: UUID | None + status: str + total_photo_count: int + batch_count: int + created_at: datetime + approved_at: datetime | None + rejection_reason: str | None + requests: list[UploadRequestSchema] + + @classmethod + def from_details( + cls, + details: UploadRequestGroupDetails, + ) -> "UploadRequestGroupSchema": + return cls( + id=details.group.id, + event_id=details.group.event_id, + folder_id=details.group.folder_id, + requested_by=details.group.requested_by, + approved_by=details.group.approved_by, + status=getattr(details.group.status, "value", str(details.group.status)), + total_photo_count=details.group.total_photo_count, + batch_count=details.group.batch_count, + created_at=details.group.created_at, + approved_at=details.group.approved_at, + rejection_reason=details.group.rejection_reason, + requests=[ + UploadRequestSchema.from_models(request_details.request, request_details.photos) + for request_details in details.requests + ], + ) + + +class UploadRequestGroupListResponse(BaseModel): + items: list[UploadRequestGroupSchema] + + @classmethod + def from_details_list( + cls, + details_list: list[UploadRequestGroupDetails], + ) -> "UploadRequestGroupListResponse": + return cls(items=[UploadRequestGroupSchema.from_details(details) for details in details_list]) + + +class UploadRequestGroupPhotoListResponse(UploadRequestPhotoListResponse): + @classmethod + def from_photos( + cls, + photos: list[UploadRequestPhoto], + ) -> "UploadRequestGroupPhotoListResponse": + base_response = UploadRequestPhotoListResponse.from_models(photos) + return cls(items=base_response.items) diff --git a/app/schema/response/staff/uploads.py b/app/schema/response/staff/uploads.py index 74d7b7c..1d29e9a 100644 --- a/app/schema/response/staff/uploads.py +++ b/app/schema/response/staff/uploads.py @@ -38,6 +38,7 @@ def from_model( id: UUID event_id: UUID + group_id: UUID | None drive_file_id: str | None requested_by: UUID approved_by: UUID | None @@ -57,6 +58,7 @@ def from_models( return cls( id=upload_request.id, event_id=upload_request.event_id, + group_id=upload_request.group_id, drive_file_id=upload_request.drive_file_id, requested_by=upload_request.requested_by, approved_by=upload_request.approved_by, diff --git a/app/schema/response/web/user.py b/app/schema/response/web/user.py new file mode 100644 index 0000000..bd79627 --- /dev/null +++ b/app/schema/response/web/user.py @@ -0,0 +1,25 @@ +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel +from db.generated.models import User + + +class AdminUserSchema(BaseModel): + id: UUID + email: str + display_name: str | None + blocked: bool + created_at: datetime + updated_at: datetime + + +def to_admin_user_schema(user: User) -> AdminUserSchema: + return AdminUserSchema( + id=user.id, + email=user.email, + display_name=user.display_name, + blocked=user.blocked, + created_at=user.created_at, + updated_at=user.updated_at, + ) diff --git a/app/service/event.py b/app/service/event.py index 27e8e81..9723a06 100644 --- a/app/service/event.py +++ b/app/service/event.py @@ -12,7 +12,6 @@ UserEventResponse, ParticipantResponse ) -# Ensure these imports match your actual folder structure from db.generated import events as event_queries from db.generated import eventParticipant as participant_queries from db.generated import models diff --git a/app/service/face_embedding.py b/app/service/face_embedding.py index 11a0d81..71295e3 100644 --- a/app/service/face_embedding.py +++ b/app/service/face_embedding.py @@ -1,11 +1,13 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass from typing import List, Literal, Optional, Sequence, Tuple, TypedDict import cv2 # type: ignore import numpy as np from insightface.app import FaceAnalysis # type: ignore[import-untyped] +from app.core.config import settings from app.core.exceptions import AppException @@ -27,18 +29,35 @@ class FaceStub: embedding: Optional[np.ndarray] = None +@dataclass(frozen=True) +class DetectedFace: + embedding: list[float] + bbox: Tuple[float, float, float, float] + + class FaceEmbedding: def __init__( self, - model_name: str = "buffalo_l", - providers: Sequence[str] = ("CPUExecutionProvider",), - ctx_id: int = -1, - det_size: Tuple[int, int] = (640, 640), + model_name: str | None = None, + providers: Sequence[str] | None = None, + ctx_id: int | None = None, + det_size: Tuple[int, int] | None = None, ) -> None: self.model: FaceAnalysis | None = None - self.model_name = model_name + self.model_name = model_name or settings.FACE_EMBEDDING_MODEL_NAME + if providers is None: + providers = tuple( + p.strip() + for p in settings.FACE_EMBEDDING_PROVIDERS.split(",") + if p.strip() + ) self.providers = providers - self.ctx_id = ctx_id + self.ctx_id = settings.FACE_EMBEDDING_CTX_ID if ctx_id is None else ctx_id + if det_size is None: + det_size = ( + settings.FACE_EMBEDDING_DET_WIDTH, + settings.FACE_EMBEDDING_DET_HEIGHT, + ) self.det_size = det_size self._initialized = False @@ -151,6 +170,59 @@ async def compute_average_embedding( return averaged.astype(float).tolist() + async def compute_event_embedding( + self, + payloads: Sequence[FaceImagePayload], + ) -> dict[str, list[list[float]]]: + + if not payloads: + raise AppException.bad_request( + "At least one image is required" + ) + + results: dict[str, list[list[float]]] = {} + + for payload in payloads: + try: + image = self._decode_image(payload) + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + faces: list[FaceStub] = await asyncio.to_thread( + self.face_embedding.model.get, image_rgb # type: ignore + ) + + results[payload["filename"]] = [ + face.embedding.flatten().tolist() + for face in faces + if face.embedding is not None + ] + + except Exception as e: + print(f"[FaceEmbeddingService] Skipping {payload['filename']}: {e}") + results[payload["filename"]] = [] + + return results + + async def detect_faces( + self, + payload: FaceImagePayload, + ) -> list[DetectedFace]: + image = self._decode_image(payload) + image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + faces: list[FaceStub] = await asyncio.to_thread( # type: ignore + self.face_embedding.model.get, image_rgb # type: ignore + ) + + detected: list[DetectedFace] = [] + for face in faces: + if face.embedding is None: + continue + embedding = face.embedding.astype(float).flatten().tolist() + detected.append(DetectedFace(embedding=embedding, bbox=face.bbox)) + + return detected + def _decode_image(self, payload: FaceImagePayload) -> np.ndarray: buffer = np.frombuffer(payload["bytes"], dtype=np.uint8) diff --git a/app/service/session.py b/app/service/session.py index d0792d7..e441fc9 100644 --- a/app/service/session.py +++ b/app/service/session.py @@ -22,6 +22,8 @@ class SessionService : def init(self, session: session_queries.AsyncQuerier, redis: RedisClient) -> None: self.session_querier = session self.redis = redis + SessionService.session_querier = session + SessionService.redis = redis @staticmethod async def create_session(user_id:uuid.UUID,device_id:uuid.UUID)->UpsertSessionRow: diff --git a/app/service/single_face_match.py b/app/service/single_face_match.py new file mode 100644 index 0000000..30676c1 --- /dev/null +++ b/app/service/single_face_match.py @@ -0,0 +1,301 @@ +from __future__ import annotations + +import asyncio +import json +from dataclasses import dataclass +from uuid import UUID + +import sqlalchemy +import sqlalchemy.ext.asyncio + +from app.core.constant import MINIO_URL_PREFIX +from app.core.config import settings +from app.core.logger import logger +from sqlalchemy.exc import DBAPIError, SQLAlchemyError +from app.infra.minio import Bucket, IMAGES_BUCKET_NAME +from app.service.face_embedding import FaceEmbeddingService, FaceImagePayload +from app.schema.dto.single_face_match import BBoxPayload, SingleFaceMatchJob +from db.generated import photo_faces as photo_face_queries +from db.generated import models + + +@dataclass(frozen=True) +class ClosestUserMatch: + user_id: UUID + distance: float + + +PHOTO_EXISTS = """ +SELECT 1 +FROM photos +WHERE id = :photo_id +""" + +GET_CLOSEST_USER = """ +SELECT id, (face_embedding <=> CAST(:embedding AS vector)) AS distance +FROM users +WHERE face_embedding IS NOT NULL +ORDER BY distance ASC +LIMIT 1 +""" + +INSERT_FACE_MATCH = """ +INSERT INTO face_matches (photo_face_id, user_id, confidence) +VALUES (:photo_face_id, :user_id, :confidence) +RETURNING id +""" + +CHECK_MATCH_FOR_PHOTO = """ +SELECT 1 +FROM face_matches fm +JOIN photo_faces pf ON pf.id = fm.photo_face_id +WHERE pf.photo_id = :photo_id +LIMIT 1 +""" + +CHECK_MATCH_FOR_PHOTO_FACE = """ +SELECT 1 +FROM face_matches +WHERE photo_face_id = :photo_face_id +LIMIT 1 +""" + + + +class SingleFaceMatchService: + def __init__( + self, + *, + conn: sqlalchemy.ext.asyncio.AsyncConnection, + face_embedding_service: FaceEmbeddingService, + photo_face_querier: photo_face_queries.AsyncQuerier, + ) -> None: + self.conn = conn + self.face_embedding_service = face_embedding_service + self.photo_face_querier = photo_face_querier + + async def process_job(self, job: SingleFaceMatchJob) -> None: # noqa: C901 + if job.faces_detected is not None and job.faces_detected != 1: + logger.info( + "Skipping photo %s: faces_detected=%s (single-face worker)", + job.photo_id, + job.faces_detected, + ) + return + + if not job.image_ref: + logger.warning("Missing image_ref in event payload for photo %s", job.photo_id) + return + + if not await self._photo_exists(job.photo_id): + logger.warning("Photo not found: %s", job.photo_id) + return + if await self._match_exists_for_photo(job.photo_id): + logger.info("Photo %s already matched; skipping", job.photo_id) + return + + embedding, bbox = await self._resolve_embedding(job) + if embedding is None: + return + + try: + photo_face = await self._upsert_photo_face( + photo_id=job.photo_id, + face_index=job.face_index, + embedding=embedding, + bbox=bbox, + ) + if photo_face is None: + logger.warning("Failed to upsert photo_face for photo %s", job.photo_id) + return + await self._commit_best_effort() + except (DBAPIError, SQLAlchemyError) as exc: + await self._rollback_best_effort() + logger.warning("DB write failed for photo %s: %s", job.photo_id, exc) + return + except MemoryError: + logger.error("Out of memory while processing photo %s", job.photo_id) + return + + match = await self._find_closest_user(embedding) + if match is None: + logger.info("No user embeddings available for matching") + return + + if await self._match_exists_for_photo_face(photo_face.id): + logger.info("Match already exists for photo_face %s; skipping", photo_face.id) + return + + try: + await self._insert_face_match( + photo_face_id=photo_face.id, + user_id=match.user_id, + confidence=match.distance, + ) + await self._commit_best_effort() + except (DBAPIError, SQLAlchemyError) as exc: + await self._rollback_best_effort() + logger.warning("Failed to insert face match for photo %s: %s", job.photo_id, exc) + return + except MemoryError: + logger.error("Out of memory while matching photo %s", job.photo_id) + return + + async def _photo_exists(self, photo_id: UUID) -> bool: + row = (await self.conn.execute( + sqlalchemy.text(PHOTO_EXISTS), + {"photo_id": photo_id}, + )).first() + return row is not None + + async def _resolve_embedding( + self, + job: SingleFaceMatchJob, + ) -> tuple[list[float] | None, BBoxPayload | None]: + try: + payload = await self._load_payload(job) + except Exception as exc: + logger.warning("Failed to load image payload for photo %s: %s", job.photo_id, exc) + return None, None + + try: + faces = await self.face_embedding_service.detect_faces(payload) + except Exception as exc: + logger.warning("Face detection failed for photo %s: %s", job.photo_id, exc) + return None, None + + if len(faces) != 1: + logger.info( + "Skipping photo %s: detected %s faces (single-face worker)", + job.photo_id, + len(faces), + ) + return None, None + + face = faces[0] + bbox = BBoxPayload( + x1=float(face.bbox[0]), + y1=float(face.bbox[1]), + x2=float(face.bbox[2]), + y2=float(face.bbox[3]), + ) + return face.embedding, bbox + + async def _load_payload(self, job: SingleFaceMatchJob) -> FaceImagePayload: + if not job.image_ref: + raise ValueError("Missing image_ref in event payload") + + bucket_name, object_name = self._parse_minio_ref(job.image_ref) + bucket = Bucket(bucket_name, "") + last_exc: Exception | None = None + for attempt in range(1, settings.MINIO_RETRY_ATTEMPTS + 1): + try: + data, filename, content_type = await bucket.get(object_name) + return FaceImagePayload( + filename=filename, + content_type=content_type, + bytes=data, + ) + except Exception as exc: + last_exc = exc + logger.warning( + "MinIO fetch failed for %s (attempt %s/%s): %s", + object_name, + attempt, + settings.MINIO_RETRY_ATTEMPTS, + exc, + ) + if attempt < settings.MINIO_RETRY_ATTEMPTS: + await asyncio.sleep(settings.MINIO_RETRY_BASE_SECONDS * attempt) + assert last_exc is not None + raise last_exc + + async def _upsert_photo_face( + self, + *, + photo_id: UUID, + face_index: int, + embedding: list[float], + bbox: BBoxPayload | None, + ) -> models.PhotoFace | None: + embedding_literal = self._vector_literal(embedding) + bbox_payload = None + if bbox is not None: + bbox_payload = json.dumps( + {"x1": bbox.x1, "y1": bbox.y1, "x2": bbox.x2, "y2": bbox.y2} + ) + return await self.photo_face_querier.upsert_photo_face( + photo_id=photo_id, + face_index=face_index, + dollar_3=embedding_literal, + bbox=bbox_payload, + ) + + async def _find_closest_user( + self, + embedding: list[float], + ) -> ClosestUserMatch | None: + embedding_literal = self._vector_literal(embedding) + row = (await self.conn.execute( + sqlalchemy.text(GET_CLOSEST_USER), + {"embedding": embedding_literal}, + )).first() + if row is None: + return None + return ClosestUserMatch(user_id=row[0], distance=float(row[1])) + + async def _insert_face_match( + self, + *, + photo_face_id: UUID, + user_id: UUID, + confidence: float, + ) -> None: + await self.conn.execute( + sqlalchemy.text(INSERT_FACE_MATCH), + { + "photo_face_id": photo_face_id, + "user_id": user_id, + "confidence": confidence, + }, + ) + + async def _match_exists_for_photo(self, photo_id: UUID) -> bool: + row = (await self.conn.execute( + sqlalchemy.text(CHECK_MATCH_FOR_PHOTO), + {"photo_id": photo_id}, + )).first() + return row is not None + + async def _match_exists_for_photo_face(self, photo_face_id: UUID) -> bool: + row = (await self.conn.execute( + sqlalchemy.text(CHECK_MATCH_FOR_PHOTO_FACE), + {"photo_face_id": photo_face_id}, + )).first() + return row is not None + + async def _commit_best_effort(self) -> None: + try: + await self.conn.commit() + except Exception: + pass + + async def _rollback_best_effort(self) -> None: + try: + await self.conn.rollback() + except Exception: + pass + + @staticmethod + def _vector_literal(embedding: list[float]) -> str: + return "[" + ", ".join(str(x) for x in embedding) + "]" + + @staticmethod + def _parse_minio_ref(image_ref: str) -> tuple[str, str]: + if image_ref.startswith(MINIO_URL_PREFIX): + raw = image_ref[len(MINIO_URL_PREFIX) :] + parts = raw.split("/", 1) + if len(parts) != 2 or not parts[0] or not parts[1]: + raise ValueError("Invalid MinIO image_ref format") + return parts[0], parts[1] + return IMAGES_BUCKET_NAME, image_ref diff --git a/app/service/staff_drive.py b/app/service/staff_drive.py index 4e4019f..d7485c0 100644 --- a/app/service/staff_drive.py +++ b/app/service/staff_drive.py @@ -2,6 +2,7 @@ import hashlib import json import secrets +import urllib.parse import uuid from cryptography.fernet import Fernet, InvalidToken @@ -30,17 +31,42 @@ def __init__( self.drive_connection_querier = drive_connection_querier self.redis = redis - async def create_connect_url(self, staff_user: StaffUser) -> tuple[str, str]: + async def create_connect_url( + self, + staff_user: StaffUser, + redirect_url: str | None = None, + ) -> tuple[str, str]: state = secrets.token_urlsafe(32) + state_payload: dict[str, str] = {"staff_user_id": str(staff_user.id)} + if redirect_url is not None: + state_payload["redirect_url"] = self._validate_redirect_url(redirect_url) + await self.redis.set( self.STATE_PREFIX.format(state=state), - json.dumps({"staff_user_id": str(staff_user.id)}), + json.dumps(state_payload), expire=self.STATE_TTL_SECONDS, nx=True, ) return GoogleDriveClient.build_consent_url(state), state - async def handle_callback(self, code: str, state: str) -> StaffDriveConnection: + async def get_callback_redirect_url(self, state: str) -> str | None: + state_payload = await self.redis.get(self.STATE_PREFIX.format(state=state)) + if state_payload is None: + return None + try: + payload = json.loads(state_payload) + except json.JSONDecodeError: + return None + redirect_url = payload.get("redirect_url") + if isinstance(redirect_url, str) and redirect_url: + return redirect_url + return None + + async def handle_callback( + self, + code: str, + state: str, + ) -> tuple[StaffDriveConnection, str | None]: state_key = self.STATE_PREFIX.format(state=state) state_payload = await self.redis.get(state_key) if state_payload is None: @@ -49,10 +75,15 @@ async def handle_callback(self, code: str, state: str) -> StaffDriveConnection: await self.redis.delete(state_key) try: - staff_user_id = uuid.UUID(json.loads(state_payload)["staff_user_id"]) + payload = json.loads(state_payload) + staff_user_id = uuid.UUID(payload["staff_user_id"]) except (KeyError, ValueError, json.JSONDecodeError) as exc: raise AppException.bad_request("Invalid OAuth state payload") from exc + redirect_url = payload.get("redirect_url") + if redirect_url is not None and not isinstance(redirect_url, str): + raise AppException.bad_request("Invalid OAuth redirect URL") + staff_user = await self.staff_user_querier.get_staff_user_by_id(id=staff_user_id) if staff_user is None: raise AppException.not_found("Staff user not found") @@ -67,22 +98,20 @@ async def handle_callback(self, code: str, state: str) -> StaffDriveConnection: connection = await self.drive_connection_querier.upsert_staff_drive_connection( arg=drive_queries.UpsertStaffDriveConnectionParams( - staff_user_id=staff_user.id, - provider=self.PROVIDER, - google_email=user_info.email, - google_account_id=user_info.id, - access_token=encrypted_access_token, - refresh_token=encrypted_refresh_token, - token_expires_at=token.expires_at, - scopes=token.scope, - + staff_user_id=staff_user.id, + provider=self.PROVIDER, + google_email=user_info.email, + google_account_id=user_info.id, + access_token=encrypted_access_token, + refresh_token=encrypted_refresh_token, + token_expires_at=token.expires_at, + scopes=token.scope, ) - ) if connection is None: raise AppException.internal_error("Failed to save Google Drive connection") - return connection + return connection, redirect_url async def get_status(self, staff_user_id: uuid.UUID) -> StaffDriveConnection | None: return await self.drive_connection_querier.get_active_staff_drive_connection_by_staff_user_id( @@ -125,3 +154,27 @@ def decrypt(self, encrypted_value: str) -> str: def _fernet(self) -> Fernet: digest = hashlib.sha256(settings.encryption_key.encode("utf-8")).digest() return Fernet(base64.urlsafe_b64encode(digest)) + + @staticmethod + def _validate_redirect_url(redirect_url: str) -> str: + parsed = urllib.parse.urlparse(redirect_url) + if parsed.scheme not in {"http", "https"} or not parsed.netloc: + raise AppException.bad_request("Invalid redirect URL") + return redirect_url + + @staticmethod + def build_frontend_callback_url( + redirect_url: str, + *, + status: str, + google_email: str | None = None, + error: str | None = None, + ) -> str: + parsed = urllib.parse.urlparse(redirect_url) + query = urllib.parse.parse_qsl(parsed.query, keep_blank_values=True) + query.append(("status", status)) + if google_email is not None: + query.append(("google_email", google_email)) + if error is not None: + query.append(("error", error)) + return urllib.parse.urlunparse(parsed._replace(query=urllib.parse.urlencode(query))) diff --git a/app/service/staff_user.py b/app/service/staff_user.py index 1da37f5..6241818 100644 --- a/app/service/staff_user.py +++ b/app/service/staff_user.py @@ -110,8 +110,8 @@ async def admin_login( ) -> WebAuthResponse: print("hello") staff: StaffUser | None = await self.staff_user_querier.get_staff_user_by_email(email=email) - if staff is None or not verify_password(password, staff.password): - logger.info(f'user:{staff.email}') # type: ignore + if staff is None or not verify_password(password, staff.password): + logger.info("admin login failed for email %s", email) raise AppException.unauthorized("Invalid email or password") diff --git a/app/service/upload_requests.py b/app/service/upload_requests.py index 13759dd..1a49ac8 100644 --- a/app/service/upload_requests.py +++ b/app/service/upload_requests.py @@ -1,29 +1,34 @@ +from collections import defaultdict from collections.abc import Sequence from dataclasses import dataclass -from collections import defaultdict import json from typing import Literal import uuid +from sqlalchemy.exc import IntegrityError + from app.core.exceptions import AppException from app.core.logger import logger -from app.infra.google_drive import GoogleDriveClient, GoogleDriveFileDownload +from app.infra.google_drive import ( + GoogleDriveClient, + GoogleDriveFileDownload, + GoogleDriveFileMetadata, +) from app.infra.nats import NatsClient, NatsSubjects -from sqlalchemy.exc import IntegrityError - from app.schema.dto.staff.uploads import UploadPhotoInput from app.service.staged_upload_storage import PreviewObject, StagedUploadStorageService from app.service.staff_drive import StaffDriveService from app.service.staff_notifications import StaffNotificationsService from db.generated import photos as photo_queries +from db.generated import upload_request_groups as upload_request_group_queries from db.generated import upload_request_photos as upload_request_photo_queries from db.generated import upload_requests as upload_request_queries from db.generated.models import ( StaffRole, StaffUser, UploadRequest, + UploadRequestGroup, UploadRequestPhoto, - UploadRequestStatus, ) @@ -33,12 +38,20 @@ class UploadRequestDetails: photos: list[UploadRequestPhoto] +@dataclass +class UploadRequestGroupDetails: + group: UploadRequestGroup + requests: list[UploadRequestDetails] + + class UploadRequestsService: _allowed_mime_types = {"image/jpeg", "image/png", "image/webp"} _max_photo_size_bytes = 20 * 1024 * 1024 + _max_request_batch_size = 20 def __init__( self, + upload_request_group_querier: upload_request_group_queries.AsyncQuerier, upload_request_querier: upload_request_queries.AsyncQuerier, upload_request_photo_querier: upload_request_photo_queries.AsyncQuerier, photo_querier: photo_queries.AsyncQuerier, @@ -46,6 +59,7 @@ def __init__( staff_drive_service: StaffDriveService, staff_notifications_service: StaffNotificationsService, ): + self.upload_request_group_querier = upload_request_group_querier self.upload_request_querier = upload_request_querier self.upload_request_photo_querier = upload_request_photo_querier self.photo_querier = photo_querier @@ -61,6 +75,16 @@ def _status_value(status: object) -> str: def _role_value(role: object) -> str: return getattr(role, "value", str(role)) + @staticmethod + def _chunk_photo_inputs( + photos: Sequence[UploadPhotoInput], + chunk_size: int, + ) -> list[list[UploadPhotoInput]]: + return [ + list(photos[index : index + chunk_size]) + for index in range(0, len(photos), chunk_size) + ] + @staticmethod def _raise_integrity_error(exc: IntegrityError) -> None: orig = getattr(exc, "orig", None) @@ -80,11 +104,14 @@ def _validate_downloaded_photo(self, downloaded_photo: GoogleDriveFileDownload) if metadata.size_bytes <= 0 or metadata.size_bytes > self._max_photo_size_bytes: raise AppException.bad_request("Google Drive image exceeds maximum allowed size") + def _is_supported_image(self, metadata: GoogleDriveFileMetadata) -> bool: + return metadata.mime_type in self._allowed_mime_types and metadata.size_bytes > 0 + @staticmethod def _validate_create_request_inputs(photos: Sequence[UploadPhotoInput]) -> None: if not photos: raise AppException.bad_request("At least one photo is required") - if len(photos) > 20: + if len(photos) > UploadRequestsService._max_request_batch_size: raise AppException.bad_request("A batch can contain at most 20 photos") drive_file_ids = [photo.drive_file_id for photo in photos] @@ -101,6 +128,31 @@ async def _cleanup_created_photos(self, created_photos: Sequence[UploadRequestPh created_photo.staging_storage_key, ) + async def _cleanup_created_group( + self, + *, + upload_group_id: uuid.UUID, + created_requests: Sequence[UploadRequestDetails], + ) -> None: + for request_details in reversed(created_requests): + try: + await self.upload_request_querier.delete_upload_request(id=request_details.request.id) + except Exception as exc: + logger.warning( + "Failed to delete upload request %s during group cleanup: %s", + request_details.request.id, + exc, + ) + + try: + await self.upload_request_group_querier.delete_upload_request_group(id=upload_group_id) + except Exception as exc: + logger.warning( + "Failed to delete upload request group %s during cleanup: %s", + upload_group_id, + exc, + ) + async def _cleanup_finalized_objects(self, storage_keys: Sequence[str]) -> None: for storage_key in storage_keys: try: @@ -133,8 +185,8 @@ async def _list_request_photos_by_request_ids( if not request_ids: return photos_by_request_id - async for photo in self.upload_request_photo_querier.list_upload_request_photos_by_upload_request_i_ds( - dollar_1=list(request_ids) + async for photo in self.upload_request_photo_querier.list_upload_request_photos_by_upload_request_ids( + upload_request_ids=list(request_ids) ): photos_by_request_id[photo.upload_request_id].append(photo) @@ -163,7 +215,6 @@ async def _create_staged_photo( try: created_photo = await self.upload_request_photo_querier.create_upload_request_photo( - arg=upload_request_photo_queries.CreateUploadRequestPhotoParams( upload_request_id=upload_request_id, drive_file_id=photo.drive_file_id, file_name=downloaded_photo.metadata.name, @@ -174,7 +225,6 @@ async def _create_staged_photo( day_number=photo.day_number, visibility=photo.visibility, status="staged", - ) ) except IntegrityError: try: @@ -198,6 +248,150 @@ async def _create_staged_photo( return created_photo + async def _create_request_with_access_token( + self, + *, + event_id: uuid.UUID, + photos: Sequence[UploadPhotoInput], + requested_by: StaffUser, + access_token: str, + group_id: uuid.UUID | None = None, + publish_event: bool = True, + ) -> UploadRequestDetails: + self._validate_create_request_inputs(photos) + + try: + upload_request = await self.upload_request_querier.create_upload_request( + event_id=event_id, + group_id=group_id, + drive_file_id=None, + requested_by=requested_by.id, + photo_count=len(photos), + ) + except IntegrityError as exc: + self._raise_integrity_error(exc) + if upload_request is None: + raise AppException.internal_error("Failed to create upload request") + + created_photos: list[UploadRequestPhoto] = [] + try: + for photo in photos: + created_photos.append( + await self._create_staged_photo( + upload_request_id=upload_request.id, + photo=photo, + access_token=access_token, + ) + ) + except IntegrityError as exc: + await self._cleanup_created_photos(created_photos) + self._raise_integrity_error(exc) + except Exception: + await self._cleanup_created_photos(created_photos) + raise + + if publish_event: + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_REQUEST_CREATED, + payload={ + "upload_request_id": str(upload_request.id), + "event_id": str(upload_request.event_id), + "requested_by": str(requested_by.id), + "photo_count": upload_request.photo_count, + "group_id": str(group_id) if group_id is not None else None, + }, + ) + + return UploadRequestDetails(request=upload_request, photos=created_photos) + + async def _approve_request_without_side_effects( + self, + *, + request_id: uuid.UUID, + approved_by: StaffUser, + ) -> tuple[UploadRequest, list[UploadRequestPhoto], list[str]]: + existing = await self.upload_request_querier.get_upload_request_by_id(id=request_id) + if existing is None: + raise AppException.not_found("Upload request not found") + if self._status_value(existing.status) != "pending": + raise AppException.bad_request("Upload request is not pending") + + staged_photos = await self.list_request_photos(request_id) + if not staged_photos: + raise AppException.bad_request("No staged photos found for this upload request") + + finalized_storage_keys: list[str] = [] + try: + for staged_photo in staged_photos: + final_storage_key = await self.staged_upload_storage.promote_to_final( + event_id=existing.event_id, + photo_id=staged_photo.id, + file_name=staged_photo.file_name, + staging_storage_key=staged_photo.staging_storage_key, + ) + finalized_storage_keys.append(final_storage_key) + created_photo = await self.photo_querier.create_photo( + photo_queries.CreatePhotoParams( + event_id=existing.event_id, + storage_key=final_storage_key, + taken_at=staged_photo.taken_at, + day_number=staged_photo.day_number, + visibility=staged_photo.visibility, + ) + ) + if created_photo is None: + raise AppException.internal_error("Failed to finalize staged photo") + updated_photo = await self.upload_request_photo_querier.update_upload_request_photo_approval( + id=staged_photo.id, + status="approved", + final_storage_key=final_storage_key, + ) + if updated_photo is None: + raise AppException.internal_error("Failed to update staged photo approval state") + + upload_request = await self.upload_request_querier.approve_upload_request( + id=request_id, + approved_by=approved_by.id, + ) + if upload_request is None: + raise AppException.internal_error("Failed to approve upload request") + except Exception: + await self._cleanup_finalized_objects(finalized_storage_keys) + raise + + return upload_request, staged_photos, finalized_storage_keys + + async def _reject_request_without_side_effects( + self, + *, + request_id: uuid.UUID, + approved_by: StaffUser, + reason: str | None, + ) -> tuple[UploadRequest, list[UploadRequestPhoto], list[UploadRequestPhoto]]: + existing = await self.upload_request_querier.get_upload_request_by_id(id=request_id) + if existing is None: + raise AppException.not_found("Upload request not found") + if self._status_value(existing.status) != "pending": + raise AppException.bad_request("Upload request is not pending") + + upload_request = await self.upload_request_querier.reject_upload_request( + id=request_id, + approved_by=approved_by.id, + rejection_reason=reason, + ) + if upload_request is None: + raise AppException.internal_error("Failed to reject upload request") + + staged_photos = await self.list_request_photos(request_id) + rejected_photos: list[UploadRequestPhoto] = [] + async for staged_photo in self.upload_request_photo_querier.update_upload_request_photo_status_by_upload_request_id( + upload_request_id=request_id, + status="rejected", + ): + rejected_photos.append(staged_photo) + + return upload_request, rejected_photos, staged_photos + def _ensure_request_access( self, *, @@ -210,6 +404,38 @@ def _ensure_request_access( return raise AppException.forbidden("You are not allowed to access this upload request") + def _ensure_group_access( + self, + *, + current_staff_user: StaffUser, + upload_group: UploadRequestGroup, + ) -> None: + if upload_group.requested_by == current_staff_user.id: + return + if self._role_value(current_staff_user.role) == StaffRole.MULTI_TEAM_LEAD.value: + return + raise AppException.forbidden("You are not allowed to access this upload request group") + + def _ensure_group_is_pending( + self, + group: UploadRequestGroup, + ) -> None: + if self._status_value(group.status) != "pending": + raise AppException.bad_request("Upload request group is not pending") + + def _ensure_all_requests_are_pending( + self, + requests: Sequence[UploadRequestDetails], + ) -> None: + if not requests: + raise AppException.bad_request("No upload requests found for this group") + + for request_details in requests: + if self._status_value(request_details.request.status) != "pending": + raise AppException.bad_request( + "Upload request group contains non-pending requests" + ) + async def _publish_event( self, *, @@ -221,43 +447,29 @@ async def _publish_event( except Exception as exc: logger.warning("Failed to publish upload request event %s: %s", subject.value, exc) - async def get_request_details( - self, - *, - request_id: uuid.UUID, - current_staff_user: StaffUser, - ) -> UploadRequestDetails: - upload_request = await self.upload_request_querier.get_upload_request_by_id(id=request_id) - if upload_request is None: - raise AppException.not_found("Upload request not found") - self._ensure_request_access( - current_staff_user=current_staff_user, - upload_request=upload_request, - ) - return UploadRequestDetails( - request=upload_request, - photos=await self.list_request_photos(upload_request.id), - ) - - async def get_request_photo_preview( + async def create_upload( self, *, - request_id: uuid.UUID, - photo_id: uuid.UUID, - current_staff_user: StaffUser, - ) -> PreviewObject: - upload_request = await self.upload_request_querier.get_upload_request_by_id(id=request_id) - if upload_request is None: - raise AppException.not_found("Upload request not found") - self._ensure_request_access( - current_staff_user=current_staff_user, - upload_request=upload_request, + event_id: uuid.UUID, + folder_id: str | None, + photos: Sequence[UploadPhotoInput], + visibility: str, + day_number: int | None, + requested_by: StaffUser, + ) -> UploadRequestDetails | UploadRequestGroupDetails: + if folder_id is not None: + return await self.create_group_from_folder( + event_id=event_id, + folder_id=folder_id, + visibility=visibility, + day_number=day_number, + requested_by=requested_by, + ) + return await self.create_request( + event_id=event_id, + photos=photos, + requested_by=requested_by, ) - photo = await self.upload_request_photo_querier.get_upload_request_photo_by_id(id=photo_id) - if photo is None or photo.upload_request_id != request_id: - raise AppException.not_found("Upload request photo not found") - storage_key = photo.final_storage_key or photo.staging_storage_key - return await self.staged_upload_storage.get_preview(storage_key) async def create_request( self, @@ -266,90 +478,178 @@ async def create_request( photos: Sequence[UploadPhotoInput], requested_by: StaffUser, ) -> UploadRequestDetails: - self._validate_create_request_inputs(photos) - access_token = await self.staff_drive_service.get_access_token_for_staff_user( requested_by.id ) - upload_request: UploadRequest | None = None + return await self._create_request_with_access_token( + event_id=event_id, + photos=photos, + requested_by=requested_by, + access_token=access_token, + ) + async def create_group_from_folder( + self, + *, + event_id: uuid.UUID, + folder_id: str, + visibility: str, + day_number: int | None, + requested_by: StaffUser, + ) -> UploadRequestGroupDetails: + access_token = await self.staff_drive_service.get_access_token_for_staff_user( + requested_by.id + ) + folder_files = await GoogleDriveClient.list_folder_files( + access_token=access_token, + folder_id=folder_id, + ) + folder_files = sorted(folder_files, key=lambda file: (file.name.lower(), file.id)) + photo_inputs = [ + UploadPhotoInput( + drive_file_id=file.id, + taken_at=None, + day_number=day_number, + visibility=visibility, + ) + for file in folder_files + if self._is_supported_image(file) + ] + if not photo_inputs: + raise AppException.bad_request( + "Selected Google Drive folder does not contain valid images" + ) + photo_batches = self._chunk_photo_inputs(photo_inputs, self._max_request_batch_size) try: - upload_request = await self.upload_request_querier.create_upload_request( + upload_group = await self.upload_request_group_querier.create_upload_request_group( event_id=event_id, - drive_file_id=None, + folder_id=folder_id, requested_by=requested_by.id, - photo_count=len(photos), + total_photo_count=len(photo_inputs), + batch_count=len(photo_batches), ) except IntegrityError as exc: self._raise_integrity_error(exc) - if upload_request is None: - raise AppException.internal_error("Failed to create upload request") + if upload_group is None: + raise AppException.internal_error("Failed to create upload request group") - created_photos: list[UploadRequestPhoto] = [] + created_requests: list[UploadRequestDetails] = [] try: - for photo in photos: - created_photos.append( - await self._create_staged_photo( - upload_request_id=upload_request.id, - photo=photo, + for batch in photo_batches: + created_requests.append( + await self._create_request_with_access_token( + event_id=event_id, + photos=batch, + requested_by=requested_by, access_token=access_token, + group_id=upload_group.id, + publish_event=False, ) ) - except IntegrityError as exc: - await self._cleanup_created_photos(created_photos) - self._raise_integrity_error(exc) except Exception: + created_photos = [ + photo + for request_details in created_requests + for photo in request_details.photos + ] await self._cleanup_created_photos(created_photos) + await self._cleanup_created_group( + upload_group_id=upload_group.id, + created_requests=created_requests, + ) raise + for request_details in created_requests: + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_REQUEST_CREATED, + payload={ + "upload_request_id": str(request_details.request.id), + "event_id": str(request_details.request.event_id), + "requested_by": str(requested_by.id), + "photo_count": request_details.request.photo_count, + "group_id": str(upload_group.id), + }, + ) + await self._publish_event( - subject=NatsSubjects.STAFF_UPLOAD_REQUEST_CREATED, + subject=NatsSubjects.STAFF_UPLOAD_GROUP_CREATED, payload={ - "upload_request_id": str(upload_request.id), - "event_id": str(upload_request.event_id), + "group_id": str(upload_group.id), + "event_id": str(upload_group.event_id), "requested_by": str(requested_by.id), - "photo_count": upload_request.photo_count, + "total_photo_count": upload_group.total_photo_count, + "batch_count": upload_group.batch_count, }, ) + return UploadRequestGroupDetails(group=upload_group, requests=created_requests) - return UploadRequestDetails(request=upload_request, photos=created_photos) + async def get_request_details( + self, + *, + request_id: uuid.UUID, + current_staff_user: StaffUser, + ) -> UploadRequestDetails: + upload_request = await self.upload_request_querier.get_upload_request_by_id(id=request_id) + if upload_request is None: + raise AppException.not_found("Upload request not found") + self._ensure_request_access( + current_staff_user=current_staff_user, + upload_request=upload_request, + ) + return UploadRequestDetails( + request=upload_request, + photos=await self.list_request_photos(upload_request.id), + ) + + async def get_request_photo_preview( + self, + *, + request_id: uuid.UUID, + photo_id: uuid.UUID, + current_staff_user: StaffUser, + ) -> PreviewObject: + upload_request = await self.upload_request_querier.get_upload_request_by_id(id=request_id) + if upload_request is None: + raise AppException.not_found("Upload request not found") + self._ensure_request_access( + current_staff_user=current_staff_user, + upload_request=upload_request, + ) + photo = await self.upload_request_photo_querier.get_upload_request_photo_by_id(id=photo_id) + if photo is None or photo.upload_request_id != request_id: + raise AppException.not_found("Upload request photo not found") + storage_key = photo.final_storage_key or photo.staging_storage_key + return await self.staged_upload_storage.get_preview(storage_key) async def list_requests( self, *, current_staff_user: StaffUser, scope: Literal["my", "all"], - status: UploadRequestStatus | None, + status: str | None, ) -> list[UploadRequestDetails]: if scope == "all" and self._role_value(current_staff_user.role) != StaffRole.MULTI_TEAM_LEAD.value: raise AppException.forbidden("Multi team lead access required") requested_by = current_staff_user.id if scope == "my" else None - if requested_by is None: - logger.info("hello") - raise AppException.not_found("not requests") - else : - request_rows: list[UploadRequest] = [] - async for upload_request in self.upload_request_querier.list_upload_requests( - dollar_1=requested_by, - p2=status, - ): - request_rows.append(upload_request) - - photos_by_request_id = await self._list_request_photos_by_request_ids( - [upload_request.id for upload_request in request_rows] - ) + request_rows: list[UploadRequest] = [] + async for upload_request in self.upload_request_querier.list_upload_requests( + requested_by=requested_by, + status=status, + ): + request_rows.append(upload_request) - requests: list[UploadRequestDetails] = [] - for upload_request in request_rows: - requests.append( - UploadRequestDetails( - request=upload_request, - photos=photos_by_request_id.get(upload_request.id, []), - ) - ) - return requests + photos_by_request_id = await self._list_request_photos_by_request_ids( + [upload_request.id for upload_request in request_rows] + ) + return [ + UploadRequestDetails( + request=upload_request, + photos=photos_by_request_id.get(upload_request.id, []), + ) + for upload_request in request_rows + ] async def list_request_photos( self, @@ -362,58 +662,97 @@ async def list_request_photos( photos.append(photo) return photos - async def approve_request( + async def get_group_details( self, *, - request_id: uuid.UUID, - approved_by: StaffUser, - ) -> UploadRequestDetails: - existing = await self.upload_request_querier.get_upload_request_by_id(id=request_id) - if existing is None: - raise AppException.not_found("Upload request not found") - if self._status_value(existing.status) != "pending": - raise AppException.bad_request("Upload request is not pending") + group_id: uuid.UUID, + current_staff_user: StaffUser, + ) -> UploadRequestGroupDetails: + group = await self.upload_request_group_querier.get_upload_request_group_by_id(id=group_id) + if group is None: + raise AppException.not_found("Upload request group not found") + self._ensure_group_access( + current_staff_user=current_staff_user, + upload_group=group, + ) - staged_photos = await self.list_request_photos(request_id) - if not staged_photos: - raise AppException.bad_request("No staged photos found for this upload request") + requests: list[UploadRequest] = [] + async for upload_request in self.upload_request_querier.list_upload_requests_by_group_id( + group_id=group_id + ): + requests.append(upload_request) - finalized_storage_keys: list[str] = [] - try: - for staged_photo in staged_photos: - final_storage_key = await self.staged_upload_storage.promote_to_final( - event_id=existing.event_id, - photo_id=staged_photo.id, - file_name=staged_photo.file_name, - staging_storage_key=staged_photo.staging_storage_key, + photos_by_request_id = await self._list_request_photos_by_request_ids( + [upload_request.id for upload_request in requests] + ) + return UploadRequestGroupDetails( + group=group, + requests=[ + UploadRequestDetails( + request=upload_request, + photos=photos_by_request_id.get(upload_request.id, []), ) - finalized_storage_keys.append(final_storage_key) - created_photo = await self.photo_querier.create_photo( - arg=photo_queries.CreatePhotoParams( - event_id=existing.event_id, - storage_key=final_storage_key, - taken_at=staged_photo.taken_at, - day_number=staged_photo.day_number, - visibility=staged_photo.visibility, - ) + for upload_request in requests + ], + ) + async def list_groups( + self, + *, + current_staff_user: StaffUser, + scope: Literal["my", "all"], + status: str | None, + ) -> list[UploadRequestGroupDetails]: + if scope == "all" and self._role_value(current_staff_user.role) != StaffRole.MULTI_TEAM_LEAD.value: + raise AppException.forbidden("Multi team lead access required") + + requested_by = current_staff_user.id if scope == "my" else None + groups: list[UploadRequestGroup] = [] + async for group in self.upload_request_group_querier.list_upload_request_groups( + requested_by=requested_by, + status=status, + ): + groups.append(group) + + details: list[UploadRequestGroupDetails] = [] + for group in groups: + details.append( + await self.get_group_details( + group_id=group.id, + current_staff_user=current_staff_user, ) - if created_photo is None: - raise AppException.internal_error("Failed to finalize staged photo") - updated_photo = await self.upload_request_photo_querier.update_upload_request_photo_approval( - id=staged_photo.id, - status="approved", - final_storage_key=final_storage_key, - ) - if updated_photo is None: - raise AppException.internal_error("Failed to update staged photo approval state") + ) + return details - upload_request = await self.upload_request_querier.approve_upload_request( - id=request_id, - approved_by=approved_by.id, + async def list_group_photos( + self, + *, + group_id: uuid.UUID, + current_staff_user: StaffUser, + ) -> list[UploadRequestPhoto]: + group_details = await self.get_group_details( + group_id=group_id, + current_staff_user=current_staff_user, + ) + return [ + photo + for request_details in group_details.requests + for photo in request_details.photos + ] + + async def approve_request( + self, + *, + request_id: uuid.UUID, + approved_by: StaffUser, + ) -> UploadRequestDetails: + upload_request, staged_photos, finalized_storage_keys = ( + await self._approve_request_without_side_effects( + request_id=request_id, + approved_by=approved_by, ) - if upload_request is None: - raise AppException.internal_error("Failed to approve upload request") + ) + try: await self.staff_notifications_service.create_notification( staff_user_id=upload_request.requested_by, type="upload_request_approved", @@ -425,20 +764,20 @@ async def approve_request( "status": "approved", }, ) + await self._delete_staging_objects_best_effort(staged_photos) + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_REQUEST_APPROVED, + payload={ + "upload_request_id": str(upload_request.id), + "event_id": str(upload_request.event_id), + "approved_by": str(approved_by.id), + "photo_count": upload_request.photo_count, + }, + ) except Exception: await self._cleanup_finalized_objects(finalized_storage_keys) raise - await self._delete_staging_objects_best_effort(staged_photos) - await self._publish_event( - subject=NatsSubjects.STAFF_UPLOAD_REQUEST_APPROVED, - payload={ - "upload_request_id": str(upload_request.id), - "event_id": str(upload_request.event_id), - "approved_by": str(approved_by.id), - "photo_count": upload_request.photo_count, - }, - ) return UploadRequestDetails( request=upload_request, photos=await self.list_request_photos(request_id), @@ -451,28 +790,13 @@ async def reject_request( approved_by: StaffUser, reason: str | None, ) -> UploadRequestDetails: - existing = await self.upload_request_querier.get_upload_request_by_id(id=request_id) - if existing is None: - raise AppException.not_found("Upload request not found") - if self._status_value(existing.status) != "pending": - raise AppException.bad_request("Upload request is not pending") - - upload_request = await self.upload_request_querier.reject_upload_request( - id=request_id, - approved_by=approved_by.id, - rejection_reason=reason, + upload_request, rejected_photos, staged_photos = ( + await self._reject_request_without_side_effects( + request_id=request_id, + approved_by=approved_by, + reason=reason, + ) ) - if upload_request is None: - raise AppException.internal_error("Failed to reject upload request") - - staged_photos = await self.list_request_photos(request_id) - rejected_photos: list[UploadRequestPhoto] = [] - async for staged_photo in self.upload_request_photo_querier.update_upload_request_photo_status_by_upload_request_id( - upload_request_id=request_id, - status="rejected", - ): - rejected_photos.append(staged_photo) - await self.staff_notifications_service.create_notification( staff_user_id=upload_request.requested_by, type="upload_request_rejected", @@ -497,3 +821,158 @@ async def reject_request( ) await self._delete_staging_objects_best_effort(staged_photos) return UploadRequestDetails(request=upload_request, photos=rejected_photos) + + async def approve_group( + self, + *, + group_id: uuid.UUID, + approved_by: StaffUser, + ) -> UploadRequestGroupDetails: + group_details = await self.get_group_details( + group_id=group_id, + current_staff_user=approved_by, + ) + self._ensure_group_is_pending(group_details.group) + pending_requests = group_details.requests + self._ensure_all_requests_are_pending(pending_requests) + + approved_requests: list[UploadRequest] = [] + all_staged_photos: list[UploadRequestPhoto] = [] + finalized_storage_keys: list[str] = [] + try: + for request_details in pending_requests: + approved_request, staged_photos, request_storage_keys = ( + await self._approve_request_without_side_effects( + request_id=request_details.request.id, + approved_by=approved_by, + ) + ) + approved_requests.append(approved_request) + all_staged_photos.extend(staged_photos) + finalized_storage_keys.extend(request_storage_keys) + + upload_group = await self.upload_request_group_querier.approve_upload_request_group( + id=group_id, + approved_by=approved_by.id, + ) + if upload_group is None: + raise AppException.internal_error("Failed to approve upload request group") + + for approved_request in approved_requests: + await self.staff_notifications_service.create_notification( + staff_user_id=approved_request.requested_by, + type="upload_request_approved", + payload={ + "upload_request_id": str(approved_request.id), + "event_id": str(approved_request.event_id), + "photo_count": approved_request.photo_count, + "approved_by": str(approved_by.id), + "status": "approved", + }, + ) + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_REQUEST_APPROVED, + payload={ + "upload_request_id": str(approved_request.id), + "event_id": str(approved_request.event_id), + "approved_by": str(approved_by.id), + "photo_count": approved_request.photo_count, + }, + ) + + await self._delete_staging_objects_best_effort(all_staged_photos) + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_GROUP_APPROVED, + payload={ + "group_id": str(upload_group.id), + "event_id": str(upload_group.event_id), + "approved_by": str(approved_by.id), + "total_photo_count": upload_group.total_photo_count, + "batch_count": upload_group.batch_count, + }, + ) + except Exception: + await self._cleanup_finalized_objects(finalized_storage_keys) + raise + + return await self.get_group_details( + group_id=group_id, + current_staff_user=approved_by, + ) + + async def reject_group( + self, + *, + group_id: uuid.UUID, + approved_by: StaffUser, + reason: str | None, + ) -> UploadRequestGroupDetails: + group_details = await self.get_group_details( + group_id=group_id, + current_staff_user=approved_by, + ) + self._ensure_group_is_pending(group_details.group) + pending_requests = group_details.requests + self._ensure_all_requests_are_pending(pending_requests) + + rejected_requests: list[UploadRequest] = [] + all_staged_photos: list[UploadRequestPhoto] = [] + for request_details in pending_requests: + rejected_request, _rejected_photos, staged_photos = ( + await self._reject_request_without_side_effects( + request_id=request_details.request.id, + approved_by=approved_by, + reason=reason, + ) + ) + rejected_requests.append(rejected_request) + all_staged_photos.extend(staged_photos) + + upload_group = await self.upload_request_group_querier.reject_upload_request_group( + id=group_id, + approved_by=approved_by.id, + rejection_reason=reason, + ) + if upload_group is None: + raise AppException.internal_error("Failed to reject upload request group") + + for rejected_request in rejected_requests: + await self.staff_notifications_service.create_notification( + staff_user_id=rejected_request.requested_by, + type="upload_request_rejected", + payload={ + "upload_request_id": str(rejected_request.id), + "event_id": str(rejected_request.event_id), + "photo_count": rejected_request.photo_count, + "approved_by": str(approved_by.id), + "status": "rejected", + "reason": reason, + }, + ) + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_REQUEST_REJECTED, + payload={ + "upload_request_id": str(rejected_request.id), + "event_id": str(rejected_request.event_id), + "approved_by": str(approved_by.id), + "photo_count": rejected_request.photo_count, + "reason": reason, + }, + ) + + await self._delete_staging_objects_best_effort(all_staged_photos) + await self._publish_event( + subject=NatsSubjects.STAFF_UPLOAD_GROUP_REJECTED, + payload={ + "group_id": str(upload_group.id), + "event_id": str(upload_group.event_id), + "approved_by": str(approved_by.id), + "total_photo_count": upload_group.total_photo_count, + "batch_count": upload_group.batch_count, + "reason": reason, + }, + ) + return await self.get_group_details( + group_id=group_id, + current_staff_user=approved_by, + ) diff --git a/app/service/users.py b/app/service/users.py index 0e54045..8ab3f81 100644 --- a/app/service/users.py +++ b/app/service/users.py @@ -1,10 +1,8 @@ from datetime import datetime, timedelta, timezone import uuid -from app.core import constant -from app.core.exceptions import AppException +from app.core.exceptions import AppException, DBException from app.core.securite import ( - # EmbeddingCrypto, hash_password, verify_password, create_acces_mobile_token, @@ -12,6 +10,7 @@ decode_refresh_mobile_token, Get_expiry_time, ) +from app.core import constant from app.core.config import settings from app.infra.redis import RedisClient @@ -85,6 +84,8 @@ async def mobile_register_login( user: User | None = None if existing_user is not None: + if existing_user.blocked: + raise AppException.forbidden("User is blocked") if not verify_password(req.password, existing_user.hashed_password or ""): raise AppException.unauthorized("Invalid credentials") user = existing_user @@ -103,8 +104,6 @@ async def mobile_register_login( user_id: uuid.UUID = user.id session_key = constant.RedisKey.UserSessionByUser.value.format(user_id=user_id) - if await redis.exists(session_key): - raise AppException.forbidden("User already has an active session") session_count = await self.session_querier.count_user_sessions(user_id=user_id) if session_count and session_count >= AuthService.SESSION_LIMIT: @@ -143,7 +142,7 @@ async def mobile_register_login( return MobileAuthResponse( access_token=access_token, refresh_token=refresh_token, - session_id=str(session.id), + session_id=str(session.id), expires_in=expiry, ) @@ -166,15 +165,11 @@ async def refresh_token( if session.expires_at < datetime.now(timezone.utc): raise AppException.unauthorized("Session expired") - session_key = constant.RedisKey.UserSessionByUser.value.format( - user_id=session.user_id - ) - redis_session = await redis.get(session_key) - - if not redis_session or redis_session != session_id: - raise AppException.unauthorized("Session invalidated") - - await redis.expire(session_key, AuthService.REDIS_SESSION_TTL) + user = await self.user_querier.get_user_by_id(id=session.user_id) + if not user: + raise AppException.unauthorized("User not found") + if user.blocked: + raise AppException.forbidden("User is blocked") new_access_token = create_acces_mobile_token(session_id) new_refresh_token = create_refresh_mobile_token(session_id) @@ -201,16 +196,13 @@ async def add_embbed_user( self, user_id: uuid.UUID, image_payloads: list[FaceImagePayload], - ) ->User: + ) -> User: logger.info("Generating face embeddings for user %s", user_id) averaging = await self.face_embedding_service.compute_average_embedding( image_payloads ) - # pgvector accepts input like: "[0.1, 0.2, ...]". Convert list to a vector literal. vector_literal = "[" + ", ".join(str(x) for x in averaging) + "]" - #TODO:we encrypt it here we wont store it as plaintext in the db but the porblmem is were lossing the search as trade of in the vestor so i will let it like this until i found somthing tht fit - # encrypted_embedding = EmbeddingCrypto.encrypt(averaging) user = await self.user_querier.set_user_embedding( dollar_1=vector_literal, id=user_id, @@ -232,13 +224,129 @@ async def validate_session( if session.expires_at < datetime.now(timezone.utc): return False - - session_key = constant.RedisKey.UserSessionByUser.value.format( - user_id=session.user_id - ) - redis_session = await redis.get(session_key) - - return redis_session == session_id + return True async def get_user_by_id(self, user_id: uuid.UUID) -> User | None: return await self.user_querier.get_user_by_id(id=user_id) + + async def create_user( + self, + *, + email: str, + password: str, + display_name: str | None = None, + blocked: bool = False, + ) -> User: + try: + hashed = hash_password(password) + user = await self.user_querier.create_user( + email=email, + hashed_password=hashed, + ) + if not user: + raise AppException.internal_error("Failed to create user") + + if display_name is not None or blocked: + updated = await self.user_querier.update_user( + email=user.email, + display_name=display_name, + blocked=blocked, + id=user.id, + ) + if not updated: + raise AppException.internal_error("Failed to update user") + return updated + + return user + except Exception as exc: + logger.error("Failed to create user: %s", exc) + raise DBException.handle(exc) + + async def get_user(self, *, user_id: uuid.UUID) -> User: + user = await self.user_querier.get_user_by_id(id=user_id) + if not user: + raise AppException.not_found("User not found") + return user + + async def list_users(self, *, limit: int, offset: int) -> list[User]: + try: + users: list[User] = [] + async for user in self.user_querier.list_users(limit=limit, offset=offset): + users.append(user) + return users + except Exception as exc: + logger.error("Failed to list users: %s", exc) + raise DBException.handle(exc) + + async def update_user( + self, + *, + user_id: uuid.UUID, + email: str | None = None, + display_name: str | None = None, + blocked: bool | None = None, + ) -> User: + try: + existing = await self.user_querier.get_user_by_id(id=user_id) + if not existing: + raise AppException.not_found("User not found") + + new_email = email if email is not None else existing.email + new_display_name = ( + display_name if display_name is not None else existing.display_name + ) + new_blocked = blocked if blocked is not None else existing.blocked + + user = await self.user_querier.update_user( + email=new_email, + display_name=new_display_name, + blocked=new_blocked, + id=user_id, + ) + if not user: + raise AppException.internal_error("Failed to update user") + return user + except Exception as exc: + logger.error("Failed to update user: %s", exc) + raise DBException.handle(exc) + + async def delete_user(self, *, redis: RedisClient, user_id: uuid.UUID) -> User: + try: + existing = await self.user_querier.get_user_by_id(id=user_id) + if not existing: + raise AppException.not_found("User not found") + await self.user_querier.delete_user(id=user_id) + session_key = constant.RedisKey.UserSessionByUser.value.format( + user_id=user_id + ) + await redis.delete(session_key) + return existing + except Exception as exc: + logger.error("Failed to delete user: %s", exc) + raise DBException.handle(exc) + + async def block_user(self, *, redis: RedisClient, user_id: uuid.UUID) -> User: + try: + user = await self.user_querier.set_user_blocked(blocked=True, id=user_id) + if not user: + raise AppException.not_found("User not found") + + session_key = constant.RedisKey.UserSessionByUser.value.format( + user_id=user_id + ) + await redis.delete(session_key) + + return user + except Exception as exc: + logger.error("Failed to block user: %s", exc) + raise DBException.handle(exc) + + async def unblock_user(self, *, user_id: uuid.UUID) -> User: + try: + user = await self.user_querier.set_user_blocked(blocked=False, id=user_id) + if not user: + raise AppException.not_found("User not found") + return user + except Exception as exc: + logger.error("Failed to unblock user: %s", exc) + raise DBException.handle(exc) diff --git a/app/worker/notification/firebase.py b/app/worker/notification/firebase.py index ffc2360..c490f3b 100644 --- a/app/worker/notification/firebase.py +++ b/app/worker/notification/firebase.py @@ -2,9 +2,9 @@ from typing import cast # pyright: ignore[reportMissingTypeStubs] -import firebase_admin # type: ignore[import-untyped] +import firebase_admin # type: ignore[import-not-found,import-untyped] # pyright: ignore[reportMissingTypeStubs] -from firebase_admin import credentials, messaging # type: ignore[import-untyped] +from firebase_admin import credentials, messaging # type: ignore[import-not-found,import-untyped] from app.core.config import settings from app.core.logger import logger diff --git a/app/worker/single_face_match/__init__.py b/app/worker/single_face_match/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/worker/single_face_match/__init__.py @@ -0,0 +1 @@ + diff --git a/app/worker/single_face_match/worker.py b/app/worker/single_face_match/worker.py new file mode 100644 index 0000000..fa8ef37 --- /dev/null +++ b/app/worker/single_face_match/worker.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import asyncio + +from app.container import Container +from app.core.config import settings +from app.core.logger import logger +from app.infra.database import engine +from app.infra.minio import Bucket, init_minio_client +from app.infra.nats import NatsClient, NatsSubjects +from app.infra.redis import RedisClient +from app.schema.dto.single_face_match import SingleFaceMatchJob +from app.service.single_face_match import SingleFaceMatchService + + +class SingleFaceMatchWorker: + def __init__(self, service: SingleFaceMatchService) -> None: + self.service = service + + async def handle_message(self, data: bytes) -> None: + try: + job = SingleFaceMatchJob.model_validate_json(data) + except Exception as exc: + logger.warning("Failed to parse single face match job: %s", exc) + return + + try: + await self.service.process_job(job) + except Exception as exc: + logger.exception("Failed to process single face match job: %s", exc) + return + + +async def run_worker() -> None: + await init_minio_client( + minio_host=settings.MINIO_HOST, + minio_port=settings.MINIO_API_PORT, + minio_root_user=settings.MINIO_ROOT_USER, + minio_root_password=settings.MINIO_ROOT_PASSWORD, + ) + RedisClient( + host=settings.REDIS_HOST, + port=settings.REDIS_PORT, + password=settings.REDIS_PASSWORD, + ) + + async with engine.connect() as conn: + container = Container(conn) + service = SingleFaceMatchService( + conn=conn, + face_embedding_service=container.face_embedding_service, + photo_face_querier=container.photo_face_querier, + ) + worker = SingleFaceMatchWorker(service) + + await NatsClient.js_subscribe( + subject=NatsSubjects.SINGLE_FACE_MATCH_REQUESTED, + callback=worker.handle_message, + stream_name=settings.NATS_SINGLE_FACE_MATCH_STREAM, + durable_name=settings.NATS_SINGLE_FACE_MATCH_DURABLE, + ) + + logger.info("SingleFaceMatchWorker subscribed; waiting for jobs") + try: + await asyncio.Event().wait() + finally: + await _close_minio() + await NatsClient.close() + + +async def _close_minio() -> None: + client = getattr(Bucket, "client", None) + if client is None: + return + close_session = getattr(client, "close_session", None) + if close_session is None: + return + try: + await close_session() + except Exception: + pass + + +if __name__ == "__main__": + asyncio.run(run_worker()) diff --git a/app/worker/storage_cleaner/main.py b/app/worker/storage_cleaner/main.py index e69de29..344049c 100644 --- a/app/worker/storage_cleaner/main.py +++ b/app/worker/storage_cleaner/main.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import asyncio +import json +import uuid +from typing import Iterable, Optional, Set, Tuple + +import sqlalchemy.ext.asyncio +from fastapi import HTTPException +from pydantic import BaseModel, ValidationError + +from app.core.logger import logger +from app.infra.database import engine +from app.infra.nats import NatsClient +from app.service.staged_upload_storage import StagedUploadStorageService +from db.generated import upload_request_photos as upload_request_photo_queries +from app.worker.storage_cleaner.settings import settings + + +class FinalBucketCleanupPayload(BaseModel): + storage_keys: list[str] = [] + photo_ids: list[str] | None = None + ids: list[str] | None = None + + +storage_service = StagedUploadStorageService() + + +async def create_photo_querier() -> Tuple[ + sqlalchemy.ext.asyncio.AsyncConnection, + upload_request_photo_queries.AsyncQuerier, +]: + conn = await engine.connect() + querier = upload_request_photo_queries.AsyncQuerier(conn) + return conn, querier + + +async def close_connection(conn: sqlalchemy.ext.asyncio.AsyncConnection) -> None: + await conn.close() + + +def _parse_payload(raw_data: bytes | str) -> Optional[FinalBucketCleanupPayload]: + if isinstance(raw_data, bytes): + try: + raw_data = raw_data.decode("utf-8") + except UnicodeDecodeError as exc: + logger.warning("Final bucket cleanup payload failed to decode: %s", exc) + return None + + try: + parsed = json.loads(raw_data) + except (json.JSONDecodeError, TypeError) as exc: + logger.warning("Final bucket cleanup payload is invalid JSON: %s", exc) + return None + + if not isinstance(parsed, dict): + return None + + try: + return FinalBucketCleanupPayload.model_validate(parsed) + except ValidationError as exc: + logger.warning("Final bucket cleanup payload validation failed: %s", exc) + return None + + +async def resolve_final_storage_keys( + payload: FinalBucketCleanupPayload, + querier: upload_request_photo_queries.AsyncQuerier, +) -> Set[str]: + storage_keys: Set[str] = set(payload.storage_keys) + photo_ids = payload.photo_ids or payload.ids + if photo_ids: + storage_keys.update(await _fetch_keys_for_ids(photo_ids, querier)) + return storage_keys + + +async def _fetch_keys_for_ids( + photo_ids: Iterable[str], + querier: upload_request_photo_queries.AsyncQuerier, +) -> Set[str]: + keys: Set[str] = set() + for raw_id in photo_ids: + try: + photo_id = uuid.UUID(raw_id) + except ValueError: + logger.warning("Skipping invalid photo id %s", raw_id) + continue + photo = await querier.get_upload_request_photo_by_id(id=photo_id) + if photo is None: + logger.warning("No upload request photo found for %s", raw_id) + continue + if photo.final_storage_key is None: + logger.warning("Upload request photo %s has no final storage key", raw_id) + continue + keys.add(photo.final_storage_key) + return keys + + +async def _delete_storage_key(storage_key: str) -> None: + try: + await storage_service.delete_storage_key(storage_key) + logger.info("Removed finalized storage key %s", storage_key) + except HTTPException as exc: + detail = getattr(exc, "detail", exc) + logger.warning("Skipping cleanup for %s: %s", storage_key, detail) + except Exception: + logger.exception("Failed to delete %s, worker will retry", storage_key) + raise + + +async def _handle_cleanup_event( + raw_payload: bytes | str, + querier: upload_request_photo_queries.AsyncQuerier, +) -> None: + payload = _parse_payload(raw_payload) + if payload is None: + return + + storage_keys = await resolve_final_storage_keys(payload, querier) + if not storage_keys: + logger.info("Final bucket cleanup event contained no storage keys") + return + + logger.info( + "Cleaning %d finalized storage objects from JetStream schedule", + len(storage_keys), + ) + + for storage_key in storage_keys: + await _delete_storage_key(storage_key) + + +async def main() -> None: + conn, querier = await create_photo_querier() + await NatsClient.connect() + try: + async def _jetstream_handler(data: bytes | str) -> None: + await _handle_cleanup_event(data, querier) + + await NatsClient.js_subscribe( + subject=settings.subject_enum, + callback=_jetstream_handler, + stream_name=settings.stream_name, + durable_name=settings.durable_name, + ) + logger.info( + "Storage cleaner listening on %s for %d-day window", + settings.subject, + settings.WINDOW_DAYS, + ) + await asyncio.Event().wait() + finally: + await close_connection(conn) + await NatsClient.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/app/worker/storage_cleaner/settings.py b/app/worker/storage_cleaner/settings.py index e69de29..23a56f6 100644 --- a/app/worker/storage_cleaner/settings.py +++ b/app/worker/storage_cleaner/settings.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from pydantic import Field +from pydantic_settings import BaseSettings + +from app.core.constant import ( + FINAL_BUCKET_CLEANUP_DURABLE_NAME, + FINAL_BUCKET_CLEANUP_STREAM, + FINAL_BUCKET_CLEANUP_SUBJECT, +) +from app.infra.nats import NatsSubjects + + +class StorageCleanerSettings(BaseSettings): + subject: str = Field(FINAL_BUCKET_CLEANUP_SUBJECT) + stream_name: str = Field(FINAL_BUCKET_CLEANUP_STREAM) + durable_name: str = Field(FINAL_BUCKET_CLEANUP_DURABLE_NAME) + WINDOW_DAYS = 7 + + class Config: + env_prefix = "STORAGE_CLEANER_" + + @property + def subject_enum(self) -> NatsSubjects: + try: + return NatsSubjects(self.subject) + except ValueError: + return NatsSubjects.FINAL_BUCKET_CLEANUP + + +settings = StorageCleanerSettings() # type: ignore diff --git a/db/generated/devices.py b/db/generated/devices.py index 2514ff9..4e1af31 100644 --- a/db/generated/devices.py +++ b/db/generated/devices.py @@ -21,7 +21,7 @@ COUNT__USER__DEVICES = """-- name: count__user__devices \\:one -SELECT COUNT(*) +SELECT COUNT(*) FROM user_devices WHERE user_id = :p1 """ diff --git a/db/generated/models.py b/db/generated/models.py index db5740d..fd0891f 100644 --- a/db/generated/models.py +++ b/db/generated/models.py @@ -185,6 +185,7 @@ class StaffUser: class UploadRequest: id: uuid.UUID event_id: uuid.UUID + group_id: Optional[uuid.UUID] drive_file_id: Optional[str] requested_by: uuid.UUID approved_by: Optional[uuid.UUID] @@ -195,6 +196,21 @@ class UploadRequest: rejection_reason: Optional[str] +@dataclasses.dataclass() +class UploadRequestGroup: + id: uuid.UUID + event_id: uuid.UUID + folder_id: str + requested_by: uuid.UUID + approved_by: Optional[uuid.UUID] + status: Any + total_photo_count: int + batch_count: int + created_at: datetime.datetime + approved_at: Optional[datetime.datetime] + rejection_reason: Optional[str] + + @dataclasses.dataclass() class UploadRequestPhoto: id: uuid.UUID @@ -221,7 +237,9 @@ class User: updated_at: datetime.datetime display_name: Optional[str] face_embedding: Optional[Any] + blocked: bool deleted_at: Optional[datetime.datetime] + blocked: bool @dataclasses.dataclass() diff --git a/db/generated/photo_faces.py b/db/generated/photo_faces.py new file mode 100644 index 0000000..09d76f1 --- /dev/null +++ b/db/generated/photo_faces.py @@ -0,0 +1,50 @@ +# Code generated by sqlc. DO NOT EDIT. +# versions: +# sqlc v1.30.0 +# source: photo_faces.sql +from typing import Any, Optional +import uuid + +import sqlalchemy +import sqlalchemy.ext.asyncio + +from db.generated import models + + +UPSERT_PHOTO_FACE = """-- name: upsert_photo_face \\:one +INSERT INTO photo_faces ( + photo_id, + face_index, + embedding, + bbox +) VALUES ( + :p1, :p2, :p3\\:\\:vector, :p4 +) +ON CONFLICT (photo_id, face_index) +DO UPDATE SET embedding = EXCLUDED.embedding, + bbox = EXCLUDED.bbox +RETURNING id, photo_id, face_index, embedding, bbox, created_at +""" + + +class AsyncQuerier: + def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): + self._conn = conn + + async def upsert_photo_face(self, *, photo_id: uuid.UUID, face_index: int, dollar_3: Any, bbox: Optional[str]) -> Optional[models.PhotoFace]: + row = (await self._conn.execute(sqlalchemy.text(UPSERT_PHOTO_FACE), { + "p1": photo_id, + "p2": face_index, + "p3": dollar_3, + "p4": bbox, + })).first() + if row is None: + return None + return models.PhotoFace( + id=row[0], + photo_id=row[1], + face_index=row[2], + embedding=row[3], + bbox=row[4], + created_at=row[5], + ) diff --git a/db/generated/session.py b/db/generated/session.py index 1b8e026..bc7b427 100644 --- a/db/generated/session.py +++ b/db/generated/session.py @@ -4,7 +4,7 @@ # source: session.sql import dataclasses import datetime -from typing import Optional +from typing import AsyncIterator, Optional import uuid import sqlalchemy @@ -51,6 +51,13 @@ """ +LIST_SESSIONS_BY_USER = """-- name: list_sessions_by_user \\:many +SELECT id, user_id, device_id, created_at, last_active, expires_at +FROM user_sessions +WHERE user_id = :p1 +""" + + UPDATE_SESSION_ACTIVITY = """-- name: update_session_activity \\:exec UPDATE user_sessions SET last_active = NOW() @@ -135,6 +142,18 @@ async def get_session_by_id(self, *, id: uuid.UUID) -> Optional[models.UserSessi expires_at=row[5], ) + async def list_sessions_by_user(self, *, user_id: uuid.UUID) -> AsyncIterator[models.UserSession]: + result = await self._conn.stream(sqlalchemy.text(LIST_SESSIONS_BY_USER), {"p1": user_id}) + async for row in result: + yield models.UserSession( + id=row[0], + user_id=row[1], + device_id=row[2], + created_at=row[3], + last_active=row[4], + expires_at=row[5], + ) + async def update_session_activity(self, *, id: uuid.UUID) -> None: await self._conn.execute(sqlalchemy.text(UPDATE_SESSION_ACTIVITY), {"p1": id}) diff --git a/db/generated/upload_request_groups.py b/db/generated/upload_request_groups.py new file mode 100644 index 0000000..736c260 --- /dev/null +++ b/db/generated/upload_request_groups.py @@ -0,0 +1,310 @@ +# Code generated manually to match the sqlc async querier style used in the repo. +from typing import AsyncIterator, Optional +import uuid + +import sqlalchemy +import sqlalchemy.ext.asyncio + +from . import models + + +CREATE_UPLOAD_REQUEST_GROUP = """-- name: create_upload_request_group \\:one +INSERT INTO upload_request_groups ( + event_id, + folder_id, + requested_by, + total_photo_count, + batch_count +) VALUES ( + :p1, :p2, :p3, :p4, :p5 +) +RETURNING + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +""" + + +GET_UPLOAD_REQUEST_GROUP_BY_ID = """-- name: get_upload_request_group_by_id \\:one +SELECT + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +FROM upload_request_groups +WHERE id = :p1 +""" + + +LIST_UPLOAD_REQUEST_GROUPS = """-- name: list_upload_request_groups \\:many +SELECT + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +FROM upload_request_groups +ORDER BY created_at DESC +""" + + +LIST_UPLOAD_REQUEST_GROUPS_BY_STATUS = """-- name: list_upload_request_groups_by_status \\:many +SELECT + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +FROM upload_request_groups +WHERE status = :p1 +ORDER BY created_at DESC +""" + + +LIST_UPLOAD_REQUEST_GROUPS_BY_REQUESTER = """-- name: list_upload_request_groups_by_requester \\:many +SELECT + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +FROM upload_request_groups +WHERE requested_by = :p1 +ORDER BY created_at DESC +""" + + +LIST_UPLOAD_REQUEST_GROUPS_BY_REQUESTER_AND_STATUS = """-- name: list_upload_request_groups_by_requester_and_status \\:many +SELECT + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +FROM upload_request_groups +WHERE requested_by = :p1 + AND status = :p2 +ORDER BY created_at DESC +""" + + +APPROVE_UPLOAD_REQUEST_GROUP = """-- name: approve_upload_request_group \\:one +UPDATE upload_request_groups +SET status = 'approved', + approved_by = :p2, + approved_at = NOW(), + rejection_reason = NULL +WHERE id = :p1 + AND status = 'pending' +RETURNING + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +""" + + +REJECT_UPLOAD_REQUEST_GROUP = """-- name: reject_upload_request_group \\:one +UPDATE upload_request_groups +SET status = 'rejected', + approved_by = :p2, + approved_at = NOW(), + rejection_reason = :p3 +WHERE id = :p1 + AND status = 'pending' +RETURNING + id, + event_id, + folder_id, + requested_by, + approved_by, + status, + total_photo_count, + batch_count, + created_at, + approved_at, + rejection_reason +""" + + +DELETE_UPLOAD_REQUEST_GROUP = """-- name: delete_upload_request_group \\:exec +DELETE FROM upload_request_groups +WHERE id = :p1 +""" + + +class AsyncQuerier: + def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): + self._conn = conn + + async def create_upload_request_group( + self, + *, + event_id: uuid.UUID, + folder_id: str, + requested_by: uuid.UUID, + total_photo_count: int, + batch_count: int, + ) -> Optional[models.UploadRequestGroup]: + row = ( + await self._conn.execute( + sqlalchemy.text(CREATE_UPLOAD_REQUEST_GROUP), + { + "p1": event_id, + "p2": folder_id, + "p3": requested_by, + "p4": total_photo_count, + "p5": batch_count, + }, + ) + ).first() + if row is None: + return None + return _row_to_upload_request_group(row) + + async def get_upload_request_group_by_id( + self, + *, + id: uuid.UUID, + ) -> Optional[models.UploadRequestGroup]: + row = ( + await self._conn.execute( + sqlalchemy.text(GET_UPLOAD_REQUEST_GROUP_BY_ID), + {"p1": id}, + ) + ).first() + if row is None: + return None + return _row_to_upload_request_group(row) + + async def list_upload_request_groups( + self, + *, + requested_by: uuid.UUID | None, + status: str | None, + ) -> AsyncIterator[models.UploadRequestGroup]: + if requested_by is None and status is None: + statement = LIST_UPLOAD_REQUEST_GROUPS + params: dict[str, object] = {} + elif requested_by is None: + statement = LIST_UPLOAD_REQUEST_GROUPS_BY_STATUS + params = {"p1": status} + elif status is None: + statement = LIST_UPLOAD_REQUEST_GROUPS_BY_REQUESTER + params = {"p1": requested_by} + else: + statement = LIST_UPLOAD_REQUEST_GROUPS_BY_REQUESTER_AND_STATUS + params = {"p1": requested_by, "p2": status} + result = await self._conn.stream( + sqlalchemy.text(statement), + params, + ) + async for row in result: + yield _row_to_upload_request_group(row) + + async def approve_upload_request_group( + self, + *, + id: uuid.UUID, + approved_by: uuid.UUID, + ) -> Optional[models.UploadRequestGroup]: + row = ( + await self._conn.execute( + sqlalchemy.text(APPROVE_UPLOAD_REQUEST_GROUP), + {"p1": id, "p2": approved_by}, + ) + ).first() + if row is None: + return None + return _row_to_upload_request_group(row) + + async def reject_upload_request_group( + self, + *, + id: uuid.UUID, + approved_by: uuid.UUID, + rejection_reason: str | None, + ) -> Optional[models.UploadRequestGroup]: + row = ( + await self._conn.execute( + sqlalchemy.text(REJECT_UPLOAD_REQUEST_GROUP), + {"p1": id, "p2": approved_by, "p3": rejection_reason}, + ) + ).first() + if row is None: + return None + return _row_to_upload_request_group(row) + + async def delete_upload_request_group( + self, + *, + id: uuid.UUID, + ) -> None: + await self._conn.execute( + sqlalchemy.text(DELETE_UPLOAD_REQUEST_GROUP), + {"p1": id}, + ) + + +def _row_to_upload_request_group( + row: sqlalchemy.Row[tuple[object, ...]], +) -> models.UploadRequestGroup: + return models.UploadRequestGroup( + id=row[0], + event_id=row[1], + folder_id=row[2], + requested_by=row[3], + approved_by=row[4], + status=row[5], + total_photo_count=row[6], + batch_count=row[7], + created_at=row[8], + approved_at=row[9], + rejection_reason=row[10], + ) diff --git a/db/generated/upload_request_photos.py b/db/generated/upload_request_photos.py index 3dd7732..a329598 100644 --- a/db/generated/upload_request_photos.py +++ b/db/generated/upload_request_photos.py @@ -1,16 +1,12 @@ -# Code generated by sqlc. DO NOT EDIT. -# versions: -# sqlc v1.30.0 -# source: upload_request_photos.sql -import dataclasses +# Code generated manually to match the sqlc async querier style used in the repo. +from typing import AsyncIterator, Optional import datetime -from typing import AsyncIterator, List, Optional import uuid import sqlalchemy import sqlalchemy.ext.asyncio -from db.generated import models +from . import models CREATE_UPLOAD_REQUEST_PHOTO = """-- name: create_upload_request_photo \\:one @@ -28,50 +24,82 @@ ) VALUES ( :p1, :p2, :p3, :p4, :p5, :p6, :p7, :p8, :p9, :p10 ) -RETURNING id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at -""" - - -@dataclasses.dataclass() -class CreateUploadRequestPhotoParams: - upload_request_id: uuid.UUID - drive_file_id: str - file_name: str - mime_type: str - size_bytes: int - staging_storage_key: str - taken_at: Optional[datetime.datetime] - day_number: Optional[int] - visibility: str - status: str - - -DELETE_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID = """-- name: delete_upload_request_photos_by_upload_request_id \\:exec -DELETE FROM upload_request_photos -WHERE upload_request_id = :p1 +RETURNING + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at """ -GET_UPLOAD_REQUEST_PHOTO_BY_ID = """-- name: get_upload_request_photo_by_id \\:one -SELECT id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at +LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID = """-- name: list_upload_request_photos_by_upload_request_id \\:many +SELECT + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at FROM upload_request_photos -WHERE id = :p1 +WHERE upload_request_id = :p1 +ORDER BY created_at ASC """ -LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_I_DS = """-- name: list_upload_request_photos_by_upload_request_i_ds \\:many -SELECT id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at +LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_IDS = """-- name: list_upload_request_photos_by_upload_request_ids \\:many +SELECT + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at FROM upload_request_photos -WHERE upload_request_id = ANY(:p1\\:\\:uuid[]) +WHERE upload_request_id = ANY(:p1) ORDER BY created_at ASC """ -LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID = """-- name: list_upload_request_photos_by_upload_request_id \\:many -SELECT id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at +GET_UPLOAD_REQUEST_PHOTO_BY_ID = """-- name: get_upload_request_photo_by_id \\:one +SELECT + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at FROM upload_request_photos -WHERE upload_request_id = :p1 -ORDER BY created_at ASC +WHERE id = :p1 """ @@ -80,7 +108,20 @@ class CreateUploadRequestPhotoParams: SET status = :p2, final_storage_key = :p3 WHERE id = :p1 -RETURNING id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at +RETURNING + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at """ @@ -88,7 +129,26 @@ class CreateUploadRequestPhotoParams: UPDATE upload_request_photos SET status = :p2 WHERE upload_request_id = :p1 -RETURNING id, upload_request_id, drive_file_id, file_name, mime_type, size_bytes, staging_storage_key, final_storage_key, taken_at, day_number, visibility, status, created_at +RETURNING + id, + upload_request_id, + drive_file_id, + file_name, + mime_type, + size_bytes, + staging_storage_key, + final_storage_key, + taken_at, + day_number, + visibility, + status, + created_at +""" + + +DELETE_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID = """-- name: delete_upload_request_photos_by_upload_request_id \\:exec +DELETE FROM upload_request_photos +WHERE upload_request_id = :p1 """ @@ -96,133 +156,136 @@ class AsyncQuerier: def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): self._conn = conn - async def create_upload_request_photo(self, arg: CreateUploadRequestPhotoParams) -> Optional[models.UploadRequestPhoto]: - row = (await self._conn.execute(sqlalchemy.text(CREATE_UPLOAD_REQUEST_PHOTO), { - "p1": arg.upload_request_id, - "p2": arg.drive_file_id, - "p3": arg.file_name, - "p4": arg.mime_type, - "p5": arg.size_bytes, - "p6": arg.staging_storage_key, - "p7": arg.taken_at, - "p8": arg.day_number, - "p9": arg.visibility, - "p10": arg.status, - })).first() + async def create_upload_request_photo( + self, + *, + upload_request_id: uuid.UUID, + drive_file_id: str, + file_name: str, + mime_type: str, + size_bytes: int, + staging_storage_key: str, + taken_at: datetime.datetime | None, + day_number: int | None, + visibility: str, + status: str, + ) -> Optional[models.UploadRequestPhoto]: + row = ( + await self._conn.execute( + sqlalchemy.text(CREATE_UPLOAD_REQUEST_PHOTO), + { + "p1": upload_request_id, + "p2": drive_file_id, + "p3": file_name, + "p4": mime_type, + "p5": size_bytes, + "p6": staging_storage_key, + "p7": taken_at, + "p8": day_number, + "p9": visibility, + "p10": status, + }, + ) + ).first() if row is None: return None - return models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], + return _row_to_upload_request_photo(row) + + async def list_upload_request_photos_by_upload_request_id( + self, + *, + upload_request_id: uuid.UUID, + ) -> AsyncIterator[models.UploadRequestPhoto]: + result = await self._conn.stream( + sqlalchemy.text(LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID), + {"p1": upload_request_id}, ) + async for row in result: + yield _row_to_upload_request_photo(row) - async def delete_upload_request_photos_by_upload_request_id(self, *, upload_request_id: uuid.UUID) -> None: - await self._conn.execute(sqlalchemy.text(DELETE_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID), {"p1": upload_request_id}) + async def list_upload_request_photos_by_upload_request_ids( + self, + *, + upload_request_ids: list[uuid.UUID], + ) -> AsyncIterator[models.UploadRequestPhoto]: + statement = sqlalchemy.text(LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_IDS).bindparams( + sqlalchemy.bindparam("p1", type_=sqlalchemy.ARRAY(sqlalchemy.Uuid)) + ) + result = await self._conn.stream(statement, {"p1": upload_request_ids}) + async for row in result: + yield _row_to_upload_request_photo(row) - async def get_upload_request_photo_by_id(self, *, id: uuid.UUID) -> Optional[models.UploadRequestPhoto]: - row = (await self._conn.execute(sqlalchemy.text(GET_UPLOAD_REQUEST_PHOTO_BY_ID), {"p1": id})).first() + async def get_upload_request_photo_by_id( + self, + *, + id: uuid.UUID, + ) -> Optional[models.UploadRequestPhoto]: + row = ( + await self._conn.execute( + sqlalchemy.text(GET_UPLOAD_REQUEST_PHOTO_BY_ID), + {"p1": id}, + ) + ).first() if row is None: return None - return models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], - ) + return _row_to_upload_request_photo(row) - async def list_upload_request_photos_by_upload_request_i_ds(self, *, dollar_1: List[uuid.UUID]) -> AsyncIterator[models.UploadRequestPhoto]: - result = await self._conn.stream(sqlalchemy.text(LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_I_DS), {"p1": dollar_1}) - async for row in result: - yield models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], + async def update_upload_request_photo_approval( + self, + *, + id: uuid.UUID, + status: str, + final_storage_key: str | None, + ) -> Optional[models.UploadRequestPhoto]: + row = ( + await self._conn.execute( + sqlalchemy.text(UPDATE_UPLOAD_REQUEST_PHOTO_APPROVAL), + {"p1": id, "p2": status, "p3": final_storage_key}, ) + ).first() + if row is None: + return None + return _row_to_upload_request_photo(row) - async def list_upload_request_photos_by_upload_request_id(self, *, upload_request_id: uuid.UUID) -> AsyncIterator[models.UploadRequestPhoto]: - result = await self._conn.stream(sqlalchemy.text(LIST_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID), {"p1": upload_request_id}) + async def update_upload_request_photo_status_by_upload_request_id( + self, + *, + upload_request_id: uuid.UUID, + status: str, + ) -> AsyncIterator[models.UploadRequestPhoto]: + result = await self._conn.stream( + sqlalchemy.text(UPDATE_UPLOAD_REQUEST_PHOTO_STATUS_BY_UPLOAD_REQUEST_ID), + {"p1": upload_request_id, "p2": status}, + ) async for row in result: - yield models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], - ) + yield _row_to_upload_request_photo(row) - async def update_upload_request_photo_approval(self, *, id: uuid.UUID, status: str, final_storage_key: Optional[str]) -> Optional[models.UploadRequestPhoto]: - row = (await self._conn.execute(sqlalchemy.text(UPDATE_UPLOAD_REQUEST_PHOTO_APPROVAL), {"p1": id, "p2": status, "p3": final_storage_key})).first() - if row is None: - return None - return models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], + async def delete_upload_request_photos_by_upload_request_id( + self, + *, + upload_request_id: uuid.UUID, + ) -> None: + await self._conn.execute( + sqlalchemy.text(DELETE_UPLOAD_REQUEST_PHOTOS_BY_UPLOAD_REQUEST_ID), + {"p1": upload_request_id}, ) - async def update_upload_request_photo_status_by_upload_request_id(self, *, upload_request_id: uuid.UUID, status: str) -> AsyncIterator[models.UploadRequestPhoto]: - result = await self._conn.stream(sqlalchemy.text(UPDATE_UPLOAD_REQUEST_PHOTO_STATUS_BY_UPLOAD_REQUEST_ID), {"p1": upload_request_id, "p2": status}) - async for row in result: - yield models.UploadRequestPhoto( - id=row[0], - upload_request_id=row[1], - drive_file_id=row[2], - file_name=row[3], - mime_type=row[4], - size_bytes=row[5], - staging_storage_key=row[6], - final_storage_key=row[7], - taken_at=row[8], - day_number=row[9], - visibility=row[10], - status=row[11], - created_at=row[12], - ) + +def _row_to_upload_request_photo( + row: sqlalchemy.Row[tuple[object, ...]], +) -> models.UploadRequestPhoto: + return models.UploadRequestPhoto( + id=row[0], + upload_request_id=row[1], + drive_file_id=row[2], + file_name=row[3], + mime_type=row[4], + size_bytes=row[5], + staging_storage_key=row[6], + final_storage_key=row[7], + taken_at=row[8], + day_number=row[9], + visibility=row[10], + status=row[11], + created_at=row[12], + ) diff --git a/db/generated/upload_requests.py b/db/generated/upload_requests.py index 0008eca..7c40b5f 100644 --- a/db/generated/upload_requests.py +++ b/db/generated/upload_requests.py @@ -1,57 +1,174 @@ -# Code generated by sqlc. DO NOT EDIT. -# versions: -# sqlc v1.30.0 -# source: upload_requests.sql +# Code generated manually to match the sqlc async querier style used in the repo. from typing import AsyncIterator, Optional import uuid import sqlalchemy import sqlalchemy.ext.asyncio -from db.generated import models - - -APPROVE_UPLOAD_REQUEST = """-- name: approve_upload_request \\:one -UPDATE upload_requests -SET status = 'approved', - approved_by = :p2, - approved_at = NOW(), - rejection_reason = NULL -WHERE id = :p1 - AND status = 'pending' -RETURNING id, event_id, drive_file_id, requested_by, approved_by, status, created_at, approved_at, photo_count, rejection_reason -""" +from . import models CREATE_UPLOAD_REQUEST = """-- name: create_upload_request \\:one INSERT INTO upload_requests ( event_id, + group_id, drive_file_id, requested_by, photo_count ) VALUES ( - :p1, :p2, :p3, :p4 + :p1, :p2, :p3, :p4, :p5 ) -RETURNING id, event_id, drive_file_id, requested_by, approved_by, status, created_at, approved_at, photo_count, rejection_reason +RETURNING + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason """ GET_UPLOAD_REQUEST_BY_ID = """-- name: get_upload_request_by_id \\:one -SELECT id, event_id, drive_file_id, requested_by, approved_by, status, created_at, approved_at, photo_count, rejection_reason +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason FROM upload_requests WHERE id = :p1 """ +LIST_UPLOAD_REQUESTS_BY_GROUP_ID = """-- name: list_upload_requests_by_group_id \\:many +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +FROM upload_requests +WHERE group_id = :p1 +ORDER BY created_at ASC +""" + + LIST_UPLOAD_REQUESTS = """-- name: list_upload_requests \\:many -SELECT id, event_id, drive_file_id, requested_by, approved_by, status, created_at, approved_at, photo_count, rejection_reason +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +FROM upload_requests +ORDER BY created_at DESC +""" + + +LIST_UPLOAD_REQUESTS_BY_STATUS = """-- name: list_upload_requests_by_status \\:many +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +FROM upload_requests +WHERE status = :p1 +ORDER BY created_at DESC +""" + + +LIST_UPLOAD_REQUESTS_BY_REQUESTER_AND_STATUS = """-- name: list_upload_requests_by_requester_and_status \\:many +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason FROM upload_requests -WHERE requested_by = :p1\\:\\:uuid - AND status = COALESCE(:p2\\:\\:upload_request_status, status) +WHERE requested_by = :p1 + AND status = :p2 ORDER BY created_at DESC """ +LIST_UPLOAD_REQUESTS_BY_REQUESTER = """-- name: list_upload_requests_by_requester \\:many +SELECT + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +FROM upload_requests +WHERE requested_by = :p1 +ORDER BY created_at DESC +""" + + +APPROVE_UPLOAD_REQUEST = """-- name: approve_upload_request \\:one +UPDATE upload_requests +SET status = 'approved', + approved_by = :p2, + approved_at = NOW(), + rejection_reason = NULL +WHERE id = :p1 + AND status = 'pending' +RETURNING + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +""" + + REJECT_UPLOAD_REQUEST = """-- name: reject_upload_request \\:one UPDATE upload_requests SET status = 'rejected', @@ -60,7 +177,24 @@ rejection_reason = :p3 WHERE id = :p1 AND status = 'pending' -RETURNING id, event_id, drive_file_id, requested_by, approved_by, status, created_at, approved_at, photo_count, rejection_reason +RETURNING + id, + event_id, + group_id, + drive_file_id, + requested_by, + approved_by, + status, + photo_count, + created_at, + approved_at, + rejection_reason +""" + + +DELETE_UPLOAD_REQUEST = """-- name: delete_upload_request \\:exec +DELETE FROM upload_requests +WHERE id = :p1 """ @@ -68,91 +202,138 @@ class AsyncQuerier: def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): self._conn = conn - async def approve_upload_request(self, *, id: uuid.UUID, approved_by: Optional[uuid.UUID]) -> Optional[models.UploadRequest]: - row = (await self._conn.execute(sqlalchemy.text(APPROVE_UPLOAD_REQUEST), {"p1": id, "p2": approved_by})).first() + async def create_upload_request( + self, + *, + event_id: uuid.UUID, + group_id: uuid.UUID | None, + drive_file_id: str | None, + requested_by: uuid.UUID, + photo_count: int, + ) -> Optional[models.UploadRequest]: + row = ( + await self._conn.execute( + sqlalchemy.text(CREATE_UPLOAD_REQUEST), + { + "p1": event_id, + "p2": group_id, + "p3": drive_file_id, + "p4": requested_by, + "p5": photo_count, + }, + ) + ).first() if row is None: return None - return models.UploadRequest( - id=row[0], - event_id=row[1], - drive_file_id=row[2], - requested_by=row[3], - approved_by=row[4], - status=row[5], - created_at=row[6], - approved_at=row[7], - photo_count=row[8], - rejection_reason=row[9], + return _row_to_upload_request(row) + + async def list_upload_requests_by_group_id( + self, + *, + group_id: uuid.UUID, + ) -> AsyncIterator[models.UploadRequest]: + result = await self._conn.stream( + sqlalchemy.text(LIST_UPLOAD_REQUESTS_BY_GROUP_ID), + {"p1": group_id}, ) + async for row in result: + yield _row_to_upload_request(row) - async def create_upload_request(self, *, event_id: uuid.UUID, drive_file_id: Optional[str], requested_by: uuid.UUID, photo_count: int) -> Optional[models.UploadRequest]: - row = (await self._conn.execute(sqlalchemy.text(CREATE_UPLOAD_REQUEST), { - "p1": event_id, - "p2": drive_file_id, - "p3": requested_by, - "p4": photo_count, - })).first() + async def get_upload_request_by_id( + self, + *, + id: uuid.UUID, + ) -> Optional[models.UploadRequest]: + row = ( + await self._conn.execute( + sqlalchemy.text(GET_UPLOAD_REQUEST_BY_ID), + {"p1": id}, + ) + ).first() if row is None: return None - return models.UploadRequest( - id=row[0], - event_id=row[1], - drive_file_id=row[2], - requested_by=row[3], - approved_by=row[4], - status=row[5], - created_at=row[6], - approved_at=row[7], - photo_count=row[8], - rejection_reason=row[9], + return _row_to_upload_request(row) + + async def list_upload_requests( + self, + *, + requested_by: uuid.UUID | None, + status: str | None, + ) -> AsyncIterator[models.UploadRequest]: + if requested_by is None and status is None: + statement = LIST_UPLOAD_REQUESTS + params: dict[str, object] = {} + elif requested_by is None: + statement = LIST_UPLOAD_REQUESTS_BY_STATUS + params = {"p1": status} + elif status is None: + statement = LIST_UPLOAD_REQUESTS_BY_REQUESTER + params = {"p1": requested_by} + else: + statement = LIST_UPLOAD_REQUESTS_BY_REQUESTER_AND_STATUS + params = {"p1": requested_by, "p2": status} + result = await self._conn.stream( + sqlalchemy.text(statement), + params, ) + async for row in result: + yield _row_to_upload_request(row) - async def get_upload_request_by_id(self, *, id: uuid.UUID) -> Optional[models.UploadRequest]: - row = (await self._conn.execute(sqlalchemy.text(GET_UPLOAD_REQUEST_BY_ID), {"p1": id})).first() + async def approve_upload_request( + self, + *, + id: uuid.UUID, + approved_by: uuid.UUID, + ) -> Optional[models.UploadRequest]: + row = ( + await self._conn.execute( + sqlalchemy.text(APPROVE_UPLOAD_REQUEST), + {"p1": id, "p2": approved_by}, + ) + ).first() if row is None: return None - return models.UploadRequest( - id=row[0], - event_id=row[1], - drive_file_id=row[2], - requested_by=row[3], - approved_by=row[4], - status=row[5], - created_at=row[6], - approved_at=row[7], - photo_count=row[8], - rejection_reason=row[9], - ) + return _row_to_upload_request(row) - async def list_upload_requests(self, *, dollar_1: uuid.UUID, p2: Optional[models.UploadRequestStatus]) -> AsyncIterator[models.UploadRequest]: - result = await self._conn.stream(sqlalchemy.text(LIST_UPLOAD_REQUESTS), {"p1": dollar_1, "p2": p2}) - async for row in result: - yield models.UploadRequest( - id=row[0], - event_id=row[1], - drive_file_id=row[2], - requested_by=row[3], - approved_by=row[4], - status=row[5], - created_at=row[6], - approved_at=row[7], - photo_count=row[8], - rejection_reason=row[9], + async def reject_upload_request( + self, + *, + id: uuid.UUID, + approved_by: uuid.UUID, + rejection_reason: str | None, + ) -> Optional[models.UploadRequest]: + row = ( + await self._conn.execute( + sqlalchemy.text(REJECT_UPLOAD_REQUEST), + {"p1": id, "p2": approved_by, "p3": rejection_reason}, ) - - async def reject_upload_request(self, *, id: uuid.UUID, approved_by: Optional[uuid.UUID], rejection_reason: Optional[str]) -> Optional[models.UploadRequest]: - row = (await self._conn.execute(sqlalchemy.text(REJECT_UPLOAD_REQUEST), {"p1": id, "p2": approved_by, "p3": rejection_reason})).first() + ).first() if row is None: return None - return models.UploadRequest( - id=row[0], - event_id=row[1], - drive_file_id=row[2], - requested_by=row[3], - approved_by=row[4], - status=row[5], - created_at=row[6], - approved_at=row[7], - photo_count=row[8], - rejection_reason=row[9], + return _row_to_upload_request(row) + + async def delete_upload_request( + self, + *, + id: uuid.UUID, + ) -> None: + await self._conn.execute( + sqlalchemy.text(DELETE_UPLOAD_REQUEST), + {"p1": id}, ) + + +def _row_to_upload_request(row: sqlalchemy.Row[tuple[object, ...]]) -> models.UploadRequest: + return models.UploadRequest( + id=row[0], + event_id=row[1], + group_id=row[2], + drive_file_id=row[3], + requested_by=row[4], + approved_by=row[5], + status=row[6], + photo_count=row[7], + created_at=row[8], + approved_at=row[9], + rejection_reason=row[10], + ) diff --git a/db/generated/user.py b/db/generated/user.py index 2599d3a..e812192 100644 --- a/db/generated/user.py +++ b/db/generated/user.py @@ -14,7 +14,7 @@ CREATE_USER = """-- name: create_user \\:one INSERT INTO users (email, hashed_password) VALUES (:p1, :p2) -RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at """ @@ -25,33 +25,53 @@ GET_USER_BY_EMAIL = """-- name: get_user_by_email \\:one -SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at FROM users WHERE email = :p1 """ GET_USER_BY_ID = """-- name: get_user_by_id \\:one -SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at FROM users WHERE id = :p1 """ LIST_USERS = """-- name: list_users \\:many -SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +SELECT id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at FROM users ORDER BY created_at DESC LIMIT :p1 OFFSET :p2 """ +SET_USER_BLOCKED = """-- name: set_user_blocked \\:one +UPDATE users +SET blocked = :p1, + updated_at = NOW() +WHERE id = :p2 +RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at +""" + + SET_USER_EMBEDDING = """-- name: set_user_embedding \\:one UPDATE users SET face_embedding = :p1\\:\\:vector, updated_at = NOW() WHERE id = :p2 -RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at +""" + + +UPDATE_USER = """-- name: update_user \\:one +UPDATE users +SET email = COALESCE(:p1, email), + display_name = COALESCE(:p2, display_name), + blocked = COALESCE(:p3, blocked), + updated_at = NOW() +WHERE id = :p4 +RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at """ @@ -60,7 +80,7 @@ SET hashed_password = :p1, updated_at = NOW() WHERE id = :p2 -RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, deleted_at +RETURNING id, email, hashed_password, created_at, updated_at, display_name, face_embedding, blocked, deleted_at """ @@ -68,8 +88,14 @@ class AsyncQuerier: def __init__(self, conn: sqlalchemy.ext.asyncio.AsyncConnection): self._conn = conn - async def create_user(self, *, email: str, hashed_password: Optional[str]) -> Optional[models.User]: - row = (await self._conn.execute(sqlalchemy.text(CREATE_USER), {"p1": email, "p2": hashed_password})).first() + async def create_user( + self, *, email: str, hashed_password: Optional[str] + ) -> Optional[models.User]: + row = ( + await self._conn.execute( + sqlalchemy.text(CREATE_USER), {"p1": email, "p2": hashed_password} + ) + ).first() if row is None: return None return models.User( @@ -80,14 +106,19 @@ async def create_user(self, *, email: str, hashed_password: Optional[str]) -> Op updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], ) async def delete_user(self, *, id: uuid.UUID) -> None: await self._conn.execute(sqlalchemy.text(DELETE_USER), {"p1": id}) async def get_user_by_email(self, *, email: str) -> Optional[models.User]: - row = (await self._conn.execute(sqlalchemy.text(GET_USER_BY_EMAIL), {"p1": email})).first() + row = ( + await self._conn.execute( + sqlalchemy.text(GET_USER_BY_EMAIL), {"p1": email} + ) + ).first() if row is None: return None return models.User( @@ -98,11 +129,14 @@ async def get_user_by_email(self, *, email: str) -> Optional[models.User]: updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], ) async def get_user_by_id(self, *, id: uuid.UUID) -> Optional[models.User]: - row = (await self._conn.execute(sqlalchemy.text(GET_USER_BY_ID), {"p1": id})).first() + row = ( + await self._conn.execute(sqlalchemy.text(GET_USER_BY_ID), {"p1": id}) + ).first() if row is None: return None return models.User( @@ -113,11 +147,16 @@ async def get_user_by_id(self, *, id: uuid.UUID) -> Optional[models.User]: updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], ) - async def list_users(self, *, limit: int, offset: int) -> AsyncIterator[models.User]: - result = await self._conn.stream(sqlalchemy.text(LIST_USERS), {"p1": limit, "p2": offset}) + async def list_users( + self, *, limit: int, offset: int + ) -> AsyncIterator[models.User]: + result = await self._conn.stream( + sqlalchemy.text(LIST_USERS), {"p1": limit, "p2": offset} + ) async for row in result: yield models.User( id=row[0], @@ -127,11 +166,36 @@ async def list_users(self, *, limit: int, offset: int) -> AsyncIterator[models.U updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], + ) + + async def set_user_blocked(self, *, blocked: bool, id: uuid.UUID) -> Optional[models.User]: + row = ( + await self._conn.execute( + sqlalchemy.text(SET_USER_BLOCKED), {"p1": blocked, "p2": id} ) + ).first() + if row is None: + return None + return models.User( + id=row[0], + email=row[1], + hashed_password=row[2], + created_at=row[3], + updated_at=row[4], + display_name=row[5], + face_embedding=row[6], + blocked=row[7], + deleted_at=row[8], + ) async def set_user_embedding(self, *, dollar_1: Any, id: uuid.UUID) -> Optional[models.User]: - row = (await self._conn.execute(sqlalchemy.text(SET_USER_EMBEDDING), {"p1": dollar_1, "p2": id})).first() + row = ( + await self._conn.execute( + sqlalchemy.text(SET_USER_EMBEDDING), {"p1": dollar_1, "p2": id} + ) + ).first() if row is None: return None return models.User( @@ -142,11 +206,52 @@ async def set_user_embedding(self, *, dollar_1: Any, id: uuid.UUID) -> Optional[ updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], ) - async def update_user_password(self, *, hashed_password: Optional[str], id: uuid.UUID) -> Optional[models.User]: - row = (await self._conn.execute(sqlalchemy.text(UPDATE_USER_PASSWORD), {"p1": hashed_password, "p2": id})).first() + async def update_user( + self, + *, + email: str, + display_name: Optional[str], + blocked: bool, + id: uuid.UUID, + ) -> Optional[models.User]: + row = ( + await self._conn.execute( + sqlalchemy.text(UPDATE_USER), + { + "p1": email, + "p2": display_name, + "p3": blocked, + "p4": id, + }, + ) + ).first() + if row is None: + return None + return models.User( + id=row[0], + email=row[1], + hashed_password=row[2], + created_at=row[3], + updated_at=row[4], + display_name=row[5], + face_embedding=row[6], + blocked=row[7], + deleted_at=row[8], + ) + + async def update_user_password( + self, *, hashed_password: Optional[str], id: uuid.UUID + ) -> Optional[models.User]: + row = ( + await self._conn.execute( + sqlalchemy.text(UPDATE_USER_PASSWORD), + {"p1": hashed_password, "p2": id}, + ) + ).first() if row is None: return None return models.User( @@ -157,5 +262,6 @@ async def update_user_password(self, *, hashed_password: Optional[str], id: uuid updated_at=row[4], display_name=row[5], face_embedding=row[6], - deleted_at=row[7], + blocked=row[7], + deleted_at=row[8], ) diff --git a/db/queries/photo_faces.sql b/db/queries/photo_faces.sql new file mode 100644 index 0000000..de3ffbb --- /dev/null +++ b/db/queries/photo_faces.sql @@ -0,0 +1,13 @@ +-- name: UpsertPhotoFace :one +INSERT INTO photo_faces ( + photo_id, + face_index, + embedding, + bbox +) VALUES ( + $1, $2, $3::vector, $4 +) +ON CONFLICT (photo_id, face_index) +DO UPDATE SET embedding = EXCLUDED.embedding, + bbox = EXCLUDED.bbox +RETURNING *; diff --git a/db/queries/session.sql b/db/queries/session.sql index 2a5b859..b22911e 100644 --- a/db/queries/session.sql +++ b/db/queries/session.sql @@ -28,6 +28,11 @@ SELECT * FROM user_sessions WHERE id = $1; +-- name: ListSessionsByUser :many +SELECT * +FROM user_sessions +WHERE user_id = $1; + -- name: UpdateSessionActivity :exec UPDATE user_sessions SET last_active = NOW() diff --git a/db/queries/upload_request_groups.sql b/db/queries/upload_request_groups.sql new file mode 100644 index 0000000..7dd1aeb --- /dev/null +++ b/db/queries/upload_request_groups.sql @@ -0,0 +1,64 @@ +-- name: CreateUploadRequestGroup :one +INSERT INTO upload_request_groups ( + event_id, + folder_id, + requested_by, + total_photo_count, + batch_count +) VALUES ( + $1, $2, $3, $4, $5 +) +RETURNING *; + +-- name: GetUploadRequestGroupByID :one +SELECT * +FROM upload_request_groups +WHERE id = $1; + +-- name: ListUploadRequestGroups :many +SELECT * +FROM upload_request_groups +ORDER BY created_at DESC; + +-- name: ListUploadRequestGroupsByStatus :many +SELECT * +FROM upload_request_groups +WHERE status = $1 +ORDER BY created_at DESC; + +-- name: ListUploadRequestGroupsByRequester :many +SELECT * +FROM upload_request_groups +WHERE requested_by = $1 +ORDER BY created_at DESC; + +-- name: ListUploadRequestGroupsByRequesterAndStatus :many +SELECT * +FROM upload_request_groups +WHERE requested_by = $1 + AND status = $2 +ORDER BY created_at DESC; + +-- name: ApproveUploadRequestGroup :one +UPDATE upload_request_groups +SET status = 'approved', + approved_by = $2, + approved_at = NOW(), + rejection_reason = NULL +WHERE id = $1 + AND status = 'pending' +RETURNING *; + +-- name: RejectUploadRequestGroup :one +UPDATE upload_request_groups +SET status = 'rejected', + approved_by = $2, + approved_at = NOW(), + rejection_reason = $3 +WHERE id = $1 + AND status = 'pending' +RETURNING *; + +-- name: DeleteUploadRequestGroup :exec +DELETE FROM upload_request_groups +WHERE id = $1; diff --git a/db/queries/upload_requests.sql b/db/queries/upload_requests.sql index c95fcad..043f641 100644 --- a/db/queries/upload_requests.sql +++ b/db/queries/upload_requests.sql @@ -1,11 +1,12 @@ -- name: CreateUploadRequest :one INSERT INTO upload_requests ( event_id, + group_id, drive_file_id, requested_by, photo_count ) VALUES ( - $1, $2, $3, $4 + $1, $2, $3, $4, $5 ) RETURNING *; @@ -14,11 +15,34 @@ SELECT * FROM upload_requests WHERE id = $1; +-- name: ListUploadRequestsByGroupID :many +SELECT * +FROM upload_requests +WHERE group_id = $1 +ORDER BY created_at ASC; + -- name: ListUploadRequests :many SELECT * FROM upload_requests -WHERE requested_by = $1::uuid - AND status = COALESCE(sqlc.narg('p2')::upload_request_status, status) +ORDER BY created_at DESC; + +-- name: ListUploadRequestsByStatus :many +SELECT * +FROM upload_requests +WHERE status = $1 +ORDER BY created_at DESC; + +-- name: ListUploadRequestsByRequester :many +SELECT * +FROM upload_requests +WHERE requested_by = $1 +ORDER BY created_at DESC; + +-- name: ListUploadRequestsByRequesterAndStatus :many +SELECT * +FROM upload_requests +WHERE requested_by = $1 + AND status = $2 ORDER BY created_at DESC; -- name: ApproveUploadRequest :one @@ -40,3 +64,7 @@ SET status = 'rejected', WHERE id = $1 AND status = 'pending' RETURNING *; + +-- name: DeleteUploadRequest :exec +DELETE FROM upload_requests +WHERE id = $1; diff --git a/db/queries/user.sql b/db/queries/user.sql index b9e984e..bc3fdd8 100644 --- a/db/queries/user.sql +++ b/db/queries/user.sql @@ -20,6 +20,22 @@ SET hashed_password = $1, WHERE id = $2 RETURNING *; +-- name: UpdateUser :one +UPDATE users +SET email = COALESCE($1, email), + display_name = COALESCE($2, display_name), + blocked = COALESCE($3, blocked), + updated_at = NOW() +WHERE id = $4 +RETURNING *; + +-- name: SetUserBlocked :one +UPDATE users +SET blocked = $1, + updated_at = NOW() +WHERE id = $2 +RETURNING *; + -- name: DeleteUser :exec DELETE FROM users WHERE id = $1; diff --git a/ersLenovoOneDriveDesktopMultiAImultAI-back.venvScriptsActivate.ps1 b/ersLenovoOneDriveDesktopMultiAImultAI-back.venvScriptsActivate.ps1 new file mode 100644 index 0000000..d27c36e --- /dev/null +++ b/ersLenovoOneDriveDesktopMultiAImultAI-back.venvScriptsActivate.ps1 @@ -0,0 +1,7 @@ + Maya/2-ai-face-detection + Maya/3-ai-face-embedding + feat/ai_pipline +* feat/event-face-embeddings + fix/optimize-face-detection + fix/remove-redundant-face-detection + main diff --git a/migrations/sql/down/add-blocked-to-users.sql b/migrations/sql/down/add-blocked-to-users.sql new file mode 100644 index 0000000..d9bcfd4 --- /dev/null +++ b/migrations/sql/down/add-blocked-to-users.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +DROP COLUMN blocked; diff --git a/migrations/sql/down/add-upload-request-groups.sql b/migrations/sql/down/add-upload-request-groups.sql new file mode 100644 index 0000000..53c93f9 --- /dev/null +++ b/migrations/sql/down/add-upload-request-groups.sql @@ -0,0 +1,10 @@ +DROP INDEX IF EXISTS idx_upload_requests_group_id; + +ALTER TABLE upload_requests + DROP COLUMN IF EXISTS group_id; + +DROP INDEX IF EXISTS idx_upload_request_groups_status; +DROP INDEX IF EXISTS idx_upload_request_groups_requested_by; +DROP INDEX IF EXISTS idx_upload_request_groups_event_id; + +DROP TABLE IF EXISTS upload_request_groups; diff --git a/migrations/sql/down/alter-photo-faces-embedding-dim.sql b/migrations/sql/down/alter-photo-faces-embedding-dim.sql new file mode 100644 index 0000000..f3be603 --- /dev/null +++ b/migrations/sql/down/alter-photo-faces-embedding-dim.sql @@ -0,0 +1,2 @@ +ALTER TABLE photo_faces +ALTER COLUMN embedding TYPE vector(1536); diff --git a/migrations/sql/up/add-blocked-to-users.sql b/migrations/sql/up/add-blocked-to-users.sql new file mode 100644 index 0000000..c35e6fd --- /dev/null +++ b/migrations/sql/up/add-blocked-to-users.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +ADD COLUMN blocked BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/migrations/sql/up/add-upload-request-groups.sql b/migrations/sql/up/add-upload-request-groups.sql new file mode 100644 index 0000000..03ae017 --- /dev/null +++ b/migrations/sql/up/add-upload-request-groups.sql @@ -0,0 +1,28 @@ +CREATE TABLE IF NOT EXISTS upload_request_groups ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE, + folder_id TEXT NOT NULL, + requested_by UUID NOT NULL REFERENCES staff_users(id) ON DELETE RESTRICT, + approved_by UUID REFERENCES staff_users(id) ON DELETE SET NULL, + status upload_request_status NOT NULL DEFAULT 'pending', + total_photo_count INT NOT NULL DEFAULT 0, + batch_count INT NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + approved_at TIMESTAMPTZ, + rejection_reason TEXT +); + +CREATE INDEX IF NOT EXISTS idx_upload_request_groups_event_id +ON upload_request_groups(event_id); + +CREATE INDEX IF NOT EXISTS idx_upload_request_groups_requested_by +ON upload_request_groups(requested_by); + +CREATE INDEX IF NOT EXISTS idx_upload_request_groups_status +ON upload_request_groups(status); + +ALTER TABLE upload_requests + ADD COLUMN IF NOT EXISTS group_id UUID REFERENCES upload_request_groups(id) ON DELETE CASCADE; + +CREATE INDEX IF NOT EXISTS idx_upload_requests_group_id +ON upload_requests(group_id); diff --git a/migrations/sql/up/alter-photo-faces-embedding-dim.sql b/migrations/sql/up/alter-photo-faces-embedding-dim.sql new file mode 100644 index 0000000..6538447 --- /dev/null +++ b/migrations/sql/up/alter-photo-faces-embedding-dim.sql @@ -0,0 +1,2 @@ +ALTER TABLE photo_faces +ALTER COLUMN embedding TYPE vector(512); diff --git a/migrations/versions/4dd6658b9f83_merge_heads.py b/migrations/versions/4dd6658b9f83_merge_heads.py new file mode 100644 index 0000000..b63cff0 --- /dev/null +++ b/migrations/versions/4dd6658b9f83_merge_heads.py @@ -0,0 +1,28 @@ +"""merge heads + +Revision ID: 4dd6658b9f83 +Revises: 9f6c1b4a3d21, c3b8d0f1e2a4 +Create Date: 2026-03-21 23:29:09.967007 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '4dd6658b9f83' +down_revision: Union[str, Sequence[str], None] = ('9f6c1b4a3d21', 'c3b8d0f1e2a4') +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + pass + + +def downgrade() -> None: + """Downgrade schema.""" + pass diff --git a/migrations/versions/5b6615c9ab1d_merge_heads.py b/migrations/versions/5b6615c9ab1d_merge_heads.py new file mode 100644 index 0000000..cea5228 --- /dev/null +++ b/migrations/versions/5b6615c9ab1d_merge_heads.py @@ -0,0 +1,28 @@ +"""merge_heads + +Revision ID: 5b6615c9ab1d +Revises: 9f1c3c6e9c1a, c3b8d0f1e2a4 +Create Date: 2026-03-20 02:33:56.591359 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '5b6615c9ab1d' +down_revision: Union[str, Sequence[str], None] = ('9f1c3c6e9c1a', 'c3b8d0f1e2a4') +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + pass + + +def downgrade() -> None: + """Downgrade schema.""" + pass diff --git a/migrations/versions/9f1c3c6e9c1a_add_blocked_to_users.py b/migrations/versions/9f1c3c6e9c1a_add_blocked_to_users.py new file mode 100644 index 0000000..21b14d1 --- /dev/null +++ b/migrations/versions/9f1c3c6e9c1a_add_blocked_to_users.py @@ -0,0 +1,25 @@ +"""add-blocked-to-users + +Revision ID: 9f1c3c6e9c1a +Revises: 5ead72a95638 +Create Date: 2026-03-20 12:50:00.000000 + +""" +from typing import Sequence, Union + +from migrations.helper import run_sql_down, run_sql_up + + +# revision identifiers, used by Alembic. +revision: str = "9f1c3c6e9c1a" +down_revision: Union[str, Sequence[str], None] = "5ead72a95638" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + run_sql_up("add-blocked-to-users") + + +def downgrade() -> None: + run_sql_down("add-blocked-to-users") diff --git a/migrations/versions/9f6c1b4a3d21_alter_photo_faces_embedding_dim.py b/migrations/versions/9f6c1b4a3d21_alter_photo_faces_embedding_dim.py new file mode 100644 index 0000000..86df9cc --- /dev/null +++ b/migrations/versions/9f6c1b4a3d21_alter_photo_faces_embedding_dim.py @@ -0,0 +1,24 @@ +"""alter photo_faces embedding dimension to 512 + +Revision ID: 9f6c1b4a3d21 +Revises: 5ead72a95638 +Create Date: 2026-03-21 23:23:00.000000 + +""" +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "9f6c1b4a3d21" +down_revision: Union[str, Sequence[str], None] = "5ead72a95638" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute("ALTER TABLE photo_faces ALTER COLUMN embedding TYPE vector(512);") + + +def downgrade() -> None: + op.execute("ALTER TABLE photo_faces ALTER COLUMN embedding TYPE vector(1536);") diff --git a/migrations/versions/a7b4c2d1e9f0_add_upload_request_groups.py b/migrations/versions/a7b4c2d1e9f0_add_upload_request_groups.py new file mode 100644 index 0000000..12f0008 --- /dev/null +++ b/migrations/versions/a7b4c2d1e9f0_add_upload_request_groups.py @@ -0,0 +1,25 @@ +"""add_upload_request_groups + +Revision ID: a7b4c2d1e9f0 +Revises: c3b8d0f1e2a4 +Create Date: 2026-03-25 00:10:00.000000 + +""" + +from typing import Sequence, Union + +from migrations.helper import run_sql_down, run_sql_up + + +revision: str = "a7b4c2d1e9f0" +down_revision: Union[str, Sequence[str], None] = "c3b8d0f1e2a4" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + run_sql_up("add-upload-request-groups") + + +def downgrade() -> None: + run_sql_down("add-upload-request-groups")