diff --git a/compose.yaml b/compose.yaml index bda973f64..01abb4648 100644 --- a/compose.yaml +++ b/compose.yaml @@ -134,6 +134,8 @@ services: depends_on: - redis env_file: .env + extra_hosts: + - "localhost:host-gateway" networks: - dtm-network volumes: diff --git a/src/backend/app/arq/tasks.py b/src/backend/app/arq/tasks.py index c50b55b74..5318f030f 100644 --- a/src/backend/app/arq/tasks.py +++ b/src/backend/app/arq/tasks.py @@ -1,12 +1,13 @@ import asyncio -from typing import Any, Dict, Optional +import io +from typing import Any, Dict, Optional, Tuple from uuid import UUID from arq import ArqRedis, create_pool from arq.connections import RedisSettings, log_redis_info from fastapi import HTTPException from loguru import logger as log -from psycopg.rows import dict_row +from PIL import Image from app.config import settings from app.db.database import get_db_connection_pool @@ -14,12 +15,16 @@ calculate_file_hash, check_duplicate_image, create_project_image, - extract_exif_data, + mark_image_as_duplicate, ) from app.images.image_schemas import ProjectImageCreate, ProjectImageOut from app.models.enums import HTTPStatus, ImageStatus from app.projects.project_logic import process_all_drone_images, process_drone_images -from app.s3 import get_obj_from_bucket +from app.s3 import async_get_obj_from_bucket, s3_client +from app.projects.image_classification import ImageClassifier + + +THUMBNAIL_SIZE = (200, 200) async def startup(ctx: Dict[Any, Any]) -> None: @@ -50,6 +55,51 @@ async def shutdown(ctx: Dict[Any, Any]) -> None: log.info("Database connection pool closed") +def generate_thumbnail( + image_bytes: bytes, size: Tuple[int, int] = THUMBNAIL_SIZE +) -> bytes: + """Generate thumbnail from image bytes. + + Args: + image_bytes: Original image bytes + size: Thumbnail size (width, height), defaults to 200x200 + + Returns: + Thumbnail image bytes in JPEG format + + Raises: + ValueError: If image cannot be decoded + """ + try: + # Open image from bytes + image = Image.open(io.BytesIO(image_bytes)) + + # Convert RGBA to RGB if necessary (for PNG with transparency) + if image.mode in ("RGBA", "LA", "P"): + background = Image.new("RGB", image.size, (255, 255, 255)) + if image.mode == "P": + image = image.convert("RGBA") + background.paste( + image, + mask=image.split()[-1] if image.mode in ("RGBA", "LA") else None, + ) + image = background + + # Generate thumbnail maintaining aspect ratio + image.thumbnail(size, Image.Resampling.LANCZOS) + + # Save to bytes + output = io.BytesIO() + image.save(output, format="JPEG", quality=85, optimize=True) + output.seek(0) + + return output.getvalue() + + except Exception as e: + log.error(f"Error generating thumbnail: {e}") + raise ValueError(f"Failed to generate thumbnail: {e}") from e + + async def sleep_task(ctx: Dict[Any, Any]) -> Dict[str, str]: """Test task to sleep for 1 minute""" job_id = ctx.get("job_id", "unknown") @@ -95,6 +145,8 @@ async def _save_image_record( exif_dict: Optional[dict] = None, location: Optional[dict] = None, status: ImageStatus = ImageStatus.STAGED, + batch_id: Optional[str] = None, + thumbnail_url: Optional[str] = None, ) -> ProjectImageOut: """Save image record to database. @@ -108,6 +160,8 @@ async def _save_image_record( exif_dict: EXIF data (optional) location: GPS location (optional) status: Image status (STAGED, INVALID_EXIF, etc.) + batch_id: Batch UUID for grouping uploads (optional) + thumbnail_url: S3 key for thumbnail (optional) Returns: ProjectImageOut: Saved image record @@ -121,6 +175,8 @@ async def _save_image_record( exif=exif_dict, uploaded_by=uploaded_by, status=status, + batch_id=UUID(batch_id) if batch_id else None, + thumbnail_url=thumbnail_url, ) image_record = await create_project_image(db, image_data) @@ -128,7 +184,8 @@ async def _save_image_record( log.info( f"Saved: {filename} | Status: {status} | " - f"GPS: {location is not None} | EXIF: {exif_dict is not None}" + f"GPS: {location is not None} | EXIF: {exif_dict is not None} | " + f"BatchID: {batch_id}" ) return image_record @@ -140,6 +197,7 @@ async def process_uploaded_image( file_key: str, filename: str, uploaded_by: str, + batch_id: Optional[str] = None, ) -> Dict[str, Any]: """Background task to process uploaded image: extract EXIF, calculate hash, save to DB. @@ -157,7 +215,10 @@ async def process_uploaded_image( dict: Processing result with image_id and status """ job_id = ctx.get("job_id", "unknown") - log.info(f"Starting process_uploaded_image (Job ID: {job_id}): {filename}") + log.info( + f"Starting process_uploaded_image (Job ID: {job_id}): {filename} | " + f"BatchID received: {batch_id}" + ) try: # Get database connection from pool @@ -167,27 +228,43 @@ async def process_uploaded_image( async with db_pool.connection() as db: log.info(f"Downloading file from S3: {file_key}") - file_obj = get_obj_from_bucket(settings.S3_BUCKET_NAME, file_key) + file_obj = await async_get_obj_from_bucket( + settings.S3_BUCKET_NAME, file_key + ) file_content = file_obj.read() log.info(f"Calculating hash for: {filename}") file_hash = calculate_file_hash(file_content) - # Step 2: Check for duplicates (idempotent behavior) - duplicate_id = await check_duplicate_image(db, UUID(project_id), file_hash) - if duplicate_id: - log.info(f"Duplicate detected: {file_hash} -> {duplicate_id}") - sql = "SELECT * FROM project_images WHERE id = %(id)s" - async with db.cursor(row_factory=dict_row) as cur: - await cur.execute(sql, {"id": str(duplicate_id)}) - existing_record = await cur.fetchone() + # Step 2: Check for duplicates + duplicate_of_id = await check_duplicate_image( + db, UUID(project_id), file_hash + ) + if duplicate_of_id: + log.info(f"Duplicate detected: {file_hash} -> {duplicate_of_id}") + # Create a new record marked as duplicate (so it shows in batch) + image_record = await _save_image_record( + db=db, + project_id=project_id, + filename=filename, + file_key=file_key, + file_hash=file_hash, + uploaded_by=uploaded_by, + exif_dict=None, + location=None, + status=ImageStatus.DUPLICATE, + batch_id=batch_id, + ) + # Mark with reference to original + await mark_image_as_duplicate(db, image_record.id, duplicate_of_id) return { - "image_id": str(duplicate_id), - "status": existing_record["status"], - "has_gps": existing_record["location"] is not None, + "image_id": str(image_record.id), + "status": ImageStatus.DUPLICATE.value, + "has_gps": False, "is_duplicate": True, - "message": "Duplicate image (idempotent)", + "duplicate_of": str(duplicate_of_id), + "message": "Duplicate image detected", } # Step 3: Extract EXIF (try-catch to handle failures gracefully) @@ -200,19 +277,60 @@ async def process_uploaded_image( if exif_dict: log.info( - f"✓ EXIF: {len(exif_dict)} tags | GPS: {location is not None}" + f" EXIF: {len(exif_dict)} tags | GPS: {location is not None}" ) log.debug(f"EXIF tags: {list(exif_dict.keys())[:10]}") else: - log.warning(f"✗ No EXIF data in: {filename}") + log.warning(f"No EXIF data in: {filename}") except Exception as exif_error: - log.error(f"✗ EXIF extraction failed for {filename}: {exif_error}") + log.error(f"EXIF extraction failed for {filename}: {exif_error}") + + # Step 4: Generate and upload thumbnail + thumbnail_s3_key = None + try: + log.info(f"Generating thumbnail for: {filename}") + # Generate thumbnail (run in threadpool since PIL is CPU-bound) + thumbnail_bytes = await asyncio.to_thread( + generate_thumbnail, file_content + ) + + # Create thumbnail S3 key (store in thumbnails/ subdirectory) + thumbnail_s3_key = file_key.replace("/images/", "/thumbnails/", 1) + if "/images/" not in file_key: + # Fallback: add thumb_ prefix + parts = file_key.rsplit("/", 1) + thumbnail_s3_key = ( + f"{parts[0]}/thumb_{parts[1]}" + if len(parts) > 1 + else f"thumb_{file_key}" + ) + + # Upload thumbnail to S3 + log.info(f"Uploading thumbnail to S3: {thumbnail_s3_key}") + client = s3_client() + thumbnail_s3_key = thumbnail_s3_key.lstrip("/") + client.put_object( + settings.S3_BUCKET_NAME, + thumbnail_s3_key, + io.BytesIO(thumbnail_bytes), + len(thumbnail_bytes), + content_type="image/jpeg", + ) + + log.info(f"Thumbnail generated and uploaded: {thumbnail_s3_key}") - # Step 4: Determine status + except Exception as thumb_error: + log.warning( + f"Failed to generate/upload thumbnail for {filename}: {thumb_error}" + ) + # Continue even if thumbnail generation fails + thumbnail_s3_key = None + + # Step 5: Determine status status = ImageStatus.STAGED if exif_dict else ImageStatus.INVALID_EXIF - # Step 5: Save image record (ALWAYS save, even if EXIF failed) + # Step 6: Save image record (ALWAYS save, even if EXIF/thumbnail failed) image_record = await _save_image_record( db=db, project_id=project_id, @@ -223,10 +341,12 @@ async def process_uploaded_image( exif_dict=exif_dict, location=location, status=status, + batch_id=batch_id, + thumbnail_url=thumbnail_s3_key, ) log.info( - f"✓ Completed (Job: {job_id}): " + f"Completed (Job: {job_id}): " f"ID={image_record.id} | Status={status} | " f"EXIF={'Yes' if exif_dict else 'No'} | GPS={'Yes' if location else 'No'}" ) @@ -239,7 +359,140 @@ async def process_uploaded_image( } except Exception as e: - log.error(f"✗ Failed (Job: {job_id}): {str(e)}") + log.error(f"Failed (Job: {job_id}): {str(e)}") + raise + + +async def classify_image_batch( + ctx: Dict[Any, Any], + project_id: str, + batch_id: str, +) -> Dict: + job_id = ctx.get("job_id", "unknown") + log.info(f"Starting batch classification job {job_id} for batch {batch_id}") + + db_pool = ctx.get("db_pool") + if not db_pool: + raise RuntimeError("Database pool not initialized in ARQ context") + + try: + # Pass the pool directly so classify_batch can get separate connections + # for each parallel worker + result = await ImageClassifier.classify_batch( + db_pool, + UUID(batch_id), + UUID(project_id), + ) + + log.info( + f"Batch classification complete: " + f"Total={result['total']}, Assigned={result['assigned']}, " + f"Rejected={result['rejected']}, Unmatched={result['unmatched']}" + ) + + return result + + except Exception as e: + log.error(f"Batch classification failed: {str(e)}") + raise + + +async def delete_batch_images( + ctx: Dict[Any, Any], + project_id: str, + batch_id: str, +) -> Dict[str, Any]: + """Background task to delete all images in a batch from both database and S3. + + Args: + ctx: ARQ context + project_id: UUID of the project + batch_id: UUID of the batch to delete + + Returns: + dict: Deletion result with counts + """ + job_id = ctx.get("job_id", "unknown") + log.info(f"Starting delete_batch_images (Job ID: {job_id}): batch={batch_id}") + + db_pool = ctx.get("db_pool") + if not db_pool: + raise RuntimeError("Database pool not initialized in ARQ context") + + try: + async with db_pool.connection() as conn: + # Get all S3 keys for images and thumbnails in this batch + query = """ + SELECT s3_key, thumbnail_url + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + """ + + async with conn.cursor() as cur: + await cur.execute( + query, + {"batch_id": batch_id, "project_id": project_id}, + ) + rows = await cur.fetchall() + + # Collect all S3 keys to delete + s3_keys_to_delete = [] + for row in rows: + s3_key, thumbnail_url = row + if s3_key: + s3_keys_to_delete.append(s3_key) + if thumbnail_url: + s3_keys_to_delete.append(thumbnail_url) + + image_count = len(rows) + log.info( + f"Found {image_count} images and {len(s3_keys_to_delete)} S3 objects to delete" + ) + + # Delete from S3 + deleted_s3_count = 0 + if s3_keys_to_delete: + client = s3_client() + for key in s3_keys_to_delete: + try: + key = key.lstrip("/") + client.remove_object(settings.S3_BUCKET_NAME, key) + deleted_s3_count += 1 + except Exception as e: + log.warning(f"Failed to delete S3 object {key}: {e}") + + log.info(f"Deleted {deleted_s3_count} objects from S3") + + # Delete from database + delete_query = """ + DELETE FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + """ + + async with conn.cursor() as cur: + await cur.execute( + delete_query, + {"batch_id": batch_id, "project_id": project_id}, + ) + + await conn.commit() + + log.info( + f"Batch deletion complete: {image_count} images, " + f"{deleted_s3_count} S3 objects deleted" + ) + + return { + "message": "Batch deleted successfully", + "batch_id": batch_id, + "deleted_images": image_count, + "deleted_s3_objects": deleted_s3_count, + } + + except Exception as e: + log.error(f"Failed to delete batch (Job: {job_id}): {str(e)}") raise @@ -253,6 +506,8 @@ class WorkerSettings: process_drone_images, process_all_drone_images, process_uploaded_image, + classify_image_batch, + delete_batch_images, ] queue_name = "default_queue" diff --git a/src/backend/app/db/db_models.py b/src/backend/app/db/db_models.py index 446036b3b..3b7243d50 100644 --- a/src/backend/app/db/db_models.py +++ b/src/backend/app/db/db_models.py @@ -260,6 +260,7 @@ class DbProjectImage(Base): filename = cast(str, Column(Text, nullable=False)) s3_key = cast(str, Column(Text, nullable=False)) hash_md5 = cast(str, Column(CHAR(32), nullable=False)) + batch_id = cast(str, Column(UUID(as_uuid=True), nullable=True)) location = cast(WKBElement, Column(Geometry("POINT", srid=4326), nullable=True)) exif = cast(dict, Column(JSONB, nullable=True)) uploaded_by = cast( @@ -269,8 +270,10 @@ class DbProjectImage(Base): classified_at = cast(datetime, Column(DateTime, nullable=True)) status = cast( ImageStatus, - Column(Enum(ImageStatus), default=ImageStatus.STAGED, nullable=False), + Column(Enum(ImageStatus), default=ImageStatus.UPLOADED, nullable=False), ) + rejection_reason = cast(str, Column(Text, nullable=True)) + sharpness_score = cast(float, Column(Float, nullable=True)) duplicate_of = cast( str, Column( @@ -291,9 +294,11 @@ class DbProjectImage(Base): Index("idx_project_images_project_id", "project_id"), Index("idx_project_images_task_id", "task_id"), Index("idx_project_images_status", "status"), + Index("idx_project_images_batch_id", "batch_id"), Index("idx_project_images_hash_md5", "hash_md5"), Index("idx_project_images_uploaded_by", "uploaded_by"), Index("idx_project_images_location", location, postgresql_using="gist"), + Index("idx_project_images_batch_status", "batch_id", "status"), {}, ) diff --git a/src/backend/app/images/image_logic.py b/src/backend/app/images/image_logic.py index ec385977b..826f42a46 100644 --- a/src/backend/app/images/image_logic.py +++ b/src/backend/app/images/image_logic.py @@ -2,13 +2,12 @@ import hashlib import json -from io import BytesIO +import tempfile from typing import Any, Optional from uuid import UUID +import exiftool from loguru import logger as log -from PIL import Image -from PIL.ExifTags import TAGS, GPSTAGS from psycopg import Connection from psycopg.rows import dict_row from psycopg.types.json import Json @@ -17,43 +16,39 @@ from app.models.enums import ImageStatus -def _convert_exif_value(value: Any) -> Any: - """Convert EXIF values to JSON-serializable types. +def _sanitize_string(s: str) -> str: + """Remove null characters and other problematic characters for PostgreSQL JSONB. - PIL's EXIF data contains special types like IFDRational, TiffImagePlugin.IFDRational - which need to be converted to standard Python types for JSON serialization. + PostgreSQL's JSONB type cannot store null characters (\\u0000). """ - # Handle IFDRational (PIL's rational number type) - if hasattr(value, "numerator") and hasattr(value, "denominator"): - # Convert rational to float - return ( - float(value.numerator) / float(value.denominator) - if value.denominator != 0 - else 0.0 - ) + # Remove null characters which PostgreSQL JSONB cannot handle + return s.replace("\x00", "").replace("\u0000", "") + + +def _sanitize_exif_value(value: Any) -> Any: + """Recursively sanitize EXIF values for PostgreSQL JSONB storage. - # Handle bytes + Removes null characters from strings and handles nested structures. + """ + if isinstance(value, str): + return _sanitize_string(value) if isinstance(value, bytes): try: return value.decode("utf-8", errors="ignore") - except UnicodeDecodeError: + except Exception: return str(value) # Handle tuples (convert to list for JSON) if isinstance(value, tuple): - return [_convert_exif_value(item) for item in value] + return [_sanitize_exif_value(item) for item in value] # Handle lists if isinstance(value, list): - return [_convert_exif_value(item) for item in value] + return [_sanitize_exif_value(item) for item in value] # Handle dicts if isinstance(value, dict): - return {k: _convert_exif_value(v) for k, v in value.items()} - - # Handle other non-serializable types - if not isinstance(value, (str, int, float, bool, type(None))): - return str(value) + return {k: _sanitize_exif_value(v) for k, v in value.items()} return value @@ -61,67 +56,70 @@ def _convert_exif_value(value: Any) -> Any: def extract_exif_data( image_bytes: bytes, ) -> tuple[Optional[dict[str, Any]], Optional[dict[str, float]]]: - """Extract EXIF data and GPS coordinates from image bytes. + """Extract EXIF data and GPS coordinates from image bytes using exiftool. + + This uses pyexiftool which provides comprehensive metadata extraction, + including DJI drone-specific XMP data (yaw, pitch, roll, gimbal angles, etc.). Args: image_bytes: Image file content as bytes Returns: Tuple of (exif_dict, location_dict) - - exif_dict: All EXIF data as a dictionary + - exif_dict: All EXIF/XMP data as a dictionary - location_dict: GPS coordinates as {"lat": float, "lon": float} or None """ try: - image = Image.open(BytesIO(image_bytes)) - exif_data = image._getexif() + # Write bytes to a temp file since exiftool works with files + with tempfile.NamedTemporaryFile(suffix=".jpg", delete=True) as tmp_file: + tmp_file.write(image_bytes) + tmp_file.flush() - if not exif_data: - log.warning("No EXIF data found in image") - return None, None + # Extract metadata using exiftool + with exiftool.ExifToolHelper() as et: + metadata_list = et.get_metadata(tmp_file.name) - # Convert EXIF data to readable format + if not metadata_list: + log.warning("No EXIF data found in image") + return None, None + + # exiftool returns a list, get first item + raw_metadata = metadata_list[0] + + # Clean up the metadata - remove SourceFile and sanitize values exif_dict = {} - gps_info = {} + for key, value in raw_metadata.items(): + # Skip internal exiftool fields + if key in ("SourceFile", "ExifTool:ExifToolVersion"): + continue - for tag_id, value in exif_data.items(): - tag_name = TAGS.get(tag_id, tag_id) + # Simplify key names by removing group prefix if desired + # e.g., "EXIF:Make" -> "Make" or keep full name for clarity + # We'll keep the simplified name for common fields + simple_key = key.split(":")[-1] if ":" in key else key - # Handle GPS data specially - if tag_name == "GPSInfo": - for gps_tag_id, gps_value in value.items(): - gps_tag_name = GPSTAGS.get(gps_tag_id, gps_tag_id) - # Convert GPS values to JSON-serializable types - gps_info[gps_tag_name] = _convert_exif_value(gps_value) - else: - # Convert all EXIF values to JSON-serializable types - exif_dict[tag_name] = _convert_exif_value(value) + # Sanitize the value for PostgreSQL JSONB + exif_dict[simple_key] = _sanitize_exif_value(value) # Extract GPS coordinates - location = None - if gps_info: - location = _parse_gps_coordinates(gps_info) - - # Add GPS info to EXIF dict - if gps_info: - exif_dict["GPSInfo"] = gps_info + location = _extract_gps_from_exif(exif_dict) # Log EXIF data for debugging log.debug(f"Extracted EXIF data with {len(exif_dict)} tags") - log.debug(f"EXIF sample: {list(exif_dict.keys())[:5]}") + log.debug(f"EXIF sample: {list(exif_dict.keys())[:10]}") # Verify EXIF data is JSON-serializable try: json.dumps(exif_dict) except TypeError as e: log.error(f"EXIF data contains non-serializable types: {e}") - log.error(f"Problematic EXIF keys: {exif_dict.keys()}") - # Find the problematic field - for key, value in exif_dict.items(): + # Find and fix problematic fields + for key, value in list(exif_dict.items()): try: json.dumps({key: value}) except TypeError: - log.error(f"Non-serializable field: {key} = {type(value)} {value}") - raise + log.warning(f"Removing non-serializable field: {key}") + del exif_dict[key] return exif_dict, location @@ -130,58 +128,102 @@ def extract_exif_data( return None, None -def _parse_gps_coordinates(gps_info: dict) -> Optional[dict[str, float]]: - """Parse GPS coordinates from EXIF GPS info. +def _extract_gps_from_exif(exif_dict: dict) -> Optional[dict[str, float]]: + """Extract GPS coordinates from exiftool metadata. + + Exiftool provides GPS coordinates in multiple formats. This function + handles the most common ones. Args: - gps_info: GPS info dictionary from EXIF + exif_dict: Exiftool metadata dictionary Returns: Dictionary with lat/lon or None """ try: - # Get latitude - lat_ref = gps_info.get("GPSLatitudeRef") - lat_data = gps_info.get("GPSLatitude") - - # Get longitude - lon_ref = gps_info.get("GPSLongitudeRef") - lon_data = gps_info.get("GPSLongitude") - - if not (lat_data and lon_data): - return None + # Try direct decimal coordinates first (exiftool often provides these) + lat = exif_dict.get("GPSLatitude") + lon = exif_dict.get("GPSLongitude") - # Convert to decimal degrees - lat = _convert_to_degrees(lat_data) - lon = _convert_to_degrees(lon_data) - - # Apply reference (N/S, E/W) - if lat_ref == "S": - lat = -lat - if lon_ref == "W": - lon = -lon + if lat is not None and lon is not None: + # Handle string format like "9 deg 16' 31.05\" N" + if isinstance(lat, str): + lat = _parse_gps_string(lat) + if isinstance(lon, str): + lon = _parse_gps_string(lon) + + if lat is not None and lon is not None: + return {"lat": float(lat), "lon": float(lon)} + + # Try composite GPS position + gps_position = exif_dict.get("GPSPosition") + if gps_position and isinstance(gps_position, str): + # Format: "lat, lon" or "lat lon" + parts = gps_position.replace(",", " ").split() + if len(parts) >= 2: + try: + lat = float(parts[0]) + lon = float(parts[1]) + return {"lat": lat, "lon": lon} + except ValueError: + pass - return {"lat": lat, "lon": lon} + return None except Exception as e: log.error(f"Error parsing GPS coordinates: {e}") return None -def _convert_to_degrees(value: tuple) -> float: - """Convert GPS coordinates from degrees/minutes/seconds to decimal degrees. +def _parse_gps_string(gps_str: str) -> Optional[float]: + """Parse GPS coordinate string to decimal degrees. + + Handles formats like: + - "9 deg 16' 31.05\" N" + - "9.123456" + - "-8.299743916666667" Args: - value: Tuple of (degrees, minutes, seconds) + gps_str: GPS coordinate string Returns: - Decimal degrees as float + Decimal degrees as float or None """ - degrees = float(value[0]) - minutes = float(value[1]) - seconds = float(value[2]) + try: + # Try direct float conversion first + return float(gps_str) + except ValueError: + pass + + try: + # Parse DMS format: "9 deg 16' 31.05\" N" + import re + + # Remove directional suffix and note it + direction = 1 + gps_str = gps_str.strip() + if gps_str.endswith(("S", "W")): + direction = -1 + gps_str = gps_str[:-1].strip() + elif gps_str.endswith(("N", "E")): + gps_str = gps_str[:-1].strip() + + # Extract degrees, minutes, seconds + match = re.match( + r"(\d+(?:\.\d+)?)\s*(?:deg|°)?\s*(\d+(?:\.\d+)?)?['\s]*(\d+(?:\.\d+)?)?", + gps_str, + ) + if match: + degrees = float(match.group(1)) + minutes = float(match.group(2)) if match.group(2) else 0 + seconds = float(match.group(3)) if match.group(3) else 0 + decimal = degrees + (minutes / 60.0) + (seconds / 3600.0) + return decimal * direction + + except Exception as e: + log.debug(f"Could not parse GPS string '{gps_str}': {e}") - return degrees + (minutes / 60.0) + (seconds / 3600.0) + return None def calculate_file_hash(file_content: bytes) -> str: @@ -219,14 +261,14 @@ async def create_project_image( sql = f""" INSERT INTO project_images ( project_id, task_id, filename, s3_key, hash_md5, - location, exif, uploaded_by, status + location, exif, uploaded_by, status, batch_id, thumbnail_url ) VALUES ( %(project_id)s, %(task_id)s, %(filename)s, %(s3_key)s, %(hash_md5)s, - {location_sql}, %(exif)s, %(uploaded_by)s, %(status)s + {location_sql}, %(exif)s, %(uploaded_by)s, %(status)s, %(batch_id)s, %(thumbnail_url)s ) RETURNING id, project_id, task_id, filename, s3_key, hash_md5, ST_AsGeoJSON(location)::json as location, exif, uploaded_by, - uploaded_at, classified_at, status, duplicate_of + uploaded_at, classified_at, status, duplicate_of, batch_id, rejection_reason, thumbnail_url """ async with db.cursor(row_factory=dict_row) as cur: @@ -241,6 +283,8 @@ async def create_project_image( "exif": Json(image_data.exif) if image_data.exif else None, "uploaded_by": str(image_data.uploaded_by), "status": image_data.status.value, + "batch_id": str(image_data.batch_id) if image_data.batch_id else None, + "thumbnail_url": image_data.thumbnail_url, }, ) result = await cur.fetchone() diff --git a/src/backend/app/images/image_schemas.py b/src/backend/app/images/image_schemas.py index 3eb2bd7ab..89d4677aa 100644 --- a/src/backend/app/images/image_schemas.py +++ b/src/backend/app/images/image_schemas.py @@ -19,6 +19,7 @@ class ProjectImageBase(BaseModel): None # Supports both {"lat": float, "lon": float} and GeoJSON ) exif: Optional[dict[str, Any]] = None + thumbnail_url: Optional[str] = None # S3 key for 200x200 thumbnail @field_validator("location", mode="before") @classmethod @@ -51,6 +52,7 @@ class ProjectImageCreate(ProjectImageBase): task_id: Optional[UUID] = None uploaded_by: str # User ID is a string (Google OAuth ID), not UUID status: ImageStatus = ImageStatus.STAGED + batch_id: Optional[UUID] = None # For grouping uploaded images together class ProjectImageUpdate(BaseModel): @@ -73,6 +75,8 @@ class ProjectImageOut(ProjectImageBase): classified_at: Optional[datetime] status: ImageStatus duplicate_of: Optional[UUID] + batch_id: Optional[UUID] + rejection_reason: Optional[str] = None class Config: """Pydantic config.""" diff --git a/src/backend/app/main.py b/src/backend/app/main.py index 88f7e7e7d..e326b2afe 100644 --- a/src/backend/app/main.py +++ b/src/backend/app/main.py @@ -18,7 +18,7 @@ from app.drones import drone_routes from app.gcp import gcp_routes from app.models.enums import HTTPStatus -from app.projects import project_routes +from app.projects import classification_routes, project_routes from app.tasks import task_routes from app.users import user_routes from app.waypoints import waypoint_routes @@ -107,6 +107,7 @@ def get_application() -> FastAPI: ) _app.include_router(drone_routes.router) _app.include_router(project_routes.router) + _app.include_router(classification_routes.router) _app.include_router(waypoint_routes.router) _app.include_router(user_routes.router) _app.include_router(task_routes.router) diff --git a/src/backend/app/migrations/versions/001_add_project_images_table.py b/src/backend/app/migrations/versions/001_add_project_images_table.py new file mode 100644 index 000000000..a2be21fbe --- /dev/null +++ b/src/backend/app/migrations/versions/001_add_project_images_table.py @@ -0,0 +1,152 @@ +"""add_project_images_table + +Revision ID: 001_project_images +Revises: fa5c74996273 +Create Date: 2025-10-26 00:00:00.000000 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +from geoalchemy2 import Geometry + +# revision identifiers, used by Alembic. +revision: str = "001_project_images" +down_revision: Union[str, None] = "fa5c74996273" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create enum type for image status (only if it doesn't exist) + connection = op.get_bind() + result = connection.execute( + sa.text("SELECT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'image_status')") + ).scalar() + + if not result: + image_status_enum = postgresql.ENUM( + "staged", + "classified", + "invalid_exif", + "unmatched", + "duplicate", + name="image_status", + create_type=False, + ) + image_status_enum.create(op.get_bind(), checkfirst=False) + + # Create project_images table + op.create_table( + "project_images", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + primary_key=True, + server_default=sa.text("gen_random_uuid()"), + ), + sa.Column( + "project_id", + postgresql.UUID(as_uuid=True), + sa.ForeignKey("projects.id", ondelete="CASCADE"), + nullable=False, + ), + sa.Column( + "task_id", + postgresql.UUID(as_uuid=True), + sa.ForeignKey("tasks.id", ondelete="SET NULL"), + nullable=True, + ), + sa.Column("filename", sa.Text(), nullable=False), + sa.Column("s3_key", sa.Text(), nullable=False), + sa.Column("hash_md5", sa.CHAR(32), nullable=False), + sa.Column("location", Geometry("POINT", srid=4326), nullable=True), + sa.Column("exif", postgresql.JSONB(), nullable=True), + sa.Column( + "uploaded_by", + sa.String(), + sa.ForeignKey("users.id", ondelete="SET NULL"), + nullable=True, + ), + sa.Column( + "uploaded_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column("classified_at", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column( + "status", + postgresql.ENUM( + "staged", + "classified", + "invalid_exif", + "unmatched", + "duplicate", + name="image_status", + create_type=False, + ), + server_default="staged", + nullable=False, + ), + sa.Column( + "duplicate_of", + postgresql.UUID(as_uuid=True), + sa.ForeignKey("project_images.id", ondelete="SET NULL"), + nullable=True, + ), + ) + + # Create indexes for better query performance (if they don't exist) + op.create_index( + "idx_project_images_project_id", + "project_images", + ["project_id"], + unique=False, + if_not_exists=True, + ) + op.create_index( + "idx_project_images_task_id", + "project_images", + ["task_id"], + unique=False, + if_not_exists=True, + ) + op.create_index( + "idx_project_images_status", + "project_images", + ["status"], + unique=False, + if_not_exists=True, + ) + op.create_index( + "idx_project_images_hash_md5", + "project_images", + ["hash_md5"], + unique=False, + if_not_exists=True, + ) + op.create_index( + "idx_project_images_uploaded_by", + "project_images", + ["uploaded_by"], + unique=False, + if_not_exists=True, + ) + + # Create spatial index on location (if it doesn't exist) + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_location ON project_images USING GIST (location);" + ) + + +def downgrade() -> None: + # Drop table and enum + op.drop_table("project_images") + + # Drop enum type + image_status_enum = postgresql.ENUM(name="image_status") + image_status_enum.drop(op.get_bind(), checkfirst=True) diff --git a/src/backend/app/migrations/versions/add_image_classification_fields.py b/src/backend/app/migrations/versions/add_image_classification_fields.py new file mode 100644 index 000000000..460178197 --- /dev/null +++ b/src/backend/app/migrations/versions/add_image_classification_fields.py @@ -0,0 +1,196 @@ +"""add image classification fields + +Revision ID: add_image_classification +Revises: 001_project_images +Create Date: 2025-01-06 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = "add_image_classification" +down_revision = ("001_project_images", "7389d0d528c3") +branch_labels = None +depends_on = None + + +def upgrade(): + connection = op.get_bind() + + # Check if batch_id column exists + batch_id_exists = connection.execute( + sa.text(""" + SELECT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project_images' + AND column_name = 'batch_id' + ) + """) + ).scalar() + + if not batch_id_exists: + op.add_column( + "project_images", + sa.Column("batch_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + + # Check if rejection_reason column exists + rejection_reason_exists = connection.execute( + sa.text(""" + SELECT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project_images' + AND column_name = 'rejection_reason' + ) + """) + ).scalar() + + if not rejection_reason_exists: + op.add_column( + "project_images", sa.Column("rejection_reason", sa.Text(), nullable=True) + ) + + # Check if sharpness_score column exists + sharpness_score_exists = connection.execute( + sa.text(""" + SELECT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project_images' + AND column_name = 'sharpness_score' + ) + """) + ).scalar() + + if not sharpness_score_exists: + op.add_column( + "project_images", sa.Column("sharpness_score", sa.Float(), nullable=True) + ) + + # Create indexes if they don't exist + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_batch_id ON project_images (batch_id)" + ) + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_batch_status ON project_images (batch_id, status)" + ) + + # Check if we need to update the enum + # Get current enum type name + enum_type_name = connection.execute( + sa.text(""" + SELECT t.typname + FROM pg_type t + JOIN pg_class c ON c.reltype = t.oid + WHERE c.relname = 'project_images' + AND EXISTS ( + SELECT 1 FROM pg_attribute a + WHERE a.attrelid = c.oid + AND a.attname = 'status' + AND a.atttypid = t.oid + ) + UNION + SELECT t.typname + FROM pg_type t + JOIN pg_attribute a ON a.atttypid = t.oid + JOIN pg_class c ON a.attrelid = c.oid + WHERE c.relname = 'project_images' + AND a.attname = 'status' + LIMIT 1 + """) + ).scalar() + + # Check if 'uploaded' value exists in the enum + uploaded_exists = connection.execute( + sa.text(""" + SELECT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname IN ('imagestatus', 'image_status') + AND e.enumlabel = 'uploaded' + ) + """) + ).scalar() + + if not uploaded_exists: + # Need to recreate the enum with new values + # First, drop the default + op.execute("ALTER TABLE project_images ALTER COLUMN status DROP DEFAULT") + + # Rename old enum and create new one + op.execute(f"ALTER TYPE {enum_type_name} RENAME TO {enum_type_name}_old") + op.execute(""" + CREATE TYPE imagestatus AS ENUM ( + 'staged', + 'uploaded', + 'classifying', + 'assigned', + 'rejected', + 'unmatched', + 'invalid_exif', + 'duplicate' + ) + """) + + # Convert column to new type + op.execute(""" + ALTER TABLE project_images + ALTER COLUMN status TYPE imagestatus + USING CASE status::text + WHEN 'classified' THEN 'assigned'::imagestatus + ELSE status::text::imagestatus + END + """) + + # Drop old enum and restore default + op.execute(f"DROP TYPE {enum_type_name}_old") + op.execute( + "ALTER TABLE project_images ALTER COLUMN status SET DEFAULT 'staged'::imagestatus" + ) + + +def downgrade(): + op.drop_index("idx_project_images_batch_status", table_name="project_images") + op.drop_index("idx_project_images_batch_id", table_name="project_images") + op.drop_column("project_images", "sharpness_score") + op.drop_column("project_images", "rejection_reason") + op.drop_column("project_images", "batch_id") + + op.execute("ALTER TYPE imagestatus RENAME TO imagestatus_new") + op.execute(""" + CREATE TYPE imagestatus AS ENUM ( + 'staged', + 'classified', + 'invalid_exif', + 'unmatched', + 'duplicate' + ) + """) + op.execute(""" + ALTER TABLE project_images + ALTER COLUMN status TYPE imagestatus + USING CASE status::text + WHEN 'uploaded' THEN 'staged'::imagestatus + WHEN 'assigned' THEN 'classified'::imagestatus + WHEN 'classifying' THEN 'staged'::imagestatus + WHEN 'rejected' THEN 'staged'::imagestatus + ELSE status::text::imagestatus + END + """) + op.execute("DROP TYPE imagestatus_new") + + op.alter_column( + "project_images", + "status", + existing_type=postgresql.ENUM( + "staged", + "classified", + "invalid_exif", + "unmatched", + "duplicate", + name="imagestatus", + ), + nullable=False, + server_default="staged", + ) diff --git a/src/backend/app/migrations/versions/add_thumbnail_url.py b/src/backend/app/migrations/versions/add_thumbnail_url.py new file mode 100644 index 000000000..4f2d659ac --- /dev/null +++ b/src/backend/app/migrations/versions/add_thumbnail_url.py @@ -0,0 +1,48 @@ +"""add thumbnail_url to project_images + +Revision ID: add_thumbnail_url +Revises: add_image_classification +Create Date: 2025-01-08 + +""" + +from alembic import op +import sqlalchemy as sa + +revision = "add_thumbnail_url" +down_revision = "add_image_classification" +branch_labels = None +depends_on = None + + +def upgrade(): + connection = op.get_bind() + + # Check if thumbnail_url column exists + thumbnail_url_exists = connection.execute( + sa.text(""" + SELECT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'project_images' + AND column_name = 'thumbnail_url' + ) + """) + ).scalar() + + if not thumbnail_url_exists: + op.add_column( + "project_images", + sa.Column("thumbnail_url", sa.Text(), nullable=True), + ) + + # Create index for efficient querying + op.execute( + "CREATE INDEX IF NOT EXISTS idx_project_images_thumbnail_url ON project_images (thumbnail_url) WHERE thumbnail_url IS NOT NULL" + ) + + +def downgrade(): + op.drop_index( + "idx_project_images_thumbnail_url", table_name="project_images", if_exists=True + ) + op.drop_column("project_images", "thumbnail_url") diff --git a/src/backend/app/models/enums.py b/src/backend/app/models/enums.py index 9c7deaa84..f1ac204b2 100644 --- a/src/backend/app/models/enums.py +++ b/src/backend/app/models/enums.py @@ -224,8 +224,13 @@ class OAMUploadStatus(StrEnum): class ImageStatus(StrEnum): """Enum to describe the status of uploaded project images.""" - STAGED = "staged" # uploaded but not yet classified - CLASSIFIED = "classified" # successfully classified and moved - INVALID_EXIF = "invalid_exif" # EXIF unreadable or missing - UNMATCHED = "unmatched" # no task intersects this photo - DUPLICATE = "duplicate" # hash collision within project + STAGED = ( + "staged" # Files uploaded but not yet committed (multipart upload in progress) + ) + UPLOADED = "uploaded" # Successfully uploaded to S3, pending classification + CLASSIFYING = "classifying" # Currently being classified + ASSIGNED = "assigned" # Assigned to a task after successful classification + REJECTED = "rejected" # Failed quality checks (blur, gimbal angle, etc.) + UNMATCHED = "unmatched" # GPS coordinates don't match any task boundary + INVALID_EXIF = "invalid_exif" # EXIF data is missing or unreadable + DUPLICATE = "duplicate" # Duplicate image (same hash as existing image) diff --git a/src/backend/app/projects/classification_routes.py b/src/backend/app/projects/classification_routes.py new file mode 100644 index 000000000..6e98df33d --- /dev/null +++ b/src/backend/app/projects/classification_routes.py @@ -0,0 +1,271 @@ +from datetime import datetime +from typing import Annotated, Optional +from uuid import UUID + +from arq import ArqRedis +from fastapi import APIRouter, Depends, HTTPException, Query +from loguru import logger as log +from psycopg import Connection +from pydantic import BaseModel + +from app.arq.tasks import get_redis_pool +from app.config import settings +from app.db import database +from app.models.enums import HTTPStatus +from app.projects.image_classification import ImageClassifier +from app.users.user_deps import login_required +from app.users.user_schemas import AuthUser + + +router = APIRouter( + prefix=f"{settings.API_PREFIX}/projects", + responses={404: {"description": "Not found"}}, +) + + +class StartClassificationRequest(BaseModel): + batch_id: UUID + project_id: UUID + + +class ClassificationStatusResponse(BaseModel): + batch_id: str + total: int + assigned: int + rejected: int + unmatched: int + invalid: int + images: list[dict] + + +@router.post("/{project_id}/classify-batch/", tags=["Image Classification"]) +async def start_batch_classification( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + redis: Annotated[ArqRedis, Depends(get_redis_pool)], + user: Annotated[AuthUser, Depends(login_required)], +): + log.info( + f"Received classification request: project_id={project_id}, batch_id={batch_id}" + ) + + # First check if there are any images in the batch with status 'staged' + async with db.cursor() as cur: + await cur.execute( + """ + SELECT COUNT(*) as count + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + AND status = 'staged' + """, + {"batch_id": str(batch_id), "project_id": str(project_id)}, + ) + result = await cur.fetchone() + image_count = result[0] if result else 0 + + log.info( + f"Found {image_count} staged images for project_id={project_id}, batch_id={batch_id}" + ) + + # If no images to classify, return early without creating a job + if image_count == 0: + log.warning( + f"No images to classify for batch: {batch_id}, project: {project_id}" + ) + return { + "message": "No images available for classification", + "batch_id": str(batch_id), + "image_count": 0, + } + + # Enqueue the classification job + job = await redis.enqueue_job( + "classify_image_batch", + str(project_id), + str(batch_id), + _queue_name="default_queue", + ) + + log.info( + f"Queued batch classification job: {job.job_id} for batch: {batch_id} ({image_count} images)" + ) + + return { + "message": "Batch classification started", + "job_id": job.job_id, + "batch_id": str(batch_id), + "image_count": image_count, + } + + +@router.get("/{project_id}/batch/{batch_id}/images/", tags=["Image Classification"]) +async def get_batch_images( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], + last_timestamp: Optional[str] = Query( + None, description="ISO 8601 timestamp to get updates since" + ), +): + try: + timestamp = datetime.fromisoformat(last_timestamp) if last_timestamp else None + + images = await ImageClassifier.get_batch_images( + db, batch_id, project_id, timestamp + ) + + return {"batch_id": str(batch_id), "images": images, "count": len(images)} + + except Exception as e: + log.error(f"Failed to get batch images: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve batch images: {e}", + ) + + +@router.get("/{project_id}/batch/{batch_id}/status/", tags=["Image Classification"]) +async def get_batch_status( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + try: + query = """ + SELECT + status, + COUNT(*) as count + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + GROUP BY status + """ + + async with db.cursor() as cur: + await cur.execute( + query, {"batch_id": str(batch_id), "project_id": str(project_id)} + ) + results = await cur.fetchall() + + status_counts = {status: count for status, count in results} + + return { + "batch_id": str(batch_id), + "total": sum(status_counts.values()), + "staged": status_counts.get("staged", 0), + "uploaded": status_counts.get("uploaded", 0), + "classifying": status_counts.get("classifying", 0), + "assigned": status_counts.get("assigned", 0), + "rejected": status_counts.get("rejected", 0), + "unmatched": status_counts.get("unmatched", 0), + "invalid_exif": status_counts.get("invalid_exif", 0), + "duplicate": status_counts.get("duplicate", 0), + } + + except Exception as e: + log.error(f"Failed to get batch status: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve batch status: {e}", + ) + + +@router.get("/{project_id}/batch/{batch_id}/review/", tags=["Image Classification"]) +async def get_batch_review( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + try: + review_data = await ImageClassifier.get_batch_review_data( + db, batch_id, project_id + ) + return review_data + + except Exception as e: + log.error(f"Failed to get batch review data: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve batch review data: {e}", + ) + + +@router.post("/{project_id}/images/{image_id}/accept/", tags=["Image Classification"]) +async def accept_image( + project_id: UUID, + image_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + try: + result = await ImageClassifier.accept_image(db, image_id, project_id) + return result + + except ValueError as e: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=str(e), + ) + except Exception as e: + log.error(f"Failed to accept image: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to accept image: {e}", + ) + + +@router.get("/{project_id}/batch/{batch_id}/map-data/", tags=["Image Classification"]) +async def get_batch_map_data( + project_id: UUID, + batch_id: UUID, + db: Annotated[Connection, Depends(database.get_db)], + user: Annotated[AuthUser, Depends(login_required)], +): + """Get map data for batch review: task geometries and image point locations.""" + try: + map_data = await ImageClassifier.get_batch_map_data(db, batch_id, project_id) + return map_data + + except Exception as e: + log.error(f"Failed to get batch map data: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to retrieve batch map data: {e}", + ) + + +@router.delete("/{project_id}/batch/{batch_id}/", tags=["Image Classification"]) +async def delete_batch( + project_id: UUID, + batch_id: UUID, + redis: Annotated[ArqRedis, Depends(get_redis_pool)], + user: Annotated[AuthUser, Depends(login_required)], +): + try: + # Enqueue the deletion job to run in background + job = await redis.enqueue_job( + "delete_batch_images", + str(project_id), + str(batch_id), + _queue_name="default_queue", + ) + + log.info(f"Queued batch deletion job: {job.job_id} for batch: {batch_id}") + + return { + "message": "Batch deletion started", + "job_id": job.job_id, + "batch_id": str(batch_id), + } + + except Exception as e: + log.error(f"Failed to queue batch deletion: {e}") + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, + detail=f"Failed to delete batch: {e}", + ) diff --git a/src/backend/app/projects/image_classification.py b/src/backend/app/projects/image_classification.py new file mode 100644 index 000000000..26b8d767f --- /dev/null +++ b/src/backend/app/projects/image_classification.py @@ -0,0 +1,867 @@ +import asyncio +import json +import uuid +from datetime import datetime, timedelta +from typing import Optional + +import cv2 +import numpy as np +from fastapi.concurrency import run_in_threadpool +from loguru import logger as log +from psycopg import Connection +from psycopg.rows import dict_row +from psycopg_pool import AsyncConnectionPool + +from app.config import settings +from app.models.enums import ImageStatus +from app.s3 import get_obj_from_bucket, s3_client +from app.utils import strip_presigned_url_for_local_dev + +# Number of concurrent workers for parallel classification +CLASSIFICATION_CONCURRENCY = 10 + + +MIN_GIMBAL_ANGLE = 10.0 +MIN_SHARPNESS_SCORE = 100.0 + + +class ImageClassifier: + @staticmethod + def calculate_sharpness(image_bytes: bytes) -> float: + """Calculate image sharpness using Laplacian variance method. + + The Laplacian variance method detects blur by computing the variance + of the Laplacian (second derivative) of the image. A low variance + indicates a blurry image, while a high variance indicates a sharp image. + + Args: + image_bytes: Raw image file bytes + + Returns: + float: Sharpness score (Laplacian variance). Higher = sharper. + Typical values: + - < 50: Very blurry + - 50-100: Moderately blurry + - 100-500: Acceptable sharpness + - > 500: Very sharp + + Raises: + ValueError: If image cannot be decoded + """ + try: + # Convert bytes to numpy array + nparr = np.frombuffer(image_bytes, np.uint8) + + # Decode image + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + + if img is None: + raise ValueError("Failed to decode image") + + # Convert to grayscale for better edge detection + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + # Calculate Laplacian variance + laplacian = cv2.Laplacian(gray, cv2.CV_64F) + variance = laplacian.var() + + log.debug(f"Calculated sharpness score: {variance:.2f}") + + return float(variance) + + except Exception as e: + log.error(f"Error calculating sharpness: {e}") + raise ValueError(f"Failed to calculate sharpness: {e}") from e + + @staticmethod + async def find_matching_task( + db: Connection, project_id: uuid.UUID, latitude: float, longitude: float + ) -> Optional[uuid.UUID]: + query = """ + SELECT id + FROM tasks + WHERE project_id = %(project_id)s + AND ST_Intersects( + outline, + ST_SetSRID(ST_MakePoint(%(longitude)s, %(latitude)s), 4326) + ) + LIMIT 1; + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + query, + { + "project_id": str(project_id), + "latitude": latitude, + "longitude": longitude, + }, + ) + result = await cur.fetchone() + if not result: + return None + task_id = result["id"] + return task_id if isinstance(task_id, uuid.UUID) else uuid.UUID(task_id) + + @staticmethod + async def classify_single_image( + db: Connection, + image_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + """ + SELECT id, exif, location, status, s3_key + FROM project_images + WHERE id = %(image_id)s AND project_id = %(project_id)s + """, + {"image_id": str(image_id), "project_id": str(project_id)}, + ) + image = await cur.fetchone() + + if not image: + return { + "image_id": str(image_id), + "status": "error", + "message": "Image not found", + } + + logs = [] + issues = [] + exif_data = image.get("exif") or {} + s3_key = image.get("s3_key") + sharpness_score = None + + # Download image first for quality analysis + image_bytes = None + try: + log.info(f"Downloading image from S3: {s3_key}") + file_obj = await run_in_threadpool( + get_obj_from_bucket, settings.S3_BUCKET_NAME, s3_key + ) + image_bytes = file_obj.read() + logs.append( + { + "action": "Image Download", + "details": f"Downloaded {len(image_bytes) / 1024 / 1024:.2f} MB", + "status": "success", + } + ) + except Exception as e: + log.error(f"Failed to download image from S3: {e}") + logs.append( + { + "action": "Image Download", + "details": f"Failed to download: {str(e)}", + "status": "warning", + } + ) + + # Check sharpness first (if image bytes available) + if image_bytes: + try: + sharpness_score = ImageClassifier.calculate_sharpness(image_bytes) + if sharpness_score < MIN_SHARPNESS_SCORE: + issues.append( + f"Blurry (sharpness: {sharpness_score:.1f}, min: {MIN_SHARPNESS_SCORE})" + ) + logs.append( + { + "action": "Sharpness Check", + "details": f"Score: {sharpness_score:.1f} - FAILED", + "status": "error", + } + ) + else: + logs.append( + { + "action": "Sharpness Check", + "details": f"Score: {sharpness_score:.1f} - Passed", + "status": "success", + } + ) + except Exception as e: + log.warning(f"Could not calculate sharpness: {e}") + logs.append( + { + "action": "Sharpness Check", + "details": f"Could not analyze: {str(e)}", + "status": "warning", + } + ) + + # Check EXIF data + if not exif_data: + issues.append("Image is missing camera information (EXIF data)") + logs.append( + {"action": "EXIF Check", "details": "No EXIF data", "status": "error"} + ) + else: + logs.append( + { + "action": "EXIF Check", + "details": "EXIF data present", + "status": "success", + } + ) + + # Extract GPS coordinates from stored EXIF data + latitude = exif_data.get("GPSLatitude") + longitude = exif_data.get("GPSLongitude") + + if latitude is None or longitude is None: + issues.append("Image is missing GPS location data") + logs.append( + { + "action": "GPS Check", + "details": "No coordinates found", + "status": "error", + } + ) + else: + logs.append( + { + "action": "GPS Check", + "details": f"Location: {latitude:.4f}, {longitude:.4f}", + "status": "success", + } + ) + + # Parse UserComment for drone metadata (pitch, yaw, etc.) + user_comment = exif_data.get("UserComment") + drone_metadata = {} + if isinstance(user_comment, str): + try: + drone_metadata = json.loads(user_comment) + except (json.JSONDecodeError, TypeError): + log.debug("Could not parse UserComment as JSON") + + # Merge drone metadata for quality checks + quality_check_data = {**exif_data, **drone_metadata} + + # Check gimbal angle - look for DJI XMP field or UserComment pitch + gimbal_angle = ( + quality_check_data.get("GimbalPitchDegree") + or quality_check_data.get("FlightPitchDegree") + or quality_check_data.get("pitch") + ) + if gimbal_angle is not None and gimbal_angle > MIN_GIMBAL_ANGLE: + issues.append( + f"Camera angle is too tilted ({gimbal_angle:.0f}°). Please capture images pointing straight down." + ) + logs.append( + { + "action": "Gimbal Check", + "details": f"Angle: {gimbal_angle:.1f}° - FAILED", + "status": "error", + } + ) + elif gimbal_angle is not None: + logs.append( + { + "action": "Gimbal Check", + "details": f"Angle: {gimbal_angle:.1f}° - Passed", + "status": "success", + } + ) + + # If there are any issues, reject the image with all reasons + if issues: + # Determine the primary status based on issue types + has_exif_issue = any( + "camera information" in issue.lower() or "gps" in issue.lower() + for issue in issues + ) + rejection_reason = "; ".join(issues) + + status = ( + ImageStatus.INVALID_EXIF if has_exif_issue else ImageStatus.REJECTED + ) + + await ImageClassifier._update_image_status( + db, image_id, status, rejection_reason, sharpness_score + ) + logs.append( + {"action": "REJECTED", "details": rejection_reason, "status": "error"} + ) + return { + "image_id": str(image_id), + "status": status, + "reason": rejection_reason, + "sharpness_score": sharpness_score, + "logs": logs, + } + + # All checks passed + quality_details = ( + f"Gimbal: {gimbal_angle:.1f}°" if gimbal_angle else "Gimbal: N/A" + ) + if sharpness_score is not None: + quality_details += f", Sharpness: {sharpness_score:.1f}" + quality_details += " - All checks passed" + + logs.append( + {"action": "Quality Check", "details": quality_details, "status": "success"} + ) + + task_id = await ImageClassifier.find_matching_task( + db, project_id, latitude, longitude + ) + + if not task_id: + await ImageClassifier._update_image_status( + db, + image_id, + ImageStatus.UNMATCHED, + "Image location is outside of all task areas", + ) + logs.append( + { + "action": "UNMATCHED", + "details": "Image location is outside of all task areas", + "status": "warning", + } + ) + return { + "image_id": str(image_id), + "status": ImageStatus.UNMATCHED, + "logs": logs, + } + + await ImageClassifier._assign_image_to_task( + db, image_id, task_id, sharpness_score + ) + + logs.append( + { + "action": "ASSIGNED", + "details": f"Matched to task {str(task_id)[:8]}...", + "status": "success", + } + ) + + return { + "image_id": str(image_id), + "status": ImageStatus.ASSIGNED, + "task_id": str(task_id), + "sharpness_score": sharpness_score, + "logs": logs, + } + + @staticmethod + async def _update_image_status( + db: Connection, + image_id: uuid.UUID, + status: ImageStatus, + rejection_reason: Optional[str] = None, + sharpness_score: Optional[float] = None, + ): + query = """ + UPDATE project_images + SET status = %(status)s, + rejection_reason = %(rejection_reason)s, + sharpness_score = %(sharpness_score)s, + classified_at = %(classified_at)s + WHERE id = %(image_id)s + """ + + async with db.cursor() as cur: + await cur.execute( + query, + { + "image_id": str(image_id), + "status": status.value, + "rejection_reason": rejection_reason, + "sharpness_score": sharpness_score, + "classified_at": datetime.utcnow(), + }, + ) + + @staticmethod + async def _assign_image_to_task( + db: Connection, + image_id: uuid.UUID, + task_id: uuid.UUID, + sharpness_score: Optional[float] = None, + ): + query = """ + UPDATE project_images + SET status = %(status)s, + task_id = %(task_id)s, + sharpness_score = %(sharpness_score)s, + classified_at = %(classified_at)s + WHERE id = %(image_id)s + """ + + async with db.cursor() as cur: + await cur.execute( + query, + { + "image_id": str(image_id), + "status": ImageStatus.ASSIGNED.value, + "task_id": str(task_id), + "sharpness_score": sharpness_score, + "classified_at": datetime.utcnow(), + }, + ) + + @staticmethod + async def classify_batch( + db_pool: AsyncConnectionPool, batch_id: uuid.UUID, project_id: uuid.UUID + ) -> dict: + # Use a connection just to fetch the list of images + async with db_pool.connection() as db: + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + """ + SELECT id + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + AND status = %(status)s + ORDER BY uploaded_at + """, + { + "batch_id": str(batch_id), + "project_id": str(project_id), + "status": ImageStatus.STAGED.value, + }, + ) + images = await cur.fetchall() + + if not images: + return { + "batch_id": str(batch_id), + "message": "No images to classify", + "total": 0, + "assigned": 0, + "rejected": 0, + "unmatched": 0, + "invalid": 0, + "images": [], + } + + results = { + "batch_id": str(batch_id), + "total": len(images), + "assigned": 0, + "rejected": 0, + "unmatched": 0, + "invalid": 0, + "images": [], + } + + # Use a semaphore to limit concurrency + semaphore = asyncio.Semaphore(CLASSIFICATION_CONCURRENCY) + # Lock for thread-safe counter updates + results_lock = asyncio.Lock() + + async def classify_with_commit(image_record: dict) -> dict: + """Classify a single image with its own connection for proper isolation.""" + async with semaphore: + image_id = ( + image_record["id"] + if isinstance(image_record["id"], uuid.UUID) + else uuid.UUID(image_record["id"]) + ) + + # Each worker gets its own connection from the pool + async with db_pool.connection() as conn: + # Update status to classifying + async with conn.cursor() as cur: + await cur.execute( + "UPDATE project_images SET status = %(status)s WHERE id = %(image_id)s", + { + "status": ImageStatus.CLASSIFYING.value, + "image_id": str(image_id), + }, + ) + # Commit the classifying status so frontend can see progress + await conn.commit() + + # Perform classification + result = await ImageClassifier.classify_single_image( + conn, image_id, project_id + ) + + # Commit the classification result immediately + await conn.commit() + + # Update counters thread-safely + async with results_lock: + if result["status"] == ImageStatus.ASSIGNED: + results["assigned"] += 1 + elif result["status"] == ImageStatus.REJECTED: + results["rejected"] += 1 + elif result["status"] == ImageStatus.UNMATCHED: + results["unmatched"] += 1 + elif result["status"] == ImageStatus.INVALID_EXIF: + results["invalid"] += 1 + results["images"].append(result) + + return result + + # Process all images in parallel with controlled concurrency + tasks = [classify_with_commit(image) for image in images] + await asyncio.gather(*tasks, return_exceptions=True) + + log.info( + f"Parallel classification complete for batch {batch_id}: " + f"{results['assigned']} assigned, {results['rejected']} rejected, " + f"{results['unmatched']} unmatched, {results['invalid']} invalid" + ) + + return results + + @staticmethod + async def get_batch_images( + db: Connection, + batch_id: uuid.UUID, + project_id: uuid.UUID, + last_timestamp: Optional[datetime] = None, + ) -> list[dict]: + query = """ + SELECT + id, + filename, + s3_key, + thumbnail_url, + status, + rejection_reason, + task_id, + classified_at, + uploaded_at, + exif, + ST_X(location::geometry) as longitude, + ST_Y(location::geometry) as latitude + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + """ + + params = {"batch_id": str(batch_id), "project_id": str(project_id)} + + if last_timestamp: + query += " AND classified_at > %(last_timestamp)s" + params["last_timestamp"] = last_timestamp + + query += " ORDER BY uploaded_at" + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(query, params) + images = await cur.fetchall() + + # Generate presigned URLs for each image (keep signature for authentication) + for image in images: + if image.get("s3_key"): + client = s3_client() + url = client.presigned_get_object( + settings.S3_BUCKET_NAME, image["s3_key"], expires=timedelta(hours=1) + ) + # Keep presigned params (strip_presign=False) so signature is preserved + image["url"] = strip_presigned_url_for_local_dev( + url, strip_presign=False + ) + + # Generate presigned URL for thumbnail if available + if image.get("thumbnail_url"): + client = s3_client() + thumbnail_presigned = client.presigned_get_object( + settings.S3_BUCKET_NAME, + image["thumbnail_url"], + expires=timedelta(hours=1), + ) + image["thumbnail_url"] = strip_presigned_url_for_local_dev( + thumbnail_presigned, strip_presign=False + ) + + # Add has_gps field for frontend display + image["has_gps"] = ( + image.get("latitude") is not None and image.get("longitude") is not None + ) + + return images + + @staticmethod + async def get_batch_review_data( + db: Connection, + batch_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + query = """ + SELECT + pi.task_id, + t.project_task_index, + COUNT(*) as image_count, + json_agg( + json_build_object( + 'id', pi.id, + 'filename', pi.filename, + 's3_key', pi.s3_key, + 'thumbnail_url', pi.thumbnail_url, + 'status', pi.status, + 'rejection_reason', pi.rejection_reason, + 'uploaded_at', pi.uploaded_at + ) ORDER BY pi.uploaded_at + ) as images + FROM project_images pi + LEFT JOIN tasks t ON pi.task_id = t.id + WHERE pi.batch_id = %(batch_id)s + AND pi.project_id = %(project_id)s + AND pi.status IN ('assigned', 'rejected', 'invalid_exif') + GROUP BY pi.task_id, t.project_task_index + ORDER BY t.project_task_index NULLS LAST + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + query, {"batch_id": str(batch_id), "project_id": str(project_id)} + ) + task_groups = await cur.fetchall() + + # Generate presigned URLs for thumbnails + for group in task_groups: + for image in group["images"]: + if image.get("thumbnail_url"): + client = s3_client() + thumbnail_presigned = client.presigned_get_object( + settings.S3_BUCKET_NAME, + image["thumbnail_url"], + expires=timedelta(hours=1), + ) + image["thumbnail_url"] = strip_presigned_url_for_local_dev( + thumbnail_presigned, strip_presign=False + ) + + # Generate presigned URL for full image + if image.get("s3_key"): + client = s3_client() + url = client.presigned_get_object( + settings.S3_BUCKET_NAME, + image["s3_key"], + expires=timedelta(hours=1), + ) + image["url"] = strip_presigned_url_for_local_dev( + url, strip_presign=False + ) + + return { + "batch_id": str(batch_id), + "task_groups": task_groups, + "total_tasks": len(task_groups), + "total_images": sum(group["image_count"] for group in task_groups), + } + + @staticmethod + async def accept_image( + db: Connection, + image_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + # Get image location + query = """ + SELECT + ST_X(location::geometry) as longitude, + ST_Y(location::geometry) as latitude + FROM project_images + WHERE id = %(image_id)s + AND project_id = %(project_id)s + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + query, {"image_id": str(image_id), "project_id": str(project_id)} + ) + result = await cur.fetchone() + + if not result: + raise ValueError("Image not found") + + latitude = result.get("latitude") + longitude = result.get("longitude") + + if latitude is None or longitude is None: + raise ValueError("Image has no GPS coordinates") + + # Find matching task + task_id = await ImageClassifier.find_matching_task( + db, project_id, latitude, longitude + ) + + if not task_id: + # Update status to unmatched instead of throwing an error + await ImageClassifier._update_image_status( + db, + image_id, + ImageStatus.UNMATCHED, + "Image location is outside of all task areas", + ) + return { + "message": "Image does not fall within any task boundary", + "image_id": str(image_id), + "status": "unmatched", + "task_id": None, + } + + # Update image status to assigned + await ImageClassifier._assign_image_to_task(db, image_id, task_id) + + return { + "message": "Image accepted successfully", + "image_id": str(image_id), + "status": "assigned", + "task_id": str(task_id), + } + + @staticmethod + async def delete_batch( + db: Connection, + batch_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + # Get count of images to be deleted + count_query = """ + SELECT COUNT(*) as count + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + count_query, + {"batch_id": str(batch_id), "project_id": str(project_id)}, + ) + result = await cur.fetchone() + image_count = result["count"] if result else 0 + + # Delete all images in the batch + delete_query = """ + DELETE FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + """ + + async with db.cursor() as cur: + await cur.execute( + delete_query, + {"batch_id": str(batch_id), "project_id": str(project_id)}, + ) + + log.info( + f"Deleted {image_count} images from batch {batch_id} in project {project_id}" + ) + + return { + "message": "Batch deleted successfully", + "batch_id": str(batch_id), + "deleted_count": image_count, + } + + @staticmethod + async def get_batch_map_data( + db: Connection, + batch_id: uuid.UUID, + project_id: uuid.UUID, + ) -> dict: + """Get map data for batch review visualization. + + Returns task geometries and image point locations as GeoJSON. + """ + # Get all task IDs that have images in this batch + task_ids_query = """ + SELECT DISTINCT task_id + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + AND task_id IS NOT NULL + """ + + async with db.cursor() as cur: + await cur.execute( + task_ids_query, + {"batch_id": str(batch_id), "project_id": str(project_id)}, + ) + task_rows = await cur.fetchall() + + task_ids = [str(row[0]) for row in task_rows if row[0]] + + # Get task geometries as GeoJSON + tasks_geojson = {"type": "FeatureCollection", "features": []} + if task_ids: + tasks_query = """ + SELECT + id, + project_task_index, + ST_AsGeoJSON(outline)::json as geometry + FROM tasks + WHERE id = ANY(%(task_ids)s::uuid[]) + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute(tasks_query, {"task_ids": task_ids}) + tasks = await cur.fetchall() + + for task in tasks: + tasks_geojson["features"].append( + { + "type": "Feature", + "geometry": task["geometry"], + "properties": { + "id": str(task["id"]), + "task_index": task["project_task_index"], + }, + } + ) + + # Get image locations as GeoJSON points + images_query = """ + SELECT + id, + filename, + status, + task_id, + ST_X(location::geometry) as longitude, + ST_Y(location::geometry) as latitude + FROM project_images + WHERE batch_id = %(batch_id)s + AND project_id = %(project_id)s + AND location IS NOT NULL + """ + + async with db.cursor(row_factory=dict_row) as cur: + await cur.execute( + images_query, + {"batch_id": str(batch_id), "project_id": str(project_id)}, + ) + images = await cur.fetchall() + + images_geojson = { + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [img["longitude"], img["latitude"]], + }, + "properties": { + "id": str(img["id"]), + "filename": img["filename"], + "status": img["status"], + "task_id": str(img["task_id"]) if img["task_id"] else None, + }, + } + for img in images + if img["longitude"] is not None and img["latitude"] is not None + ], + } + + return { + "batch_id": str(batch_id), + "tasks": tasks_geojson, + "images": images_geojson, + "total_tasks": len(tasks_geojson["features"]), + "total_images": len(images_geojson["features"]), + } diff --git a/src/backend/app/projects/project_routes.py b/src/backend/app/projects/project_routes.py index 1fb2fb5e9..a4a6457e8 100644 --- a/src/backend/app/projects/project_routes.py +++ b/src/backend/app/projects/project_routes.py @@ -867,13 +867,17 @@ async def complete_upload( # NOTE: Each image is queued individually (not batched) to isolate failures. # If one image has corrupt EXIF data, others aren't affected. Redis/ARQ should # handle thousands of jobs, but monitor performance if queue length grows significantly. + # NOTE: _defer_by delays job execution by 2 seconds to allow S3/MinIO eventual + # consistency - the file may not be immediately readable after multipart upload completes. job = await redis.enqueue_job( "process_uploaded_image", str(data.project_id), data.file_key, data.filename, str(user.id), + str(data.batch_id) if data.batch_id else None, _queue_name="default_queue", + _defer_by=timedelta(seconds=2), ) log.info(f"Queued image processing job: {job.job_id} for file: {data.filename}") diff --git a/src/backend/app/projects/project_schemas.py b/src/backend/app/projects/project_schemas.py index a005e80d4..6680e81b1 100644 --- a/src/backend/app/projects/project_schemas.py +++ b/src/backend/app/projects/project_schemas.py @@ -743,6 +743,7 @@ class CompleteMultipartUploadRequest(BaseModel): parts: List[dict] # List of {"PartNumber": int, "ETag": str} project_id: uuid.UUID filename: str + batch_id: Optional[uuid.UUID] = None # Optional batch ID for grouping uploads class AbortMultipartUploadRequest(BaseModel): diff --git a/src/backend/app/s3.py b/src/backend/app/s3.py index 920078aeb..32e2bcf8f 100644 --- a/src/backend/app/s3.py +++ b/src/backend/app/s3.py @@ -3,6 +3,7 @@ from typing import Any from urllib.parse import urljoin +from fastapi.concurrency import run_in_threadpool from loguru import logger as log from minio import Minio from minio.commonconfig import CopySource @@ -15,7 +16,7 @@ def s3_client(): """Return the initialised MinIO client with credentials.""" - endpoint = settings.S3_ENDPOINT + endpoint = settings.S3_DOWNLOAD_ROOT minio_url, is_secure = is_connection_secure(endpoint) log.debug(f"Connecting to MinIO server at {minio_url} (secure={is_secure})") @@ -169,6 +170,23 @@ def get_obj_from_bucket(bucket_name: str, s3_path: str) -> BytesIO: response.release_conn() +async def async_get_obj_from_bucket(bucket_name: str, s3_path: str) -> BytesIO: + """Download an S3 object from a bucket and return it as a BytesIO object. + + This async wrapper uses run_in_threadpool to handle the synchronous MinIO client + without blocking the event loop. + + Args: + bucket_name (str): The name of the S3 bucket. + s3_path (str): The path to the S3 object in the bucket. + + Returns: + BytesIO: A BytesIO object containing the content of the downloaded S3 object. + """ + # Use run_in_threadpool to handle the synchronous operation + return await run_in_threadpool(get_obj_from_bucket, bucket_name, s3_path) + + def get_image_dir_url(bucket_name: str, image_dir: str): """Generate the full URL for the image directory in an S3 bucket. diff --git a/src/backend/pyproject.toml b/src/backend/pyproject.toml index e970e65a7..9967efbae 100644 --- a/src/backend/pyproject.toml +++ b/src/backend/pyproject.toml @@ -22,6 +22,7 @@ dependencies = [ "loguru==0.7.2", "minio==7.2.0", "pillow==10.0.0", + "pyexiftool==0.5.6", "pyjwt==2.8.0", "alembic==1.13.1", "itsdangerous==2.2.0", @@ -37,6 +38,8 @@ dependencies = [ "asgi-lifespan==2.1.0", "arq==0.26.3", "redis==5.2.1", + "opencv-python-headless==4.10.0.84", + "numpy>=1.26.0,<2.0.0", "drone-flightplan", ] requires-python = ">=3.11" @@ -68,6 +71,9 @@ docs = [ "mdx-truly-sane-lists==1.3", ] monitoring = [ + "sentry-sdk[opentelemetry]==2.17.0", + #"opentelemetry-sdk==1.27.0", + #"opentelemetry-api==1.27.0", "sentry-sdk[opentelemetry-otlp]>=2.46.0", "opentelemetry-instrumentation-fastapi>=0.59b0", "opentelemetry-instrumentation-logging>=0.59b0", diff --git a/src/backend/uv.lock b/src/backend/uv.lock index 839809b60..2c5fe7f83 100644 --- a/src/backend/uv.lock +++ b/src/backend/uv.lock @@ -2,9 +2,15 @@ version = 1 revision = 3 requires-python = ">=3.11" resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version < '3.13'", + "python_full_version >= '3.13' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.12' and sys_platform == 'darwin'", + "python_full_version < '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", ] [manifest] @@ -424,7 +430,7 @@ wheels = [ [[package]] name = "commitizen" -version = "4.9.1" +version = "4.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "argcomplete" }, @@ -440,9 +446,9 @@ dependencies = [ { name = "termcolor" }, { name = "tomlkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/19/927ac5b0eabb9451e2d5bb45b30813915c9a1260713b5b68eeb31358ea23/commitizen-4.9.1.tar.gz", hash = "sha256:b076b24657718f7a35b1068f2083bd39b4065d250164a1398d1dac235c51753b", size = 56610, upload-time = "2025-09-10T14:19:33.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/b3/cc29794fc2ecd7aa7353105773ca18ecd761c3ba5b38879bd106b3fc8840/commitizen-4.10.0.tar.gz", hash = "sha256:cc58067403b9eff21d0423b3d9a29bda05254bd51ad5bdd1fd0594bff31277e1", size = 56820, upload-time = "2025-11-10T14:08:49.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/49/577035b841442fe031b017027c3d99278b46104d227f0353c69dbbe55148/commitizen-4.9.1-py3-none-any.whl", hash = "sha256:4241b2ecae97b8109af8e587c36bc3b805a09b9a311084d159098e12d6ead497", size = 80624, upload-time = "2025-09-10T14:19:32.102Z" }, + { url = "https://files.pythonhosted.org/packages/b3/5d/2bd8881737d6a5652ae3ebc37736893b9a7425f0eb16e605d1ff2957267e/commitizen-4.10.0-py3-none-any.whl", hash = "sha256:3fe56c168b30b30b84b8329cba6b132e77b4eb304a5460bbe2186aad0f78c966", size = 81269, upload-time = "2025-11-10T14:08:48.001Z" }, ] [[package]] @@ -665,10 +671,13 @@ dependencies = [ { name = "jinja2" }, { name = "loguru" }, { name = "minio" }, + { name = "numpy" }, + { name = "opencv-python-headless" }, { name = "pillow" }, { name = "psycopg", extra = ["c", "pool"] }, { name = "pydantic", extra = ["email"] }, { name = "pydantic-settings" }, + { name = "pyexiftool" }, { name = "pyjwt" }, { name = "pyodm" }, { name = "python-multipart" }, @@ -734,10 +743,13 @@ requires-dist = [ { name = "jinja2", specifier = "==3.1.4" }, { name = "loguru", specifier = "==0.7.2" }, { name = "minio", specifier = "==7.2.0" }, + { name = "numpy", specifier = ">=1.26.0,<2.0.0" }, + { name = "opencv-python-headless", specifier = "==4.10.0.84" }, { name = "pillow", specifier = "==10.0.0" }, { name = "psycopg", extras = ["c", "pool"], specifier = "==3.2.1" }, { name = "pydantic", extras = ["email"], specifier = "==2.8.2" }, { name = "pydantic-settings", specifier = "==2.4.0" }, + { name = "pyexiftool", specifier = "==0.5.6" }, { name = "pyjwt", specifier = "==2.8.0" }, { name = "pyodm", specifier = "==1.5.11" }, { name = "python-multipart", specifier = "==0.0.9" }, @@ -767,11 +779,11 @@ docs = [ { name = "python-dotenv", specifier = "==1.0.1" }, ] monitoring = [ - { name = "opentelemetry-instrumentation-fastapi", specifier = ">=0.59b0" }, - { name = "opentelemetry-instrumentation-logging", specifier = ">=0.59b0" }, - { name = "opentelemetry-instrumentation-psycopg", specifier = ">=0.59b0" }, - { name = "opentelemetry-instrumentation-requests", specifier = ">=0.59b0" }, - { name = "sentry-sdk", extras = ["opentelemetry-otlp"], specifier = ">=2.46.0" }, + { name = "opentelemetry-instrumentation-fastapi", specifier = "==0.48b0" }, + { name = "opentelemetry-instrumentation-logging", specifier = "==0.48b0" }, + { name = "opentelemetry-instrumentation-psycopg", specifier = "==0.48b0" }, + { name = "opentelemetry-instrumentation-requests", specifier = "==0.48b0" }, + { name = "sentry-sdk", extras = ["opentelemetry"], specifier = "==2.17.0" }, ] test = [ { name = "coverage", specifier = "==7.6.4" }, @@ -1686,116 +1698,66 @@ wheels = [ ] [[package]] -name = "opentelemetry-api" -version = "1.38.0" +name = "opencv-python-headless" +version = "4.10.0.84" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/08/d8/0f354c375628e048bd0570645b310797299754730079853095bf000fba69/opentelemetry_api-1.38.0.tar.gz", hash = "sha256:f4c193b5e8acb0912b06ac5b16321908dd0843d75049c091487322284a3eea12", size = 65242, upload-time = "2025-10-16T08:35:50.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" }, -] - -[[package]] -name = "opentelemetry-distro" -version = "0.59b0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-api" }, - { name = "opentelemetry-instrumentation" }, - { name = "opentelemetry-sdk" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/73/909d18e3d609c9f72fdfc441dbf2f33d26d29126088de5b3df30f4867f8a/opentelemetry_distro-0.59b0.tar.gz", hash = "sha256:a72703a514e1773d35d1ec01489a5fd1f1e7ce92e93cf459ba60f85b880d0099", size = 2583, upload-time = "2025-10-16T08:39:28.111Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/a5/71d78732d30616b0b57cce416fa49e7f25ce57492eaf66d0b6864c1df35f/opentelemetry_distro-0.59b0-py3-none-any.whl", hash = "sha256:bbe568d84d801d7e1ead320c4521fc37a4c24b3b2cd49a64f6d8a3c10676cea4", size = 3346, upload-time = "2025-10-16T08:38:27.63Z" }, -] - -[package.optional-dependencies] -otlp = [ - { name = "opentelemetry-exporter-otlp" }, -] - -[[package]] -name = "opentelemetry-exporter-otlp" -version = "1.38.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-exporter-otlp-proto-grpc" }, - { name = "opentelemetry-exporter-otlp-proto-http" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c2/2d/16e3487ddde2dee702bd746dd41950a8789b846d22a1c7e64824aac5ebea/opentelemetry_exporter_otlp-1.38.0.tar.gz", hash = "sha256:2f55acdd475e4136117eff20fbf1b9488b1b0b665ab64407516e1ac06f9c3f9d", size = 6147, upload-time = "2025-10-16T08:35:52.53Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/8a/81cd252b16b7d95ec1147982b6af81c7932d23918b4c3b15372531242ddd/opentelemetry_exporter_otlp-1.38.0-py3-none-any.whl", hash = "sha256:bc6562cef229fac8887ed7109fc5abc52315f39d9c03fd487bb8b4ef8fbbc231", size = 7018, upload-time = "2025-10-16T08:35:32.995Z" }, -] - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.38.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "opentelemetry-proto" }, + { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/83/dd4660f2956ff88ed071e9e0e36e830df14b8c5dc06722dbde1841accbe8/opentelemetry_exporter_otlp_proto_common-1.38.0.tar.gz", hash = "sha256:e333278afab4695aa8114eeb7bf4e44e65c6607d54968271a249c180b2cb605c", size = 20431, upload-time = "2025-10-16T08:35:53.285Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/7e/d20f68a5f1487adf19d74378d349932a386b1ece3be9be9915e5986db468/opencv-python-headless-4.10.0.84.tar.gz", hash = "sha256:f2017c6101d7c2ef8d7bc3b414c37ff7f54d64413a1847d89970b6b7069b4e1a", size = 95117755, upload-time = "2024-06-17T18:32:15.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/9e/55a41c9601191e8cd8eb626b54ee6827b9c9d4a46d736f32abc80d8039fc/opentelemetry_exporter_otlp_proto_common-1.38.0-py3-none-any.whl", hash = "sha256:03cb76ab213300fe4f4c62b7d8f17d97fcfd21b89f0b5ce38ea156327ddda74a", size = 18359, upload-time = "2025-10-16T08:35:34.099Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9b/583c8d9259f6fc19413f83fd18dd8e6cbc8eefb0b4dc6da52dd151fe3272/opencv_python_headless-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a4f4bcb07d8f8a7704d9c8564c224c8b064c63f430e95b61ac0bffaa374d330e", size = 54835657, upload-time = "2024-06-18T04:58:12.904Z" }, + { url = "https://files.pythonhosted.org/packages/c0/7b/b4c67f5dad7a9a61c47f7a39e4050e8a4628bd64b3c3daaeb755d759f928/opencv_python_headless-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:5ae454ebac0eb0a0b932e3406370aaf4212e6a3fdb5038cc86c7aea15a6851da", size = 56475470, upload-time = "2024-06-17T19:34:39.604Z" }, + { url = "https://files.pythonhosted.org/packages/91/61/f838ce2046f3ec3591ea59ea3549085e399525d3b4558c4ed60b55ed88c0/opencv_python_headless-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46071015ff9ab40fccd8a163da0ee14ce9846349f06c6c8c0f2870856ffa45db", size = 29329705, upload-time = "2024-06-17T20:00:49.406Z" }, + { url = "https://files.pythonhosted.org/packages/d1/09/248f86a404567303cdf120e4a301f389b68e3b18e5c0cc428de327da609c/opencv_python_headless-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:377d08a7e48a1405b5e84afcbe4798464ce7ee17081c1c23619c8b398ff18295", size = 49858781, upload-time = "2024-06-17T18:31:49.495Z" }, + { url = "https://files.pythonhosted.org/packages/30/c0/66f88d58500e990a9a0a5c06f98862edf1d0a3a430781218a8c193948438/opencv_python_headless-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:9092404b65458ed87ce932f613ffbb1106ed2c843577501e5768912360fc50ec", size = 28675298, upload-time = "2024-06-17T18:28:56.897Z" }, + { url = "https://files.pythonhosted.org/packages/26/d0/22f68eb23eea053a31655960f133c0be9726c6a881547e6e9e7e2a946c4f/opencv_python_headless-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:afcf28bd1209dd58810d33defb622b325d3cbe49dcd7a43a902982c33e5fad05", size = 38754031, upload-time = "2024-06-17T18:29:04.871Z" }, ] [[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.38.0" +name = "opentelemetry-api" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "googleapis-common-protos" }, - { name = "grpcio" }, - { name = "opentelemetry-api" }, - { name = "opentelemetry-exporter-otlp-proto-common" }, - { name = "opentelemetry-proto" }, - { name = "opentelemetry-sdk" }, + { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/c0/43222f5b97dc10812bc4f0abc5dc7cd0a2525a91b5151d26c9e2e958f52e/opentelemetry_exporter_otlp_proto_grpc-1.38.0.tar.gz", hash = "sha256:2473935e9eac71f401de6101d37d6f3f0f1831db92b953c7dcc912536158ebd6", size = 24676, upload-time = "2025-10-16T08:35:53.83Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693, upload-time = "2024-08-28T21:35:31.445Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/f0/bd831afbdba74ca2ce3982142a2fad707f8c487e8a3b6fef01f1d5945d1b/opentelemetry_exporter_otlp_proto_grpc-1.38.0-py3-none-any.whl", hash = "sha256:7c49fd9b4bd0dbe9ba13d91f764c2d20b0025649a6e4ac35792fb8d84d764bc7", size = 19695, upload-time = "2025-10-16T08:35:35.053Z" }, + { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970, upload-time = "2024-08-28T21:35:00.598Z" }, ] [[package]] -name = "opentelemetry-exporter-otlp-proto-http" -version = "1.38.0" +name = "opentelemetry-distro" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "googleapis-common-protos" }, { name = "opentelemetry-api" }, - { name = "opentelemetry-exporter-otlp-proto-common" }, - { name = "opentelemetry-proto" }, + { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-sdk" }, - { name = "requests" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/0a/debcdfb029fbd1ccd1563f7c287b89a6f7bef3b2902ade56797bfd020854/opentelemetry_exporter_otlp_proto_http-1.38.0.tar.gz", hash = "sha256:f16bd44baf15cbe07633c5112ffc68229d0edbeac7b37610be0b2def4e21e90b", size = 17282, upload-time = "2025-10-16T08:35:54.422Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556, upload-time = "2024-08-28T21:27:40.455Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/77/154004c99fb9f291f74aa0822a2f5bbf565a72d8126b3a1b63ed8e5f83c7/opentelemetry_exporter_otlp_proto_http-1.38.0-py3-none-any.whl", hash = "sha256:84b937305edfc563f08ec69b9cb2298be8188371217e867c1854d77198d0825b", size = 19579, upload-time = "2025-10-16T08:35:36.269Z" }, + { url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321, upload-time = "2024-08-28T21:26:26.584Z" }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, - { name = "opentelemetry-semantic-conventions" }, - { name = "packaging" }, + { name = "setuptools" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/ed/9c65cd209407fd807fa05be03ee30f159bdac8d59e7ea16a8fe5a1601222/opentelemetry_instrumentation-0.59b0.tar.gz", hash = "sha256:6010f0faaacdaf7c4dff8aac84e226d23437b331dcda7e70367f6d73a7db1adc", size = 31544, upload-time = "2025-10-16T08:39:31.959Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724, upload-time = "2024-08-28T21:27:42.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/f5/7a40ff3f62bfe715dad2f633d7f1174ba1a7dd74254c15b2558b3401262a/opentelemetry_instrumentation-0.59b0-py3-none-any.whl", hash = "sha256:44082cc8fe56b0186e87ee8f7c17c327c4c2ce93bdbe86496e600985d74368ee", size = 33020, upload-time = "2025-10-16T08:38:31.463Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449, upload-time = "2024-08-28T21:26:31.288Z" }, ] [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asgiref" }, @@ -1804,14 +1766,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/a4/cfbb6fc1ec0aa9bf5a93f548e6a11ab3ac1956272f17e0d399aa2c1f85bc/opentelemetry_instrumentation_asgi-0.59b0.tar.gz", hash = "sha256:2509d6fe9fd829399ce3536e3a00426c7e3aa359fc1ed9ceee1628b56da40e7a", size = 25116, upload-time = "2025-10-16T08:39:36.092Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435, upload-time = "2024-08-28T21:27:47.276Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/88/fe02d809963b182aafbf5588685d7a05af8861379b0ec203d48e360d4502/opentelemetry_instrumentation_asgi-0.59b0-py3-none-any.whl", hash = "sha256:ba9703e09d2c33c52fa798171f344c8123488fcd45017887981df088452d3c53", size = 16797, upload-time = "2025-10-16T08:38:37.214Z" }, + { url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958, upload-time = "2024-08-28T21:26:38.139Z" }, ] [[package]] name = "opentelemetry-instrumentation-dbapi" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -1819,14 +1781,14 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/aa/36a09652c98c65b42408d40f222fba031a3a281f1b6682e1b141b20b508d/opentelemetry_instrumentation_dbapi-0.59b0.tar.gz", hash = "sha256:c50112ae1cdb7f55bddcf57eca96aaa0f2dd78732be2b00953183439a4740493", size = 16308, upload-time = "2025-10-16T08:39:43.192Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/9a/468bc52079522db225158523aaedc24bfed13fe9c3775da638fc726d21fb/opentelemetry_instrumentation_dbapi-0.48b0.tar.gz", hash = "sha256:89821288199f4f5225e74543bf14addf9b1824b8b5f1e83ad0d9dafa844f33b0", size = 11033, upload-time = "2024-08-28T21:27:58.066Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/9b/1739b5b7926cbae342880d7a56d59a847313e6568a96ba7d4873ce0c0996/opentelemetry_instrumentation_dbapi-0.59b0-py3-none-any.whl", hash = "sha256:672d59caa06754b42d4e722644d9fcd00a1f9f862e9ea5cef6d4da454515ac67", size = 13970, upload-time = "2025-10-16T08:38:48.342Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/ad9dc41c8358f4e39a8ea44273a59e7ac536b17d7c7456836ab683617eb9/opentelemetry_instrumentation_dbapi-0.48b0-py3-none-any.whl", hash = "sha256:0d11a73ecbf55b11e8fbc93e9e97366958b98ccb4b691c776b32e4b20b3ce8bb", size = 11003, upload-time = "2024-08-28T21:26:52.149Z" }, ] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -1835,41 +1797,41 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/a7/7a6ce5009584ce97dbfd5ce77d4f9d9570147507363349d2cb705c402bcf/opentelemetry_instrumentation_fastapi-0.59b0.tar.gz", hash = "sha256:e8fe620cfcca96a7d634003df1bc36a42369dedcdd6893e13fb5903aeeb89b2b", size = 24967, upload-time = "2025-10-16T08:39:46.056Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497, upload-time = "2024-08-28T21:28:01.14Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/27/5914c8bf140ffc70eff153077e225997c7b054f0bf28e11b9ab91b63b18f/opentelemetry_instrumentation_fastapi-0.59b0-py3-none-any.whl", hash = "sha256:0d8d00ff7d25cca40a4b2356d1d40a8f001e0668f60c102f5aa6bb721d660c4f", size = 13492, upload-time = "2025-10-16T08:38:52.312Z" }, + { url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777, upload-time = "2024-08-28T21:26:57.457Z" }, ] [[package]] name = "opentelemetry-instrumentation-logging" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/be/88/9c5f70fa8b8d96d30be378fc6eb1776e13aea456db15009f4eaef4928847/opentelemetry_instrumentation_logging-0.59b0.tar.gz", hash = "sha256:1b51116444edc74f699daf9002ded61529397100c9bc903c8b9aaa75a5218c76", size = 9969, upload-time = "2025-10-16T08:39:51.653Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/3c/0d329da676f17ad3b527cdb8accf63681d99a6a458d355e4559fa31cd4a7/opentelemetry_instrumentation_logging-0.48b0.tar.gz", hash = "sha256:529eb13eedf57d6b2f94e20e996271db2957b817b9457fe4796365d6d4238dec", size = 9729, upload-time = "2024-08-28T21:28:06.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/a0/340cc45d71437c2f7e27f13c1d2e335b18bbc7a24fd7d174018500b3c7ba/opentelemetry_instrumentation_logging-0.59b0-py3-none-any.whl", hash = "sha256:fdd4eddbd093fc421df8f7d356ecb15b320a1f3396b56bce5543048a5c457eea", size = 12577, upload-time = "2025-10-16T08:38:58.064Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c5/119676ba0bae61887b812d5777a521327a2335ff350cbbc8864e4200fdd7/opentelemetry_instrumentation_logging-0.48b0-py3-none-any.whl", hash = "sha256:75e5357d9b8c12071a19e1fef664dc1f430ef45874445c324ba4439a00972dc0", size = 12145, upload-time = "2024-08-28T21:27:05.915Z" }, ] [[package]] name = "opentelemetry-instrumentation-psycopg" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-instrumentation-dbapi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/19/4f612a40bd68461bbef2fdcbb28ec2aab64cf4a9f3217c159f3e0f4bfe50/opentelemetry_instrumentation_psycopg-0.59b0.tar.gz", hash = "sha256:6d0295463a66c5aed6e076c7d801946e60eb020f0d4bb06e17b6b800c73dfbd6", size = 10929, upload-time = "2025-10-16T08:39:54.506Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/56/1fc35575c4609d97db56a36f3c4c6af743250898de383a353b2ad3bd2427/opentelemetry_instrumentation_psycopg-0.48b0.tar.gz", hash = "sha256:5c6d47ee45a557b517353b3a75f710e87736c874f0996049f8ad93a430324aaa", size = 9989, upload-time = "2024-08-28T21:28:09.752Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/eb/616c0bc3612918d79f56eac834d1a4f8ba9c086022b0a3cd6b42a784ec31/opentelemetry_instrumentation_psycopg-0.59b0-py3-none-any.whl", hash = "sha256:3a3dfec0b57f37f16643bc50231a696e6831af556b3196e445b5c11bd29eec0c", size = 11071, upload-time = "2025-10-16T08:39:01.205Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e2/5db967541dc1968860cf75f387e5c2fcf483cc8cc7564c988f6b928d1535/opentelemetry_instrumentation_psycopg-0.48b0-py3-none-any.whl", hash = "sha256:337410b434f23cb57ec934de15a3a88796a0b2f091e28b6fcfdf1241a58efb6c", size = 10299, upload-time = "2024-08-28T21:27:10.379Z" }, ] [[package]] name = "opentelemetry-instrumentation-requests" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -1877,57 +1839,45 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/01/31282a46b09684dfc636bc066deb090bae6973e71e85e253a8c74e727b1f/opentelemetry_instrumentation_requests-0.59b0.tar.gz", hash = "sha256:9af2ffe3317f03074d7f865919139e89170b6763a0251b68c25e8e64e04b3400", size = 15186, upload-time = "2025-10-16T08:40:00.558Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/ea/c282ba418b2669e4f730cb3f68b02a0ca65f4baf801e971169a4cc449ffb/opentelemetry_instrumentation_requests-0.59b0-py3-none-any.whl", hash = "sha256:d43121532877e31a46c48649279cec2504ee1e0ceb3c87b80fe5ccd7eafc14c1", size = 12966, upload-time = "2025-10-16T08:39:09.919Z" }, -] - -[[package]] -name = "opentelemetry-proto" -version = "1.38.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "protobuf" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/51/14/f0c4f0f6371b9cb7f9fa9ee8918bfd59ac7040c7791f1e6da32a1839780d/opentelemetry_proto-1.38.0.tar.gz", hash = "sha256:88b161e89d9d372ce723da289b7da74c3a8354a8e5359992be813942969ed468", size = 46152, upload-time = "2025-10-16T08:36:01.612Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120, upload-time = "2024-08-28T21:28:16.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/6a/82b68b14efca5150b2632f3692d627afa76b77378c4999f2648979409528/opentelemetry_proto-1.38.0-py3-none-any.whl", hash = "sha256:b6ebe54d3217c42e45462e2a1ae28c3e2bf2ec5a5645236a490f55f45f1a0a18", size = 72535, upload-time = "2025-10-16T08:35:45.749Z" }, + { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366, upload-time = "2024-08-28T21:27:20.771Z" }, ] [[package]] name = "opentelemetry-sdk" -version = "1.38.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/cb/f0eee1445161faf4c9af3ba7b848cc22a50a3d3e2515051ad8628c35ff80/opentelemetry_sdk-1.38.0.tar.gz", hash = "sha256:93df5d4d871ed09cb4272305be4d996236eedb232253e3ab864c8620f051cebe", size = 171942, upload-time = "2025-10-16T08:36:02.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019, upload-time = "2024-08-28T21:35:46.708Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/2e/e93777a95d7d9c40d270a371392b6d6f1ff170c2a3cb32d6176741b5b723/opentelemetry_sdk-1.38.0-py3-none-any.whl", hash = "sha256:1c66af6564ecc1553d72d811a01df063ff097cdc82ce188da9951f93b8d10f6b", size = 132349, upload-time = "2025-10-16T08:35:46.995Z" }, + { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505, upload-time = "2024-08-28T21:35:24.769Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/bc/8b9ad3802cd8ac6583a4eb7de7e5d7db004e89cb7efe7008f9c8a537ee75/opentelemetry_semantic_conventions-0.59b0.tar.gz", hash = "sha256:7a6db3f30d70202d5bf9fa4b69bc866ca6a30437287de6c510fb594878aed6b0", size = 129861, upload-time = "2025-10-16T08:36:03.346Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445, upload-time = "2024-08-28T21:35:47.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/24/7d/c88d7b15ba8fe5c6b8f93be50fc11795e9fc05386c44afaf6b76fe191f9b/opentelemetry_semantic_conventions-0.59b0-py3-none-any.whl", hash = "sha256:35d3b8833ef97d614136e253c1da9342b4c3c083bbaf29ce31d572a1c3825eed", size = 207954, upload-time = "2025-10-16T08:35:48.054Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685, upload-time = "2024-08-28T21:35:25.983Z" }, ] [[package]] name = "opentelemetry-util-http" -version = "0.59b0" +version = "0.48b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/f7/13cd081e7851c42520ab0e96efb17ffbd901111a50b8252ec1e240664020/opentelemetry_util_http-0.59b0.tar.gz", hash = "sha256:ae66ee91be31938d832f3b4bc4eb8a911f6eddd38969c4a871b1230db2a0a560", size = 9412, upload-time = "2025-10-16T08:40:11.335Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863, upload-time = "2024-08-28T21:28:27.266Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/56/62282d1d4482061360449dacc990c89cad0fc810a2ed937b636300f55023/opentelemetry_util_http-0.59b0-py3-none-any.whl", hash = "sha256:6d036a07563bce87bf521839c0671b507a02a0d39d7ea61b88efa14c6e25355d", size = 7648, upload-time = "2025-10-16T08:39:25.706Z" }, + { url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946, upload-time = "2024-08-28T21:27:37.975Z" }, ] [[package]] @@ -2314,6 +2264,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/66/0e/9ee7bc0b48ec45d93b302fa2d787830dca4dc454d31a237faa5815995988/PyDispatcher-2.0.7-py3-none-any.whl", hash = "sha256:96543bea04115ffde08f851e1d45cacbfd1ee866ac42127d9b476dc5aefa7de0", size = 12040, upload-time = "2023-02-17T20:11:11.991Z" }, ] +[[package]] +name = "pyexiftool" +version = "0.5.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/48/406da6691d15abf3c8d399bce8bc588709a5b54e857fd7c22dad2f90c33c/PyExifTool-0.5.6.tar.gz", hash = "sha256:22a972c1c212d1ad5f61916fded5057333dcc48fb8e42eed12d2ff9665b367ae", size = 56365, upload-time = "2023-10-22T23:18:18.819Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/b9/175e9a1f8f3f94b22f622f0fcac853ae2c43cb4ac6034f849269c6086dac/PyExifTool-0.5.6-py3-none-any.whl", hash = "sha256:ac7d7836d2bf373f20aa558528f6b2222c4c0d896ed28c951a3ff8e6cec05a87", size = 51243, upload-time = "2023-10-22T23:18:16.614Z" }, +] + [[package]] name = "pygments" version = "2.18.0" @@ -2779,15 +2738,15 @@ wheels = [ [[package]] name = "sentry-sdk" -version = "2.46.0" +version = "2.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/c140a5837649e2bf2ec758494fde1d9a016c76777eab64e75ef38d685bbb/sentry_sdk-2.46.0.tar.gz", hash = "sha256:91821a23460725734b7741523021601593f35731808afc0bb2ba46c27b8acd91", size = 374761, upload-time = "2025-11-24T09:34:13.932Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/83/7d0956a71ac894717099be3669ca7b8f164bccbcfb570b2f02817d0a0068/sentry_sdk-2.17.0.tar.gz", hash = "sha256:dd0a05352b78ffeacced73a94e86f38b32e2eae15fff5f30ca5abb568a72eacf", size = 290959, upload-time = "2024-10-17T08:48:21.236Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/b6/ce7c502a366f4835b1f9c057753f6989a92d3c70cbadb168193f5fb7499b/sentry_sdk-2.46.0-py2.py3-none-any.whl", hash = "sha256:4eeeb60198074dff8d066ea153fa6f241fef1668c10900ea53a4200abc8da9b1", size = 406266, upload-time = "2025-11-24T09:34:12.114Z" }, + { url = "https://files.pythonhosted.org/packages/10/63/8e80fff3aa15488bc332ede44165a397a29bb13ec4a4b2236299e3b66067/sentry_sdk-2.17.0-py2.py3-none-any.whl", hash = "sha256:625955884b862cc58748920f9e21efdfb8e0d4f98cca4ab0d3918576d5b606ad", size = 314520, upload-time = "2024-10-17T08:48:17.979Z" }, ] [package.optional-dependencies] diff --git a/src/frontend/package.json b/src/frontend/package.json index 931eb1103..7001ca6b5 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -27,6 +27,7 @@ "@tanstack/react-query": "^5.66.0", "@tanstack/react-query-devtools": "^5.66.0", "@tanstack/react-table": "^8.20.6", + "@tanstack/react-virtual": "^3.13.12", "@turf/area": "^7.2.0", "@turf/bbox": "^7.2.0", "@turf/boolean-point-in-polygon": "^7.2.0", diff --git a/src/frontend/pnpm-lock.yaml b/src/frontend/pnpm-lock.yaml index 2425b26a3..82eca5d5e 100644 --- a/src/frontend/pnpm-lock.yaml +++ b/src/frontend/pnpm-lock.yaml @@ -56,6 +56,9 @@ importers: '@tanstack/react-table': specifier: ^8.20.6 version: 8.20.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + '@tanstack/react-virtual': + specifier: ^3.13.12 + version: 3.13.12(react-dom@19.0.0(react@19.0.0))(react@19.0.0) '@turf/area': specifier: ^7.2.0 version: 7.2.0 @@ -1731,10 +1734,19 @@ packages: react: '>=16.8' react-dom: '>=16.8' + '@tanstack/react-virtual@3.13.12': + resolution: {integrity: sha512-Gd13QdxPSukP8ZrkbgS2RwoZseTTbQPLnQEn7HY/rqtM+8Zt95f7xKC7N0EsKs7aoz0WzZ+fditZux+F8EzYxA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + '@tanstack/table-core@8.20.5': resolution: {integrity: sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==} engines: {node: '>=12'} + '@tanstack/virtual-core@3.13.12': + resolution: {integrity: sha512-1YBOJfRHV4sXUmWsFSf5rQor4Ss82G8dQWLRbnk3GA4jeP8hQt1hxXh0tmflpC0dz3VgEv/1+qwPyLeWkQuPFA==} + '@transloadit/prettier-bytes@0.3.5': resolution: {integrity: sha512-xF4A3d/ZyX2LJWeQZREZQw+qFX4TGQ8bGVP97OLRt6sPO6T0TNHBFTuRHOJh7RNmYOBmQ9MHxpolD9bXihpuVA==} @@ -6108,8 +6120,16 @@ snapshots: react: 19.0.0 react-dom: 19.0.0(react@19.0.0) + '@tanstack/react-virtual@3.13.12(react-dom@19.0.0(react@19.0.0))(react@19.0.0)': + dependencies: + '@tanstack/virtual-core': 3.13.12 + react: 19.0.0 + react-dom: 19.0.0(react@19.0.0) + '@tanstack/table-core@8.20.5': {} + '@tanstack/virtual-core@3.13.12': {} + '@transloadit/prettier-bytes@0.3.5': {} '@turf/area@7.2.0': diff --git a/src/frontend/src/api/projects.ts b/src/frontend/src/api/projects.ts index ec9f0b831..2073320e4 100644 --- a/src/frontend/src/api/projects.ts +++ b/src/frontend/src/api/projects.ts @@ -1,5 +1,5 @@ /* eslint-disable import/prefer-default-export */ -import { UseQueryOptions, useQuery } from '@tanstack/react-query'; +import { UseQueryOptions, useQuery, useMutation, UseMutationOptions } from '@tanstack/react-query'; import { getProjectsList, getProjectDetail, @@ -7,6 +7,13 @@ import { } from '@Services/createproject'; import { getTaskStates } from '@Services/project'; import { getUserProfileInfo } from '@Services/common'; +import { + startClassification, + getBatchStatus, + getBatchImages, + BatchStatusSummary, + ImageClassificationResult, +} from '@Services/classification'; export const useGetProjectsListQuery = ( queryOptions?: Partial, @@ -96,3 +103,45 @@ export const useGetProjectCentroidQuery = ( ...queryOptions, }); }; + +// Classification hooks +export const useStartClassificationMutation = ( + mutationOptions?: UseMutationOptions< + { job_id: string; message: string }, + Error, + { projectId: string; batchId: string } + >, +) => { + return useMutation({ + mutationFn: ({ projectId, batchId }) => + startClassification(projectId, batchId), + ...mutationOptions, + }); +}; + +export const useGetBatchStatusQuery = ( + projectId: string, + batchId: string, + queryOptions?: Partial>, +) => { + return useQuery({ + queryKey: ['batch-status', projectId, batchId], + queryFn: async () => getBatchStatus(projectId, batchId), + enabled: !!projectId && !!batchId, + ...queryOptions, + }); +}; + +export const useGetBatchImagesQuery = ( + projectId: string, + batchId: string, + since?: string, + queryOptions?: Partial>, +) => { + return useQuery({ + queryKey: ['batch-images', projectId, batchId, since], + queryFn: () => getBatchImages(projectId, batchId, since), + enabled: !!projectId && !!batchId, + ...queryOptions, + }); +}; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx index a212f85e4..2309e69e7 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageClassification.tsx @@ -1,14 +1,639 @@ -const ImageClassification = () => { +import { useEffect, useState, useCallback, useRef, useMemo } from 'react'; +import { toast } from 'react-toastify'; +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useVirtualizer } from '@tanstack/react-virtual'; +import { useTypedDispatch, useTypedSelector } from '@Store/hooks'; +import { + startClassification as startClassificationAction, + completeClassification, +} from '@Store/slices/imageProcessingWorkflow'; +import { + useStartClassificationMutation, + useGetBatchStatusQuery, + useGetBatchImagesQuery, +} from '@Api/projects'; +import { acceptImage } from '@Services/classification'; +import type { ImageClassificationResult } from '@Services/classification'; +import { Button } from '@Components/RadixComponents/Button'; + +// Grid configuration +const COLUMNS = 8; // Number of columns in the grid +const ROW_HEIGHT = 120; // Height of each row in pixels (including gap) + +interface ImageClassificationProps { + projectId: string; + batchId: string; +} + +const ImageClassification = ({ + projectId, + batchId, +}: ImageClassificationProps) => { + const dispatch = useTypedDispatch(); + const queryClient = useQueryClient(); + const { jobId } = useTypedSelector( + (state) => state.imageProcessingWorkflow + ); + const [images, setImages] = useState>({}); + const [lastUpdateTime, setLastUpdateTime] = useState(); + const [isPolling, setIsPolling] = useState(false); + const [selectedImage, setSelectedImage] = useState(null); + + // Mutation to accept a rejected image + const acceptImageMutation = useMutation({ + mutationFn: (imageId: string) => acceptImage(projectId, imageId), + onSuccess: (data: { message: string; image_id: string; status: string; task_id: string | null }) => { + // Handle based on the returned status + if (data.status === 'unmatched') { + toast.warning(data.message); + // Update local state to reflect unmatched status + if (selectedImage) { + setImages(prev => ({ + ...prev, + [selectedImage.id]: { + ...prev[selectedImage.id], + status: 'unmatched' as const, + task_id: undefined, + }, + })); + } + } else { + toast.success('Image accepted successfully'); + // Update the local state to reflect the change + if (selectedImage) { + setImages(prev => ({ + ...prev, + [selectedImage.id]: { + ...prev[selectedImage.id], + status: 'assigned' as const, + task_id: data.task_id || undefined, + }, + })); + } + } + setSelectedImage(null); + // Invalidate queries to refresh the data + queryClient.invalidateQueries({ queryKey: ['batchStatus', projectId, batchId] }); + }, + onError: (error: any) => { + const message = error?.response?.data?.detail || error.message || 'Failed to accept image'; + toast.error(message); + }, + }); + + // Mutation to start classification + const startClassificationMutation = useStartClassificationMutation({ + onSuccess: (data) => { + dispatch(startClassificationAction(data.job_id)); + setIsPolling(true); + toast.info('Classification started. Processing images...'); + }, + onError: (error: any) => { + toast.error(`Failed to start classification: ${error?.message || 'Unknown error'}`); + }, + }); + + // Query for batch status (summary counts) - poll every 10 seconds to handle race conditions + const { data: batchStatus, isLoading: isLoadingStatus } = useGetBatchStatusQuery( + projectId, + batchId, + { + enabled: !!projectId && !!batchId, + refetchInterval: 10000, + }, + ); + + // Query for batch images - fetch immediately and poll every 10 seconds to handle race conditions + const { data: newImages } = useGetBatchImagesQuery( + projectId, + batchId, + lastUpdateTime, + { + enabled: !!projectId && !!batchId, + refetchInterval: 10000, + }, + ); + + // Update images state when new data arrives + useEffect(() => { + if (newImages && newImages.length > 0) { + setImages((prev) => { + const updated = { ...prev }; + newImages.forEach((img) => { + updated[img.id] = img; + }); + return updated; + }); + + const latestTime = newImages.reduce((latest, img) => { + return img.uploaded_at > latest ? img.uploaded_at : latest; + }, lastUpdateTime || ''); + setLastUpdateTime(latestTime); + } + }, [newImages, lastUpdateTime]); + + // Stop polling when classification is complete + useEffect(() => { + if (batchStatus && isPolling) { + const classified = (batchStatus.assigned ?? 0) + (batchStatus.rejected ?? 0) + + (batchStatus.unmatched ?? 0) + (batchStatus.invalid_exif ?? 0) + + (batchStatus.duplicate ?? 0); + const toClassify = (batchStatus.uploaded ?? 0) + (batchStatus.classifying ?? 0); + + if (classified > 0 && toClassify === 0) { + setIsPolling(false); + dispatch(completeClassification()); + toast.success('Classification complete! Review the results below.'); + } + } + }, [batchStatus, isPolling, dispatch]); + + const handleStartClassification = useCallback(() => { + startClassificationMutation.mutate({ projectId, batchId }); + }, [projectId, batchId, startClassificationMutation]); + + // Get status badge styles + const getStatusBadgeClass = (status: string): string => { + const baseClass = 'naxatw-inline-flex naxatw-items-center naxatw-rounded-full naxatw-px-2 naxatw-py-0.5 naxatw-text-xs naxatw-font-medium naxatw-shadow-sm'; + switch (status) { + case 'assigned': + return `${baseClass} naxatw-bg-green-500 naxatw-text-white`; + case 'rejected': + return `${baseClass} naxatw-bg-red naxatw-text-white`; + case 'unmatched': + return `${baseClass} naxatw-bg-yellow-500 naxatw-text-white`; + case 'invalid_exif': + return `${baseClass} naxatw-bg-orange-500 naxatw-text-white`; + case 'duplicate': + return `${baseClass} naxatw-bg-gray-500 naxatw-text-white`; + case 'classifying': + return `${baseClass} naxatw-bg-blue-500 naxatw-text-white naxatw-animate-pulse`; + case 'uploaded': + return `${baseClass} naxatw-bg-gray-400 naxatw-text-white`; + case 'staged': + default: + return `${baseClass} naxatw-bg-gray-300 naxatw-text-gray-700`; + } + }; + + const getStatusLabel = (status: string): string => { + switch (status) { + case 'assigned': + return 'Assigned'; + case 'rejected': + return 'Rejected'; + case 'unmatched': + return 'No Match'; + case 'invalid_exif': + return 'Invalid EXIF'; + case 'duplicate': + return 'Duplicate'; + case 'classifying': + return 'Processing...'; + case 'uploaded': + return 'Ready'; + case 'staged': + default: + return 'Staged'; + } + }; + + const getBorderColor = (status: string): string => { + switch (status) { + case 'assigned': + return 'naxatw-border-green-400'; + case 'rejected': + return 'naxatw-border-red-400'; + case 'unmatched': + return 'naxatw-border-yellow-400'; + case 'invalid_exif': + return 'naxatw-border-orange-400'; + case 'duplicate': + return 'naxatw-border-gray-400'; + case 'classifying': + return 'naxatw-border-blue-400'; + default: + return 'naxatw-border-gray-300'; + } + }; + + const imagesList = Object.values(images); + const isClassifying = (batchStatus?.classifying ?? 0) > 0 || isPolling; + + // Compute simplified stats for user-friendly display + const computedStats = useMemo(() => { + if (!batchStatus) return null; + + const uploaded = (batchStatus.staged ?? 0) + (batchStatus.uploaded ?? 0); + const processing = batchStatus.classifying ?? 0; + const complete = batchStatus.assigned ?? 0; + const issues = (batchStatus.rejected ?? 0) + (batchStatus.unmatched ?? 0) + (batchStatus.invalid_exif ?? 0); + const duplicates = batchStatus.duplicate ?? 0; + const totalClassified = complete + issues + duplicates; + const issuePercentage = totalClassified > 0 ? (issues / totalClassified) * 100 : 0; + + return { + uploaded, + processing, + complete, + issues, + duplicates, + totalClassified, + issuePercentage, + }; + }, [batchStatus]); + + // Check if classification is complete and has high issue rate + const isClassificationComplete = computedStats && computedStats.processing === 0 && computedStats.uploaded === 0 && computedStats.totalClassified > 0; + const hasHighIssueRate = isClassificationComplete && computedStats.issuePercentage >= 50; + + // Virtualization setup + const parentRef = useRef(null); + + // Split images into rows for virtualization + const rows = useMemo(() => { + const result: ImageClassificationResult[][] = []; + for (let i = 0; i < imagesList.length; i += COLUMNS) { + result.push(imagesList.slice(i, i + COLUMNS)); + } + return result; + }, [imagesList]); + + const rowVirtualizer = useVirtualizer({ + count: rows.length, + getScrollElement: () => parentRef.current, + estimateSize: () => ROW_HEIGHT, + overscan: 3, // Render 3 extra rows above and below viewport + }); + + // Helper to render value with spinner for pending stages + const renderValue = (value: number, showSpinner: boolean, colorClass: string = '') => { + if (value === 0 && showSpinner) { + return ( +
+
+
+ ); + } + return
{value}
; + }; + return ( -
-

- Classify the uploaded images based on their content or purpose. -

-
-

- Classification interface will go here -

+
+ {/* Header Section */} +
+ {!jobId && ( +
+ + {isLoadingStatus && ( +
+
+ Fetching images... +
+ )} +
+ )}
+ + {/* Status Summary */} + {computedStats && ( +
+
+ {renderValue(computedStats.uploaded, isClassifying, 'naxatw-text-gray-500')} +
Uploaded
+
+
+ {renderValue(computedStats.processing, isClassifying, 'naxatw-text-blue-600')} +
Processing
+
+
+ {renderValue(computedStats.complete, isClassifying, 'naxatw-text-green-600')} +
Complete
+
+
+ {renderValue(computedStats.issues, isClassifying, 'naxatw-text-orange-600')} +
Issues
+
+
+ {renderValue(computedStats.duplicates, isClassifying, 'naxatw-text-gray-600')} +
Duplicates
+
+
+ )} + + {/* Warning for high issue rate */} + {hasHighIssueRate && computedStats && ( +
+ warning +
+

+ Dataset Quality Warning +

+

+ {computedStats.issuePercentage.toFixed(0)}% of your images ({computedStats.issues} out of {computedStats.totalClassified}) have issues. + This may indicate problems with your drone settings or capture conditions. + Please review the images with issues and consider re-capturing if necessary. +

+
+
+ )} + + {/* Virtualized Image Grid */} + {imagesList.length > 0 && ( +
+
+ {rowVirtualizer.getVirtualItems().map((virtualRow) => { + const rowImages = rows[virtualRow.index]; + return ( +
+ {rowImages.map((image) => ( +
setSelectedImage(image)} + className={`naxatw-group naxatw-relative naxatw-h-[108px] naxatw-w-[calc(12.5%-10px)] naxatw-cursor-pointer naxatw-overflow-hidden naxatw-rounded-lg naxatw-border-2 ${getBorderColor(image.status)} naxatw-transition-all hover:naxatw-scale-105 hover:naxatw-shadow-lg`} + > + {/* Image thumbnail - use thumbnail_url if available, otherwise fall back to full url */} + {(image.thumbnail_url || image.url) ? ( + <> + {/* Loading placeholder - shown before image loads */} +
+
+
+ {/* Actual image - will overlay the placeholder once loaded */} + {image.filename} + + ) : ( +
+ + + +
+ )} + + {/* Status badge */} +
+ + {getStatusLabel(image.status)} + +
+ + {/* Filename on hover */} +
+
{image.filename}
+
+
+ ))} +
+ ); + })} +
+
+ )} + + {/* Loading state - show when no images yet */} + {imagesList.length === 0 && ( +
+
+
+

+ Loading images... +

+
+
+ )} + + {/* Image Detail Modal */} + {selectedImage && ( +
setSelectedImage(null)} + > +
e.stopPropagation()} + > + {/* Modal Header */} +
+
+

+ {selectedImage.filename} +

+ + {getStatusLabel(selectedImage.status)} + +
+ +
+ + {/* Modal Content */} +
+ {/* Image */} +
+ {selectedImage.url ? ( + {selectedImage.filename} + ) : ( +
+ + + +
+ )} +
+ + {/* Details */} +
+

Image Details

+ +
+
+ Status +
+ + {getStatusLabel(selectedImage.status)} + +
+
+ +
+ Filename +

+ {selectedImage.filename} +

+
+ +
+ GPS Data +

+ {selectedImage.has_gps ? ( + Available + ) : ( + Not Available + )} +

+
+ + {selectedImage.task_id && ( +
+ Assigned Task +

+ {selectedImage.task_id} +

+
+ )} + +
+ Uploaded +

+ {new Date(selectedImage.uploaded_at).toLocaleString()} +

+
+
+ + {/* Rejection/Classification Reason */} + {selectedImage.rejection_reason && ( +
+
+ + + + Classification Issue +
+

+ {selectedImage.rejection_reason} +

+
+ )} + + {/* Status-specific messages */} + {selectedImage.status === 'unmatched' && !selectedImage.rejection_reason && ( +
+
+ + + + No Task Match +
+

+ The image coordinates do not fall within any task boundary in this project. +

+
+ )} + + {selectedImage.status === 'invalid_exif' && !selectedImage.rejection_reason && ( +
+
+ + + + Missing GPS Data +
+

+ The image does not contain valid GPS coordinates in its EXIF metadata. +

+
+ )} + + {selectedImage.status === 'duplicate' && !selectedImage.rejection_reason && ( +
+
+ + + + + Duplicate Image +
+

+ This image has already been uploaded to the project. +

+
+ )} + + {selectedImage.status === 'assigned' && ( +
+
+ + + + Successfully Assigned +
+

+ This image has been matched to a task and is ready for processing. +

+
+ )} + + {/* Mark as Good button for rejected/invalid images */} + {(selectedImage.status === 'rejected' || selectedImage.status === 'invalid_exif') && ( +
+ +

+ If this image is valid and should be included, click to override the rejection. +

+
+ )} +
+
+
+
+ )}
); }; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx index 633ce006b..7588b52b5 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageReview.tsx @@ -1,13 +1,405 @@ -const ImageReview = () => { +import { useState, useEffect, useCallback, useRef } from 'react'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { Map as MapLibreMap, NavigationControl, AttributionControl } from 'maplibre-gl'; +import { getBatchReview, getBatchMapData, acceptImage, BatchReviewData, BatchMapData, TaskGroup, TaskGroupImage } from '@Services/classification'; +import { FlexColumn, FlexRow } from '@Components/common/Layouts'; +import Accordion from '@Components/common/Accordion'; +import { Button } from '@Components/RadixComponents/Button'; +import { toast } from 'react-toastify'; +import MapContainer from '@Components/common/MapLibreComponents/MapContainer'; +import VectorLayer from '@Components/common/MapLibreComponents/Layers/VectorLayer'; +import BaseLayerSwitcherUI from '@Components/common/BaseLayerSwitcher'; +import { GeojsonType } from '@Components/common/MapLibreComponents/types'; +import AsyncPopup from '@Components/common/MapLibreComponents/NewAsyncPopup'; + +interface ImageReviewProps { + projectId: string; + batchId: string; +} + +const ImageReview = ({ projectId, batchId }: ImageReviewProps) => { + const queryClient = useQueryClient(); + const [selectedImage, setSelectedImage] = useState<{ + id: string; + url: string; + filename: string; + status: string; + rejection_reason?: string; + } | null>(null); + const [popupData, setPopupData] = useState>(); + const [map, setMap] = useState(null); + const [isMapLoaded, setIsMapLoaded] = useState(false); + const mapContainerRef = useRef(null); + + const { data: reviewData, isLoading, error } = useQuery({ + queryKey: ['batchReview', projectId, batchId], + queryFn: () => getBatchReview(projectId, batchId), + enabled: !!projectId && !!batchId, + }); + + const { data: mapData } = useQuery({ + queryKey: ['batchMapData', projectId, batchId], + queryFn: () => getBatchMapData(projectId, batchId), + enabled: !!projectId && !!batchId, + }); + + // Initialize map only after container is mounted + useEffect(() => { + if (!mapContainerRef.current || map) return; + + const mapInstance = new MapLibreMap({ + container: mapContainerRef.current, + style: { version: 8, sources: {}, layers: [] }, + center: [0, 0], + zoom: 2, + maxZoom: 22, + attributionControl: false, + renderWorldCopies: false, + }); + + mapInstance.on('load', () => { + setIsMapLoaded(true); + }); + + // Disable rotation + mapInstance.dragRotate.disable(); + mapInstance.touchZoomRotate.disableRotation(); + + setMap(mapInstance); + + return () => { + mapInstance.remove(); + }; + }, [reviewData]); // Only run when reviewData is available (meaning component is rendered) + + // Add map controls when loaded + useEffect(() => { + if (isMapLoaded && map) { + map.addControl(new NavigationControl(), 'top-right'); + map.addControl( + new AttributionControl({ + compact: true, + }), + 'bottom-right', + ); + } + }, [isMapLoaded, map]); + + const acceptMutation = useMutation({ + mutationFn: (imageId: string) => acceptImage(projectId, imageId), + onSuccess: (data) => { + if (data.status === 'unmatched') { + toast.warning(data.message); + } else { + toast.success('Image accepted successfully'); + } + queryClient.invalidateQueries({ queryKey: ['batchReview', projectId, batchId] }); + queryClient.invalidateQueries({ queryKey: ['batchMapData', projectId, batchId] }); + setSelectedImage(null); + }, + onError: (error: any) => { + const message = error?.response?.data?.detail || error.message || 'Failed to accept image'; + toast.error(message); + }, + }); + + const handleImageClick = (image: TaskGroupImage) => { + setSelectedImage({ + id: image.id, + url: image.url || image.thumbnail_url || '', + filename: image.filename, + status: image.status, + rejection_reason: image.rejection_reason, + }); + }; + + const closeModal = () => { + setSelectedImage(null); + }; + + const handleAcceptImage = () => { + if (selectedImage) { + acceptMutation.mutate(selectedImage.id); + } + }; + + const getPopupUI = useCallback(() => { + return ( +
+

{popupData?.filename || 'Unknown'}

+

Status: {popupData?.status?.replace('_', ' ') || 'Unknown'}

+
+ ); + }, [popupData]); + + if (isLoading) { + return ( +
+

Loading review data...

+
+ ); + } + + if (error) { + return ( +
+

+ Error loading review data: {error instanceof Error ? error.message : 'Unknown error'} +

+
+ ); + } + + if (!reviewData || reviewData.task_groups.length === 0) { + return ( +
+

+ No classified images available for review. +

+
+ ); + } + + const isRejectedImage = selectedImage && (selectedImage.status === 'rejected' || selectedImage.status === 'invalid_exif'); + return ( -
-

- Review the classified images and EXIF data before processing. -

-
-

Review interface will go here

-
-
+ + {/* Map and List Split View */} +
+ {/* Map Section */} +
+ + + + {/* Task polygons */} + {map && isMapLoaded && mapData?.tasks && ( + + )} + + {/* Task polygon outlines for better visibility */} + {map && isMapLoaded && mapData?.tasks && ( + + )} + + {/* Image point markers */} + {map && isMapLoaded && mapData?.images && ( + + )} + + {/* Popup for image points */} + ) => + feature?.source === 'review-image-points' + } + popupUI={getPopupUI} + fetchPopupData={(properties: Record) => { + setPopupData(properties); + }} + title="Image Details" + hideButton + /> + + + {/* Legend */} +
+

Image Status

+
+
+
+ Assigned +
+
+
+ Rejected +
+
+
+ Unmatched +
+
+
+ Invalid EXIF +
+
+
+
+ + {/* List Section */} +
+ +

+ Review the classified images grouped by tasks. +

+ + + {reviewData.total_tasks} Tasks + + + {reviewData.total_images} Images + + +
+ + {/* Task Accordions */} +
+ {reviewData.task_groups.map((group: TaskGroup, index: number) => ( + +

+ {group.task_id ? `Task #${group.project_task_index}` : 'Rejected Images'} +

+ + {group.image_count} {group.image_count === 1 ? 'image' : 'images'} + + + } + > + {/* Image Grid - Only loaded when accordion is open */} +
+ {group.images.map((image) => ( +
handleImageClick(image)} + title={image.filename} + > + {image.filename} + {(image.status === 'rejected' || image.status === 'invalid_exif') && ( +
+ Rejected +
+ )} +
+
+ ))} +
+ + ))} +
+
+
+ + {/* Full Image Modal */} + {selectedImage && ( +
+
e.stopPropagation()}> + + {selectedImage.filename} +
+
+

{selectedImage.filename}

+ {isRejectedImage && selectedImage.rejection_reason && ( +

+ Reason: {selectedImage.rejection_reason} +

+ )} +
+ {isRejectedImage && ( + + )} +
+
+
+ )} + ); }; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx index 6f2b374e5..67c92a939 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/ImageUpload.tsx @@ -2,12 +2,12 @@ import UppyFileUploader from '../UppyFileUploader'; interface ImageUploadProps { projectId: string; - onUploadComplete?: (result: any) => void; + onUploadComplete?: (result: any, batchId?: string) => void; } const ImageUpload = ({ projectId, onUploadComplete }: ImageUploadProps) => { return ( -
+
{projectId ? ( { staging={true} /> ) : ( -
+

Project ID not found

)} diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/index.tsx index 95197c0fa..4944c3482 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/index.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/DroneImageProcessingWorkflow/index.tsx @@ -1,7 +1,17 @@ -import { useState } from 'react'; +import { useEffect, useCallback, useState } from 'react'; +import { useMutation } from '@tanstack/react-query'; +import { toast } from 'react-toastify'; +import { useTypedDispatch, useTypedSelector } from '@Store/hooks'; +import { + setCurrentStep, + setBatchId, + setProjectId, + resetWorkflow, +} from '@Store/slices/imageProcessingWorkflow'; import Modal from '@Components/common/Modal'; import { Button } from '@Components/RadixComponents/Button'; import StepSwitcher from '@Components/common/StepSwitcher'; +import { deleteBatch } from '@Services/classification'; import ImageUpload from './ImageUpload'; import ImageClassification from './ImageClassification'; import ImageReview from './ImageReview'; @@ -18,7 +28,39 @@ const DroneImageProcessingWorkflow = ({ onClose, projectId, }: IDroneImageProcessingWorkflowProps) => { - const [currentStep, setCurrentStep] = useState(1); + const dispatch = useTypedDispatch(); + const { currentStep, batchId, isClassifying } = useTypedSelector( + (state) => state.imageProcessingWorkflow + ); + const [showAbortConfirmation, setShowAbortConfirmation] = useState(false); + + // Mutation to delete batch + const deleteBatchMutation = useMutation({ + mutationFn: () => deleteBatch(projectId, batchId!), + onSuccess: () => { + toast.success('Batch deleted successfully'); + setShowAbortConfirmation(false); + dispatch(resetWorkflow()); + onClose(); + }, + onError: (error: Error) => { + toast.error(error.message || 'Failed to delete batch'); + }, + }); + + // Set project ID when component mounts or projectId changes + useEffect(() => { + if (projectId) { + dispatch(setProjectId(projectId)); + } + }, [projectId, dispatch]); + + // Reset abort confirmation state when modal opens + useEffect(() => { + if (isOpen) { + setShowAbortConfirmation(false); + } + }, [isOpen]); const steps = [ { url: '', step: 1, label: '01', name: 'Image Upload', title: 'Upload' }, @@ -29,29 +71,92 @@ const DroneImageProcessingWorkflow = ({ const handleNext = () => { if (currentStep < steps.length) { - setCurrentStep(currentStep + 1); + dispatch(setCurrentStep(currentStep + 1)); } }; const handlePrevious = () => { if (currentStep > 1) { - setCurrentStep(currentStep - 1); + dispatch(setCurrentStep(currentStep - 1)); } }; const handleClose = () => { - setCurrentStep(1); + // If there's a batch in progress (after upload step), show confirmation + if (batchId && currentStep > 1) { + setShowAbortConfirmation(true); + return; + } + dispatch(resetWorkflow()); onClose(); }; + const handleConfirmAbort = () => { + if (batchId) { + deleteBatchMutation.mutate(); + } else { + dispatch(resetWorkflow()); + onClose(); + } + }; + + const handleCancelAbort = () => { + setShowAbortConfirmation(false); + }; + + // Handle upload complete - store batch ID and move to classification + const handleUploadComplete = useCallback((result: any, uploadedBatchId?: string) => { + if (uploadedBatchId) { + dispatch(setBatchId(uploadedBatchId)); + // Automatically move to classification step + dispatch(setCurrentStep(2)); + } + }, [dispatch]); + + // Should proceed to the next step + const handleNextButton = () => { + // Disable Next button on step 1 if no batch ID + if (currentStep === 1 && !batchId) { + return true; + } + // Disable Next button if currently classifying + if (isClassifying) { + return true; + } + return false; + } + const renderStepContent = () => { switch (currentStep) { case 1: - return ; + return ( + + ); case 2: - return ; + return batchId ? ( + + ) : ( +
+ No batch ID available. Please upload images first. +
+ ); case 3: - return ; + return batchId ? ( + + ) : ( +
+ No batch ID available. Please complete classification first. +
+ ); case 4: return ; default: @@ -60,6 +165,7 @@ const DroneImageProcessingWorkflow = ({ }; return ( + <> - {/* Step Title */} -
-

- {steps[currentStep - 1].title} -

-
- {/* Content */}
{renderStepContent()} @@ -84,15 +183,27 @@ const DroneImageProcessingWorkflow = ({ {/* Footer */}
- +
+ + {batchId && currentStep > 1 && ( + + )} +
+
+ + {/* Abort Confirmation Dialog */} + {showAbortConfirmation && ( +
+
+
+ warning +

+ Abort Process? +

+
+

+ Are you sure you want to abort? This will permanently delete all images + in this batch from both the database and storage. This action cannot be undone. +

+
+ + +
+
+
+ )} + ); }; diff --git a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx index a61f5ad59..34d283832 100644 --- a/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx +++ b/src/frontend/src/components/DroneOperatorTask/DescriptionSection/UppyFileUploader/index.tsx @@ -1,12 +1,10 @@ -import { useEffect, useCallback, useContext } from 'react'; +import { useEffect, useCallback, useContext, useRef } from 'react'; import AwsS3 from '@uppy/aws-s3'; import { Dashboard } from '@uppy/react'; import { UppyContext } from '@uppy/react'; import { toast } from 'react-toastify'; import { authenticated, api } from '@Services/index'; import { useTypedDispatch } from '@Store/hooks'; -import { setFilesExifData } from '@Store/actions/droneOperatorTask'; -import getExifData from '@Utils/getExifData'; import '@uppy/core/css/style.min.css'; import '@uppy/dashboard/css/style.min.css'; @@ -16,7 +14,7 @@ interface UppyFileUploaderProps { projectId: string; taskId?: string; label?: string; - onUploadComplete?: (result: any) => void; + onUploadComplete?: (result: any, batchId?: string) => void; allowedFileTypes?: string[]; note?: string; staging?: boolean; // If true, uploads to user-uploads staging directory @@ -44,6 +42,10 @@ const UppyFileUploader = ({ staging = false, }: UppyFileUploaderProps) => { const dispatch = useTypedDispatch(); + // Generate a batch ID when upload starts (for staging uploads only) + const batchIdRef = useRef(null); + // Track if we've shown the success notification to prevent duplicates + const notificationShownRef = useRef(false); // Get the shared Uppy instance from context const { uppy } = useContext(UppyContext); @@ -62,10 +64,13 @@ const UppyFileUploader = ({ }, }); - // Check if AwsS3 plugin is already added, if not add it + // Remove existing AwsS3 plugin and re-add with fresh configuration const pluginId = 'AwsS3'; - if (!uppy.getPlugin(pluginId)) { - uppy.use(AwsS3, { + const existingPlugin = uppy.getPlugin(pluginId); + if (existingPlugin) { + uppy.removePlugin(existingPlugin); + } + uppy.use(AwsS3, { id: pluginId, limit: 4, // Upload 4 parts simultaneously retryDelays: [0, 1000, 3000, 5000], @@ -131,15 +136,22 @@ const UppyFileUploader = ({ }, completeMultipartUpload: async (file, data) => { try { + const requestBody: any = { + upload_id: data.uploadId, + file_key: data.key, + parts: data.parts, + project_id: projectId, + filename: file.name, + }; + + // Include batch_id for staging uploads + if (staging && batchIdRef.current) { + requestBody.batch_id = batchIdRef.current; + } + await authenticated(api).post( '/projects/complete-multipart-upload/', - { - upload_id: data.uploadId, - file_key: data.key, - parts: data.parts, - project_id: projectId, - filename: file.name, - }, + requestBody, { headers: { 'Content-Type': 'application/json', @@ -186,7 +198,6 @@ const UppyFileUploader = ({ } }, }); - } // Cleanup function return () => { @@ -195,59 +206,61 @@ const UppyFileUploader = ({ }; }, [uppy, projectId, taskId, allowedFileTypes, staging]); - // Extract EXIF data when files are added - const handleFilesAdded = useCallback( - async (addedFiles: any[]) => { - try { - // Extract EXIF data from image files - const imageFiles = addedFiles.filter(file => - file.type?.startsWith('image/'), - ); - if (imageFiles.length > 0) { - const exifDataPromises = imageFiles.map(async file => { - const exifData = await getExifData(file.data); - return exifData; - }); + useEffect(() => { + // Reset batch ID when component mounts to ensure fresh state + batchIdRef.current = null; + notificationShownRef.current = false; - const exifData = await Promise.all(exifDataPromises); - dispatch(setFilesExifData(exifData)); - } - } catch (error) { - console.error('Error extracting EXIF data:', error); - toast.error('Error reading file metadata'); + // Generate batch ID when upload starts (for staging uploads only) + const handleUpload = () => { + if (staging && !batchIdRef.current) { + // Generate a UUID v4 for the batch + batchIdRef.current = crypto.randomUUID(); + console.log('Generated batch ID:', batchIdRef.current); } - }, - [dispatch], - ); - - useEffect(() => { - uppy.on('files-added', handleFilesAdded); + // Reset notification flag when new upload starts + notificationShownRef.current = false; + }; - uppy.on('upload-error', (file, error) => { + const handleUploadError = (file: any, error: Error) => { toast.error(`Upload failed for ${file?.name}: ${error.message}`); - }); + }; - uppy.on('complete', result => { + const handleComplete = (result: any) => { const successfulUploads = result.successful?.length || 0; const failedUploads = result.failed?.length || 0; - if (successfulUploads > 0) { + // Only show notification once per upload batch + if (successfulUploads > 0 && !notificationShownRef.current) { toast.success(`${successfulUploads} file(s) uploaded successfully`); + notificationShownRef.current = true; + if (onUploadComplete) { - onUploadComplete(result); + onUploadComplete(result, staging ? batchIdRef.current || undefined : undefined); + } + + // Reset batch ID after successful upload + if (staging) { + batchIdRef.current = null; } } if (failedUploads > 0) { toast.error(`${failedUploads} file(s) failed to upload`); } - }); + }; + + uppy.on('upload', handleUpload); + uppy.on('upload-error', handleUploadError); + uppy.on('complete', handleComplete); return () => { - // Event listeners are automatically cleaned up when component unmounts + uppy.off('upload', handleUpload); + uppy.off('upload-error', handleUploadError); + uppy.off('complete', handleComplete); }; - }, [uppy, handleFilesAdded, dispatch, onUploadComplete]); + }, [uppy, dispatch, onUploadComplete, staging]); return (
diff --git a/src/frontend/src/components/common/Accordion/index.tsx b/src/frontend/src/components/common/Accordion/index.tsx index 0ef572290..c7ea20482 100644 --- a/src/frontend/src/components/common/Accordion/index.tsx +++ b/src/frontend/src/components/common/Accordion/index.tsx @@ -1,26 +1,49 @@ -import { useState } from 'react'; +import { useState, ReactNode } from 'react'; import { FlexRow } from '../Layouts'; import Icon from '../Icon'; interface IAccordionProps { - open: boolean; - title: string; - description: string; + open?: boolean; + title: string | ReactNode; + description?: string; + children?: ReactNode; + className?: string; + headerClassName?: string; + contentClassName?: string; + onToggle?: (isOpen: boolean) => void; } export default function Accordion({ open = false, title, description, + children, + className = '', + headerClassName = '', + contentClassName = '', + onToggle, }: IAccordionProps) { const [isOpen, setIsOpen] = useState(open); + + const handleToggle = () => { + const newState = !isOpen; + setIsOpen(newState); + onToggle?.(newState); + }; + return ( -
- -

- {title} -

-
{isOpen && ( -

- {description} -

+
+ {children || ( +

+ {description} +

+ )} +
)}
); diff --git a/src/frontend/src/services/classification.ts b/src/frontend/src/services/classification.ts new file mode 100644 index 000000000..79a3e3d2f --- /dev/null +++ b/src/frontend/src/services/classification.ts @@ -0,0 +1,159 @@ +import { authenticated, api } from './index'; + +export interface ImageClassificationResult { + id: string; + filename: string; + status: 'staged' | 'uploaded' | 'classifying' | 'assigned' | 'rejected' | 'unmatched' | 'invalid_exif' | 'duplicate'; + task_id?: string; + rejection_reason?: string; + has_gps: boolean; + s3_key: string; + url?: string; + thumbnail_url?: string; // 200x200 thumbnail for grid display + uploaded_at: string; +} + +export interface BatchStatusSummary { + total: number; + staged: number; + uploaded: number; + classifying: number; + assigned: number; + rejected: number; + unmatched: number; + invalid_exif: number; + duplicate: number; +} + +export interface TaskGroupImage { + id: string; + filename: string; + s3_key: string; + thumbnail_url?: string; + url?: string; + status: 'assigned' | 'rejected' | 'invalid_exif'; + rejection_reason?: string; + uploaded_at: string; +} + +export interface TaskGroup { + task_id: string | null; + project_task_index: number | null; + image_count: number; + images: TaskGroupImage[]; +} + +export interface BatchReviewData { + batch_id: string; + task_groups: TaskGroup[]; + total_tasks: number; + total_images: number; +} + +/** + * Start classification job for a batch of uploaded images + */ +export const startClassification = async ( + projectId: string, + batchId: string, +): Promise<{ job_id: string; message: string }> => { + const response = await authenticated(api).post( + `/projects/${projectId}/classify-batch/?batch_id=${batchId}`, + {}, + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + return response.data; +}; + +/** + * Get batch status summary (counts by status) + */ +export const getBatchStatus = async ( + projectId: string, + batchId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/batch/${batchId}/status/`, + ); + return response.data; +}; + +/** + * Poll for image updates in a batch (incremental updates) + */ +export const getBatchImages = async ( + projectId: string, + batchId: string, + since?: string, +): Promise => { + const params = since ? { last_timestamp: since } : {}; + const response = await authenticated(api).get( + `/projects/${projectId}/batch/${batchId}/images/`, + { params }, + ); + return response.data.images || response.data; +}; + +/** + * Get batch review data grouped by tasks + */ +export const getBatchReview = async ( + projectId: string, + batchId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/batch/${batchId}/review/`, + ); + return response.data; +}; + +/** + * Accept a rejected image - assigns to task if within boundary, otherwise marks as unmatched + */ +export const acceptImage = async ( + projectId: string, + imageId: string, +): Promise<{ message: string; image_id: string; status: string; task_id: string | null }> => { + const response = await authenticated(api).post( + `/projects/${projectId}/images/${imageId}/accept/`, + ); + return response.data; +}; + +/** + * Delete a batch and all its images from database and S3 storage + */ +export const deleteBatch = async ( + projectId: string, + batchId: string, +): Promise<{ message: string; batch_id: string; job_id: string }> => { + const response = await authenticated(api).delete( + `/projects/${projectId}/batch/${batchId}/`, + ); + return response.data; +}; + +export interface BatchMapData { + batch_id: string; + tasks: GeoJSON.FeatureCollection; + images: GeoJSON.FeatureCollection; + total_tasks: number; + total_images: number; +} + +/** + * Get map data for batch review (task geometries and image point locations) + */ +export const getBatchMapData = async ( + projectId: string, + batchId: string, +): Promise => { + const response = await authenticated(api).get( + `/projects/${projectId}/batch/${batchId}/map-data/`, + ); + return response.data; +}; diff --git a/src/frontend/src/store/reducers/index.ts b/src/frontend/src/store/reducers/index.ts index 7e1ad6e56..fe2467d10 100644 --- a/src/frontend/src/store/reducers/index.ts +++ b/src/frontend/src/store/reducers/index.ts @@ -1,6 +1,7 @@ import { combineReducers } from '@reduxjs/toolkit'; import createproject from '@Store/slices/createproject'; import droneOperatorTask from '@Store/slices/droneOperartorTask'; +import imageProcessingWorkflow from '@Store/slices/imageProcessingWorkflow'; import common from '../slices/common'; import loader from '../slices/loader'; import project from '../slices/project'; @@ -11,6 +12,7 @@ const rootReducer = combineReducers({ createproject, project, droneOperatorTask, + imageProcessingWorkflow, }); export default rootReducer; diff --git a/src/frontend/src/store/slices/imageProcessingWorkflow.ts b/src/frontend/src/store/slices/imageProcessingWorkflow.ts new file mode 100644 index 000000000..0a67d8e07 --- /dev/null +++ b/src/frontend/src/store/slices/imageProcessingWorkflow.ts @@ -0,0 +1,66 @@ +/* eslint-disable no-param-reassign */ +import { createSlice, PayloadAction } from '@reduxjs/toolkit'; + +export interface IImageProcessingWorkflowState { + currentStep: number; + batchId: string | null; + projectId: string | null; + isClassifying: boolean; + jobId: string | null; +} + +const initialState: IImageProcessingWorkflowState = { + currentStep: 1, + batchId: null, + projectId: null, + isClassifying: false, + jobId: null, +}; + +export const imageProcessingWorkflowSlice = createSlice({ + name: 'imageProcessingWorkflow', + initialState, + reducers: { + setCurrentStep: (state, action: PayloadAction) => { + state.currentStep = action.payload; + }, + setBatchId: (state, action: PayloadAction) => { + state.batchId = action.payload; + }, + setProjectId: (state, action: PayloadAction) => { + state.projectId = action.payload; + }, + setIsClassifying: (state, action: PayloadAction) => { + state.isClassifying = action.payload; + }, + setJobId: (state, action: PayloadAction) => { + state.jobId = action.payload; + }, + startClassification: (state, action: PayloadAction) => { + state.isClassifying = true; + state.jobId = action.payload; + }, + completeClassification: (state) => { + state.isClassifying = false; + }, + resetWorkflow: (state) => { + state.currentStep = 1; + state.batchId = null; + state.isClassifying = false; + state.jobId = null; + }, + }, +}); + +export const { + setCurrentStep, + setBatchId, + setProjectId, + setIsClassifying, + setJobId, + startClassification, + completeClassification, + resetWorkflow, +} = imageProcessingWorkflowSlice.actions; + +export default imageProcessingWorkflowSlice.reducer;