add operation ids to all endpoints, for cleaner generated ts client
Continuous Integration / backend-tests (push) Successful in 27s
Continuous Integration / frontend-check (push) Successful in 15s
Continuous Integration / e2e-tests (push) Failing after 23m48s

This commit is contained in:
2026-05-04 18:07:18 -04:00
parent 9064d3b7ea
commit 89de081a6c
22 changed files with 562 additions and 524 deletions
+5 -5
View File
@@ -43,7 +43,7 @@ class JobSchema(BaseModel):
# --- Endpoints --- # --- Endpoints ---
@router.post("/trigger/auto") @router.post("/trigger/auto", operation_id="trigger_auto_backup")
def trigger_auto_backup( def trigger_auto_backup(
background_tasks: BackgroundTasks, background_tasks: BackgroundTasks,
db_session: Session = Depends(get_db), db_session: Session = Depends(get_db),
@@ -95,8 +95,8 @@ def trigger_auto_backup(
} }
@router.post("/trigger/{media_id}") @router.post("/trigger/{media_id}", operation_id="trigger_backup")
def trigger_backup_job( def trigger_backup(
media_id: int, media_id: int,
background_tasks: BackgroundTasks, background_tasks: BackgroundTasks,
db_session: Session = Depends(get_db), db_session: Session = Depends(get_db),
@@ -132,8 +132,8 @@ def trigger_backup_job(
} }
@router.get("/", response_model=List[JobSchema]) @router.get("/", response_model=List[JobSchema], operation_id="list_backups")
def list_archival_history(db_session: Session = Depends(get_db)): def list_backups(db_session: Session = Depends(get_db)):
"""Retrieves a history of archival jobs, sorted by most recent.""" """Retrieves a history of archival jobs, sorted by most recent."""
# Note: Using the generic Job model for consistency across the UI # Note: Using the generic Job model for consistency across the UI
return ( return (
+26 -20
View File
@@ -31,8 +31,12 @@ class ReorderMediaRequest(BaseModel):
# --- Core Logic --- # --- Core Logic ---
@router.get("/providers", response_model=List[StorageProviderSchema]) @router.get(
def list_storage_providers(): "/providers",
response_model=List[StorageProviderSchema],
operation_id="list_providers",
)
def list_providers():
"""Returns a registry of all available storage providers and their configurations.""" """Returns a registry of all available storage providers and their configurations."""
from app.providers.cloud import CloudStorageProvider from app.providers.cloud import CloudStorageProvider
from app.providers.hdd import OfflineHDDProvider from app.providers.hdd import OfflineHDDProvider
@@ -58,8 +62,8 @@ def list_storage_providers():
] ]
@router.get("/media", response_model=List[MediaSchema]) @router.get("/media", response_model=List[MediaSchema], operation_id="list_media")
def list_storage_fleet(refresh: bool = False, db_session: Session = Depends(get_db)): def list_media(refresh: bool = False, db_session: Session = Depends(get_db)):
"""Returns all registered media assets with real-time hardware status.""" """Returns all registered media assets with real-time hardware status."""
from app.services.archiver import archiver_manager from app.services.archiver import archiver_manager
@@ -144,8 +148,8 @@ def list_storage_fleet(refresh: bool = False, db_session: Session = Depends(get_
return results return results
@router.post("/media/reorder") @router.post("/media/reorder", operation_id="reorder_media")
def reorder_archival_priority( def reorder_media(
request_data: ReorderMediaRequest, db_session: Session = Depends(get_db) request_data: ReorderMediaRequest, db_session: Session = Depends(get_db)
): ):
"""Updates the global archival priority order for the media fleet.""" """Updates the global archival priority order for the media fleet."""
@@ -158,8 +162,8 @@ def reorder_archival_priority(
return {"message": "Archival priority synchronized."} return {"message": "Archival priority synchronized."}
@router.post("/media", response_model=MediaSchema) @router.post("/media", response_model=MediaSchema, operation_id="create_media")
def register_new_media( def create_media(
request_data: MediaCreateSchema, db_session: Session = Depends(get_db) request_data: MediaCreateSchema, db_session: Session = Depends(get_db)
): ):
"""Adds a new physical storage medium to the inventory.""" """Adds a new physical storage medium to the inventory."""
@@ -197,8 +201,10 @@ def register_new_media(
) )
@router.patch("/media/{media_id}", response_model=MediaSchema) @router.patch(
def update_media_asset( "/media/{media_id}", response_model=MediaSchema, operation_id="update_media"
)
def update_media(
media_id: int, media_id: int,
request_data: MediaUpdateSchema, request_data: MediaUpdateSchema,
db_session: Session = Depends(get_db), db_session: Session = Depends(get_db),
@@ -252,8 +258,8 @@ def update_media_asset(
) )
@router.delete("/media/{media_id}") @router.delete("/media/{media_id}", operation_id="delete_media")
def delete_media_asset(media_id: int, db_session: Session = Depends(get_db)): def delete_media(media_id: int, db_session: Session = Depends(get_db)):
"""Removes a media asset and all associated version history from the index.""" """Removes a media asset and all associated version history from the index."""
media_record = db_session.get(models.StorageMedia, media_id) media_record = db_session.get(models.StorageMedia, media_id)
if not media_record: if not media_record:
@@ -269,8 +275,8 @@ def delete_media_asset(media_id: int, db_session: Session = Depends(get_db)):
return {"message": "Media and associated history successfully purged."} return {"message": "Media and associated history successfully purged."}
@router.post("/media/{media_id}/initialize") @router.post("/media/{media_id}/initialize", operation_id="initialize_media")
def initialize_storage_hardware( def initialize_media(
media_id: int, force: bool = False, db_session: Session = Depends(get_db) media_id: int, force: bool = False, db_session: Session = Depends(get_db)
): ):
"""Prepares hardware for use by the system (wipes and labels media).""" """Prepares hardware for use by the system (wipes and labels media)."""
@@ -320,8 +326,8 @@ def initialize_storage_hardware(
# --- Browsing & Analytics (Optimized) --- # --- Browsing & Analytics (Optimized) ---
@router.get("/insights") @router.get("/insights", operation_id="get_analytics")
def get_system_analytics(db_session: Session = Depends(get_db)): def get_analytics(db_session: Session = Depends(get_db)):
"""Computes high-signal system metrics with optimized single-pass queries.""" """Computes high-signal system metrics with optimized single-pass queries."""
# 1. Deduplication & Scale (Only counting unignored files) # 1. Deduplication & Scale (Only counting unignored files)
@@ -546,8 +552,8 @@ def get_system_analytics(db_session: Session = Depends(get_db)):
} }
@router.get("/directories") @router.get("/directories", operation_id="get_treemap")
def get_directory_treemap(db_session: Session = Depends(get_db)): def get_treemap(db_session: Session = Depends(get_db)):
"""Returns directory tree data for treemap visualization.""" """Returns directory tree data for treemap visualization."""
# Directory aggregation - same as insights but only directories # Directory aggregation - same as insights but only directories
directory_aggregation_sql = text(""" directory_aggregation_sql = text("""
@@ -621,8 +627,8 @@ def get_directory_treemap(db_session: Session = Depends(get_db)):
return convert_tree_to_list(nested_dir_map, 10) return convert_tree_to_list(nested_dir_map, 10)
@router.get("/detect") @router.get("/detect", operation_id="detect_media")
def detect_unregistered_media(db_session: Session = Depends(get_db)): def detect_media(db_session: Session = Depends(get_db)):
"""Scans all configured hardware providers for newly inserted, unregistered media.""" """Scans all configured hardware providers for newly inserted, unregistered media."""
from app.services.archiver import archiver_manager from app.services.archiver import archiver_manager
+34 -20
View File
@@ -85,8 +85,10 @@ class BatchCartRequest(BaseModel):
# --- Endpoints --- # --- Endpoints ---
@router.get("/queue", response_model=List[CartItemSchema]) @router.get(
def list_recovery_queue(db_session: Session = Depends(get_db)): "/queue", response_model=List[CartItemSchema], operation_id="get_restore_queue"
)
def get_restore_queue(db_session: Session = Depends(get_db)):
"""Returns all items currently queued for data recovery.""" """Returns all items currently queued for data recovery."""
queue_items = ( queue_items = (
db_session.query(models.RestoreCart) db_session.query(models.RestoreCart)
@@ -104,16 +106,16 @@ def list_recovery_queue(db_session: Session = Depends(get_db)):
] ]
@router.post("/queue/clear") @router.post("/queue/clear", operation_id="clear_restore_queue")
def clear_recovery_queue(db_session: Session = Depends(get_db)): def clear_restore_queue(db_session: Session = Depends(get_db)):
"""Removes all items from the data recovery queue.""" """Removes all items from the data recovery queue."""
db_session.query(models.RestoreCart).delete() db_session.query(models.RestoreCart).delete()
db_session.commit() db_session.commit()
return {"message": "Recovery queue cleared."} return {"message": "Recovery queue cleared."}
@router.post("/queue/directory") @router.post("/queue/directory", operation_id="add_directory_to_restore_queue")
def add_directory_to_recovery_queue( def add_directory_to_restore_queue(
request_data: DirectoryCartRequest, db_session: Session = Depends(get_db) request_data: DirectoryCartRequest, db_session: Session = Depends(get_db)
): ):
"""Recursively adds all restorable files within a directory to the recovery queue.""" """Recursively adds all restorable files within a directory to the recovery queue."""
@@ -144,8 +146,8 @@ def add_directory_to_recovery_queue(
return {"message": f"Added restorable items from {target_directory} to queue."} return {"message": f"Added restorable items from {target_directory} to queue."}
@router.post("/queue/file/{file_id}") @router.post("/queue/file/{file_id}", operation_id="add_file_to_restore_queue")
def add_file_to_recovery_queue(file_id: int, db_session: Session = Depends(get_db)): def add_file_to_restore_queue(file_id: int, db_session: Session = Depends(get_db)):
"""Adds a specific file to the recovery queue if it has valid backups.""" """Adds a specific file to the recovery queue if it has valid backups."""
existing_item = ( existing_item = (
db_session.query(models.RestoreCart) db_session.query(models.RestoreCart)
@@ -178,8 +180,8 @@ def add_file_to_recovery_queue(file_id: int, db_session: Session = Depends(get_d
return {"message": "Added to recovery queue."} return {"message": "Added to recovery queue."}
@router.post("/queue/batch") @router.post("/queue/batch", operation_id="batch_add_to_restore_queue")
def batch_add_to_recovery_queue( def batch_add_to_restore_queue(
request: BatchCartRequest, db_session: Session = Depends(get_db) request: BatchCartRequest, db_session: Session = Depends(get_db)
): ):
"""Adds multiple files to the recovery queue if they have valid backups.""" """Adds multiple files to the recovery queue if they have valid backups."""
@@ -214,8 +216,8 @@ def batch_add_to_recovery_queue(
} }
@router.delete("/queue/item/{item_id}") @router.delete("/queue/item/{item_id}", operation_id="remove_from_restore_queue")
def remove_from_recovery_queue(item_id: int, db_session: Session = Depends(get_db)): def remove_from_restore_queue(item_id: int, db_session: Session = Depends(get_db)):
"""Removes a specific item from the data recovery queue.""" """Removes a specific item from the data recovery queue."""
queue_item = db_session.get(models.RestoreCart, item_id) queue_item = db_session.get(models.RestoreCart, item_id)
if queue_item: if queue_item:
@@ -224,8 +226,12 @@ def remove_from_recovery_queue(item_id: int, db_session: Session = Depends(get_d
return {"message": "Removed from recovery queue."} return {"message": "Removed from recovery queue."}
@router.get("/manifest", response_model=RestoreManifestSchema) @router.get(
def calculate_recovery_manifest(db_session: Session = Depends(get_db)): "/manifest",
response_model=RestoreManifestSchema,
operation_id="get_restore_manifest",
)
def get_restore_manifest(db_session: Session = Depends(get_db)):
"""Generates an optimized physical media manifest for the current recovery queue.""" """Generates an optimized physical media manifest for the current recovery queue."""
manifest_sql = text(""" manifest_sql = text("""
SELECT SELECT
@@ -264,8 +270,8 @@ def calculate_recovery_manifest(db_session: Session = Depends(get_db)):
) )
@router.post("/trigger") @router.post("/trigger", operation_id="trigger_restore")
def trigger_recovery_job( def trigger_restore(
request_data: RestoreTriggerRequest, request_data: RestoreTriggerRequest,
background_tasks: BackgroundTasks, background_tasks: BackgroundTasks,
db_session: Session = Depends(get_db), db_session: Session = Depends(get_db),
@@ -315,8 +321,12 @@ def trigger_recovery_job(
return {"message": "Recovery job initiated.", "job_id": job_record.id} return {"message": "Recovery job initiated.", "job_id": job_record.id}
@router.get("/queue/browse", response_model=List[CartFileItemSchema]) @router.get(
def browse_recovery_queue_virtual_fs( "/queue/browse",
response_model=List[CartFileItemSchema],
operation_id="browse_restore_queue",
)
def browse_restore_queue(
path: Optional[str] = None, db_session: Session = Depends(get_db) path: Optional[str] = None, db_session: Session = Depends(get_db)
): ):
"""Provides a virtual browsable view of the recovery queue.""" """Provides a virtual browsable view of the recovery queue."""
@@ -395,8 +405,12 @@ def browse_recovery_queue_virtual_fs(
return results return results
@router.get("/queue/tree", response_model=List[CartTreeNodeSchema]) @router.get(
def get_recovery_queue_tree( "/queue/tree",
response_model=List[CartTreeNodeSchema],
operation_id="get_restore_queue_tree",
)
def get_restore_queue_tree(
path: Optional[str] = None, db_session: Session = Depends(get_db) path: Optional[str] = None, db_session: Session = Depends(get_db)
): ):
"""Returns a recursive tree view of the recovery queue's virtual filesystem.""" """Returns a recursive tree view of the recovery queue's virtual filesystem."""
+72 -54
View File
@@ -203,7 +203,7 @@ def _validate_path_within_roots(path: str, roots: List[str]) -> bool:
# --- Endpoints --- # --- Endpoints ---
@router.post("/test/reset") @router.post("/test/reset", operation_id="reset_test_environment")
def reset_test_environment(db_session: Session = Depends(get_db)): def reset_test_environment(db_session: Session = Depends(get_db)):
"""Wipes the database and resets state for E2E testing.""" """Wipes the database and resets state for E2E testing."""
import os import os
@@ -229,7 +229,11 @@ def reset_test_environment(db_session: Session = Depends(get_db)):
return {"message": "Test environment reset"} return {"message": "Test environment reset"}
@router.get("/dashboard/stats", response_model=DashboardStatsSchema) @router.get(
"/dashboard/stats",
response_model=DashboardStatsSchema,
operation_id="get_dashboard_stats",
)
def get_dashboard_stats(db_session: Session = Depends(get_db)): def get_dashboard_stats(db_session: Session = Depends(get_db)):
"""Computes high-level system statistics for the overview dashboard.""" """Computes high-level system statistics for the overview dashboard."""
aggregation_sql = text(""" aggregation_sql = text("""
@@ -324,7 +328,7 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
) )
@router.get("/jobs", response_model=List[JobSchema]) @router.get("/jobs", response_model=List[JobSchema], operation_id="list_jobs")
def list_jobs(limit: int = 10, offset: int = 0, db_session: Session = Depends(get_db)): def list_jobs(limit: int = 10, offset: int = 0, db_session: Session = Depends(get_db)):
"""Returns a paginated list of background archival and discovery jobs.""" """Returns a paginated list of background archival and discovery jobs."""
jobs = ( jobs = (
@@ -373,14 +377,14 @@ def list_jobs(limit: int = 10, offset: int = 0, db_session: Session = Depends(ge
return result return result
@router.get("/jobs/count") @router.get("/jobs/count", operation_id="get_job_count")
def get_jobs_count(db_session: Session = Depends(get_db)): def get_job_count(db_session: Session = Depends(get_db)):
"""Returns the total number of jobs recorded in the system.""" """Returns the total number of jobs recorded in the system."""
return {"count": db_session.query(models.Job).count()} return {"count": db_session.query(models.Job).count()}
@router.get("/jobs/stats") @router.get("/jobs/stats", operation_id="get_job_stats")
def get_jobs_stats(db_session: Session = Depends(get_db)): def get_job_stats(db_session: Session = Depends(get_db)):
"""Returns summary statistics for all jobs.""" """Returns summary statistics for all jobs."""
total = db_session.query(models.Job).count() total = db_session.query(models.Job).count()
completed = ( completed = (
@@ -429,8 +433,8 @@ def get_jobs_stats(db_session: Session = Depends(get_db)):
} }
@router.get("/jobs/{job_id}", response_model=JobSchema) @router.get("/jobs/{job_id}", response_model=JobSchema, operation_id="get_job")
def get_job_detail(job_id: int, db_session: Session = Depends(get_db)): def get_job(job_id: int, db_session: Session = Depends(get_db)):
"""Retrieves detailed metadata for a specific job.""" """Retrieves detailed metadata for a specific job."""
job_record = db_session.get(models.Job, job_id) job_record = db_session.get(models.Job, job_id)
if not job_record: if not job_record:
@@ -457,7 +461,11 @@ def get_job_detail(job_id: int, db_session: Session = Depends(get_db)):
) )
@router.get("/jobs/{job_id}/logs", response_model=List[JobLogSchema]) @router.get(
"/jobs/{job_id}/logs",
response_model=List[JobLogSchema],
operation_id="get_job_logs",
)
def get_job_logs(job_id: int, db_session: Session = Depends(get_db)): def get_job_logs(job_id: int, db_session: Session = Depends(get_db)):
"""Retrieves the full execution log for a specific job.""" """Retrieves the full execution log for a specific job."""
job_record = db_session.get(models.Job, job_id) job_record = db_session.get(models.Job, job_id)
@@ -476,14 +484,14 @@ def get_job_logs(job_id: int, db_session: Session = Depends(get_db)):
] ]
@router.post("/jobs/{job_id}/cancel") @router.post("/jobs/{job_id}/cancel", operation_id="cancel_job")
def cancel_job(job_id: int): def cancel_job(job_id: int):
"""Submits a cancellation request for an active job.""" """Submits a cancellation request for an active job."""
JobManager.cancel_job(job_id) JobManager.cancel_job(job_id)
return {"message": "Cancellation request submitted"} return {"message": "Cancellation request submitted"}
@router.post("/jobs/{job_id}/retry") @router.post("/jobs/{job_id}/retry", operation_id="retry_job")
def retry_job( def retry_job(
job_id: int, job_id: int,
background_tasks: BackgroundTasks, background_tasks: BackgroundTasks,
@@ -520,7 +528,7 @@ def retry_job(
} }
@router.get("/jobs/stream") @router.get("/jobs/stream", operation_id="stream_jobs")
async def stream_jobs(): async def stream_jobs():
"""Server-Sent Events (SSE) endpoint for real-time job status updates.""" """Server-Sent Events (SSE) endpoint for real-time job status updates."""
@@ -582,7 +590,7 @@ async def stream_jobs():
return StreamingResponse(event_generator(), media_type="text/event-stream") return StreamingResponse(event_generator(), media_type="text/event-stream")
@router.post("/scan") @router.post("/scan", operation_id="trigger_scan")
def trigger_scan( def trigger_scan(
background_tasks: BackgroundTasks, db_session: Session = Depends(get_db) background_tasks: BackgroundTasks, db_session: Session = Depends(get_db)
): ):
@@ -599,7 +607,7 @@ def trigger_scan(
return {"message": "Scan started", "job_id": job_record.id} return {"message": "Scan started", "job_id": job_record.id}
@router.post("/index/hash") @router.post("/index/hash", operation_id="trigger_indexing")
def trigger_indexing( def trigger_indexing(
background_tasks: BackgroundTasks, db_session: Session = Depends(get_db) background_tasks: BackgroundTasks, db_session: Session = Depends(get_db)
): ):
@@ -611,7 +619,9 @@ def trigger_indexing(
return {"message": "Background hashing task initiated"} return {"message": "Background hashing task initiated"}
@router.get("/scan/status", response_model=ScanStatusSchema) @router.get(
"/scan/status", response_model=ScanStatusSchema, operation_id="get_scan_status"
)
def get_scan_status(): def get_scan_status():
"""Returns the real-time operational status of the scanner and hashing engines.""" """Returns the real-time operational status of the scanner and hashing engines."""
return ScanStatusSchema( return ScanStatusSchema(
@@ -837,10 +847,8 @@ def search_system_index(
return results return results
@router.post("/track/batch") @router.post("/track/batch", operation_id="batch_track")
def batch_update_tracking( def batch_track(request_data: BatchTrackRequest, db_session: Session = Depends(get_db)):
request_data: BatchTrackRequest, db_session: Session = Depends(get_db)
):
"""Applies bulk inclusion and exclusion rules and synchronizes is_ignored flags.""" """Applies bulk inclusion and exclusion rules and synchronizes is_ignored flags."""
all_paths = list(request_data.tracks) + list(request_data.untracks) all_paths = list(request_data.tracks) + list(request_data.untracks)
# Batch-fetch existing TrackedSource records (MEDIUM #15) # Batch-fetch existing TrackedSource records (MEDIUM #15)
@@ -887,17 +895,15 @@ def batch_update_tracking(
return {"message": "Tracking policy synchronized with filesystem index."} return {"message": "Tracking policy synchronized with filesystem index."}
@router.get("/settings", response_model=Dict[str, str]) @router.get("/settings", response_model=Dict[str, str], operation_id="get_settings")
def get_system_settings(db_session: Session = Depends(get_db)): def get_settings(db_session: Session = Depends(get_db)):
"""Retrieves all global system configuration key-value pairs.""" """Retrieves all global system configuration key-value pairs."""
settings_records = db_session.query(models.SystemSetting).all() settings_records = db_session.query(models.SystemSetting).all()
return {record.key: record.value for record in settings_records} return {record.key: record.value for record in settings_records}
@router.post("/settings") @router.post("/settings", operation_id="update_settings")
def update_system_setting( def update_settings(setting_data: SettingSchema, db_session: Session = Depends(get_db)):
setting_data: SettingSchema, db_session: Session = Depends(get_db)
):
"""Updates or creates a global system configuration setting.""" """Updates or creates a global system configuration setting."""
existing_record = ( existing_record = (
db_session.query(models.SystemSetting) db_session.query(models.SystemSetting)
@@ -921,8 +927,8 @@ def update_system_setting(
return {"message": "Setting committed."} return {"message": "Setting committed."}
@router.post("/notifications/test") @router.post("/notifications/test", operation_id="test_notification")
def test_notification_dispatch(request_data: TestNotificationRequest): def test_notification(request_data: TestNotificationRequest):
"""Dispatches a test alert to the provided Apprise URL.""" """Dispatches a test alert to the provided Apprise URL."""
from app.services.notifications import notification_manager from app.services.notifications import notification_manager
@@ -932,8 +938,8 @@ def test_notification_dispatch(request_data: TestNotificationRequest):
raise HTTPException(status_code=500, detail="Failed to dispatch test alert.") raise HTTPException(status_code=500, detail="Failed to dispatch test alert.")
@router.get("/ls") @router.get("/ls", operation_id="list_directories")
def list_host_directories(path: str = "/"): def list_directories(path: str = "/"):
"""Lists subdirectories on the host system for UI path selection.""" """Lists subdirectories on the host system for UI path selection."""
if ".." in path: if ".." in path:
raise HTTPException(status_code=403, detail="Path traversal not allowed") raise HTTPException(status_code=403, detail="Path traversal not allowed")
@@ -956,8 +962,8 @@ def list_host_directories(path: str = "/"):
raise HTTPException(status_code=500, detail=str(directory_error)) raise HTTPException(status_code=500, detail=str(directory_error))
@router.get("/hardware/discover") @router.get("/hardware/discover", operation_id="discover_hardware")
def discover_hardware_nodes(db_session: Session = Depends(get_db)): def discover_hardware(db_session: Session = Depends(get_db)):
"""Polls host hardware and mount points to discover unregistered storage media.""" """Polls host hardware and mount points to discover unregistered storage media."""
discovered_nodes = [] discovered_nodes = []
@@ -1112,8 +1118,8 @@ def discover_hardware_nodes(db_session: Session = Depends(get_db)):
return discovered_nodes return discovered_nodes
@router.post("/hardware/ignore") @router.post("/hardware/ignore", operation_id="ignore_hardware")
def ignore_hardware_node( def ignore_hardware(
request_data: IgnoreHardwareRequest, db_session: Session = Depends(get_db) request_data: IgnoreHardwareRequest, db_session: Session = Depends(get_db)
): ):
"""Appends a hardware identifier to the global ignore list.""" """Appends a hardware identifier to the global ignore list."""
@@ -1135,8 +1141,8 @@ def ignore_hardware_node(
return {"message": "Hardware node ignored."} return {"message": "Hardware node ignored."}
@router.get("/database/export") @router.get("/database/export", operation_id="export_database")
def export_database_index(): def export_database():
"""Generates a clean backup of the active SQLite database.""" """Generates a clean backup of the active SQLite database."""
database_url = os.getenv("DATABASE_URL", "sqlite:///tapehoard.db") database_url = os.getenv("DATABASE_URL", "sqlite:///tapehoard.db")
database_path = database_url.replace("sqlite:///", "") database_path = database_url.replace("sqlite:///", "")
@@ -1174,8 +1180,8 @@ def export_database_index():
) )
@router.post("/database/import") @router.post("/database/import", operation_id="import_database")
async def import_database_index(file: Any, db_session: Session = Depends(get_db)): async def import_database(file: Any, db_session: Session = Depends(get_db)):
"""Overwrites the current system state with an imported index file.""" """Overwrites the current system state with an imported index file."""
# Implementation pending - requires careful session termination # Implementation pending - requires careful session termination
return {"message": "Import logic restricted for safety."} return {"message": "Import logic restricted for safety."}
@@ -1218,7 +1224,11 @@ def get_system_tree(path: Optional[str] = None, db_session: Session = Depends(ge
# --- Discrepancy Endpoints --- # --- Discrepancy Endpoints ---
@router.get("/discrepancies", response_model=List[DiscrepancySchema]) @router.get(
"/discrepancies",
response_model=List[DiscrepancySchema],
operation_id="list_discrepancies",
)
def list_discrepancies(db_session: Session = Depends(get_db)): def list_discrepancies(db_session: Session = Depends(get_db)):
"""Lists files with discrepancies: confirmed deleted or unhashed and missing from disk.""" """Lists files with discrepancies: confirmed deleted or unhashed and missing from disk."""
deleted_records = ( deleted_records = (
@@ -1327,8 +1337,8 @@ def _resolve_ids_from_action(
return [] return []
@router.post("/discrepancies/batch/confirm") @router.post("/discrepancies/batch/confirm", operation_id="batch_confirm_discrepancies")
def batch_confirm_deleted( def batch_confirm_discrepancies(
action: BatchDiscrepancyAction, db_session: Session = Depends(get_db) action: BatchDiscrepancyAction, db_session: Session = Depends(get_db)
): ):
ids = _resolve_ids_from_action(action, db_session) ids = _resolve_ids_from_action(action, db_session)
@@ -1344,8 +1354,8 @@ def batch_confirm_deleted(
} }
@router.post("/discrepancies/batch/dismiss") @router.post("/discrepancies/batch/dismiss", operation_id="batch_dismiss_discrepancies")
def batch_dismiss( def batch_dismiss_discrepancies(
action: BatchDiscrepancyAction, db_session: Session = Depends(get_db) action: BatchDiscrepancyAction, db_session: Session = Depends(get_db)
): ):
ids = _resolve_ids_from_action(action, db_session) ids = _resolve_ids_from_action(action, db_session)
@@ -1363,8 +1373,8 @@ def batch_dismiss(
return {"message": f"{len(ids)} discrepancy(ies) dismissed", "count": len(ids)} return {"message": f"{len(ids)} discrepancy(ies) dismissed", "count": len(ids)}
@router.post("/discrepancies/batch/delete") @router.post("/discrepancies/batch/delete", operation_id="batch_delete_discrepancies")
def batch_hard_delete( def batch_delete_discrepancies(
action: BatchDiscrepancyAction, db_session: Session = Depends(get_db) action: BatchDiscrepancyAction, db_session: Session = Depends(get_db)
): ):
ids = _resolve_ids_from_action(action, db_session) ids = _resolve_ids_from_action(action, db_session)
@@ -1383,8 +1393,8 @@ def batch_hard_delete(
return {"message": f"{len(ids)} record(s) permanently deleted", "count": len(ids)} return {"message": f"{len(ids)} record(s) permanently deleted", "count": len(ids)}
@router.post("/discrepancies/{file_id}/confirm") @router.post("/discrepancies/{file_id}/confirm", operation_id="confirm_discrepancy")
def confirm_file_deleted(file_id: int, db_session: Session = Depends(get_db)): def confirm_discrepancy(file_id: int, db_session: Session = Depends(get_db)):
"""Marks a file as confirmed deleted (soft delete).""" """Marks a file as confirmed deleted (soft delete)."""
record = db_session.get(models.FilesystemState, file_id) record = db_session.get(models.FilesystemState, file_id)
if not record: if not record:
@@ -1394,7 +1404,7 @@ def confirm_file_deleted(file_id: int, db_session: Session = Depends(get_db)):
return {"message": f"File '{record.file_path}' marked as deleted"} return {"message": f"File '{record.file_path}' marked as deleted"}
@router.post("/discrepancies/{file_id}/dismiss") @router.post("/discrepancies/{file_id}/dismiss", operation_id="dismiss_discrepancy")
def dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)): def dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)):
"""Acknowledges a missing file — hides it from discrepancies.""" """Acknowledges a missing file — hides it from discrepancies."""
record = db_session.get(models.FilesystemState, file_id) record = db_session.get(models.FilesystemState, file_id)
@@ -1405,7 +1415,9 @@ def dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)):
return {"message": f"File '{record.file_path}' discrepancy dismissed"} return {"message": f"File '{record.file_path}' discrepancy dismissed"}
@router.post("/discrepancies/{file_id}/undo-dismiss") @router.post(
"/discrepancies/{file_id}/undo-dismiss", operation_id="undo_dismiss_discrepancy"
)
def undo_dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)): def undo_dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)):
"""Clears the acknowledged state so the file reappears in discrepancies (MEDIUM #22).""" """Clears the acknowledged state so the file reappears in discrepancies (MEDIUM #22)."""
record = db_session.get(models.FilesystemState, file_id) record = db_session.get(models.FilesystemState, file_id)
@@ -1418,8 +1430,8 @@ def undo_dismiss_discrepancy(file_id: int, db_session: Session = Depends(get_db)
} }
@router.delete("/discrepancies/{file_id}") @router.delete("/discrepancies/{file_id}", operation_id="delete_discrepancy")
def delete_file_record(file_id: int, db_session: Session = Depends(get_db)): def delete_discrepancy(file_id: int, db_session: Session = Depends(get_db)):
"""Hard-deletes a file record and all associated versions/cart entries.""" """Hard-deletes a file record and all associated versions/cart entries."""
record = db_session.get(models.FilesystemState, file_id) record = db_session.get(models.FilesystemState, file_id)
if not record: if not record:
@@ -1439,8 +1451,12 @@ def delete_file_record(file_id: int, db_session: Session = Depends(get_db)):
# --- Discrepancy Tree & Browse Endpoints --- # --- Discrepancy Tree & Browse Endpoints ---
@router.get("/discrepancies/tree", response_model=List[TreeNodeSchema]) @router.get(
def get_discrepancies_tree( "/discrepancies/tree",
response_model=List[TreeNodeSchema],
operation_id="get_discrepancy_tree",
)
def get_discrepancy_tree(
path: Optional[str] = Query( path: Optional[str] = Query(
default="ROOT", description="Root path to get tree for" default="ROOT", description="Root path to get tree for"
), ),
@@ -1557,7 +1573,9 @@ def get_discrepancies_tree(
return result return result
@router.get("/discrepancies/browse", response_model=dict) @router.get(
"/discrepancies/browse", response_model=dict, operation_id="browse_discrepancies"
)
def browse_discrepancies( def browse_discrepancies(
path: Optional[str] = Query(default="ROOT", description="Directory path to browse"), path: Optional[str] = Query(default="ROOT", description="Directory path to browse"),
db_session: Session = Depends(get_db), db_session: Session = Depends(get_db),
+2 -2
View File
@@ -71,7 +71,7 @@ if os.path.exists(static_assets_path):
return FileResponse(os.path.join(static_assets_path, "index.html")) return FileResponse(os.path.join(static_assets_path, "index.html"))
@app.get("/health") @app.get("/health", operation_id="check_health")
def health_heartbeat(): def check_health():
"""Simple health check endpoint for monitoring.""" """Simple health check endpoint for monitoring."""
return {"status": "healthy", "service": "tapehoard-backend"} return {"status": "healthy", "service": "tapehoard-backend"}
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large Load Diff
@@ -1,7 +1,7 @@
<script lang="ts"> <script lang="ts">
import { Folder, ChevronRight, Home, ArrowLeft, Check, X, RotateCw } from 'lucide-svelte'; import { Folder, ChevronRight, Home, ArrowLeft, Check, X, RotateCw } from 'lucide-svelte';
import { Button } from './ui/button'; import { Button } from './ui/button';
import { listHostDirectoriesSystemLsGet } from '$lib/api'; import { listDirectories } from '$lib/api';
import { cn } from '$lib/utils'; import { cn } from '$lib/utils';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
@@ -17,7 +17,7 @@
async function loadDirectories(path: string) { async function loadDirectories(path: string) {
loading = true; loading = true;
try { try {
const response = await listHostDirectoriesSystemLsGet({ const response = await listDirectories({
query: { path } query: { path }
}); });
if (response.data) { if (response.data) {
@@ -4,7 +4,7 @@
import { Button } from './ui/button'; import { Button } from './ui/button';
import { Card } from './ui/card'; import { Card } from './ui/card';
import Dialog from './ui/Dialog.svelte'; import Dialog from './ui/Dialog.svelte';
import { getJobDetailSystemJobsJobIdGet, getJobLogsSystemJobsJobIdLogsGet, type AppApiSystemJobSchema } from '$lib/api'; import { getJob, getJobLogs, type AppApiSystemJobSchema } from '$lib/api';
import { cn, formatLocalTime, formatLocalDateTime, parseUTCDate } from '$lib/utils'; import { cn, formatLocalTime, formatLocalDateTime, parseUTCDate } from '$lib/utils';
import { POLL_FAST } from '$lib/config'; import { POLL_FAST } from '$lib/config';
@@ -22,8 +22,8 @@
loading = true; loading = true;
try { try {
const [jobRes, logsRes] = await Promise.all([ const [jobRes, logsRes] = await Promise.all([
getJobDetailSystemJobsJobIdGet({ path: { job_id: jobId } }), getJob({ path: { job_id: jobId } }),
getJobLogsSystemJobsJobIdLogsGet({ path: { job_id: jobId } }) getJobLogs({ path: { job_id: jobId } })
]); ]);
if (jobRes.data) job = jobRes.data; if (jobRes.data) job = jobRes.data;
if (logsRes.data) logs = logsRes.data; if (logsRes.data) logs = logsRes.data;
@@ -39,8 +39,8 @@
try { try {
const [jobRes, logsRes] = await Promise.all([ const [jobRes, logsRes] = await Promise.all([
getJobDetailSystemJobsJobIdGet({ path: { job_id: jobId } }), getJob({ path: { job_id: jobId } }),
getJobLogsSystemJobsJobIdLogsGet({ path: { job_id: jobId } }) getJobLogs({ path: { job_id: jobId } })
]); ]);
if (jobRes.data) { if (jobRes.data) {
const wasRunning = job.status === 'RUNNING' || job.status === 'PENDING'; const wasRunning = job.status === 'RUNNING' || job.status === 'PENDING';
@@ -2,7 +2,7 @@
import { onMount, onDestroy } from 'svelte'; import { onMount, onDestroy } from 'svelte';
import { RotateCw, Activity, CheckCircle2 } from 'lucide-svelte'; import { RotateCw, Activity, CheckCircle2 } from 'lucide-svelte';
import { Card } from '$lib/components/ui/card'; import { Card } from '$lib/components/ui/card';
import { getScanStatusSystemScanStatusGet, type ScanStatusSchema } from '$lib/api'; import { getScanStatus, type ScanStatusSchema } from '$lib/api';
import { POLL_FAST } from '$lib/config'; import { POLL_FAST } from '$lib/config';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
@@ -13,7 +13,7 @@
async function updateScanStatus() { async function updateScanStatus() {
try { try {
const response = await getScanStatusSystemScanStatusGet(); const response = await getScanStatus();
if (response.data) { if (response.data) {
const wasRunning = scanStatus?.is_running; const wasRunning = scanStatus?.is_running;
scanStatus = response.data; scanStatus = response.data;
@@ -24,8 +24,8 @@
archiveTree, archiveTree,
filesystemBrowse, filesystemBrowse,
archiveBrowse, archiveBrowse,
getDiscrepanciesTreeSystemDiscrepanciesTreeGet, getDiscrepancyTree,
browseDiscrepanciesSystemDiscrepanciesBrowseGet, browseDiscrepancies,
} from "$lib/api"; } from "$lib/api";
let { let {
@@ -199,7 +199,7 @@
onMount(async () => { onMount(async () => {
if (mode === "discrepancies") { if (mode === "discrepancies") {
try { try {
const response = await getDiscrepanciesTreeSystemDiscrepanciesTreeGet({ query: { path: "ROOT" } }); const response = await getDiscrepancyTree({ query: { path: "ROOT" } });
if (response.data && Array.isArray(response.data)) { if (response.data && Array.isArray(response.data)) {
discrepancyRoot.children = response.data.map((d: any) => ({ discrepancyRoot.children = response.data.map((d: any) => ({
name: d.name, name: d.name,
@@ -5,7 +5,7 @@
import type { TreeNode } from "$lib/types"; import type { TreeNode } from "$lib/types";
import { cn } from "$lib/utils"; import { cn } from "$lib/utils";
import FileBrowserTreeItem from "./FileBrowserTreeItem.svelte"; import FileBrowserTreeItem from "./FileBrowserTreeItem.svelte";
import { filesystemTree, archiveTree, getDiscrepanciesTreeSystemDiscrepanciesTreeGet } from "$lib/api"; import { filesystemTree, archiveTree, getDiscrepancyTree } from "$lib/api";
let { let {
node, node,
@@ -62,7 +62,7 @@
try { try {
let response; let response;
if (mode === "discrepancies") { if (mode === "discrepancies") {
response = await getDiscrepanciesTreeSystemDiscrepanciesTreeGet({ response = await getDiscrepancyTree({
query: { path: node.path } query: { path: node.path }
}); });
} else { } else {
+4 -4
View File
@@ -23,7 +23,7 @@
import SectionHeader from '$lib/components/ui/SectionHeader.svelte'; import SectionHeader from '$lib/components/ui/SectionHeader.svelte';
import StatCard from '$lib/components/ui/StatCard.svelte'; import StatCard from '$lib/components/ui/StatCard.svelte';
import ProgressBar from '$lib/components/ui/ProgressBar.svelte'; import ProgressBar from '$lib/components/ui/ProgressBar.svelte';
import { getDashboardStatsSystemDashboardStatsGet, triggerScanSystemScanPost, triggerIndexingSystemIndexHashPost, type DashboardStatsSchema } from '$lib/api'; import { getDashboardStats, triggerScan, triggerIndexing, type DashboardStatsSchema } from '$lib/api';
import { cn, formatLocalDate, formatLocalTime, formatSize } from '$lib/utils'; import { cn, formatLocalDate, formatLocalTime, formatSize } from '$lib/utils';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
@@ -35,7 +35,7 @@
async function loadStats() { async function loadStats() {
loading = true; loading = true;
try { try {
const response = await getDashboardStatsSystemDashboardStatsGet(); const response = await getDashboardStats();
if (response.data) { if (response.data) {
stats = response.data; stats = response.data;
} }
@@ -49,7 +49,7 @@
async function startIndexing() { async function startIndexing() {
indexing = true; indexing = true;
try { try {
await triggerIndexingSystemIndexHashPost(); await triggerIndexing();
toast.success("Background indexing job initiated"); toast.success("Background indexing job initiated");
} catch (error: any) { } catch (error: any) {
toast.error(error.body?.detail || "Failed to start indexing"); toast.error(error.body?.detail || "Failed to start indexing");
@@ -61,7 +61,7 @@
async function startScan() { async function startScan() {
scanning = true; scanning = true;
try { try {
await triggerScanSystemScanPost(); await triggerScan();
toast.success("Scan job initiated successfully"); toast.success("Scan job initiated successfully");
} catch (error: any) { } catch (error: any) {
toast.error(error.body?.detail || "Failed to start scan"); toast.error(error.body?.detail || "Failed to start scan");
+15 -15
View File
@@ -8,14 +8,14 @@
import FileBrowser from '$lib/components/file-browser/FileBrowser.svelte'; import FileBrowser from '$lib/components/file-browser/FileBrowser.svelte';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import { import {
listDiscrepanciesSystemDiscrepanciesGet, listDiscrepancies,
dismissDiscrepancySystemDiscrepanciesFileIdDismissPost, dismissDiscrepancy,
batchDismissSystemDiscrepanciesBatchDismissPost, batchDismissDiscrepancies,
batchHardDeleteSystemDiscrepanciesBatchDeletePost, batchDeleteDiscrepancies,
addFileToRecoveryQueueRestoresQueueFileFileIdPost, addFileToRestoreQueue,
batchAddToRecoveryQueueRestoresQueueBatchPost, batchAddToRestoreQueue,
browseDiscrepanciesSystemDiscrepanciesBrowseGet, browseDiscrepancies,
getDiscrepanciesTreeSystemDiscrepanciesTreeGet, getDiscrepancyTree,
type DiscrepancySchema, type DiscrepancySchema,
} from '$lib/api'; } from '$lib/api';
import { type FileItem } from '$lib/types'; import { type FileItem } from '$lib/types';
@@ -31,7 +31,7 @@
async function loadDiscrepancies() { async function loadDiscrepancies() {
loading = true; loading = true;
try { try {
const response = await listDiscrepanciesSystemDiscrepanciesGet(); const response = await listDiscrepancies();
if (response.data) { if (response.data) {
discrepancies = response.data; discrepancies = response.data;
} }
@@ -46,7 +46,7 @@
async function loadFiles(path: string) { async function loadFiles(path: string) {
try { try {
const response = await browseDiscrepanciesSystemDiscrepanciesBrowseGet({ query: { path } }); const response = await browseDiscrepancies({ query: { path } });
if (response.data && (response.data as any).files) { if (response.data && (response.data as any).files) {
files = (response.data as any).files.map((d: any) => { files = (response.data as any).files.map((d: any) => {
// Check if it's a directory (has "type" property) or a file (has "id") // Check if it's a directory (has "type" property) or a file (has "id")
@@ -82,7 +82,7 @@
async function addToCart(item: FileItem) { async function addToCart(item: FileItem) {
if (!item.discrepancy_id) return; if (!item.discrepancy_id) return;
try { try {
await addFileToRecoveryQueueRestoresQueueFileFileIdPost({ await addFileToRestoreQueue({
path: { file_id: item.discrepancy_id } path: { file_id: item.discrepancy_id }
}); });
toast.success("Added to restore cart"); toast.success("Added to restore cart");
@@ -94,7 +94,7 @@
async function deletePermanently(item: FileItem) { async function deletePermanently(item: FileItem) {
if (!item.discrepancy_id) return; if (!item.discrepancy_id) return;
try { try {
await batchHardDeleteSystemDiscrepanciesBatchDeletePost({ await batchDeleteDiscrepancies({
body: { ids: [item.discrepancy_id] } body: { ids: [item.discrepancy_id] }
}); });
toast.success("File record deleted permanently"); toast.success("File record deleted permanently");
@@ -112,7 +112,7 @@
} }
batchLoading = true; batchLoading = true;
try { try {
await batchDismissSystemDiscrepanciesBatchDismissPost({ await batchDismissDiscrepancies({
body: { ids } body: { ids }
}); });
toast.success(`Dismissed ${ids.length} files`); toast.success(`Dismissed ${ids.length} files`);
@@ -133,7 +133,7 @@
} }
batchLoading = true; batchLoading = true;
try { try {
await batchHardDeleteSystemDiscrepanciesBatchDeletePost({ await batchDeleteDiscrepancies({
body: { ids } body: { ids }
}); });
toast.success(`Deleted ${ids.length} file records`); toast.success(`Deleted ${ids.length} file records`);
@@ -154,7 +154,7 @@
} }
batchLoading = true; batchLoading = true;
try { try {
await batchAddToRecoveryQueueRestoresQueueBatchPost({ await batchAddToRestoreQueue({
body: { ids } body: { ids }
}); });
toast.success(`Added ${ids.length} files to restore cart`); toast.success(`Added ${ids.length} files to restore cart`);
+6 -6
View File
@@ -8,9 +8,9 @@
import type { FileItem } from '$lib/types'; import type { FileItem } from '$lib/types';
import { import {
filesystemBrowse, filesystemBrowse,
batchUpdateTrackingSystemTrackBatchPost, batchTrack,
triggerScanSystemScanPost, triggerScan,
getScanStatusSystemScanStatusGet, getScanStatus,
filesystemSearch, filesystemSearch,
type ScanStatusSchema type ScanStatusSchema
} from '$lib/api'; } from '$lib/api';
@@ -108,7 +108,7 @@
async function updateScanStatus() { async function updateScanStatus() {
try { try {
const response = await getScanStatusSystemScanStatusGet(); const response = await getScanStatus();
if (response.data) { if (response.data) {
const wasRunning = scanRunning; const wasRunning = scanRunning;
scanRunning = response.data.is_running; scanRunning = response.data.is_running;
@@ -124,7 +124,7 @@
async function startScan() { async function startScan() {
try { try {
await triggerScanSystemScanPost(); await triggerScan();
updateScanStatus(); updateScanStatus();
} catch (error: any) { } catch (error: any) {
toast.error(error.body?.detail || "Failed to start scan"); toast.error(error.body?.detail || "Failed to start scan");
@@ -181,7 +181,7 @@
.filter(([_, ignoredState]) => ignoredState) .filter(([_, ignoredState]) => ignoredState)
.map(([path, _]) => path); .map(([path, _]) => path);
await batchUpdateTrackingSystemTrackBatchPost({ await batchTrack({
body: { tracks, untracks } body: { tracks, untracks }
}); });
pendingChanges.clear(); pendingChanges.clear();
@@ -23,10 +23,10 @@
import { import {
archiveBrowse, archiveBrowse,
archiveMetadata, archiveMetadata,
listRecoveryQueueRestoresQueueGet, getRestoreQueue,
addFileToRecoveryQueueRestoresQueueFileFileIdPost, addFileToRestoreQueue,
removeFromRecoveryQueueRestoresQueueItemItemIdDelete, removeFromRestoreQueue,
addDirectoryToRecoveryQueueRestoresQueueDirectoryPost, addDirectoryToRestoreQueue,
archiveSearch, archiveSearch,
type ItemMetadataSchema, type ItemMetadataSchema,
type CartItemSchema type CartItemSchema
@@ -51,7 +51,7 @@
async function loadCart() { async function loadCart() {
try { try {
const response = await listRecoveryQueueRestoresQueueGet(); const response = await getRestoreQueue();
if (response.data) { if (response.data) {
restoreCartItems = response.data; restoreCartItems = response.data;
} }
@@ -177,7 +177,7 @@
if (item.type === 'file') { if (item.type === 'file') {
const cartItem = restoreCartItems.find(i => i.file_path === item.path); const cartItem = restoreCartItems.find(i => i.file_path === item.path);
if (cartItem) { if (cartItem) {
await removeFromRecoveryQueueRestoresQueueItemItemIdDelete({ await removeFromRestoreQueue({
path: { item_id: cartItem.id } path: { item_id: cartItem.id }
}); });
} }
@@ -193,13 +193,13 @@
}); });
if (metaResponse.data?.id) { if (metaResponse.data?.id) {
await addFileToRecoveryQueueRestoresQueueFileFileIdPost({ await addFileToRestoreQueue({
path: { file_id: metaResponse.data.id } path: { file_id: metaResponse.data.id }
}); });
} }
} else { } else {
// It's a directory // It's a directory
await addDirectoryToRecoveryQueueRestoresQueueDirectoryPost({ await addDirectoryToRestoreQueue({
body: { path: item.path } body: { path: item.path }
}); });
} }
@@ -222,7 +222,7 @@
async function handleToggleDirectoryCart(itemPath: string) { async function handleToggleDirectoryCart(itemPath: string) {
try { try {
await addDirectoryToRecoveryQueueRestoresQueueDirectoryPost({ await addDirectoryToRestoreQueue({
body: { path: itemPath } body: { path: itemPath }
}); });
+3 -3
View File
@@ -18,7 +18,7 @@
import StatCard from '$lib/components/ui/StatCard.svelte'; import StatCard from '$lib/components/ui/StatCard.svelte';
import ProgressBar from '$lib/components/ui/ProgressBar.svelte'; import ProgressBar from '$lib/components/ui/ProgressBar.svelte';
import Treemap from '$lib/components/Treemap.svelte'; import Treemap from '$lib/components/Treemap.svelte';
import { getSystemAnalyticsInventoryInsightsGet, getDirectoryTreemapInventoryDirectoriesGet } from '$lib/api'; import { getAnalytics, getTreemap } from '$lib/api';
import { cn, formatSize } from '$lib/utils'; import { cn, formatSize } from '$lib/utils';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import { goto } from '$app/navigation'; import { goto } from '$app/navigation';
@@ -32,7 +32,7 @@
async function loadInsights() { async function loadInsights() {
loading = true; loading = true;
try { try {
const response = await getSystemAnalyticsInventoryInsightsGet(); const response = await getAnalytics();
if (response.data) insights = response.data; if (response.data) insights = response.data;
} catch (error) { } catch (error) {
toast.error("Failed to generate analytics"); toast.error("Failed to generate analytics");
@@ -45,7 +45,7 @@
if (dirTreemapLoaded) return; if (dirTreemapLoaded) return;
dirTreemapLoading = true; dirTreemapLoading = true;
try { try {
const response = await getDirectoryTreemapInventoryDirectoriesGet(); const response = await getTreemap();
if (response.data) { if (response.data) {
dirTreemapData = mapDirectoryTree(response.data as any[]); dirTreemapData = mapDirectoryTree(response.data as any[]);
dirTreemapLoaded = true; dirTreemapLoaded = true;
+22 -22
View File
@@ -39,17 +39,17 @@
import { cn, formatSize } from '$lib/utils'; import { cn, formatSize } from '$lib/utils';
import { POLL_SLOW } from '$lib/config'; import { POLL_SLOW } from '$lib/config';
import { import {
listStorageFleetInventoryMediaGet, listMedia,
registerNewMediaInventoryMediaPost, createMedia,
deleteMediaAssetInventoryMediaMediaIdDelete, deleteMedia,
triggerBackupJobBackupsTriggerMediaIdPost, triggerBackup,
triggerAutoBackupBackupsTriggerAutoPost, triggerAutoBackup,
initializeStorageHardwareInventoryMediaMediaIdInitializePost, initializeMedia,
reorderArchivalPriorityInventoryMediaReorderPost, reorderMedia,
updateMediaAssetInventoryMediaMediaIdPatch, updateMedia,
discoverHardwareNodesSystemHardwareDiscoverGet, discoverHardware,
ignoreHardwareNodeSystemHardwareIgnorePost, ignoreHardware,
listStorageProvidersInventoryProvidersGet, listProviders,
type MediaSchema, type MediaSchema,
type StorageProviderSchema type StorageProviderSchema
} from '$lib/api'; } from '$lib/api';
@@ -149,8 +149,8 @@
if (!silent) loading = true; if (!silent) loading = true;
try { try {
const [mediaRes, hardwareRes] = await Promise.all([ const [mediaRes, hardwareRes] = await Promise.all([
listStorageFleetInventoryMediaGet({ query: { refresh } }), listMedia({ query: { refresh } }),
discoverHardwareNodesSystemHardwareDiscoverGet() discoverHardware()
]); ]);
if (mediaRes.data) { if (mediaRes.data) {
// Implement client-side Last Known Good (LKG) caching for hardware status // Implement client-side Last Known Good (LKG) caching for hardware status
@@ -207,7 +207,7 @@
loadMedia(false, true); loadMedia(false, true);
try { try {
const res = await listStorageProvidersInventoryProvidersGet(); const res = await listProviders();
if (res.data) providersList = res.data; if (res.data) providersList = res.data;
} catch (error) { } catch (error) {
console.error("Failed to load storage providers:", error); console.error("Failed to load storage providers:", error);
@@ -248,7 +248,7 @@
mediaList = [...activeItems, ...inactiveItems]; mediaList = [...activeItems, ...inactiveItems];
try { try {
await reorderArchivalPriorityInventoryMediaReorderPost({ await reorderMedia({
body: { media_ids: mediaList.map(m => m.id) }, body: { media_ids: mediaList.map(m => m.id) },
throwOnError: true throwOnError: true
}); });
@@ -264,7 +264,7 @@
try { try {
toast.info(`Initializing ${identifier}...`); toast.info(`Initializing ${identifier}...`);
await initializeStorageHardwareInventoryMediaMediaIdInitializePost({ await initializeMedia({
path: { media_id: mediaId }, path: { media_id: mediaId },
query: { force }, query: { force },
throwOnError: true throwOnError: true
@@ -284,7 +284,7 @@
async function handleStartBackup(mediaId: number, identifier: string) { async function handleStartBackup(mediaId: number, identifier: string) {
try { try {
await triggerBackupJobBackupsTriggerMediaIdPost({ await triggerBackup({
path: { media_id: mediaId }, path: { media_id: mediaId },
throwOnError: true throwOnError: true
}); });
@@ -296,7 +296,7 @@
async function handleAutoArchive() { async function handleAutoArchive() {
try { try {
await triggerAutoBackupBackupsTriggerAutoPost({ await triggerAutoBackup({
throwOnError: true throwOnError: true
}); });
toast.success("Auto-archival job initiated for all active media"); toast.success("Auto-archival job initiated for all active media");
@@ -312,7 +312,7 @@
} }
try { try {
await registerNewMediaInventoryMediaPost({ await createMedia({
body: { body: {
media_type: newMedia.media_type, media_type: newMedia.media_type,
identifier: newMedia.identifier, identifier: newMedia.identifier,
@@ -338,7 +338,7 @@
async function handleUpdate() { async function handleUpdate() {
if (!editingMedia) return; if (!editingMedia) return;
try { try {
await updateMediaAssetInventoryMediaMediaIdPatch({ await updateMedia({
path: { media_id: editingMedia.id }, path: { media_id: editingMedia.id },
body: { body: {
location: editingMedia.location, location: editingMedia.location,
@@ -357,7 +357,7 @@
async function handleIgnoreAsset(identifier: string) { async function handleIgnoreAsset(identifier: string) {
try { try {
await ignoreHardwareNodeSystemHardwareIgnorePost({ await ignoreHardware({
body: { identifier } body: { identifier }
}); });
loadMedia(); loadMedia();
@@ -369,7 +369,7 @@
async function handleDelete(mediaId: number) { async function handleDelete(mediaId: number) {
if (!confirm("Remove this media from inventory? Data on the physical media will remain, but TapeHoard will lose its index association.")) return; if (!confirm("Remove this media from inventory? Data on the physical media will remain, but TapeHoard will lose its index association.")) return;
try { try {
await deleteMediaAssetInventoryMediaMediaIdDelete({ await deleteMedia({
path: { media_id: mediaId } path: { media_id: mediaId }
}); });
toast.success("Media removed from inventory"); toast.success("Media removed from inventory");
+13 -13
View File
@@ -24,11 +24,11 @@
import EmptyState from '$lib/components/ui/EmptyState.svelte'; import EmptyState from '$lib/components/ui/EmptyState.svelte';
import JobDetailModal from '$lib/components/JobDetailModal.svelte'; import JobDetailModal from '$lib/components/JobDetailModal.svelte';
import { import {
listJobsSystemJobsGet, listJobs,
getJobsCountSystemJobsCountGet, getJobCount,
getJobsStatsSystemJobsStatsGet, getJobStats,
cancelJobSystemJobsJobIdCancelPost, cancelJob as cancelJobApi,
retryJobSystemJobsJobIdRetryPost, retryJob as retryJobApi,
type AppApiSystemJobSchema type AppApiSystemJobSchema
} from '$lib/api'; } from '$lib/api';
import { cn, formatLocalTime, parseUTCDate } from '$lib/utils'; import { cn, formatLocalTime, parseUTCDate } from '$lib/utils';
@@ -65,7 +65,7 @@
async function loadStats() { async function loadStats() {
statsLoading = true; statsLoading = true;
try { try {
const res = await getJobsStatsSystemJobsStatsGet(); const res = await getJobStats();
if (res.data) stats = res.data as typeof stats; if (res.data) stats = res.data as typeof stats;
} catch (error) { } catch (error) {
console.error("Failed to load stats:", error); console.error("Failed to load stats:", error);
@@ -78,8 +78,8 @@
loading = true; loading = true;
try { try {
const [jobsRes, countRes] = await Promise.all([ const [jobsRes, countRes] = await Promise.all([
listJobsSystemJobsGet({ query: { limit: LIMIT, offset: 0 } }), listJobs({ query: { limit: LIMIT, offset: 0 } }),
getJobsCountSystemJobsCountGet() getJobCount()
]); ]);
if (jobsRes.data) jobs = jobsRes.data; if (jobsRes.data) jobs = jobsRes.data;
@@ -97,7 +97,7 @@
loadingMore = true; loadingMore = true;
const newOffset = offset + LIMIT; const newOffset = offset + LIMIT;
try { try {
const response = await listJobsSystemJobsGet({ const response = await listJobs({
query: { limit: LIMIT, offset: newOffset } query: { limit: LIMIT, offset: newOffset }
}); });
if (response.data) { if (response.data) {
@@ -113,7 +113,7 @@
async function pollActiveJobs() { async function pollActiveJobs() {
try { try {
const response = await listJobsSystemJobsGet({ const response = await listJobs({
query: { limit: LIMIT, offset: 0 } query: { limit: LIMIT, offset: 0 }
}); });
if (response.data) { if (response.data) {
@@ -122,7 +122,7 @@
jobs = [...updated, ...rest]; jobs = [...updated, ...rest];
} }
const countRes = await getJobsCountSystemJobsCountGet(); const countRes = await getJobCount();
if (countRes.data) totalJobs = (countRes.data as any).count; if (countRes.data) totalJobs = (countRes.data as any).count;
} catch (error) { } catch (error) {
// Silently fail polling // Silently fail polling
@@ -131,7 +131,7 @@
async function cancelJob(jobId: number) { async function cancelJob(jobId: number) {
try { try {
await cancelJobSystemJobsJobIdCancelPost({ await cancelJobApi({
path: { job_id: jobId }, path: { job_id: jobId },
throwOnError: true throwOnError: true
}); });
@@ -144,7 +144,7 @@
async function retryJob(jobId: number) { async function retryJob(jobId: number) {
try { try {
const res = await retryJobSystemJobsJobIdRetryPost({ const res = await retryJobApi({
path: { job_id: jobId }, path: { job_id: jobId },
throwOnError: true throwOnError: true
}); });
+12 -12
View File
@@ -24,13 +24,13 @@
import FileBrowser from '$lib/components/file-browser/FileBrowser.svelte'; import FileBrowser from '$lib/components/file-browser/FileBrowser.svelte';
import type { FileItem } from '$lib/types'; import type { FileItem } from '$lib/types';
import { import {
listRecoveryQueueRestoresQueueGet, getRestoreQueue,
calculateRecoveryManifestRestoresManifestGet, getRestoreManifest,
removeFromRecoveryQueueRestoresQueueItemItemIdDelete, removeFromRestoreQueue,
clearRecoveryQueueRestoresQueueClearPost, clearRestoreQueue,
getSystemSettingsSystemSettingsGet, getSettings,
triggerRecoveryJobRestoresTriggerPost, triggerRestore,
browseRecoveryQueueVirtualFsRestoresQueueBrowseGet, browseRestoreQueue,
type CartItemSchema, type CartItemSchema,
type RestoreManifestSchema, type RestoreManifestSchema,
type CartFileItemSchema type CartFileItemSchema
@@ -49,7 +49,7 @@
async function loadData() { async function loadData() {
loading = true; loading = true;
try { try {
const settingsRes = await getSystemSettingsSystemSettingsGet(); const settingsRes = await getSettings();
if (settingsRes.data?.restore_destinations) { if (settingsRes.data?.restore_destinations) {
restoreDests = JSON.parse(settingsRes.data.restore_destinations); restoreDests = JSON.parse(settingsRes.data.restore_destinations);
if (restoreDests.length > 0 && !selectedDest) selectedDest = restoreDests[0]; if (restoreDests.length > 0 && !selectedDest) selectedDest = restoreDests[0];
@@ -70,7 +70,7 @@
async function refreshManifest() { async function refreshManifest() {
try { try {
const manifestRes = await calculateRecoveryManifestRestoresManifestGet(); const manifestRes = await getRestoreManifest();
if (manifestRes.data) manifest = manifestRes.data; if (manifestRes.data) manifest = manifestRes.data;
} catch (err) { } catch (err) {
console.error("Failed to load manifest:", err); console.error("Failed to load manifest:", err);
@@ -80,7 +80,7 @@
async function loadCartFiles(path: string) { async function loadCartFiles(path: string) {
loading = true; loading = true;
try { try {
const response = await browseRecoveryQueueVirtualFsRestoresQueueBrowseGet({ const response = await browseRestoreQueue({
query: { path } query: { path }
}); });
if (response.data) { if (response.data) {
@@ -113,7 +113,7 @@
restoring = true; restoring = true;
try { try {
await triggerRecoveryJobRestoresTriggerPost({ await triggerRestore({
body: { destination_path: selectedDest } body: { destination_path: selectedDest }
}); });
toast.success("Recovery job initiated! Check System Activity for progress."); toast.success("Recovery job initiated! Check System Activity for progress.");
@@ -139,7 +139,7 @@
async function clearCart() { async function clearCart() {
if (!confirm("Are you sure you want to clear the entire recovery queue?")) return; if (!confirm("Are you sure you want to clear the entire recovery queue?")) return;
try { try {
await clearRecoveryQueueRestoresQueueClearPost(); await clearRestoreQueue();
cartFiles = []; cartFiles = [];
manifest = null; manifest = null;
await loadData(); await loadData();
+15 -15
View File
@@ -26,11 +26,11 @@
import { Card } from "$lib/components/ui/card"; import { Card } from "$lib/components/ui/card";
import { Input } from "$lib/components/ui/input"; import { Input } from "$lib/components/ui/input";
import { import {
getSystemSettingsSystemSettingsGet, getSettings,
updateSystemSettingSystemSettingsPost, updateSettings,
testNotificationDispatchSystemNotificationsTestPost, testNotification,
exportDatabaseIndexSystemDatabaseExportGet, exportDatabase,
importDatabaseIndexSystemDatabaseImportPost importDatabase
} from "$lib/api"; } from "$lib/api";
import { toast } from "svelte-sonner"; import { toast } from "svelte-sonner";
import { cn } from "$lib/utils"; import { cn } from "$lib/utils";
@@ -115,7 +115,7 @@
async function loadSettings() { async function loadSettings() {
loading = true; loading = true;
try { try {
const response = await getSystemSettingsSystemSettingsGet(); const response = await getSettings();
if (response.data) { if (response.data) {
const data = response.data; const data = response.data;
if (data.source_roots) sourceRoots = JSON.parse(data.source_roots); if (data.source_roots) sourceRoots = JSON.parse(data.source_roots);
@@ -148,13 +148,13 @@
saving = true; saving = true;
try { try {
await Promise.all([ await Promise.all([
updateSystemSettingSystemSettingsPost({ body: { key: "source_roots", value: JSON.stringify(sourceRoots) } }), updateSettings({ body: { key: "source_roots", value: JSON.stringify(sourceRoots) } }),
updateSystemSettingSystemSettingsPost({ body: { key: "restore_destinations", value: JSON.stringify(restoreDestinations) } }), updateSettings({ body: { key: "restore_destinations", value: JSON.stringify(restoreDestinations) } }),
updateSystemSettingSystemSettingsPost({ body: { key: "tape_drives", value: JSON.stringify(tapeDrives) } }), updateSettings({ body: { key: "tape_drives", value: JSON.stringify(tapeDrives) } }),
updateSystemSettingSystemSettingsPost({ body: { key: "global_exclusions", value: globalExclusions } }), updateSettings({ body: { key: "global_exclusions", value: globalExclusions } }),
updateSystemSettingSystemSettingsPost({ body: { key: "schedule_scan", value: scanSchedule } }), updateSettings({ body: { key: "schedule_scan", value: scanSchedule } }),
updateSystemSettingSystemSettingsPost({ body: { key: "schedule_archival", value: archivalSchedule } }), updateSettings({ body: { key: "schedule_archival", value: archivalSchedule } }),
updateSystemSettingSystemSettingsPost({ body: { key: "notification_urls", value: JSON.stringify(notificationUrls) } }) updateSettings({ body: { key: "notification_urls", value: JSON.stringify(notificationUrls) } })
]); ]);
// Snapshot saved state // Snapshot saved state
@@ -179,7 +179,7 @@
async function handleExport() { async function handleExport() {
exporting = true; exporting = true;
try { try {
const response = await exportDatabaseIndexSystemDatabaseExportGet(); const response = await exportDatabase();
if (response.data) { if (response.data) {
const blob = await (response.data as any).blob(); const blob = await (response.data as any).blob();
const url = window.URL.createObjectURL(blob); const url = window.URL.createObjectURL(blob);
@@ -200,7 +200,7 @@
async function testNotify(url: string) { async function testNotify(url: string) {
try { try {
await testNotificationDispatchSystemNotificationsTestPost({ body: { url } }); await testNotification({ body: { url } });
toast.success("Test notification dispatched"); toast.success("Test notification dispatched");
} catch (error) { } catch (error) {
toast.error("Notification test failed"); toast.error("Notification test failed");