job logs + better status popup
This commit is contained in:
@@ -0,0 +1,42 @@
|
||||
"""add_job_logs_table
|
||||
|
||||
Revision ID: d1271a4ba29d
|
||||
Revises: ffdd68ee8ee3
|
||||
Create Date: 2026-04-30 00:36:23.634265
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "d1271a4ba29d"
|
||||
down_revision: Union[str, Sequence[str], None] = "ffdd68ee8ee3"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.create_table(
|
||||
"job_logs",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("job_id", sa.Integer, sa.ForeignKey("jobs.id"), nullable=False),
|
||||
sa.Column("message", sa.String, nullable=False),
|
||||
sa.Column(
|
||||
"timestamp",
|
||||
sa.DateTime,
|
||||
nullable=False,
|
||||
server_default=sa.func.datetime("now"),
|
||||
),
|
||||
)
|
||||
op.create_index("ix_job_logs_job_id", "job_logs", ["job_id"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
op.drop_index("ix_job_logs_job_id", "job_logs")
|
||||
op.drop_table("job_logs")
|
||||
+222
-92
@@ -48,6 +48,15 @@ class JobSchema(BaseModel):
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
created_at: datetime
|
||||
latest_log: Optional[str] = None
|
||||
|
||||
|
||||
class JobLogSchema(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: int
|
||||
message: str
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
class FileItemSchema(BaseModel):
|
||||
@@ -60,6 +69,11 @@ class FileItemSchema(BaseModel):
|
||||
sha256_hash: Optional[str] = None
|
||||
|
||||
|
||||
class BrowseResponseSchema(BaseModel):
|
||||
files: List[FileItemSchema]
|
||||
last_scan_time: Optional[datetime] = None
|
||||
|
||||
|
||||
class TrackToggleRequest(BaseModel):
|
||||
path: str
|
||||
is_directory: bool = True
|
||||
@@ -196,11 +210,23 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
|
||||
SUM(size) as total_size,
|
||||
SUM(CASE WHEN is_ignored = 1 THEN 1 ELSE 0 END) as ignored_count,
|
||||
SUM(CASE WHEN is_ignored = 1 THEN size ELSE 0 END) as ignored_size,
|
||||
SUM(CASE WHEN is_ignored = 0 AND id NOT IN (SELECT filesystem_state_id FROM file_versions) THEN 1 ELSE 0 END) as unprotected_count,
|
||||
SUM(CASE WHEN is_ignored = 0 AND id NOT IN (SELECT filesystem_state_id FROM file_versions) THEN size ELSE 0 END) as unprotected_size,
|
||||
SUM(CASE WHEN is_ignored = 0 AND id NOT IN (
|
||||
SELECT fv.filesystem_state_id FROM file_versions fv
|
||||
JOIN storage_media sm ON sm.id = fv.media_id
|
||||
WHERE sm.status IN ('active', 'full')
|
||||
) THEN 1 ELSE 0 END) as unprotected_count,
|
||||
SUM(CASE WHEN is_ignored = 0 AND id NOT IN (
|
||||
SELECT fv.filesystem_state_id FROM file_versions fv
|
||||
JOIN storage_media sm ON sm.id = fv.media_id
|
||||
WHERE sm.status IN ('active', 'full')
|
||||
) THEN size ELSE 0 END) as unprotected_size,
|
||||
SUM(CASE WHEN sha256_hash IS NOT NULL AND is_ignored = 0 THEN 1 ELSE 0 END) as hashed_count,
|
||||
SUM(CASE WHEN is_ignored = 0 THEN 1 ELSE 0 END) as eligible_count,
|
||||
SUM(CASE WHEN id IN (SELECT filesystem_state_id FROM file_versions) THEN size ELSE 0 END) as archived_size
|
||||
SUM(CASE WHEN id IN (
|
||||
SELECT fv.filesystem_state_id FROM file_versions fv
|
||||
JOIN storage_media sm ON sm.id = fv.media_id
|
||||
WHERE sm.status IN ('active', 'full')
|
||||
) THEN size ELSE 0 END) as archived_size
|
||||
FROM filesystem_state
|
||||
""")
|
||||
|
||||
@@ -236,7 +262,15 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
|
||||
.first()
|
||||
)
|
||||
|
||||
total_versions = db_session.query(func.count(models.FileVersion.id)).scalar() or 0
|
||||
total_versions = (
|
||||
db_session.query(func.count(models.FileVersion.id))
|
||||
.join(
|
||||
models.StorageMedia, models.StorageMedia.id == models.FileVersion.media_id
|
||||
)
|
||||
.filter(models.StorageMedia.status.in_(["active", "full"]))
|
||||
.scalar()
|
||||
or 0
|
||||
)
|
||||
eligible_redundancy_count = max(total_count - ignored_count, 1)
|
||||
redundancy_percentage = (total_versions / eligible_redundancy_count) * 100
|
||||
|
||||
@@ -258,7 +292,7 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
|
||||
@router.get("/jobs", response_model=List[JobSchema])
|
||||
def list_jobs(limit: int = 10, offset: int = 0, db_session: Session = Depends(get_db)):
|
||||
"""Returns a paginated list of background archival and discovery jobs."""
|
||||
return (
|
||||
jobs = (
|
||||
db_session.query(models.Job)
|
||||
.order_by(models.Job.created_at.desc())
|
||||
.limit(limit)
|
||||
@@ -266,6 +300,43 @@ def list_jobs(limit: int = 10, offset: int = 0, db_session: Session = Depends(ge
|
||||
.all()
|
||||
)
|
||||
|
||||
job_ids = [job.id for job in jobs]
|
||||
if job_ids:
|
||||
placeholders = ", ".join([f":id{i}" for i in range(len(job_ids))])
|
||||
params = {f"id{i}": jid for i, jid in enumerate(job_ids)}
|
||||
subquery = text(f"""
|
||||
SELECT jl.job_id, jl.message
|
||||
FROM job_logs jl
|
||||
INNER JOIN (
|
||||
SELECT job_id, MAX(id) as max_id
|
||||
FROM job_logs
|
||||
WHERE job_id IN ({placeholders})
|
||||
GROUP BY job_id
|
||||
) latest ON jl.id = latest.max_id
|
||||
""")
|
||||
latest_logs = {
|
||||
row[0]: row[1] for row in db_session.execute(subquery, params).fetchall()
|
||||
}
|
||||
else:
|
||||
latest_logs = {}
|
||||
|
||||
result = []
|
||||
for job in jobs:
|
||||
job_dict = {
|
||||
"id": job.id,
|
||||
"job_type": job.job_type,
|
||||
"status": job.status,
|
||||
"progress": job.progress,
|
||||
"current_task": job.current_task,
|
||||
"error_message": job.error_message,
|
||||
"started_at": job.started_at,
|
||||
"completed_at": job.completed_at,
|
||||
"created_at": job.created_at,
|
||||
"latest_log": latest_logs.get(job.id),
|
||||
}
|
||||
result.append(JobSchema(**job_dict))
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/jobs/count")
|
||||
def get_jobs_count(db_session: Session = Depends(get_db)):
|
||||
@@ -279,7 +350,45 @@ def get_job_detail(job_id: int, db_session: Session = Depends(get_db)):
|
||||
job_record = db_session.get(models.Job, job_id)
|
||||
if not job_record:
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
return job_record
|
||||
|
||||
latest_log = (
|
||||
db_session.query(models.JobLog)
|
||||
.filter(models.JobLog.job_id == job_id)
|
||||
.order_by(models.JobLog.id.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
return JobSchema(
|
||||
id=job_record.id,
|
||||
job_type=job_record.job_type,
|
||||
status=job_record.status,
|
||||
progress=job_record.progress,
|
||||
current_task=job_record.current_task,
|
||||
error_message=job_record.error_message,
|
||||
started_at=job_record.started_at,
|
||||
completed_at=job_record.completed_at,
|
||||
created_at=job_record.created_at,
|
||||
latest_log=latest_log.message if latest_log else None,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/jobs/{job_id}/logs", response_model=List[JobLogSchema])
|
||||
def get_job_logs(job_id: int, db_session: Session = Depends(get_db)):
|
||||
"""Retrieves the full execution log for a specific job."""
|
||||
job_record = db_session.get(models.Job, job_id)
|
||||
if not job_record:
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
logs = (
|
||||
db_session.query(models.JobLog)
|
||||
.filter(models.JobLog.job_id == job_id)
|
||||
.order_by(models.JobLog.id.asc())
|
||||
.all()
|
||||
)
|
||||
return [
|
||||
JobLogSchema(id=log.id, message=log.message, timestamp=log.timestamp)
|
||||
for log in logs
|
||||
]
|
||||
|
||||
|
||||
@router.post("/jobs/{job_id}/cancel")
|
||||
@@ -301,6 +410,27 @@ async def stream_jobs():
|
||||
.filter(models.Job.status.in_(["RUNNING", "PENDING"]))
|
||||
.all()
|
||||
)
|
||||
job_ids = [job.id for job in active_jobs]
|
||||
if job_ids:
|
||||
placeholders = ", ".join([f":id{i}" for i in range(len(job_ids))])
|
||||
params = {f"id{i}": jid for i, jid in enumerate(job_ids)}
|
||||
subquery = text(f"""
|
||||
SELECT jl.job_id, jl.message
|
||||
FROM job_logs jl
|
||||
INNER JOIN (
|
||||
SELECT job_id, MAX(id) as max_id
|
||||
FROM job_logs
|
||||
WHERE job_id IN ({placeholders})
|
||||
GROUP BY job_id
|
||||
) latest ON jl.id = latest.max_id
|
||||
""")
|
||||
latest_logs = {
|
||||
row[0]: row[1]
|
||||
for row in db_session.execute(subquery, params).fetchall()
|
||||
}
|
||||
else:
|
||||
latest_logs = {}
|
||||
|
||||
serialized_data = []
|
||||
for job in active_jobs:
|
||||
job_dict = {
|
||||
@@ -312,6 +442,7 @@ async def stream_jobs():
|
||||
"error_message": job.error_message,
|
||||
"started_at": job.started_at,
|
||||
"created_at": job.created_at,
|
||||
"latest_log": latest_logs.get(job.id),
|
||||
}
|
||||
for date_field in ["started_at", "created_at"]:
|
||||
from datetime import datetime
|
||||
@@ -374,110 +505,98 @@ def get_scan_status():
|
||||
)
|
||||
|
||||
|
||||
@router.get("/browse", response_model=List[FileItemSchema])
|
||||
def _get_last_scan_time(db_session: Session) -> Optional[datetime]:
|
||||
"""Returns the completion time of the most recent successful SCAN job."""
|
||||
last_scan = (
|
||||
db_session.query(models.Job)
|
||||
.filter(models.Job.job_type == "SCAN", models.Job.status == "COMPLETED")
|
||||
.order_by(models.Job.completed_at.desc())
|
||||
.first()
|
||||
)
|
||||
return last_scan.completed_at if last_scan else None
|
||||
|
||||
|
||||
@router.get("/browse", response_model=BrowseResponseSchema)
|
||||
def browse_system_path(
|
||||
path: Optional[str] = None, db_session: Session = Depends(get_db)
|
||||
):
|
||||
"""Provides a browsable view of the host filesystem for rule configuration."""
|
||||
"""Provides a browsable view of the indexed filesystem from the database."""
|
||||
roots = get_source_roots(db_session)
|
||||
tracking_rules = db_session.query(models.TrackedSource).all()
|
||||
tracking_map = {rule.path: rule.action for rule in tracking_rules}
|
||||
exclusion_spec = get_exclusion_spec(db_session)
|
||||
last_scan_time = _get_last_scan_time(db_session)
|
||||
|
||||
if path is None or path == "ROOT":
|
||||
results = []
|
||||
for root_path in roots:
|
||||
if not os.path.exists(root_path):
|
||||
continue
|
||||
stats = os.stat(root_path)
|
||||
# Source roots themselves follow policy
|
||||
is_ignored = get_ignored_status(root_path, tracking_map, exclusion_spec)
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=root_path,
|
||||
path=root_path,
|
||||
type="directory",
|
||||
size=stats.st_size,
|
||||
mtime=stats.st_mtime,
|
||||
ignored=is_ignored,
|
||||
)
|
||||
count_sql = text("""
|
||||
SELECT COUNT(*) FROM filesystem_state
|
||||
WHERE file_path LIKE :prefix
|
||||
""")
|
||||
count = (
|
||||
db_session.execute(count_sql, {"prefix": f"{root_path}%"}).scalar() or 0
|
||||
)
|
||||
return results
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise HTTPException(status_code=404, detail="Path not found")
|
||||
|
||||
results = []
|
||||
try:
|
||||
entries = []
|
||||
immediate_file_paths = []
|
||||
with os.scandir(path) as directory_iterator:
|
||||
for entry in directory_iterator:
|
||||
entries.append(entry)
|
||||
if not entry.is_dir(follow_symlinks=False):
|
||||
immediate_file_paths.append(entry.path)
|
||||
|
||||
# Fetch existing indexed files for ONLY the immediate files in this directory
|
||||
indexed_info = {} # path -> (sha256_hash, is_ignored)
|
||||
if immediate_file_paths:
|
||||
for i in range(0, len(immediate_file_paths), 900):
|
||||
chunk = immediate_file_paths[i : i + 900]
|
||||
for f_path, sha256_hash, db_ignored in (
|
||||
db_session.query(
|
||||
models.FilesystemState.file_path,
|
||||
models.FilesystemState.sha256_hash,
|
||||
models.FilesystemState.is_ignored,
|
||||
)
|
||||
.filter(models.FilesystemState.file_path.in_(chunk))
|
||||
.all()
|
||||
):
|
||||
indexed_info[f_path] = (sha256_hash, db_ignored)
|
||||
|
||||
for entry in entries:
|
||||
try:
|
||||
# Explicitly don't follow symlinks during browsing to show raw state
|
||||
file_stats = entry.stat(follow_symlinks=False)
|
||||
|
||||
if entry.path in indexed_info:
|
||||
# If in DB, the DB flag is the source of truth for archival intent
|
||||
is_ignored = indexed_info[entry.path][1]
|
||||
item_hash = indexed_info[entry.path][0]
|
||||
else:
|
||||
# If not in DB, calculate intended state based on policy
|
||||
is_ignored = get_ignored_status(
|
||||
entry.path, tracking_map, exclusion_spec
|
||||
)
|
||||
item_hash = None
|
||||
|
||||
if count > 0:
|
||||
is_ignored = get_ignored_status(root_path, tracking_map, exclusion_spec)
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=entry.name,
|
||||
path=entry.path,
|
||||
type="directory"
|
||||
if entry.is_dir(follow_symlinks=False)
|
||||
else "file",
|
||||
size=file_stats.st_size,
|
||||
mtime=file_stats.st_mtime,
|
||||
name=root_path,
|
||||
path=root_path,
|
||||
type="directory",
|
||||
ignored=is_ignored,
|
||||
sha256_hash=item_hash,
|
||||
)
|
||||
)
|
||||
except (OSError, FileNotFoundError):
|
||||
continue
|
||||
except PermissionError:
|
||||
raise HTTPException(status_code=403, detail="Permission denied")
|
||||
return BrowseResponseSchema(files=results, last_scan_time=last_scan_time)
|
||||
|
||||
# Deduplicate by path to prevent frontend keyed each block errors
|
||||
seen_paths: set[str] = set()
|
||||
deduped_results: list[FileItemSchema] = []
|
||||
for r in results:
|
||||
if r.path not in seen_paths:
|
||||
seen_paths.add(r.path)
|
||||
deduped_results.append(r)
|
||||
results = deduped_results
|
||||
target_prefix = path if path.endswith("/") else path + "/"
|
||||
|
||||
files_sql = text("""
|
||||
SELECT file_path, size, mtime, sha256_hash, is_ignored
|
||||
FROM filesystem_state
|
||||
WHERE file_path LIKE :prefix
|
||||
AND file_path != :prefix
|
||||
""")
|
||||
rows = db_session.execute(files_sql, {"prefix": f"{target_prefix}%"}).fetchall()
|
||||
|
||||
results = []
|
||||
seen = set()
|
||||
|
||||
for file_path, size, mtime, sha256_hash, is_ignored in rows:
|
||||
relative = file_path[len(target_prefix) :]
|
||||
if "/" in relative:
|
||||
immediate_name = relative.split("/")[0]
|
||||
child_path = target_prefix + immediate_name
|
||||
if child_path not in seen:
|
||||
seen.add(child_path)
|
||||
dir_ignored = get_ignored_status(
|
||||
child_path, tracking_map, exclusion_spec
|
||||
)
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=immediate_name,
|
||||
path=child_path,
|
||||
type="directory",
|
||||
ignored=dir_ignored,
|
||||
)
|
||||
)
|
||||
else:
|
||||
if file_path not in seen:
|
||||
seen.add(file_path)
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=relative,
|
||||
path=file_path,
|
||||
type="file",
|
||||
size=size,
|
||||
mtime=mtime,
|
||||
ignored=is_ignored,
|
||||
sha256_hash=sha256_hash,
|
||||
)
|
||||
)
|
||||
|
||||
results.sort(key=lambda x: (x.type != "directory", x.name.lower()))
|
||||
return results
|
||||
return BrowseResponseSchema(files=results, last_scan_time=last_scan_time)
|
||||
|
||||
|
||||
@router.get("/search", response_model=List[FileItemSchema])
|
||||
@@ -932,6 +1051,17 @@ def list_discrepancies(db_session: Session = Depends(get_db)):
|
||||
if record.id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(record.id)
|
||||
|
||||
has_valid_versions = (
|
||||
db_session.query(models.FileVersion)
|
||||
.join(models.StorageMedia)
|
||||
.filter(
|
||||
models.FileVersion.filesystem_state_id == record.id,
|
||||
models.StorageMedia.status.in_(["active", "full"]),
|
||||
)
|
||||
.first()
|
||||
) is not None
|
||||
|
||||
if record.is_deleted:
|
||||
results.append(
|
||||
DiscrepancySchema(
|
||||
@@ -942,7 +1072,7 @@ def list_discrepancies(db_session: Session = Depends(get_db)):
|
||||
last_seen_timestamp=record.last_seen_timestamp,
|
||||
sha256_hash=record.sha256_hash,
|
||||
is_deleted=True,
|
||||
has_versions=len(record.versions) > 0,
|
||||
has_versions=has_valid_versions,
|
||||
)
|
||||
)
|
||||
elif not os.path.exists(record.file_path):
|
||||
@@ -955,7 +1085,7 @@ def list_discrepancies(db_session: Session = Depends(get_db)):
|
||||
last_seen_timestamp=record.last_seen_timestamp,
|
||||
sha256_hash=record.sha256_hash,
|
||||
is_deleted=False,
|
||||
has_versions=len(record.versions) > 0,
|
||||
has_versions=has_valid_versions,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -126,6 +126,23 @@ class Job(Base):
|
||||
DateTime, default=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
logs: Mapped[List["JobLog"]] = relationship(
|
||||
back_populates="job", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
|
||||
class JobLog(Base):
|
||||
__tablename__ = "job_logs"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
job_id: Mapped[int] = mapped_column(ForeignKey("jobs.id"))
|
||||
message: Mapped[str] = mapped_column(String)
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
DateTime, default=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
job: Mapped["Job"] = relationship(back_populates="logs")
|
||||
|
||||
|
||||
class SystemSetting(Base):
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
@@ -261,12 +261,16 @@ class ArchiverService:
|
||||
JobManager.update_job(
|
||||
job_id, 5.0, f"Calculating backup set for {media_record.identifier}..."
|
||||
)
|
||||
JobManager.add_job_log(job_id, f"Starting backup to {media_record.identifier}")
|
||||
|
||||
workload_batch = self.assemble_backup_batch(db_session, media_id)
|
||||
if not workload_batch:
|
||||
JobManager.add_job_log(job_id, "No files require backup")
|
||||
JobManager.complete_job(job_id)
|
||||
return
|
||||
|
||||
JobManager.add_job_log(job_id, f"{len(workload_batch)} files queued for backup")
|
||||
|
||||
# --- Tar Chunking Logic ---
|
||||
# Ensure at least 100 archives per tape to improve restoration granularity.
|
||||
# Max chunk size = capacity / 100.
|
||||
@@ -326,10 +330,18 @@ class ArchiverService:
|
||||
if current_chunk:
|
||||
chunks.append(current_chunk)
|
||||
|
||||
JobManager.add_job_log(job_id, f"Packed into {len(chunks)} archive(s)")
|
||||
|
||||
for chunk_index, chunk_items in enumerate(chunks):
|
||||
if JobManager.is_cancelled(job_id):
|
||||
break
|
||||
|
||||
chunk_num = chunk_index + 1
|
||||
JobManager.add_job_log(
|
||||
job_id,
|
||||
f"Processing archive {chunk_num}/{len(chunks)} ({len(chunk_items)} files)",
|
||||
)
|
||||
|
||||
archive_filename = f"backup_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}_{chunk_index}.tar"
|
||||
staging_full_path = os.path.join(
|
||||
self.staging_directory, archive_filename
|
||||
@@ -613,12 +625,16 @@ class ArchiverService:
|
||||
else 0
|
||||
)
|
||||
|
||||
if utilization_ratio >= 0.98:
|
||||
if utilization_ratio >= 0.98 and media_record.status == "active":
|
||||
logger.info(
|
||||
f"MEDIA SATURATED: {media_record.identifier} ({utilization_ratio*100:.1f}%)"
|
||||
)
|
||||
media_record.status = "full"
|
||||
|
||||
JobManager.add_job_log(
|
||||
job_id, f"Media {media_record.identifier} marked as full"
|
||||
)
|
||||
|
||||
# Automate priority ceding: Move this media to the end of the list
|
||||
max_priority = (
|
||||
db_session.query(func.max(models.StorageMedia.priority_index))
|
||||
@@ -636,6 +652,9 @@ class ArchiverService:
|
||||
f"Media record {media_record.id} was modified or deleted by another process; skipping final commit"
|
||||
)
|
||||
|
||||
JobManager.add_job_log(
|
||||
job_id, f"Backup complete. Utilization: {utilization_ratio*100:.1f}%"
|
||||
)
|
||||
JobManager.complete_job(job_id)
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
@@ -661,6 +680,7 @@ class ArchiverService:
|
||||
"""Orchestrates the retrieval and reassembly of data from storage providers."""
|
||||
JobManager.start_job(job_id)
|
||||
JobManager.update_job(job_id, 2.0, "Building recovery manifest...")
|
||||
JobManager.add_job_log(job_id, "Starting restore")
|
||||
|
||||
active_cart = (
|
||||
db_session.query(models.RestoreCart)
|
||||
@@ -672,9 +692,12 @@ class ArchiverService:
|
||||
.all()
|
||||
)
|
||||
if not active_cart:
|
||||
JobManager.add_job_log(job_id, "Restore queue is empty, nothing to do")
|
||||
JobManager.complete_job(job_id)
|
||||
return
|
||||
|
||||
JobManager.add_job_log(job_id, f"{len(active_cart)} items in restore queue")
|
||||
|
||||
os.makedirs(destination_root, exist_ok=True)
|
||||
|
||||
media_workload: Dict[int, Dict[str, List[models.FileVersion]]] = {}
|
||||
@@ -709,6 +732,10 @@ class ArchiverService:
|
||||
media_record = db_session.get(models.StorageMedia, media_id)
|
||||
if not media_record:
|
||||
continue
|
||||
JobManager.add_job_log(
|
||||
job_id,
|
||||
f"Reading from {media_record.identifier} ({len(archive_groups)} archive(s))",
|
||||
)
|
||||
provider = self._get_storage_provider(media_record)
|
||||
if not provider:
|
||||
continue
|
||||
@@ -875,6 +902,7 @@ class ArchiverService:
|
||||
if not JobManager.is_cancelled(job_id):
|
||||
db_session.query(models.RestoreCart).delete()
|
||||
db_session.commit()
|
||||
JobManager.add_job_log(job_id, "Restore complete, queue cleared")
|
||||
JobManager.complete_job(job_id)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -383,6 +383,18 @@ class JobManager:
|
||||
db_session.rollback()
|
||||
logger.debug(f"JobManager.fail_job failed for {job_id}: {e}")
|
||||
|
||||
@staticmethod
|
||||
def add_job_log(job_id: int, message: str):
|
||||
"""Appends a log entry to a job's log history."""
|
||||
with SessionLocal() as db_session:
|
||||
try:
|
||||
log_entry = models.JobLog(job_id=job_id, message=message)
|
||||
db_session.add(log_entry)
|
||||
db_session.commit()
|
||||
except (StaleDataError, Exception) as e:
|
||||
db_session.rollback()
|
||||
logger.debug(f"JobManager.add_job_log failed for {job_id}: {e}")
|
||||
|
||||
@staticmethod
|
||||
def cancel_job(job_id: int):
|
||||
"""Submits a cancellation request for a pending or running job."""
|
||||
@@ -514,6 +526,7 @@ class ScannerService:
|
||||
if job_id is not None:
|
||||
JobManager.start_job(job_id)
|
||||
JobManager.update_job(job_id, 0.0, "Starting system scan...")
|
||||
JobManager.add_job_log(job_id, "Starting system scan")
|
||||
|
||||
self._set_process_priority("normal")
|
||||
with self._metrics_lock:
|
||||
@@ -661,6 +674,10 @@ class ScannerService:
|
||||
)
|
||||
|
||||
if job_id is not None and not JobManager.is_cancelled(job_id):
|
||||
JobManager.add_job_log(
|
||||
job_id,
|
||||
f"Scan complete: {self.files_new} new, {self.files_modified} modified, {self.files_missing} missing",
|
||||
)
|
||||
JobManager.complete_job(job_id)
|
||||
self.last_run_time = current_timestamp
|
||||
|
||||
@@ -901,6 +918,10 @@ class ScannerService:
|
||||
break
|
||||
|
||||
if not JobManager.is_cancelled(hashing_job.id) and self.is_hashing:
|
||||
JobManager.add_job_log(
|
||||
hashing_job.id,
|
||||
f"Hashing complete: {self.files_hashed} files indexed",
|
||||
)
|
||||
JobManager.complete_job(hashing_job.id)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -223,3 +223,159 @@ def test_delete_file_record(client, db_session):
|
||||
.first()
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_dashboard_stats_excludes_failed_media(client, db_session):
|
||||
"""Tests that dashboard stats do not count versions on failed or retired media."""
|
||||
active_media = models.StorageMedia(
|
||||
media_type="hdd", identifier="M1", capacity=5000, status="active"
|
||||
)
|
||||
failed_media = models.StorageMedia(
|
||||
media_type="tape", identifier="TAPE_01", capacity=5000, status="failed"
|
||||
)
|
||||
retired_media = models.StorageMedia(
|
||||
media_type="tape", identifier="TAPE_02", capacity=5000, status="retired"
|
||||
)
|
||||
db_session.add_all([active_media, failed_media, retired_media])
|
||||
db_session.flush()
|
||||
|
||||
file1 = models.FilesystemState(
|
||||
file_path="/source/only_active.txt",
|
||||
size=2048,
|
||||
mtime=1000,
|
||||
is_ignored=False,
|
||||
)
|
||||
file2 = models.FilesystemState(
|
||||
file_path="/source/only_failed.txt",
|
||||
size=4096,
|
||||
mtime=1000,
|
||||
is_ignored=False,
|
||||
)
|
||||
file3 = models.FilesystemState(
|
||||
file_path="/source/only_retired.txt",
|
||||
size=8192,
|
||||
mtime=1000,
|
||||
is_ignored=False,
|
||||
)
|
||||
db_session.add_all([file1, file2, file3])
|
||||
db_session.flush()
|
||||
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file1.id,
|
||||
media_id=active_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=2048,
|
||||
)
|
||||
)
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file2.id,
|
||||
media_id=failed_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=4096,
|
||||
)
|
||||
)
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file3.id,
|
||||
media_id=retired_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=8192,
|
||||
)
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
response = client.get("/system/dashboard/stats")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["unprotected_files_count"] == 2
|
||||
assert data["unprotected_data_size"] == 12288
|
||||
assert data["archived_data_size"] == 2048
|
||||
|
||||
|
||||
def test_discrepancies_excludes_versions_on_unavailable_media(client, db_session):
|
||||
"""Tests that discrepancy has_versions is False when only backed up on failed/retired media."""
|
||||
failed_media = models.StorageMedia(
|
||||
media_type="tape", identifier="TAPE_BAD", capacity=5000, status="failed"
|
||||
)
|
||||
retired_media = models.StorageMedia(
|
||||
media_type="tape", identifier="TAPE_OLD", capacity=5000, status="retired"
|
||||
)
|
||||
active_media = models.StorageMedia(
|
||||
media_type="hdd", identifier="M_OK", capacity=5000, status="active"
|
||||
)
|
||||
db_session.add_all([failed_media, retired_media, active_media])
|
||||
db_session.flush()
|
||||
|
||||
file_failed = models.FilesystemState(
|
||||
file_path="/data/gone_failed.txt",
|
||||
size=500,
|
||||
mtime=1000,
|
||||
is_deleted=True,
|
||||
is_ignored=False,
|
||||
)
|
||||
file_retired = models.FilesystemState(
|
||||
file_path="/data/gone_retired.txt",
|
||||
size=600,
|
||||
mtime=1000,
|
||||
is_deleted=True,
|
||||
is_ignored=False,
|
||||
)
|
||||
file_good = models.FilesystemState(
|
||||
file_path="/data/exists_on_good.txt",
|
||||
size=700,
|
||||
mtime=1000,
|
||||
is_deleted=True,
|
||||
is_ignored=False,
|
||||
)
|
||||
db_session.add_all([file_failed, file_retired, file_good])
|
||||
db_session.flush()
|
||||
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file_failed.id,
|
||||
media_id=failed_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=500,
|
||||
)
|
||||
)
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file_retired.id,
|
||||
media_id=retired_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=600,
|
||||
)
|
||||
)
|
||||
db_session.add(
|
||||
models.FileVersion(
|
||||
filesystem_state_id=file_good.id,
|
||||
media_id=active_media.id,
|
||||
file_number="1",
|
||||
offset_start=0,
|
||||
offset_end=700,
|
||||
)
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
response = client.get("/system/discrepancies")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data) == 3
|
||||
|
||||
failed_backed = next(d for d in data if d["path"] == "/data/gone_failed.txt")
|
||||
assert failed_backed["has_versions"] is False
|
||||
|
||||
retired_backed = next(d for d in data if d["path"] == "/data/gone_retired.txt")
|
||||
assert retired_backed["has_versions"] is False
|
||||
|
||||
good_backed = next(d for d in data if d["path"] == "/data/exists_on_good.txt")
|
||||
assert good_backed["has_versions"] is True
|
||||
|
||||
@@ -13,4 +13,4 @@ import type { ClientOptions as ClientOptions2 } from './types.gen';
|
||||
*/
|
||||
export type CreateClientConfig<T extends ClientOptions = ClientOptions2> = (override?: Config<ClientOptions & T>) => Config<Required<ClientOptions> & T>;
|
||||
|
||||
export const client = createClient(createConfig<ClientOptions2>({ baseUrl: 'http://localhost:8000' }));
|
||||
export const client = createClient(createConfig<ClientOptions2>());
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,33 +1,7 @@
|
||||
// This file is auto-generated by @hey-api/openapi-ts
|
||||
|
||||
export type ClientOptions = {
|
||||
baseUrl: 'http://localhost:8000' | (string & {});
|
||||
};
|
||||
|
||||
/**
|
||||
* BackupJobSchema
|
||||
*/
|
||||
export type BackupJobSchema = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* Job Type
|
||||
*/
|
||||
job_type: string;
|
||||
/**
|
||||
* Status
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* Started At
|
||||
*/
|
||||
started_at?: string | null;
|
||||
/**
|
||||
* Completed At
|
||||
*/
|
||||
completed_at?: string | null;
|
||||
baseUrl: `${string}://${string}` | (string & {});
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -44,6 +18,20 @@ export type BatchTrackRequest = {
|
||||
untracks?: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* BrowseResponseSchema
|
||||
*/
|
||||
export type BrowseResponseSchema = {
|
||||
/**
|
||||
* Files
|
||||
*/
|
||||
files: Array<FileItemSchema>;
|
||||
/**
|
||||
* Last Scan Time
|
||||
*/
|
||||
last_scan_time?: string | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* CartFileItemSchema
|
||||
*/
|
||||
@@ -184,6 +172,44 @@ export type DirectoryCartRequest = {
|
||||
path: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* DiscrepancySchema
|
||||
*/
|
||||
export type DiscrepancySchema = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
* Size
|
||||
*/
|
||||
size: number;
|
||||
/**
|
||||
* Mtime
|
||||
*/
|
||||
mtime: string;
|
||||
/**
|
||||
* Last Seen Timestamp
|
||||
*/
|
||||
last_seen_timestamp?: string | null;
|
||||
/**
|
||||
* Sha256 Hash
|
||||
*/
|
||||
sha256_hash?: string | null;
|
||||
/**
|
||||
* Is Deleted
|
||||
*/
|
||||
is_deleted: boolean;
|
||||
/**
|
||||
* Has Versions
|
||||
*/
|
||||
has_versions?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* FileItemSchema
|
||||
*/
|
||||
@@ -270,11 +296,18 @@ export type ItemMetadataSchema = {
|
||||
* Sha256 Hash
|
||||
*/
|
||||
sha256_hash?: string | null;
|
||||
|
||||
/**
|
||||
* Is Ignored
|
||||
*/
|
||||
is_ignored?: boolean;
|
||||
/**
|
||||
* Is Deleted
|
||||
*/
|
||||
is_deleted?: boolean;
|
||||
/**
|
||||
* Exists On Disk
|
||||
*/
|
||||
exists_on_disk?: boolean | null;
|
||||
/**
|
||||
* Child Count
|
||||
*/
|
||||
@@ -292,45 +325,21 @@ export type ItemMetadataSchema = {
|
||||
};
|
||||
|
||||
/**
|
||||
* JobSchema
|
||||
* JobLogSchema
|
||||
*/
|
||||
export type JobSchema = {
|
||||
export type JobLogSchema = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* Job Type
|
||||
* Message
|
||||
*/
|
||||
job_type: string;
|
||||
message: string;
|
||||
/**
|
||||
* Status
|
||||
* Timestamp
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* Progress
|
||||
*/
|
||||
progress: number;
|
||||
/**
|
||||
* Current Task
|
||||
*/
|
||||
current_task?: string | null;
|
||||
/**
|
||||
* Error Message
|
||||
*/
|
||||
error_message?: string | null;
|
||||
/**
|
||||
* Started At
|
||||
*/
|
||||
started_at?: string | null;
|
||||
/**
|
||||
* Completed At
|
||||
*/
|
||||
completed_at?: string | null;
|
||||
/**
|
||||
* Created At
|
||||
*/
|
||||
created_at: string;
|
||||
timestamp: string;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -555,6 +564,10 @@ export type ScanStatusSchema = {
|
||||
* Files Modified
|
||||
*/
|
||||
files_modified: number;
|
||||
/**
|
||||
* Files Missing
|
||||
*/
|
||||
files_missing: number;
|
||||
/**
|
||||
* Total Files Found
|
||||
*/
|
||||
@@ -677,6 +690,92 @@ export type ValidationError = {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* JobSchema
|
||||
*/
|
||||
export type AppApiBackupsJobSchema = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* Job Type
|
||||
*/
|
||||
job_type: string;
|
||||
/**
|
||||
* Status
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* Started At
|
||||
*/
|
||||
started_at?: string | null;
|
||||
/**
|
||||
* Completed At
|
||||
*/
|
||||
completed_at?: string | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* JobSchema
|
||||
*/
|
||||
export type AppApiSystemJobSchema = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* Job Type
|
||||
*/
|
||||
job_type: string;
|
||||
/**
|
||||
* Status
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* Progress
|
||||
*/
|
||||
progress: number;
|
||||
/**
|
||||
* Current Task
|
||||
*/
|
||||
current_task?: string | null;
|
||||
/**
|
||||
* Error Message
|
||||
*/
|
||||
error_message?: string | null;
|
||||
/**
|
||||
* Started At
|
||||
*/
|
||||
started_at?: string | null;
|
||||
/**
|
||||
* Completed At
|
||||
*/
|
||||
completed_at?: string | null;
|
||||
/**
|
||||
* Created At
|
||||
*/
|
||||
created_at: string;
|
||||
/**
|
||||
* Latest Log
|
||||
*/
|
||||
latest_log?: string | null;
|
||||
};
|
||||
|
||||
export type ResetTestEnvironmentSystemTestResetPostData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/system/test/reset';
|
||||
};
|
||||
|
||||
export type ResetTestEnvironmentSystemTestResetPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type GetDashboardStatsSystemDashboardStatsGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -724,7 +823,7 @@ export type ListJobsSystemJobsGetResponses = {
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<JobSchema>;
|
||||
200: Array<AppApiSystemJobSchema>;
|
||||
};
|
||||
|
||||
export type ListJobsSystemJobsGetResponse = ListJobsSystemJobsGetResponses[keyof ListJobsSystemJobsGetResponses];
|
||||
@@ -768,11 +867,43 @@ export type GetJobDetailSystemJobsJobIdGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: JobSchema;
|
||||
200: AppApiSystemJobSchema;
|
||||
};
|
||||
|
||||
export type GetJobDetailSystemJobsJobIdGetResponse = GetJobDetailSystemJobsJobIdGetResponses[keyof GetJobDetailSystemJobsJobIdGetResponses];
|
||||
|
||||
export type GetJobLogsSystemJobsJobIdLogsGetData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* Job Id
|
||||
*/
|
||||
job_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/system/jobs/{job_id}/logs';
|
||||
};
|
||||
|
||||
export type GetJobLogsSystemJobsJobIdLogsGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type GetJobLogsSystemJobsJobIdLogsGetError = GetJobLogsSystemJobsJobIdLogsGetErrors[keyof GetJobLogsSystemJobsJobIdLogsGetErrors];
|
||||
|
||||
export type GetJobLogsSystemJobsJobIdLogsGetResponses = {
|
||||
/**
|
||||
* Response Get Job Logs System Jobs Job Id Logs Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<JobLogSchema>;
|
||||
};
|
||||
|
||||
export type GetJobLogsSystemJobsJobIdLogsGetResponse = GetJobLogsSystemJobsJobIdLogsGetResponses[keyof GetJobLogsSystemJobsJobIdLogsGetResponses];
|
||||
|
||||
export type CancelJobSystemJobsJobIdCancelPostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
@@ -882,11 +1013,9 @@ export type BrowseSystemPathSystemBrowseGetError = BrowseSystemPathSystemBrowseG
|
||||
|
||||
export type BrowseSystemPathSystemBrowseGetResponses = {
|
||||
/**
|
||||
* Response Browse System Path System Browse Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<FileItemSchema>;
|
||||
200: BrowseResponseSchema;
|
||||
};
|
||||
|
||||
export type BrowseSystemPathSystemBrowseGetResponse = BrowseSystemPathSystemBrowseGetResponses[keyof BrowseSystemPathSystemBrowseGetResponses];
|
||||
@@ -1159,6 +1288,108 @@ export type GetSystemTreeSystemTreeGetResponses = {
|
||||
|
||||
export type GetSystemTreeSystemTreeGetResponse = GetSystemTreeSystemTreeGetResponses[keyof GetSystemTreeSystemTreeGetResponses];
|
||||
|
||||
export type ListDiscrepanciesSystemDiscrepanciesGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/system/discrepancies';
|
||||
};
|
||||
|
||||
export type ListDiscrepanciesSystemDiscrepanciesGetResponses = {
|
||||
/**
|
||||
* Response List Discrepancies System Discrepancies Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<DiscrepancySchema>;
|
||||
};
|
||||
|
||||
export type ListDiscrepanciesSystemDiscrepanciesGetResponse = ListDiscrepanciesSystemDiscrepanciesGetResponses[keyof ListDiscrepanciesSystemDiscrepanciesGetResponses];
|
||||
|
||||
export type ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* File Id
|
||||
*/
|
||||
file_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/system/discrepancies/{file_id}/confirm';
|
||||
};
|
||||
|
||||
export type ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostError = ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostErrors[keyof ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostErrors];
|
||||
|
||||
export type ConfirmFileDeletedSystemDiscrepanciesFileIdConfirmPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type DismissDiscrepancySystemDiscrepanciesFileIdDismissPostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* File Id
|
||||
*/
|
||||
file_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/system/discrepancies/{file_id}/dismiss';
|
||||
};
|
||||
|
||||
export type DismissDiscrepancySystemDiscrepanciesFileIdDismissPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type DismissDiscrepancySystemDiscrepanciesFileIdDismissPostError = DismissDiscrepancySystemDiscrepanciesFileIdDismissPostErrors[keyof DismissDiscrepancySystemDiscrepanciesFileIdDismissPostErrors];
|
||||
|
||||
export type DismissDiscrepancySystemDiscrepanciesFileIdDismissPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type DeleteFileRecordSystemDiscrepanciesFileIdDeleteData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* File Id
|
||||
*/
|
||||
file_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/system/discrepancies/{file_id}';
|
||||
};
|
||||
|
||||
export type DeleteFileRecordSystemDiscrepanciesFileIdDeleteErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type DeleteFileRecordSystemDiscrepanciesFileIdDeleteError = DeleteFileRecordSystemDiscrepanciesFileIdDeleteErrors[keyof DeleteFileRecordSystemDiscrepanciesFileIdDeleteErrors];
|
||||
|
||||
export type DeleteFileRecordSystemDiscrepanciesFileIdDeleteResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type ListStorageProvidersInventoryProvidersGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -1362,6 +1593,20 @@ export type GetSystemAnalyticsInventoryInsightsGetResponses = {
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type DetectUnregisteredMediaInventoryDetectGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/inventory/detect';
|
||||
};
|
||||
|
||||
export type DetectUnregisteredMediaInventoryDetectGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type BrowseArchiveIndexInventoryBrowseGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -1539,7 +1784,7 @@ export type ListArchivalHistoryBackupsGetResponses = {
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<BackupJobSchema>;
|
||||
200: Array<AppApiBackupsJobSchema>;
|
||||
};
|
||||
|
||||
export type ListArchivalHistoryBackupsGetResponse = ListArchivalHistoryBackupsGetResponses[keyof ListArchivalHistoryBackupsGetResponses];
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
<script lang="ts">
|
||||
import { X, Activity, Search, Play, RotateCw, Clock, CheckCircle2, AlertCircle, FileText, Database, HardDrive, MapPin, ExternalLink, ArrowRight } from 'lucide-svelte';
|
||||
import { X, Activity, Search, Play, RotateCw, Clock, CheckCircle2, AlertCircle, FileText, Database, HardDrive, MapPin, ExternalLink, ArrowRight, Terminal } from 'lucide-svelte';
|
||||
import { Button } from './ui/button';
|
||||
import { Card } from './ui/card';
|
||||
import Dialog from './ui/Dialog.svelte';
|
||||
import { getJobDetailSystemJobsJobIdGet, type JobSchema } from '$lib/api';
|
||||
import { getJobDetailSystemJobsJobIdGet, getJobLogsSystemJobsJobIdLogsGet, type AppApiSystemJobSchema } from '$lib/api';
|
||||
import { cn, formatLocalTime, formatLocalDateTime, parseUTCDate } from '$lib/utils';
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
@@ -12,16 +12,19 @@
|
||||
onClear: () => void;
|
||||
}>();
|
||||
|
||||
let job = $state<JobSchema | null>(null);
|
||||
let job = $state<AppApiSystemJobSchema | null>(null);
|
||||
let logs = $state<{ id: number; message: string; timestamp: string }[]>([]);
|
||||
let loading = $state(true);
|
||||
|
||||
async function loadJob() {
|
||||
loading = true;
|
||||
try {
|
||||
const response = await getJobDetailSystemJobsJobIdGet({
|
||||
path: { job_id: jobId }
|
||||
});
|
||||
if (response.data) job = response.data;
|
||||
const [jobRes, logsRes] = await Promise.all([
|
||||
getJobDetailSystemJobsJobIdGet({ path: { job_id: jobId } }),
|
||||
getJobLogsSystemJobsJobIdLogsGet({ path: { job_id: jobId } })
|
||||
]);
|
||||
if (jobRes.data) job = jobRes.data;
|
||||
if (logsRes.data) logs = logsRes.data;
|
||||
} catch (error) {
|
||||
console.error("Failed to load job details:", error);
|
||||
} finally {
|
||||
@@ -42,6 +45,12 @@
|
||||
return `${minutes}m ${remSeconds}s`;
|
||||
}
|
||||
|
||||
function formatLogTime(timestamp: string) {
|
||||
const date = parseUTCDate(timestamp);
|
||||
if (!date) return '--';
|
||||
return date.toLocaleTimeString();
|
||||
}
|
||||
|
||||
onMount(loadJob);
|
||||
</script>
|
||||
|
||||
@@ -99,18 +108,26 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Final Status / Logs -->
|
||||
<!-- Execution Log -->
|
||||
<div class="space-y-4">
|
||||
<div class="flex items-center gap-2 px-1">
|
||||
<FileText size={14} class="text-text-secondary opacity-50" />
|
||||
<Terminal size={14} class="text-text-secondary opacity-50" />
|
||||
<h3 class="text-[10px] font-medium text-text-secondary uppercase tracking-wider">Execution log</h3>
|
||||
</div>
|
||||
|
||||
<div class={cn(
|
||||
"p-5 rounded-xl border mono text-xs leading-relaxed",
|
||||
job.status === 'FAILED' ? "bg-error-color/5 border-error-color/20 text-error-color/90" : "bg-bg-primary border-border-color/60 text-text-primary/80"
|
||||
)}>
|
||||
{#if job.error_message}
|
||||
{#if logs.length > 0}
|
||||
<div class="bg-bg-primary border border-border-color/60 rounded-xl overflow-hidden">
|
||||
<div class="max-h-[300px] overflow-y-auto font-mono text-xs p-4 space-y-1">
|
||||
{#each logs as log (log.id)}
|
||||
<div class="flex gap-3 leading-relaxed">
|
||||
<span class="text-text-secondary/40 shrink-0 select-none">{formatLogTime(log.timestamp)}</span>
|
||||
<span class="text-text-primary/80 whitespace-pre-wrap break-words">{log.message}</span>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{:else if job.error_message}
|
||||
<div class="p-5 rounded-xl border mono text-xs leading-relaxed bg-error-color/5 border-error-color/20 text-error-color/90">
|
||||
<div class="flex gap-3 items-start">
|
||||
<AlertCircle size={16} class="shrink-0 mt-0.5" />
|
||||
<div>
|
||||
@@ -118,7 +135,9 @@
|
||||
{job.error_message}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="p-5 rounded-xl border mono text-xs leading-relaxed bg-bg-primary border-border-color/60 text-text-primary/80">
|
||||
<div class="flex gap-3 items-start text-success-color">
|
||||
<CheckCircle2 size={16} class="shrink-0 mt-0.5" />
|
||||
<div>
|
||||
@@ -126,8 +145,8 @@
|
||||
{job.current_task || 'Process completed successfully with zero hardware interrupts.'}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Next Steps / Metadata -->
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
<script lang="ts">
|
||||
import { onMount, onDestroy } from 'svelte';
|
||||
import { RotateCw, Activity } from 'lucide-svelte';
|
||||
import { RotateCw, Activity, CheckCircle2 } from 'lucide-svelte';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
import { getScanStatusSystemScanStatusGet, type ScanStatusSchema } from '$lib/api';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
let scanStatus = $state<ScanStatusSchema | null>(null);
|
||||
let pollInterval: any;
|
||||
let showCompleted = $state(false);
|
||||
let completedTimeout: any;
|
||||
|
||||
async function updateScanStatus() {
|
||||
try {
|
||||
@@ -16,6 +19,9 @@
|
||||
|
||||
if (wasRunning && !scanStatus.is_running) {
|
||||
toast.success("Filesystem scan completed");
|
||||
showCompleted = true;
|
||||
if (completedTimeout) clearTimeout(completedTimeout);
|
||||
completedTimeout = setTimeout(() => { showCompleted = false; }, 5000);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -30,6 +36,7 @@
|
||||
|
||||
onDestroy(() => {
|
||||
if (pollInterval) clearInterval(pollInterval);
|
||||
if (completedTimeout) clearTimeout(completedTimeout);
|
||||
});
|
||||
|
||||
const scanProgress = $derived(
|
||||
@@ -40,59 +47,82 @@
|
||||
</script>
|
||||
|
||||
{#if scanStatus?.is_running}
|
||||
<div class="fixed bottom-8 right-8 z-[100] bg-bg-secondary border border-blue-500/30 rounded-xl p-6 shadow-[0_25px_60px_rgba(0,0,0,0.6)] w-[450px] animate-in fade-in slide-in-from-bottom-8 border-l-4 border-l-blue-500 overflow-hidden group">
|
||||
<div class="absolute inset-0 bg-gradient-to-br from-blue-500/5 to-transparent pointer-events-none"></div>
|
||||
|
||||
<div class="relative z-10">
|
||||
<div class="flex items-center gap-4 mb-6">
|
||||
<div class="p-3 bg-blue-500/10 rounded-xl border border-blue-500/20 group-hover:scale-110 transition-transform duration-500">
|
||||
<RotateCw size={24} class="animate-spin text-blue-500" />
|
||||
</div>
|
||||
<div class="flex-1">
|
||||
<div class="flex justify-between items-center mb-1">
|
||||
<span class="text-xs font-black uppercase tracking-widest text-text-primary">System Scanner Active</span>
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="fixed bottom-6 right-6 z-[100] w-[420px] animate-in fade-in slide-in-from-bottom-4" data-testid="scan-status-overlay">
|
||||
<Card class="bg-bg-secondary border-border-color shadow-2xl overflow-hidden">
|
||||
<!-- Header -->
|
||||
<header class="px-5 py-4 border-b border-border-color bg-bg-tertiary/30 relative overflow-hidden">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-blue-500/5 to-transparent pointer-events-none"></div>
|
||||
<div class="flex items-center gap-4 relative z-10">
|
||||
<div class="p-2.5 bg-blue-500/10 rounded-xl text-blue-500 border border-blue-500/20">
|
||||
<RotateCw size={20} class="animate-spin" />
|
||||
</div>
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center justify-between">
|
||||
<h3 class="text-sm font-semibold text-text-primary">Filesystem scan in progress</h3>
|
||||
{#if scanStatus.is_throttled}
|
||||
<span class="text-4xs font-black bg-orange-500/10 text-orange-500 px-2 py-0.5 rounded border border-orange-500/20 animate-pulse">THROTTLED</span>
|
||||
<span class="text-[10px] font-medium bg-orange-500/10 text-orange-500 px-2 py-0.5 rounded border border-orange-500/20 ml-2 shrink-0">THROTTLED</span>
|
||||
{/if}
|
||||
<span class="text-sm font-black mono text-blue-400">{scanStatus.hashing_speed}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="w-1.5 h-1.5 rounded-full bg-blue-500 animate-pulse"></div>
|
||||
<p class="text-3xs font-bold uppercase tracking-[0.2em] text-text-secondary opacity-60">
|
||||
New: {scanStatus.files_new} • Mod: {scanStatus.files_modified}
|
||||
<p class="text-xs text-text-secondary mt-0.5">
|
||||
{scanStatus.files_processed.toLocaleString()} of {scanStatus.total_files_found.toLocaleString()} files
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Progress -->
|
||||
<div class="p-5 space-y-4">
|
||||
<div class="w-full bg-bg-primary h-2 rounded-full overflow-hidden">
|
||||
<div
|
||||
class="bg-blue-500 h-full transition-all duration-1000"
|
||||
style="width: {scanProgress}%"
|
||||
></div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-2 gap-3">
|
||||
<div class="bg-bg-primary/50 rounded-lg px-3 py-2.5 border border-border-color/50">
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase tracking-wide">New files</p>
|
||||
<p class="text-sm font-semibold mono text-text-primary mt-0.5">{scanStatus.files_new.toLocaleString()}</p>
|
||||
</div>
|
||||
<div class="bg-bg-primary/50 rounded-lg px-3 py-2.5 border border-border-color/50">
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase tracking-wide">Modified</p>
|
||||
<p class="text-sm font-semibold mono text-text-primary mt-0.5">{scanStatus.files_modified.toLocaleString()}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if scanStatus.current_path}
|
||||
<div class="bg-bg-primary/50 rounded-lg px-3 py-2.5 border border-border-color/50">
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase tracking-wide mb-1">Current file</p>
|
||||
<p class="text-xs text-text-primary mono truncate">{scanStatus.current_path}</p>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if scanStatus.hashing_speed}
|
||||
<div class="flex items-center gap-2 text-xs text-text-secondary">
|
||||
<Activity size={12} />
|
||||
<span class="mono">{scanStatus.hashing_speed}</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="space-y-4">
|
||||
<div class="flex flex-col gap-3">
|
||||
<div class="flex justify-between items-center text-3xs font-black uppercase tracking-widest text-text-secondary">
|
||||
<span class="flex items-center gap-2">
|
||||
<Activity size={12} class="opacity-50" />
|
||||
Indexing Data
|
||||
</span>
|
||||
<span class="mono text-text-primary">
|
||||
{scanStatus.files_processed.toLocaleString()} / {scanStatus.total_files_found.toLocaleString()}
|
||||
</span>
|
||||
</Card>
|
||||
</div>
|
||||
{:else if showCompleted}
|
||||
<div class="fixed bottom-6 right-6 z-[100] w-[420px] animate-in fade-in slide-in-from-bottom-4">
|
||||
<Card class="bg-bg-secondary border-border-color shadow-2xl overflow-hidden">
|
||||
<header class="px-5 py-4 border-b border-border-color bg-bg-tertiary/30 relative overflow-hidden">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-green-500/5 to-transparent pointer-events-none"></div>
|
||||
<div class="flex items-center gap-4 relative z-10">
|
||||
<div class="p-2.5 bg-green-500/10 rounded-xl text-green-500 border border-green-500/20">
|
||||
<CheckCircle2 size={20} />
|
||||
</div>
|
||||
|
||||
<div class="w-full bg-bg-primary h-1.5 rounded-full border border-white/5 overflow-hidden">
|
||||
<div
|
||||
class="bg-blue-500 h-full transition-all duration-1000 shadow-[0_0_10px_rgba(59,130,246,0.4)]"
|
||||
style="width: {scanProgress}%"
|
||||
></div>
|
||||
</div>
|
||||
|
||||
<div class="bg-bg-primary/80 px-4 py-2.5 rounded-lg border border-white/5 shadow-inner">
|
||||
<p class="text-3xs text-blue-300/80 truncate mono italic leading-relaxed">
|
||||
{scanStatus.current_path || 'Starting scan...'}
|
||||
<div class="flex-1">
|
||||
<h3 class="text-sm font-semibold text-text-primary">Scan completed</h3>
|
||||
<p class="text-xs text-text-secondary mt-0.5">
|
||||
{scanStatus!.files_processed.toLocaleString()} files indexed
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
</Card>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
@@ -113,10 +113,23 @@
|
||||
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
||||
}
|
||||
|
||||
function formatPath(path: string, maxLength = 70) {
|
||||
if (path.length <= maxLength) return { head: path, tail: null };
|
||||
const parts = path.split('/');
|
||||
if (parts.length <= 3) return { head: path, tail: null };
|
||||
|
||||
const headParts = parts.slice(0, -2);
|
||||
const tailParts = parts.slice(-2);
|
||||
const head = headParts.join('/');
|
||||
const tail = tailParts.join('/');
|
||||
|
||||
return { head, tail };
|
||||
}
|
||||
|
||||
onMount(loadDiscrepancies);
|
||||
|
||||
const deletedItems = $derived(discrepancies.filter(d => d.is_deleted));
|
||||
const missingItems = $derived(discrepancies.filter(d => !d.is_deleted));
|
||||
const missingItems = $derived(discrepancies.filter(d => d.is_deleted));
|
||||
const pendingItems = $derived(discrepancies.filter(d => !d.is_deleted));
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
@@ -161,9 +174,9 @@
|
||||
<FileX size={20} />
|
||||
</div>
|
||||
<div class="flex-1">
|
||||
<span class="text-xs text-text-secondary opacity-60 block mb-1">Confirmed deleted</span>
|
||||
<h4 class="text-2xl font-bold text-error-color mono tabular-nums">{deletedItems.length}</h4>
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase opacity-40 mt-1">Files marked as removed from disk</p>
|
||||
<span class="text-xs text-text-secondary opacity-60 block mb-1">Missing from disk</span>
|
||||
<h4 class="text-2xl font-bold text-error-color mono tabular-nums">{missingItems.length}</h4>
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase opacity-40 mt-1">Files the scanner did not find</p>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
@@ -174,9 +187,9 @@
|
||||
<FileQuestion size={20} />
|
||||
</div>
|
||||
<div class="flex-1">
|
||||
<span class="text-xs text-text-secondary opacity-60 block mb-1">Missing from disk</span>
|
||||
<h4 class="text-2xl font-bold text-yellow-500 mono tabular-nums">{missingItems.length}</h4>
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase opacity-40 mt-1">Tracked files not found during scan</p>
|
||||
<span class="text-xs text-text-secondary opacity-60 block mb-1">Pending confirmation</span>
|
||||
<h4 class="text-2xl font-bold text-yellow-500 mono tabular-nums">{pendingItems.length}</h4>
|
||||
<p class="text-[10px] font-medium text-text-secondary uppercase opacity-40 mt-1">Tracked files not yet confirmed</p>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
@@ -198,28 +211,37 @@
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each discrepancies as item (item.id)}
|
||||
{@const path = formatPath(item.path)}
|
||||
<tr class="border-b border-border-color/10 hover:bg-white/[0.02] transition-colors group">
|
||||
<td class="px-6 py-4">
|
||||
<td class="px-6 py-4 align-top">
|
||||
{#if item.is_deleted}
|
||||
<StatusBadge variant="error">Deleted</StatusBadge>
|
||||
<StatusBadge variant="error">Missing</StatusBadge>
|
||||
{:else}
|
||||
<StatusBadge variant="warning">Missing</StatusBadge>
|
||||
<StatusBadge variant="warning">Pending</StatusBadge>
|
||||
{/if}
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<div class="max-w-md">
|
||||
<span class="text-sm font-medium text-text-primary mono truncate block" title={item.path}>
|
||||
{item.path.split('/').pop()}
|
||||
</span>
|
||||
<span class="text-[10px] text-text-secondary opacity-40 mono truncate block" title={item.path}>
|
||||
{item.path}
|
||||
</span>
|
||||
<td class="px-6 py-4 align-top">
|
||||
<div class="max-w-[500px]">
|
||||
{#if path.tail}
|
||||
<div class="flex flex-col gap-0.5">
|
||||
<span class="text-xs font-medium text-text-secondary mono leading-tight" title={item.path}>
|
||||
{path.head}
|
||||
</span>
|
||||
<span class="text-sm font-medium text-text-primary mono leading-tight" title={item.path}>
|
||||
{path.tail}
|
||||
</span>
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-sm font-medium text-text-primary mono truncate block" title={item.path}>
|
||||
{path.head}
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-6 py-4 text-right">
|
||||
<td class="px-6 py-4 text-right align-top">
|
||||
<span class="text-xs text-text-secondary mono">{formatSize(item.size)}</span>
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<td class="px-6 py-4 align-top">
|
||||
<span class="text-xs text-text-secondary mono">
|
||||
{#if item.last_seen_timestamp}
|
||||
{formatLocalDate(item.last_seen_timestamp)}
|
||||
@@ -228,7 +250,7 @@
|
||||
{/if}
|
||||
</span>
|
||||
</td>
|
||||
<td class="px-6 py-4 text-center">
|
||||
<td class="px-6 py-4 text-center align-top">
|
||||
{#if item.has_versions}
|
||||
<div class="inline-flex items-center gap-1.5 text-success-color">
|
||||
<ShieldCheck size={14} />
|
||||
@@ -241,8 +263,8 @@
|
||||
</div>
|
||||
{/if}
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center justify-end gap-2">
|
||||
<td class="px-6 py-4 align-top">
|
||||
<div class="flex items-center justify-end gap-2 pt-1">
|
||||
{#if item.is_deleted}
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
@@ -15,13 +15,14 @@
|
||||
type ScanStatusSchema
|
||||
} from '$lib/api';
|
||||
import { toast } from "svelte-sonner";
|
||||
import { cn } from "$lib/utils";
|
||||
import { cn, formatLocalTime } from "$lib/utils";
|
||||
import { page } from '$app/state';
|
||||
|
||||
// Current directory state
|
||||
let currentPath = $state('ROOT');
|
||||
let searchQuery = $state('');
|
||||
let files = $state<FileItem[]>([]);
|
||||
let lastScanTime = $state<string | null>(null);
|
||||
let loading = $state(false);
|
||||
let searchLoading = $state(false);
|
||||
let committing = $state(false);
|
||||
@@ -35,14 +36,14 @@
|
||||
let pendingChanges = $state<Map<string, boolean>>(new Map());
|
||||
|
||||
async function loadFiles(path: string) {
|
||||
if (searchQuery.trim().length >= 3) return; // Prevent loading path if searching
|
||||
if (searchQuery.trim().length >= 3) return;
|
||||
loading = true;
|
||||
try {
|
||||
const response = await browseSystemPathSystemBrowseGet({
|
||||
query: { path }
|
||||
});
|
||||
if (response.data) {
|
||||
files = response.data.map((f: any) => ({
|
||||
files = response.data.files.map((f: any) => ({
|
||||
name: f.name,
|
||||
path: f.path,
|
||||
type: f.type as 'file' | 'directory' | 'link',
|
||||
@@ -51,6 +52,7 @@
|
||||
ignored: f.ignored ?? false,
|
||||
sha256_hash: f.sha256_hash ?? null
|
||||
}));
|
||||
lastScanTime = response.data.last_scan_time ?? null;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to load files:", error);
|
||||
@@ -193,6 +195,9 @@
|
||||
}
|
||||
|
||||
const hasChanges = $derived(pendingChanges.size > 0);
|
||||
const lastScanDisplay = $derived(
|
||||
lastScanTime ? `Last scanned: ${formatLocalTime(lastScanTime)}` : 'Never scanned'
|
||||
);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
@@ -201,8 +206,8 @@
|
||||
|
||||
<div class="flex flex-col gap-6 h-full animate-in fade-in duration-700">
|
||||
<PageHeader
|
||||
title="Live filesystem"
|
||||
description="Define backup rules & browse physical storage"
|
||||
title="Indexed filesystem"
|
||||
description={lastScanDisplay}
|
||||
icon={FolderTree}
|
||||
>
|
||||
{#snippet actions()}
|
||||
|
||||
@@ -63,7 +63,9 @@
|
||||
let showRegisterDialog = $state(false);
|
||||
let editingMedia = $state<MediaSchema | null>(null);
|
||||
|
||||
let activeMedia = $derived(mediaList.filter(m => m.status === 'active' && (m.capacity === 0 || (m.bytes_used / m.capacity) < 0.98)));
|
||||
let activeMedia = $derived(mediaList.filter(m => m.status === 'active'));
|
||||
let fullMedia = $derived(mediaList.filter(m => m.status === 'full'));
|
||||
let unavailableMedia = $derived(mediaList.filter(m => ['failed', 'retired', 'offline'].includes(m.status)));
|
||||
|
||||
// New Media Form State
|
||||
let newMedia = $state({
|
||||
@@ -734,7 +736,7 @@
|
||||
{/if}
|
||||
|
||||
<!-- Active Media -->
|
||||
<div class="space-y-4">
|
||||
<div class="space-y-4" data-testid="active-media-section">
|
||||
<SectionHeader title="Active archive media" icon={Database} iconColor="text-blue-500" />
|
||||
|
||||
<Card class="bg-bg-secondary border-border-color shadow-2xl overflow-hidden flex flex-col">
|
||||
@@ -790,8 +792,8 @@
|
||||
</div>
|
||||
|
||||
<!-- Fully Utilized Media -->
|
||||
{#if mediaList.some(m => m.status === 'active' && m.capacity > 0 && (m.bytes_used / m.capacity) >= 0.98)}
|
||||
<div class="space-y-4">
|
||||
{#if fullMedia.length > 0}
|
||||
<div class="space-y-4" data-testid="full-media-section">
|
||||
<SectionHeader title="Fully utilized media" icon={ShieldCheck} iconColor="text-success-color" />
|
||||
|
||||
<Card class="bg-bg-secondary/80 border border-border-color/80 rounded-xl overflow-hidden shadow-xl">
|
||||
@@ -809,7 +811,7 @@
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-border-color/30">
|
||||
{#each mediaList.filter(m => m.status === 'active' && m.capacity > 0 && (m.bytes_used / m.capacity) >= 0.98) as media (media.id)}
|
||||
{#each fullMedia as media (media.id)}
|
||||
<tr class="hover:bg-bg-primary/20 transition-colors">
|
||||
<td class="px-6 py-4 text-center opacity-30">
|
||||
<Minus size={16} />
|
||||
@@ -824,8 +826,8 @@
|
||||
{/if}
|
||||
|
||||
<!-- Retired & Failed Media -->
|
||||
{#if mediaList.some(m => m.status !== 'active')}
|
||||
<div class="space-y-4">
|
||||
{#if unavailableMedia.length > 0}
|
||||
<div class="space-y-4" data-testid="unavailable-media-section">
|
||||
<SectionHeader title="Retired & failed media" icon={ShieldAlert} iconColor="text-error-color" />
|
||||
|
||||
<Card class="bg-bg-secondary/60 border border-border-color/60 rounded-xl overflow-hidden shadow-xl grayscale-[0.5] opacity-80">
|
||||
@@ -843,7 +845,7 @@
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-border-color/20">
|
||||
{#each mediaList.filter(m => m.status !== 'active') as media (media.id)}
|
||||
{#each unavailableMedia as media (media.id)}
|
||||
<tr class="hover:bg-bg-primary/20 transition-colors">
|
||||
<td class="px-6 py-4 text-center opacity-20">
|
||||
<Minus size={16} />
|
||||
|
||||
@@ -24,12 +24,12 @@
|
||||
listJobsSystemJobsGet,
|
||||
getJobsCountSystemJobsCountGet,
|
||||
cancelJobSystemJobsJobIdCancelPost,
|
||||
type JobSchema
|
||||
type AppApiSystemJobSchema
|
||||
} from '$lib/api';
|
||||
import { cn, formatLocalTime, parseUTCDate } from '$lib/utils';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
let jobs = $state<JobSchema[]>([]);
|
||||
let jobs = $state<AppApiSystemJobSchema[]>([]);
|
||||
let totalJobs = $state(0);
|
||||
let loading = $state(true);
|
||||
let loadingMore = $state(false);
|
||||
@@ -197,7 +197,7 @@
|
||||
<div class="flex-1 space-y-2.5">
|
||||
<div class="flex justify-between items-end">
|
||||
<span class="text-xs font-medium text-text-secondary truncate max-w-[400px]">
|
||||
{job.current_task || 'Starting task...'}
|
||||
{job.latest_log || job.current_task || 'Starting task...'}
|
||||
</span>
|
||||
<span class="text-xs font-semibold mono text-text-primary">{job.progress.toFixed(1)}%</span>
|
||||
</div>
|
||||
@@ -241,7 +241,7 @@
|
||||
<span class="text-sm font-semibold text-text-primary uppercase tracking-tight">{job.job_type} JOB #{job.id}</span>
|
||||
<StatusBadge variant={getStatusVariant(job.status)}>{job.status}</StatusBadge>
|
||||
</div>
|
||||
<p class="text-xs text-text-secondary mt-1 opacity-60 truncate">{job.error_message || job.current_task || 'Finished successfully'}</p>
|
||||
<p class="text-xs text-text-secondary mt-1 opacity-60 truncate">{job.latest_log || job.error_message || job.current_task || 'Finished successfully'}</p>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-3 gap-12 shrink-0">
|
||||
|
||||
@@ -128,4 +128,60 @@ test.describe('Media Lifecycle', () => {
|
||||
|
||||
await requestContext.dispose();
|
||||
});
|
||||
|
||||
test('inventory categorizes media by status correctly', async ({ page }) => {
|
||||
const requestContext = await setupRequestContext();
|
||||
|
||||
const activeMedia = await requestContext.post(`${API_URL}/inventory/media`, {
|
||||
data: { identifier: 'CAT_ACTIVE', media_type: 'mock_lto', generation_tier: 'LTO-8', capacity: 12000, config: {} }
|
||||
}).then(r => r.json());
|
||||
|
||||
const fullMedia = await requestContext.post(`${API_URL}/inventory/media`, {
|
||||
data: { identifier: 'CAT_FULL', media_type: 'mock_lto', generation_tier: 'LTO-8', capacity: 12000, config: {} }
|
||||
}).then(r => r.json());
|
||||
await requestContext.patch(`${API_URL}/inventory/media/${fullMedia.id}`, { data: { status: 'full' } });
|
||||
|
||||
const failedMedia = await requestContext.post(`${API_URL}/inventory/media`, {
|
||||
data: { identifier: 'CAT_FAILED', media_type: 'mock_lto', generation_tier: 'LTO-8', capacity: 12000, config: {} }
|
||||
}).then(r => r.json());
|
||||
await requestContext.patch(`${API_URL}/inventory/media/${failedMedia.id}`, { data: { status: 'failed' } });
|
||||
|
||||
const retiredMedia = await requestContext.post(`${API_URL}/inventory/media`, {
|
||||
data: { identifier: 'CAT_RETIRED', media_type: 'mock_lto', generation_tier: 'LTO-8', capacity: 12000, config: {} }
|
||||
}).then(r => r.json());
|
||||
await requestContext.patch(`${API_URL}/inventory/media/${retiredMedia.id}`, { data: { status: 'retired' } });
|
||||
|
||||
await page.goto('/inventory');
|
||||
await page.waitForLoadState('networkidle');
|
||||
|
||||
const activeSection = page.getByTestId('active-media-section');
|
||||
const fullSection = page.getByTestId('full-media-section');
|
||||
const unavailableSection = page.getByTestId('unavailable-media-section');
|
||||
|
||||
await expect(activeSection).toBeVisible();
|
||||
await expect(activeSection.getByText('CAT_ACTIVE')).toBeVisible();
|
||||
await expect(activeSection.getByText('CAT_FULL')).not.toBeVisible();
|
||||
await expect(activeSection.getByText('CAT_FAILED')).not.toBeVisible();
|
||||
await expect(activeSection.getByText('CAT_RETIRED')).not.toBeVisible();
|
||||
|
||||
await expect(fullSection).toBeVisible();
|
||||
await expect(fullSection.getByText('CAT_FULL')).toBeVisible();
|
||||
await expect(fullSection.getByText('CAT_ACTIVE')).not.toBeVisible();
|
||||
await expect(fullSection.getByText('CAT_FAILED')).not.toBeVisible();
|
||||
await expect(fullSection.getByText('CAT_RETIRED')).not.toBeVisible();
|
||||
|
||||
await expect(unavailableSection).toBeVisible();
|
||||
await expect(unavailableSection.getByText('CAT_FAILED')).toBeVisible();
|
||||
await expect(unavailableSection.getByText('CAT_RETIRED')).toBeVisible();
|
||||
|
||||
await expect(unavailableSection).toBeVisible();
|
||||
await expect(unavailableSection.getByText('CAT_FAILED')).toBeVisible();
|
||||
await expect(unavailableSection.getByText('CAT_RETIRED')).toBeVisible();
|
||||
|
||||
await requestContext.delete(`${API_URL}/inventory/media/${activeMedia.id}`);
|
||||
await requestContext.delete(`${API_URL}/inventory/media/${fullMedia.id}`);
|
||||
await requestContext.delete(`${API_URL}/inventory/media/${failedMedia.id}`);
|
||||
await requestContext.delete(`${API_URL}/inventory/media/${retiredMedia.id}`);
|
||||
await requestContext.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user