bunch of stuff
This commit is contained in:
@@ -0,0 +1,18 @@
|
||||
node_modules
|
||||
.venv
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.db
|
||||
.sqlite
|
||||
.git
|
||||
.gitignore
|
||||
.ruff_cache
|
||||
.svelte-kit
|
||||
build
|
||||
static
|
||||
staging_area
|
||||
data
|
||||
tmp
|
||||
source_data
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
from os.path import dirname, abspath
|
||||
from logging.config import fileConfig
|
||||
@@ -16,6 +17,11 @@ from app.db.models import Base # noqa: E402
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url if environment variable is set
|
||||
database_url = os.getenv("DATABASE_URL")
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
"""add_fts_table
|
||||
|
||||
Revision ID: 193bb204c677
|
||||
Revises: 9a6e70fabf7b
|
||||
Create Date: 2026-04-23 19:04:51.158603
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "193bb204c677"
|
||||
down_revision: Union[str, Sequence[str], None] = "9a6e70fabf7b"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.execute("""
|
||||
CREATE VIRTUAL TABLE filesystem_fts USING fts5(
|
||||
file_path,
|
||||
tokenize='trigram'
|
||||
)
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ai AFTER INSERT ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(rowid, file_path) VALUES (new.id, new.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ad AFTER DELETE ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(filesystem_fts, rowid, file_path) VALUES('delete', old.id, old.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_au AFTER UPDATE ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(filesystem_fts, rowid, file_path) VALUES('delete', old.id, old.file_path);
|
||||
INSERT INTO filesystem_fts(rowid, file_path) VALUES (new.id, new.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ai")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ad")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_au")
|
||||
op.execute("DROP TABLE IF EXISTS filesystem_fts")
|
||||
@@ -0,0 +1,65 @@
|
||||
"""add_splitting_support
|
||||
|
||||
Revision ID: 33d682d2c089
|
||||
Revises: 5e867c5b4930
|
||||
Create Date: 2026-04-23 20:47:53.406493
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "33d682d2c089"
|
||||
down_revision: Union[str, Sequence[str], None] = "5e867c5b4930"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# Columns might exist due to failed previous run
|
||||
conn = op.get_bind()
|
||||
columns = [c["name"] for c in sa.inspect(conn).get_columns("file_versions")]
|
||||
|
||||
if "is_split" not in columns:
|
||||
op.add_column(
|
||||
"file_versions",
|
||||
sa.Column("is_split", sa.Boolean(), nullable=False, server_default="0"),
|
||||
)
|
||||
if "split_id" not in columns:
|
||||
op.add_column(
|
||||
"file_versions", sa.Column("split_id", sa.String(), nullable=True)
|
||||
)
|
||||
if "offset_start" not in columns:
|
||||
op.add_column(
|
||||
"file_versions",
|
||||
sa.Column(
|
||||
"offset_start", sa.BigInteger(), nullable=False, server_default="0"
|
||||
),
|
||||
)
|
||||
if "offset_end" not in columns:
|
||||
op.add_column(
|
||||
"file_versions",
|
||||
sa.Column(
|
||||
"offset_end", sa.BigInteger(), nullable=False, server_default="0"
|
||||
),
|
||||
)
|
||||
if "created_at" not in columns:
|
||||
op.add_column(
|
||||
"file_versions", sa.Column("created_at", sa.DateTime(), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("file_versions", "created_at")
|
||||
op.drop_column("file_versions", "offset_end")
|
||||
op.drop_column("file_versions", "offset_start")
|
||||
op.drop_column("file_versions", "split_id")
|
||||
op.drop_column("file_versions", "is_split")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,70 @@
|
||||
"""fix_fts_triggers
|
||||
|
||||
Revision ID: 38cb9df7a18c
|
||||
Revises: 33d682d2c089
|
||||
Create Date: 2026-04-23 21:50:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "38cb9df7a18c"
|
||||
down_revision: Union[str, Sequence[str], None] = "33d682d2c089"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Drop old invalid triggers
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ai")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ad")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_au")
|
||||
|
||||
# Create correct triggers for standalone FTS5 table
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ai AFTER INSERT ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(rowid, file_path) VALUES (new.id, new.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ad AFTER DELETE ON filesystem_state BEGIN
|
||||
DELETE FROM filesystem_fts WHERE rowid = old.id;
|
||||
END;
|
||||
""")
|
||||
|
||||
# Only update FTS if the file_path actually changes
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_au AFTER UPDATE OF file_path ON filesystem_state BEGIN
|
||||
UPDATE filesystem_fts SET file_path = new.file_path WHERE rowid = old.id;
|
||||
END;
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ai")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_ad")
|
||||
op.execute("DROP TRIGGER IF EXISTS fts_au")
|
||||
|
||||
# Recreate the old (invalid but matches previous state) triggers if needed for rollback
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ai AFTER INSERT ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(rowid, file_path) VALUES (new.id, new.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_ad AFTER DELETE ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(filesystem_fts, rowid, file_path) VALUES('delete', old.id, old.file_path);
|
||||
END;
|
||||
""")
|
||||
|
||||
op.execute("""
|
||||
CREATE TRIGGER fts_au AFTER UPDATE ON filesystem_state BEGIN
|
||||
INSERT INTO filesystem_fts(filesystem_fts, rowid, file_path) VALUES('delete', old.id, old.file_path);
|
||||
INSERT INTO filesystem_fts(rowid, file_path) VALUES (new.id, new.file_path);
|
||||
END;
|
||||
""")
|
||||
@@ -0,0 +1,96 @@
|
||||
"""sync_models
|
||||
|
||||
Revision ID: 5e867c5b4930
|
||||
Revises: 193bb204c677
|
||||
Create Date: 2026-04-23 19:10:26.093824
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "5e867c5b4930"
|
||||
down_revision: Union[str, Sequence[str], None] = "193bb204c677"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column(
|
||||
"filesystem_state",
|
||||
sa.Column("is_indexed", sa.Boolean(), nullable=False, server_default="0"),
|
||||
)
|
||||
op.add_column(
|
||||
"filesystem_state",
|
||||
sa.Column("is_ignored", sa.Boolean(), nullable=False, server_default="0"),
|
||||
)
|
||||
op.add_column(
|
||||
"storage_media", sa.Column("extra_config", sa.String(), nullable=True)
|
||||
)
|
||||
|
||||
# We must also create missing tables: TrackedSource, RestoreCart, Job, SystemSetting
|
||||
op.create_table(
|
||||
"jobs",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("job_type", sa.String(), nullable=False),
|
||||
sa.Column("status", sa.String(), nullable=False),
|
||||
sa.Column("progress", sa.Float(), nullable=False),
|
||||
sa.Column("current_task", sa.String(), nullable=True),
|
||||
sa.Column("error_message", sa.String(), nullable=True),
|
||||
sa.Column("started_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("completed_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"restore_cart",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("filesystem_state_id", sa.Integer(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["filesystem_state_id"],
|
||||
["filesystem_state.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"system_settings",
|
||||
sa.Column("key", sa.String(), nullable=False),
|
||||
sa.Column("value", sa.String(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("key"),
|
||||
)
|
||||
op.create_table(
|
||||
"tracked_sources",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("path", sa.String(), nullable=False),
|
||||
sa.Column("is_directory", sa.Boolean(), nullable=False),
|
||||
sa.Column("action", sa.String(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_tracked_sources_path"), "tracked_sources", ["path"], unique=True
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f("ix_tracked_sources_path"), table_name="tracked_sources")
|
||||
op.drop_table("tracked_sources")
|
||||
op.drop_table("system_settings")
|
||||
op.drop_table("restore_cart")
|
||||
op.drop_table("jobs")
|
||||
|
||||
op.drop_column("storage_media", "extra_config")
|
||||
op.drop_column("filesystem_state", "is_ignored")
|
||||
op.drop_column("filesystem_state", "is_indexed")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,31 @@
|
||||
"""add_missing_indexes
|
||||
|
||||
Revision ID: ac51f5e25832
|
||||
Revises: 38cb9df7a18c
|
||||
Create Date: 2026-04-23 23:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "ac51f5e25832"
|
||||
down_revision: Union[str, Sequence[str], None] = "38cb9df7a18c"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_index(
|
||||
"ix_file_versions_filesystem_state_id", "file_versions", ["filesystem_state_id"]
|
||||
)
|
||||
op.create_index(
|
||||
"ix_restore_cart_filesystem_state_id", "restore_cart", ["filesystem_state_id"]
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_restore_cart_filesystem_state_id", table_name="restore_cart")
|
||||
op.drop_index("ix_file_versions_filesystem_state_id", table_name="file_versions")
|
||||
+259
-77
@@ -50,6 +50,9 @@ class ItemMetadataSchema(BaseModel):
|
||||
sha256_hash: Optional[str] = None
|
||||
versions: List[FileVersionSchema] = []
|
||||
child_count: Optional[int] = None
|
||||
vulnerable: bool = False
|
||||
selected: bool = False
|
||||
indeterminate: bool = False
|
||||
|
||||
|
||||
class FileItemSchema(BaseModel):
|
||||
@@ -59,6 +62,9 @@ class FileItemSchema(BaseModel):
|
||||
size: Optional[int] = None
|
||||
mtime: Optional[float] = None
|
||||
media: List[str] = []
|
||||
vulnerable: bool = False
|
||||
selected: bool = False
|
||||
indeterminate: bool = False
|
||||
|
||||
|
||||
class TreeNodeSchema(BaseModel):
|
||||
@@ -68,10 +74,10 @@ class TreeNodeSchema(BaseModel):
|
||||
|
||||
|
||||
class MediaCreateSchema(BaseModel):
|
||||
media_type: str # tape, hdd, cloud
|
||||
media_type: str
|
||||
identifier: str
|
||||
generation_tier: Optional[str] = None
|
||||
capacity: int # in bytes
|
||||
capacity: int
|
||||
location: Optional[str] = None
|
||||
config: Dict[str, Any] = {}
|
||||
|
||||
@@ -93,34 +99,38 @@ class MediaSchema(BaseModel):
|
||||
status: str
|
||||
config: Dict[str, Any]
|
||||
|
||||
@classmethod
|
||||
def from_orm_custom(cls, obj: models.StorageMedia):
|
||||
config_data = {}
|
||||
if obj.extra_config:
|
||||
try:
|
||||
config_data = json.loads(obj.extra_config)
|
||||
except Exception:
|
||||
pass
|
||||
return cls(
|
||||
id=obj.id,
|
||||
media_type=obj.media_type,
|
||||
identifier=obj.identifier,
|
||||
generation_tier=obj.generation_tier,
|
||||
capacity=obj.capacity,
|
||||
bytes_used=obj.bytes_used,
|
||||
location=obj.location,
|
||||
status=obj.status,
|
||||
config=config_data,
|
||||
)
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# --- Media Endpoints ---
|
||||
# --- Media Management ---
|
||||
|
||||
|
||||
@router.get("/media", response_model=List[MediaSchema])
|
||||
def list_media(db: Session = Depends(get_db)):
|
||||
all_media = db.query(models.StorageMedia).all()
|
||||
return [MediaSchema.from_orm_custom(m) for m in all_media]
|
||||
media = db.query(models.StorageMedia).all()
|
||||
results = []
|
||||
for m in media:
|
||||
config = {}
|
||||
if m.extra_config:
|
||||
try:
|
||||
config = json.loads(m.extra_config)
|
||||
except Exception:
|
||||
pass
|
||||
results.append(
|
||||
MediaSchema(
|
||||
id=m.id,
|
||||
media_type=m.media_type,
|
||||
identifier=m.identifier,
|
||||
generation_tier=m.generation_tier,
|
||||
capacity=m.capacity,
|
||||
bytes_used=m.bytes_used,
|
||||
location=m.location,
|
||||
status=m.status,
|
||||
config=config,
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
@router.post("/media", response_model=MediaSchema)
|
||||
@@ -131,22 +141,35 @@ def register_media(req: MediaCreateSchema, db: Session = Depends(get_db)):
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Media with this identifier already exists"
|
||||
)
|
||||
raise HTTPException(status_code=400, detail="Media already exists")
|
||||
|
||||
new_media = models.StorageMedia(
|
||||
media_type=req.media_type,
|
||||
identifier=req.identifier,
|
||||
generation_tier=req.generation_tier,
|
||||
capacity=req.capacity,
|
||||
location=req.location,
|
||||
status="active",
|
||||
extra_config=json.dumps(req.config),
|
||||
)
|
||||
db.add(new_media)
|
||||
db.commit()
|
||||
db.refresh(new_media)
|
||||
return MediaSchema.from_orm_custom(new_media)
|
||||
|
||||
config = {}
|
||||
if new_media.extra_config:
|
||||
config = json.loads(new_media.extra_config)
|
||||
|
||||
return MediaSchema(
|
||||
id=new_media.id,
|
||||
media_type=new_media.media_type,
|
||||
identifier=new_media.identifier,
|
||||
generation_tier=new_media.generation_tier,
|
||||
capacity=new_media.capacity,
|
||||
bytes_used=new_media.bytes_used,
|
||||
location=new_media.location,
|
||||
status=new_media.status,
|
||||
config=config,
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/media/{media_id}", response_model=MediaSchema)
|
||||
@@ -154,22 +177,31 @@ def update_media(media_id: int, req: MediaUpdateSchema, db: Session = Depends(ge
|
||||
media = db.query(models.StorageMedia).get(media_id)
|
||||
if not media:
|
||||
raise HTTPException(status_code=404, detail="Media not found")
|
||||
|
||||
if req.status:
|
||||
media.status = req.status
|
||||
if req.location:
|
||||
media.location = req.location
|
||||
if req.config is not None:
|
||||
current_config = {}
|
||||
if media.extra_config:
|
||||
try:
|
||||
current_config = json.loads(media.extra_config)
|
||||
except Exception:
|
||||
pass
|
||||
current_config.update(req.config)
|
||||
media.extra_config = json.dumps(current_config)
|
||||
if req.config:
|
||||
media.extra_config = json.dumps(req.config)
|
||||
|
||||
db.commit()
|
||||
db.refresh(media)
|
||||
return MediaSchema.from_orm_custom(media)
|
||||
config = {}
|
||||
if media.extra_config:
|
||||
config = json.loads(media.extra_config)
|
||||
|
||||
return MediaSchema(
|
||||
id=media.id,
|
||||
media_type=media.media_type,
|
||||
identifier=media.identifier,
|
||||
generation_tier=media.generation_tier,
|
||||
capacity=media.capacity,
|
||||
bytes_used=media.bytes_used,
|
||||
location=media.location,
|
||||
status=media.status,
|
||||
config=config,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/media/{media_id}")
|
||||
@@ -184,7 +216,25 @@ def delete_media(media_id: int, db: Session = Depends(get_db)):
|
||||
return {"message": "Media deleted"}
|
||||
|
||||
|
||||
# --- Browsing Endpoints (Optimized) ---
|
||||
@router.post("/media/{media_id}/initialize")
|
||||
def initialize_media(media_id: int, db: Session = Depends(get_db)):
|
||||
from app.services.archiver import archiver_manager
|
||||
|
||||
media = db.query(models.StorageMedia).get(media_id)
|
||||
if not media:
|
||||
raise HTTPException(status_code=404, detail="Media not found")
|
||||
|
||||
provider = archiver_manager._get_provider(media)
|
||||
if not provider:
|
||||
raise HTTPException(status_code=400, detail="Unsupported media type")
|
||||
|
||||
if provider.initialize_media(media.identifier):
|
||||
return {"message": "Media initialized successfully"}
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to initialize media")
|
||||
|
||||
|
||||
# --- Browsing Endpoints (Highly Optimized) ---
|
||||
|
||||
|
||||
@router.get("/browse", response_model=List[FileItemSchema])
|
||||
@@ -193,74 +243,124 @@ def browse_index(
|
||||
include_ignored: bool = False,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
if path is None or path == "ROOT":
|
||||
roots = get_source_roots(db)
|
||||
if path is None or path == "ROOT":
|
||||
# OPTIMIZED: Fetch all root stats in a single complex SQL aggregate
|
||||
results = []
|
||||
for root in roots:
|
||||
sql = text(
|
||||
"SELECT COUNT(*), SUM(size), MAX(mtime) FROM filesystem_state WHERE file_path LIKE :prefix"
|
||||
+ (" AND is_ignored = 0" if not include_ignored else "")
|
||||
)
|
||||
row = db.execute(sql, {"prefix": f"{root}%"}).fetchone()
|
||||
if row and row[0] > 0:
|
||||
prefix = root if root.endswith("/") else root + "/"
|
||||
stats_sql = text("""
|
||||
SELECT
|
||||
MAX(CASE WHEN fv.id IS NULL AND fs.is_ignored = 0 THEN 1 ELSE 0 END) as is_vulnerable,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL THEN fs.id END) as restorable_count,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL AND rc.id IS NOT NULL THEN fs.id END) as queued_count
|
||||
FROM filesystem_state fs
|
||||
LEFT JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
LEFT JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :prefix
|
||||
""")
|
||||
stats = db.execute(stats_sql, {"prefix": f"{prefix}%"}).fetchone()
|
||||
|
||||
is_vuln = stats[0] if stats else 0
|
||||
restorable = stats[1] if stats else 0
|
||||
queued = stats[2] if stats else 0
|
||||
|
||||
is_selected = restorable > 0 and queued == restorable
|
||||
is_indeterminate = 0 < queued < restorable
|
||||
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=root,
|
||||
path=root,
|
||||
type="directory",
|
||||
size=row[1] or 0,
|
||||
mtime=row[2] or 0,
|
||||
size=0,
|
||||
mtime=0,
|
||||
vulnerable=bool(is_vuln),
|
||||
selected=is_selected,
|
||||
indeterminate=is_indeterminate,
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
prefix = path if path.endswith("/") else path + "/"
|
||||
ignore_filter = " AND is_ignored = 0" if not include_ignored else ""
|
||||
ignore_filter = " AND fs.is_ignored = 0" if not include_ignored else ""
|
||||
results = []
|
||||
|
||||
# Subdirectories
|
||||
subdir_sql = text(
|
||||
f"""
|
||||
SELECT DISTINCT SUBSTR(file_path, LENGTH(:prefix) + 1, INSTR(SUBSTR(file_path, LENGTH(:prefix) + 1), '/') - 1) as dirname
|
||||
FROM filesystem_state WHERE file_path LIKE :search_prefix AND SUBSTR(file_path, LENGTH(:prefix) + 1) LIKE '%/%' {ignore_filter}
|
||||
"""
|
||||
)
|
||||
# OPTIMIZED: Fetch ALL subdirectory metadata in a SINGLE aggregate query
|
||||
# This replaces the N+1 query pattern that was killing performance
|
||||
subdir_agg_sql = text(f"""
|
||||
SELECT
|
||||
SUBSTR(fs.file_path, LENGTH(:prefix) + 1, INSTR(SUBSTR(fs.file_path, LENGTH(:prefix) + 1), '/') - 1) as dirname,
|
||||
MAX(CASE WHEN fv.id IS NULL AND fs.is_ignored = 0 THEN 1 ELSE 0 END) as is_vulnerable,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL THEN fs.id END) as restorable_count,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL AND rc.id IS NOT NULL THEN fs.id END) as queued_count
|
||||
FROM filesystem_state fs
|
||||
LEFT JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
LEFT JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :search_prefix
|
||||
AND SUBSTR(fs.file_path, LENGTH(:prefix) + 1) LIKE '%/%' {ignore_filter}
|
||||
GROUP BY dirname
|
||||
""")
|
||||
|
||||
subdirs = db.execute(
|
||||
subdir_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
subdir_agg_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
).fetchall()
|
||||
|
||||
for sd in subdirs:
|
||||
if sd[0]:
|
||||
name = sd[0]
|
||||
is_vuln = sd[1]
|
||||
restorable = sd[2]
|
||||
queued = sd[3]
|
||||
|
||||
is_selected = restorable > 0 and queued == restorable
|
||||
is_indeterminate = 0 < queued < restorable
|
||||
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=sd[0], path=prefix + sd[0], type="directory", size=0, mtime=0
|
||||
name=name,
|
||||
path=prefix + name,
|
||||
type="directory",
|
||||
size=0,
|
||||
mtime=0,
|
||||
vulnerable=bool(is_vuln),
|
||||
selected=is_selected,
|
||||
indeterminate=is_indeterminate,
|
||||
)
|
||||
)
|
||||
|
||||
# Files
|
||||
file_sql = text(
|
||||
f"""
|
||||
SELECT name, file_path, size, mtime, id FROM (
|
||||
SELECT SUBSTR(file_path, LENGTH(:prefix) + 1) as name, file_path, size, mtime, id
|
||||
FROM filesystem_state WHERE file_path LIKE :search_prefix {ignore_filter}
|
||||
) WHERE name NOT LIKE '%/%'
|
||||
"""
|
||||
)
|
||||
# OPTIMIZED: Fetch files with version/cart status in a single joined query
|
||||
file_sql = text(f"""
|
||||
SELECT
|
||||
fs.file_path, fs.size, fs.mtime, fs.id,
|
||||
MAX(CASE WHEN fv.id IS NOT NULL THEN 1 ELSE 0 END) as has_version,
|
||||
MAX(CASE WHEN rc.id IS NOT NULL THEN 1 ELSE 0 END) as is_selected,
|
||||
GROUP_CONCAT(DISTINCT sm.identifier) as media_list
|
||||
FROM filesystem_state fs
|
||||
LEFT JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
LEFT JOIN storage_media sm ON sm.id = fv.media_id
|
||||
LEFT JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :search_prefix
|
||||
AND SUBSTR(fs.file_path, LENGTH(:prefix) + 1) NOT LIKE '%/%' {ignore_filter}
|
||||
GROUP BY fs.id
|
||||
""")
|
||||
|
||||
files = db.execute(
|
||||
file_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
).fetchall()
|
||||
|
||||
for f in files:
|
||||
media_sql = text(
|
||||
"SELECT m.identifier FROM storage_media m JOIN file_versions v ON v.media_id = m.id WHERE v.filesystem_state_id = :fid"
|
||||
)
|
||||
media_list = [m[0] for m in db.execute(media_sql, {"fid": f[4]}).fetchall()]
|
||||
media_list = f[6].split(",") if f[6] else []
|
||||
name = f[0].split("/")[-1]
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=f[0],
|
||||
path=f[1],
|
||||
name=name,
|
||||
path=f[0],
|
||||
type="file",
|
||||
size=f[2],
|
||||
mtime=f[3],
|
||||
size=f[1],
|
||||
mtime=f[2],
|
||||
media=media_list,
|
||||
vulnerable=not bool(f[4]),
|
||||
selected=bool(f[5]),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -268,6 +368,55 @@ def browse_index(
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/search", response_model=List[FileItemSchema])
|
||||
def search_index(q: str, include_ignored: bool = False, db: Session = Depends(get_db)):
|
||||
if not q or len(q) < 3:
|
||||
return []
|
||||
|
||||
ignore_filter = " AND fs.is_ignored = 0" if not include_ignored else ""
|
||||
|
||||
# Use FTS5 for instantaneous full-text search
|
||||
sql = text(f"""
|
||||
SELECT
|
||||
fs.file_path, fs.size, fs.mtime, fs.id,
|
||||
MAX(CASE WHEN fv.id IS NOT NULL THEN 1 ELSE 0 END) as has_version,
|
||||
MAX(CASE WHEN rc.id IS NOT NULL THEN 1 ELSE 0 END) as is_selected,
|
||||
GROUP_CONCAT(DISTINCT sm.identifier) as media_list
|
||||
FROM filesystem_fts
|
||||
JOIN filesystem_state fs ON fs.id = filesystem_fts.rowid
|
||||
LEFT JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
LEFT JOIN storage_media sm ON sm.id = fv.media_id
|
||||
LEFT JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE filesystem_fts MATCH :query {ignore_filter}
|
||||
GROUP BY fs.id
|
||||
LIMIT 200
|
||||
""")
|
||||
|
||||
safe_query = f'"{q}"'
|
||||
files = db.execute(sql, {"query": safe_query}).fetchall()
|
||||
|
||||
results = []
|
||||
for f in files:
|
||||
name = f[0].split("/")[-1]
|
||||
media_list = f[6].split(",") if f[6] else []
|
||||
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=name,
|
||||
path=f[0],
|
||||
type="file",
|
||||
size=f[1],
|
||||
mtime=f[2],
|
||||
media=media_list,
|
||||
vulnerable=not bool(f[4]),
|
||||
selected=bool(f[5]),
|
||||
)
|
||||
)
|
||||
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/tree", response_model=List[TreeNodeSchema])
|
||||
def get_index_tree(
|
||||
path: Optional[str] = None,
|
||||
@@ -311,6 +460,14 @@ def get_item_metadata(path: str, db: Session = Depends(get_db)):
|
||||
)
|
||||
for v in file_state.versions
|
||||
]
|
||||
|
||||
is_selected = (
|
||||
db.query(models.RestoreCart)
|
||||
.filter(models.RestoreCart.filesystem_state_id == file_state.id)
|
||||
.first()
|
||||
is not None
|
||||
)
|
||||
|
||||
return ItemMetadataSchema(
|
||||
id=file_state.id, # Now included
|
||||
file_path=file_state.file_path,
|
||||
@@ -320,8 +477,30 @@ def get_item_metadata(path: str, db: Session = Depends(get_db)):
|
||||
last_seen_timestamp=file_state.last_seen_timestamp,
|
||||
sha256_hash=file_state.sha256_hash,
|
||||
versions=versions,
|
||||
vulnerable=not bool(file_state.versions),
|
||||
selected=is_selected,
|
||||
)
|
||||
prefix = path if path.endswith("/") else path + "/"
|
||||
# Check recursive vulnerability and selection
|
||||
stats_sql = text("""
|
||||
SELECT
|
||||
MAX(CASE WHEN fv.id IS NULL AND fs.is_ignored = 0 THEN 1 ELSE 0 END) as is_vulnerable,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL THEN fs.id END) as restorable_count,
|
||||
COUNT(DISTINCT CASE WHEN fv.id IS NOT NULL AND rc.id IS NOT NULL THEN fs.id END) as queued_count
|
||||
FROM filesystem_state fs
|
||||
LEFT JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
LEFT JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :prefix
|
||||
""")
|
||||
stats = db.execute(stats_sql, {"prefix": f"{prefix}%"}).fetchone()
|
||||
|
||||
is_vuln = stats[0] if stats else 0
|
||||
restorable = stats[1] if stats else 0
|
||||
queued = stats[2] if stats else 0
|
||||
|
||||
is_selected = restorable > 0 and queued == restorable
|
||||
is_indeterminate = 0 < queued < restorable
|
||||
|
||||
sql = text(
|
||||
"SELECT COUNT(*), SUM(size), MAX(mtime), MAX(last_seen_timestamp) FROM filesystem_state WHERE file_path LIKE :prefix AND is_ignored = 0"
|
||||
)
|
||||
@@ -334,6 +513,9 @@ def get_item_metadata(path: str, db: Session = Depends(get_db)):
|
||||
mtime=row[2] or 0,
|
||||
last_seen_timestamp=row[3] or datetime.now(timezone.utc),
|
||||
child_count=row[0],
|
||||
vulnerable=bool(is_vuln),
|
||||
selected=is_selected,
|
||||
indeterminate=is_indeterminate,
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="Item not found")
|
||||
|
||||
|
||||
+269
-71
@@ -1,9 +1,13 @@
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from typing import List
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import get_db
|
||||
from sqlalchemy.orm import Session, joinedload
|
||||
from sqlalchemy import text
|
||||
from app.db.database import get_db, SessionLocal
|
||||
from app.db import models
|
||||
from datetime import datetime, timezone
|
||||
from app.services.archiver import archiver_manager
|
||||
from app.services.scanner import JobManager
|
||||
|
||||
router = APIRouter(prefix="/restores", tags=["Restores"])
|
||||
|
||||
@@ -33,16 +37,185 @@ class RestoreManifestSchema(BaseModel):
|
||||
media_required: List[ManifestMediaRequirement]
|
||||
|
||||
|
||||
class RestoreRequest(BaseModel):
|
||||
destination: str
|
||||
|
||||
|
||||
class DirectoryCartRequest(BaseModel):
|
||||
path: str
|
||||
|
||||
|
||||
class CartFileItemSchema(BaseModel):
|
||||
name: str
|
||||
path: str
|
||||
type: str
|
||||
size: Optional[int] = None
|
||||
media: List[str] = []
|
||||
|
||||
|
||||
class CartTreeNodeSchema(BaseModel):
|
||||
name: str
|
||||
path: str
|
||||
has_children: bool = False
|
||||
|
||||
|
||||
# --- Endpoints ---
|
||||
|
||||
|
||||
@router.post("/trigger")
|
||||
def trigger_restore(
|
||||
req: RestoreRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
cart_items = db.query(models.RestoreCart).all()
|
||||
if not cart_items:
|
||||
raise HTTPException(status_code=400, detail="Recovery queue is empty")
|
||||
|
||||
job = JobManager.create_job(db, "RESTORE")
|
||||
|
||||
def run_restore_task():
|
||||
db_inner = SessionLocal()
|
||||
try:
|
||||
archiver_manager.run_restore(
|
||||
db_inner, destination=req.destination, job_id=job.id
|
||||
)
|
||||
finally:
|
||||
db_inner.close()
|
||||
|
||||
background_tasks.add_task(run_restore_task)
|
||||
return {"message": "Restore job initiated", "job_id": job.id}
|
||||
|
||||
|
||||
@router.get("/cart/browse", response_model=List[CartFileItemSchema])
|
||||
def browse_cart(path: Optional[str] = None, db: Session = Depends(get_db)):
|
||||
from app.api.inventory import get_source_roots
|
||||
|
||||
roots = get_source_roots(db)
|
||||
|
||||
if path is None or path == "ROOT":
|
||||
results = []
|
||||
for root in roots:
|
||||
# Check if any file in the cart is under this root
|
||||
prefix = root if root.endswith("/") else root + "/"
|
||||
sql = text("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :prefix
|
||||
)
|
||||
""")
|
||||
if db.execute(sql, {"prefix": f"{prefix}%"}).scalar():
|
||||
results.append(
|
||||
CartFileItemSchema(name=root, path=root, type="directory")
|
||||
)
|
||||
return results
|
||||
|
||||
prefix = path if path.endswith("/") else path + "/"
|
||||
results = []
|
||||
|
||||
# Subdirectories in cart
|
||||
subdir_sql = text("""
|
||||
SELECT DISTINCT SUBSTR(fs.file_path, LENGTH(:prefix) + 1, INSTR(SUBSTR(fs.file_path, LENGTH(:prefix) + 1), '/') - 1) as dirname
|
||||
FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :search_prefix
|
||||
AND SUBSTR(fs.file_path, LENGTH(:prefix) + 1) LIKE '%/%'
|
||||
""")
|
||||
subdirs = db.execute(
|
||||
subdir_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
).fetchall()
|
||||
for sd in subdirs:
|
||||
if sd[0]:
|
||||
results.append(
|
||||
CartFileItemSchema(name=sd[0], path=prefix + sd[0], type="directory")
|
||||
)
|
||||
|
||||
# Files in cart
|
||||
file_sql = text("""
|
||||
SELECT fs.file_path, fs.size, fs.id, GROUP_CONCAT(sm.identifier) as media_list
|
||||
FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
JOIN storage_media sm ON sm.id = fv.media_id
|
||||
WHERE fs.file_path LIKE :search_prefix
|
||||
AND SUBSTR(fs.file_path, LENGTH(:prefix) + 1) NOT LIKE '%/%'
|
||||
GROUP BY fs.id
|
||||
""")
|
||||
files = db.execute(
|
||||
file_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
).fetchall()
|
||||
for f in files:
|
||||
results.append(
|
||||
CartFileItemSchema(
|
||||
name=f[0].split("/")[-1],
|
||||
path=f[0],
|
||||
type="file",
|
||||
size=f[1],
|
||||
media=f[3].split(",") if f[3] else [],
|
||||
)
|
||||
)
|
||||
|
||||
results.sort(key=lambda x: (x.type != "directory", x.name.lower()))
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/cart/tree", response_model=List[CartTreeNodeSchema])
|
||||
def get_cart_tree(path: Optional[str] = None, db: Session = Depends(get_db)):
|
||||
from app.api.inventory import get_source_roots
|
||||
|
||||
roots = get_source_roots(db)
|
||||
|
||||
if path is None or path == "ROOT":
|
||||
results = []
|
||||
for root in roots:
|
||||
prefix = root if root.endswith("/") else root + "/"
|
||||
sql = text("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :prefix
|
||||
)
|
||||
""")
|
||||
if db.execute(sql, {"prefix": f"{prefix}%"}).scalar():
|
||||
results.append(
|
||||
CartTreeNodeSchema(name=root, path=root, has_children=True)
|
||||
)
|
||||
return results
|
||||
|
||||
prefix = path if path.endswith("/") else path + "/"
|
||||
subdir_sql = text("""
|
||||
SELECT DISTINCT SUBSTR(fs.file_path, LENGTH(:prefix) + 1, INSTR(SUBSTR(fs.file_path, LENGTH(:prefix) + 1), '/') - 1) as dirname
|
||||
FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
WHERE fs.file_path LIKE :search_prefix
|
||||
AND SUBSTR(fs.file_path, LENGTH(:prefix) + 1) LIKE '%/%'
|
||||
""")
|
||||
subdirs = db.execute(
|
||||
subdir_sql, {"prefix": prefix, "search_prefix": f"{prefix}%"}
|
||||
).fetchall()
|
||||
results = [
|
||||
CartTreeNodeSchema(name=sd[0], path=prefix + sd[0], has_children=True)
|
||||
for sd in subdirs
|
||||
if sd[0]
|
||||
]
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/cart", response_model=List[CartItemSchema])
|
||||
def list_cart(db: Session = Depends(get_db)):
|
||||
items = db.query(models.RestoreCart).all()
|
||||
# OPTIMIZED: Use joinedload to fetch all versions in a single query
|
||||
items = (
|
||||
db.query(models.RestoreCart)
|
||||
.options(
|
||||
joinedload(models.RestoreCart.file_state)
|
||||
.joinedload(models.FilesystemState.versions)
|
||||
.joinedload(models.FileVersion.media)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
results = []
|
||||
for item in items:
|
||||
media_ids = [v.media.identifier for v in item.file_state.versions]
|
||||
@@ -57,6 +230,57 @@ def list_cart(db: Session = Depends(get_db)):
|
||||
return results
|
||||
|
||||
|
||||
# NOTE: Static routes MUST come before parameterized ones like /cart/{file_id}
|
||||
|
||||
|
||||
@router.post("/cart/clear")
|
||||
def clear_cart(db: Session = Depends(get_db)):
|
||||
db.query(models.RestoreCart).delete(synchronize_session=False)
|
||||
db.commit()
|
||||
return {"message": "Recovery queue cleared"}
|
||||
|
||||
|
||||
@router.post("/cart/directory")
|
||||
def add_directory_to_cart(req: DirectoryCartRequest, db: Session = Depends(get_db)):
|
||||
from loguru import logger
|
||||
|
||||
path = req.path
|
||||
if path == "ROOT":
|
||||
prefix_query = "%"
|
||||
exact_path = "ROOT"
|
||||
else:
|
||||
prefix = path if path.endswith("/") else path + "/"
|
||||
prefix_query = f"{prefix}%"
|
||||
exact_path = path
|
||||
|
||||
logger.info(f"Adding directory to queue: {path} (prefix: {prefix_query})")
|
||||
|
||||
insert_sql = text("""
|
||||
INSERT INTO restore_cart (filesystem_state_id, created_at)
|
||||
SELECT DISTINCT fs.id, :now
|
||||
FROM filesystem_state fs
|
||||
WHERE (fs.file_path = :path OR fs.file_path LIKE :prefix)
|
||||
AND EXISTS (SELECT 1 FROM file_versions fv WHERE fv.filesystem_state_id = fs.id)
|
||||
AND fs.id NOT IN (SELECT filesystem_state_id FROM restore_cart)
|
||||
""")
|
||||
|
||||
db.execute(
|
||||
insert_sql,
|
||||
{
|
||||
"path": exact_path,
|
||||
"prefix": prefix_query,
|
||||
"now": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
||||
total_in_cart = db.query(models.RestoreCart).count()
|
||||
logger.info(f"Directory add complete. Total in cart: {total_in_cart}")
|
||||
|
||||
return {"message": f"Added restorable items from {path} to recovery queue"}
|
||||
|
||||
|
||||
@router.post("/cart/{file_id}")
|
||||
def add_to_cart(file_id: int, db: Session = Depends(get_db)):
|
||||
existing = (
|
||||
@@ -65,7 +289,7 @@ def add_to_cart(file_id: int, db: Session = Depends(get_db)):
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
return {"message": "Already in cart"}
|
||||
return {"message": "Already in recovery queue"}
|
||||
|
||||
file_state = db.query(models.FilesystemState).get(file_id)
|
||||
if not file_state or not file_state.versions:
|
||||
@@ -74,39 +298,7 @@ def add_to_cart(file_id: int, db: Session = Depends(get_db)):
|
||||
new_item = models.RestoreCart(filesystem_state_id=file_id)
|
||||
db.add(new_item)
|
||||
db.commit()
|
||||
return {"message": "Added to cart"}
|
||||
|
||||
|
||||
@router.post("/cart/directory")
|
||||
def add_directory_to_cart(req: DirectoryCartRequest, db: Session = Depends(get_db)):
|
||||
prefix = req.path if req.path.endswith("/") else req.path + "/"
|
||||
|
||||
# Find all files under this path that have at least one version
|
||||
eligible_files = (
|
||||
db.query(models.FilesystemState)
|
||||
.filter(
|
||||
models.FilesystemState.file_path.like(f"{prefix}%"),
|
||||
models.FilesystemState.versions.any(),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not eligible_files:
|
||||
raise HTTPException(
|
||||
status_code=404, detail="No restorable files found in this directory"
|
||||
)
|
||||
|
||||
# Get current cart to avoid duplicates
|
||||
in_cart = {c.filesystem_state_id for c in db.query(models.RestoreCart).all()}
|
||||
|
||||
added_count = 0
|
||||
for f in eligible_files:
|
||||
if f.id not in in_cart:
|
||||
db.add(models.RestoreCart(filesystem_state_id=f.id))
|
||||
added_count += 1
|
||||
|
||||
db.commit()
|
||||
return {"message": f"Added {added_count} files from {req.path} to cart"}
|
||||
return {"message": "Added to recovery queue"}
|
||||
|
||||
|
||||
@router.delete("/cart/{item_id}")
|
||||
@@ -115,43 +307,49 @@ def remove_from_cart(item_id: int, db: Session = Depends(get_db)):
|
||||
if item:
|
||||
db.delete(item)
|
||||
db.commit()
|
||||
return {"message": "Removed from cart"}
|
||||
|
||||
|
||||
@router.post("/cart/clear")
|
||||
def clear_cart(db: Session = Depends(get_db)):
|
||||
db.query(models.RestoreCart).delete()
|
||||
db.commit()
|
||||
return {"message": "Cart cleared"}
|
||||
return {"message": "Removed from recovery queue"}
|
||||
|
||||
|
||||
@router.get("/manifest", response_model=RestoreManifestSchema)
|
||||
def get_manifest(db: Session = Depends(get_db)):
|
||||
cart_items = db.query(models.RestoreCart).all()
|
||||
if not cart_items:
|
||||
return RestoreManifestSchema(total_files=0, total_size=0, media_required=[])
|
||||
# OPTIMIZED: Use a single raw SQL query to calculate the entire manifest
|
||||
# This completely avoids loading thousands of ORM objects into memory
|
||||
sql = text("""
|
||||
SELECT
|
||||
sm.identifier,
|
||||
sm.media_type,
|
||||
COUNT(DISTINCT fs.id) as file_count,
|
||||
SUM(fv.offset_end - fv.offset_start) as total_size
|
||||
FROM filesystem_state fs
|
||||
JOIN restore_cart rc ON rc.filesystem_state_id = fs.id
|
||||
JOIN file_versions fv ON fv.filesystem_state_id = fs.id
|
||||
JOIN storage_media sm ON sm.id = fv.media_id
|
||||
GROUP BY sm.id
|
||||
""")
|
||||
|
||||
total_size = sum(item.file_state.size for item in cart_items)
|
||||
media_map = {}
|
||||
rows = db.execute(sql).fetchall()
|
||||
|
||||
for item in cart_items:
|
||||
if not item.file_state.versions:
|
||||
continue
|
||||
primary_v = item.file_state.versions[0]
|
||||
ident = primary_v.media.identifier
|
||||
m_type = primary_v.media.media_type
|
||||
if ident not in media_map:
|
||||
media_map[ident] = {
|
||||
"identifier": ident,
|
||||
"media_type": m_type,
|
||||
"file_count": 0,
|
||||
"total_size": 0,
|
||||
}
|
||||
media_map[ident]["file_count"] += 1
|
||||
media_map[ident]["total_size"] += item.file_state.size
|
||||
requirements = []
|
||||
total_size = 0
|
||||
|
||||
requirements = [ManifestMediaRequirement(**m) for m in media_map.values()]
|
||||
requirements.sort(key=lambda x: x.identifier)
|
||||
return RestoreManifestSchema(
|
||||
total_files=len(cart_items), total_size=total_size, media_required=requirements
|
||||
# We also need the total unique files in the cart (a file might be on multiple media)
|
||||
total_unique_files = db.query(models.RestoreCart).count()
|
||||
|
||||
for row in rows:
|
||||
requirements.append(
|
||||
ManifestMediaRequirement(
|
||||
identifier=row[0],
|
||||
media_type=row[1],
|
||||
file_count=row[2],
|
||||
total_size=row[3],
|
||||
)
|
||||
)
|
||||
total_size += row[3]
|
||||
|
||||
requirements.sort(key=lambda x: x.identifier)
|
||||
|
||||
return RestoreManifestSchema(
|
||||
total_files=total_unique_files,
|
||||
total_size=total_size,
|
||||
media_required=requirements,
|
||||
)
|
||||
|
||||
+196
-4
@@ -1,6 +1,8 @@
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||
from fastapi.responses import StreamingResponse
|
||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks, File, UploadFile
|
||||
from fastapi.responses import StreamingResponse, FileResponse
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import json
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
@@ -79,6 +81,10 @@ class SettingSchema(BaseModel):
|
||||
value: str
|
||||
|
||||
|
||||
class TestNotificationRequest(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
# --- Helpers ---
|
||||
def get_source_roots(db: Session) -> List[str]:
|
||||
setting = (
|
||||
@@ -176,6 +182,15 @@ def get_dashboard_stats(db: Session = Depends(get_db)):
|
||||
for mtype, count in media_counts:
|
||||
media_dist[mtype.upper()] = count
|
||||
|
||||
# Get last successful scan time from jobs history
|
||||
last_scan = (
|
||||
db.query(models.Job)
|
||||
.filter(models.Job.job_type == "SCAN", models.Job.status == "COMPLETED")
|
||||
.order_by(models.Job.completed_at.desc())
|
||||
.first()
|
||||
)
|
||||
last_scan_time = last_scan.completed_at if last_scan else None
|
||||
|
||||
return DashboardStatsSchema(
|
||||
total_files_indexed=total_count,
|
||||
total_data_size=total_size,
|
||||
@@ -185,7 +200,7 @@ def get_dashboard_stats(db: Session = Depends(get_db)):
|
||||
ignored_files_count=ignored_count,
|
||||
ignored_data_size=ignored_size,
|
||||
redundancy_ratio=round(redundancy, 2),
|
||||
last_scan_time=scanner_manager.last_run_time,
|
||||
last_scan_time=last_scan_time,
|
||||
media_distribution=media_dist,
|
||||
)
|
||||
|
||||
@@ -224,7 +239,15 @@ async def stream_jobs():
|
||||
db.close()
|
||||
await asyncio.sleep(1)
|
||||
|
||||
return StreamingResponse(event_generator(), media_type="text/event-stream")
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no", # Critical for Nginx
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/scan")
|
||||
@@ -305,6 +328,50 @@ def browse_path(path: Optional[str] = None, db: Session = Depends(get_db)):
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/search", response_model=List[FileItemSchema])
|
||||
def search_system(q: str, include_ignored: bool = False, db: Session = Depends(get_db)):
|
||||
if not q or len(q) < 3:
|
||||
return []
|
||||
|
||||
ignore_filter = " AND fs.is_ignored = 0" if not include_ignored else ""
|
||||
|
||||
# Use FTS5 for instantaneous full-text search
|
||||
sql = text(f"""
|
||||
SELECT fs.file_path, fs.size, fs.mtime, fs.id, fs.is_ignored
|
||||
FROM filesystem_fts
|
||||
JOIN filesystem_state fs ON fs.id = filesystem_fts.rowid
|
||||
WHERE filesystem_fts MATCH :query {ignore_filter}
|
||||
LIMIT 200
|
||||
""")
|
||||
|
||||
safe_query = f'"{q}"'
|
||||
files = db.execute(sql, {"query": safe_query}).fetchall()
|
||||
|
||||
tracking_map = {s.path: s.action for s in db.query(models.TrackedSource).all()}
|
||||
spec = get_exclusion_spec(db)
|
||||
|
||||
results = []
|
||||
for f in files:
|
||||
path = f[0]
|
||||
name = path.split("/")[-1]
|
||||
tracked, _ = get_tracking_status(path, tracking_map, spec)
|
||||
|
||||
results.append(
|
||||
FileItemSchema(
|
||||
name=name,
|
||||
path=path,
|
||||
type="file",
|
||||
size=f[1],
|
||||
mtime=f[2],
|
||||
tracked=tracked,
|
||||
ignored=f[4],
|
||||
)
|
||||
)
|
||||
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
return results
|
||||
|
||||
|
||||
@router.post("/track/batch")
|
||||
def track_batch(req: BatchTrackRequest, db: Session = Depends(get_db)):
|
||||
for path in req.tracks:
|
||||
@@ -339,6 +406,8 @@ def get_settings(db: Session = Depends(get_db)):
|
||||
|
||||
@router.post("/settings")
|
||||
def update_setting(req: SettingSchema, db: Session = Depends(get_db)):
|
||||
from app.services.scheduler import scheduler_manager
|
||||
|
||||
setting = (
|
||||
db.query(models.SystemSetting)
|
||||
.filter(models.SystemSetting.key == req.key)
|
||||
@@ -349,12 +418,135 @@ def update_setting(req: SettingSchema, db: Session = Depends(get_db)):
|
||||
else:
|
||||
db.add(models.SystemSetting(key=req.key, value=req.value))
|
||||
db.commit()
|
||||
|
||||
# Update scheduler if it's a schedule setting
|
||||
if req.key == "schedule_scan":
|
||||
scheduler_manager.add_job(
|
||||
"system_scan", scheduler_manager.run_system_scan, req.value
|
||||
)
|
||||
elif req.key == "schedule_archival":
|
||||
scheduler_manager.add_job(
|
||||
"system_archival", scheduler_manager.run_system_archival, req.value
|
||||
)
|
||||
|
||||
return {"message": "Updated"}
|
||||
|
||||
|
||||
@router.post("/notifications/test")
|
||||
def test_notification(req: TestNotificationRequest):
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
success = notification_manager.test_notification(req.url)
|
||||
if success:
|
||||
return {"message": "Test notification sent successfully"}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to send test notification. Check your Apprise URL.",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/database/export")
|
||||
def export_database():
|
||||
db_path = "tapehoard.db"
|
||||
if not os.path.exists(db_path):
|
||||
raise HTTPException(status_code=404, detail="Database file not found")
|
||||
|
||||
# We create a temporary copy to ensure we don't return a partially locked file
|
||||
export_path = "tapehoard_export.db"
|
||||
try:
|
||||
# Use sqlite3 backup API for a clean copy of the live DB
|
||||
src = sqlite3.connect(db_path)
|
||||
dest = sqlite3.connect(export_path)
|
||||
with dest:
|
||||
src.backup(dest)
|
||||
src.close()
|
||||
dest.close()
|
||||
|
||||
return FileResponse(
|
||||
export_path,
|
||||
filename=f"tapehoard_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db",
|
||||
background=BackgroundTasks().add_task(
|
||||
lambda: os.remove(export_path) if os.path.exists(export_path) else None
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
if os.path.exists(export_path):
|
||||
os.remove(export_path)
|
||||
raise HTTPException(status_code=500, detail=f"Export failed: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/database/import")
|
||||
async def import_database(file: UploadFile = File(...), db: Session = Depends(get_db)):
|
||||
# Validate it's a sqlite file
|
||||
filename = file.filename or ""
|
||||
if not filename.endswith(".db"):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Invalid file type. Must be a .db file."
|
||||
)
|
||||
|
||||
temp_path = "tapehoard_import.db"
|
||||
try:
|
||||
with open(temp_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
|
||||
# Verify it's a valid SQLite DB
|
||||
conn = sqlite3.connect(temp_path)
|
||||
conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
conn.close()
|
||||
|
||||
# Replace the live DB
|
||||
# To do this safely while running, we use the backup API to overwrite our own live DB
|
||||
db_path = "tapehoard.db"
|
||||
src = sqlite3.connect(temp_path)
|
||||
dest = sqlite3.connect(db_path)
|
||||
with dest:
|
||||
src.backup(dest)
|
||||
src.close()
|
||||
dest.close()
|
||||
|
||||
return {
|
||||
"message": "Database restored successfully. Application state has been updated."
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Import failed: {str(e)}")
|
||||
finally:
|
||||
if os.path.exists(temp_path):
|
||||
os.remove(temp_path)
|
||||
|
||||
|
||||
@router.get("/tree")
|
||||
def get_tree(path: Optional[str] = None, db: Session = Depends(get_db)):
|
||||
roots = get_source_roots(db)
|
||||
if path is None or path == "ROOT":
|
||||
return [{"name": r, "path": r, "has_children": True} for r in roots]
|
||||
|
||||
if not os.path.exists(path) or not os.path.isdir(path):
|
||||
return []
|
||||
|
||||
results = []
|
||||
try:
|
||||
with os.scandir(path) as it:
|
||||
for entry in it:
|
||||
if entry.is_dir():
|
||||
# Check if it has any children to determine has_children
|
||||
has_children = False
|
||||
try:
|
||||
with os.scandir(entry.path) as sub_it:
|
||||
if any(sub_entry.is_dir() for sub_entry in sub_it):
|
||||
has_children = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
results.append(
|
||||
{
|
||||
"name": entry.name,
|
||||
"path": entry.path,
|
||||
"has_children": has_children,
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
results.sort(key=lambda x: x["name"].lower())
|
||||
return results
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# Dependency mapping for FastAPI
|
||||
# Using standard relative path, but easily overridden with env vars later
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///tapehoard.db"
|
||||
SQLALCHEMY_DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///tapehoard.db")
|
||||
|
||||
# connect_args={"check_same_thread": False} is required for SQLite in FastAPI
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False},
|
||||
pool_size=20,
|
||||
max_overflow=10,
|
||||
pool_timeout=30,
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
@@ -76,6 +76,17 @@ class FileVersion(Base):
|
||||
file_number: Mapped[str] = mapped_column(String) # Tape position or object path
|
||||
offset_in_tar: Mapped[Optional[int]] = mapped_column(Integer)
|
||||
|
||||
# Split File Support
|
||||
is_split: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||
split_id: Mapped[Optional[str]] = mapped_column(
|
||||
String, nullable=True
|
||||
) # UUID grouping parts
|
||||
offset_start: Mapped[int] = mapped_column(BigInteger, default=0)
|
||||
offset_end: Mapped[int] = mapped_column(BigInteger, default=0)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, default=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
file_state: Mapped["FilesystemState"] = relationship(back_populates="versions")
|
||||
media: Mapped["StorageMedia"] = relationship(back_populates="versions")
|
||||
|
||||
|
||||
+37
-4
@@ -1,16 +1,32 @@
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse, JSONResponse
|
||||
from app.api import system, inventory, backups, restores
|
||||
from app.db.database import engine
|
||||
from app.db import models
|
||||
from app.services.scheduler import scheduler_manager
|
||||
|
||||
# Create tables
|
||||
# Create standard tables
|
||||
models.Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
scheduler_manager.start()
|
||||
yield
|
||||
# Shutdown
|
||||
scheduler_manager.stop()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="TapeHoard API",
|
||||
description="A robust, index-driven Tape Backup Manager",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# Configure CORS
|
||||
@@ -28,7 +44,24 @@ app.include_router(inventory.router)
|
||||
app.include_router(backups.router)
|
||||
app.include_router(restores.router)
|
||||
|
||||
# Mount frontend static files
|
||||
# We expect the 'build' directory to exist at the root level of the app
|
||||
static_path = "static"
|
||||
if os.path.exists(static_path):
|
||||
app.mount("/", StaticFiles(directory=static_path, html=True), name="static")
|
||||
|
||||
@app.get("/")
|
||||
def read_root():
|
||||
return {"message": "Welcome to TapeHoard API"}
|
||||
# Add catch-all route for SPA (SvelteKit)
|
||||
@app.exception_handler(404)
|
||||
async def spa_catch_all(request, exc):
|
||||
# If the request is for an API endpoint, return 404 normally
|
||||
if request.url.path.startswith(
|
||||
("/system", "/inventory", "/backups", "/restores")
|
||||
):
|
||||
return JSONResponse(status_code=404, content={"detail": "Not Found"})
|
||||
# Otherwise, serve the SPA index
|
||||
return FileResponse(os.path.join(static_path, "index.html"))
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
@@ -16,6 +16,13 @@ class AbstractStorageProvider(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def initialize_media(self, media_id: str) -> bool:
|
||||
"""
|
||||
Initializes raw media by writing the tapehoard identifier/label.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def prepare_for_write(self, media_id: str) -> bool:
|
||||
"""
|
||||
|
||||
@@ -28,6 +28,21 @@ class CloudStorageProvider(AbstractStorageProvider):
|
||||
logger.error(f"Failed to identify cloud bucket {self.bucket_name}: {e}")
|
||||
return None
|
||||
|
||||
def initialize_media(self, media_id: str) -> bool:
|
||||
"""Initializes cloud media by writing a dummy object to verify access"""
|
||||
try:
|
||||
self.s3.head_bucket(Bucket=self.bucket_name)
|
||||
self.s3.put_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=".tapehoard_id",
|
||||
Body=media_id.encode("utf-8"),
|
||||
)
|
||||
logger.info(f"Initialized Cloud bucket {media_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize cloud bucket {self.bucket_name}: {e}")
|
||||
return False
|
||||
|
||||
def prepare_for_write(self, media_id: str) -> bool:
|
||||
return self.identify_media() == media_id
|
||||
|
||||
|
||||
@@ -45,6 +45,21 @@ class OfflineHDDProvider(AbstractStorageProvider):
|
||||
|
||||
return None
|
||||
|
||||
def initialize_media(self, media_id: str) -> bool:
|
||||
"""Initializes HDD by writing the .tapehoard_id file"""
|
||||
try:
|
||||
os.makedirs(self.mount_base, exist_ok=True)
|
||||
id_file = os.path.join(self.mount_base, ".tapehoard_id")
|
||||
with open(id_file, "w") as f:
|
||||
f.write(media_id)
|
||||
archive_dir = os.path.join(self.mount_base, "tapehoard_backups", "archives")
|
||||
os.makedirs(archive_dir, exist_ok=True)
|
||||
logger.info(f"Initialized HDD media {media_id} at {self.mount_base}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"HDD Provider: Failed to initialize media: {e}")
|
||||
return False
|
||||
|
||||
def prepare_for_write(self, media_id: str) -> bool:
|
||||
"""Verifies the disk is mounted and the identifier matches"""
|
||||
current_id = self.identify_media()
|
||||
|
||||
@@ -5,8 +5,11 @@ from loguru import logger
|
||||
|
||||
|
||||
class LTOProvider(AbstractStorageProvider):
|
||||
def __init__(self, device_path: str = "/dev/nst0"):
|
||||
def __init__(
|
||||
self, device_path: str = "/dev/nst0", encryption_key: Optional[str] = None
|
||||
):
|
||||
self.device_path = device_path
|
||||
self.encryption_key = encryption_key
|
||||
|
||||
def get_name(self) -> str:
|
||||
return "LTO Tape"
|
||||
@@ -18,9 +21,48 @@ class LTOProvider(AbstractStorageProvider):
|
||||
logger.error(f"Tape command 'mt {command}' failed: {e}")
|
||||
raise
|
||||
|
||||
def _setup_encryption(self):
|
||||
"""Configures hardware encryption on the drive using stenc"""
|
||||
if not self.encryption_key:
|
||||
# Explicitly disable encryption if no key provided
|
||||
try:
|
||||
subprocess.run(
|
||||
["stenc", "-f", self.device_path, "--off"], capture_output=True
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info(f"Setting LTO hardware encryption key for {self.device_path}")
|
||||
# stenc expects a 32-byte hex key (256-bit)
|
||||
# We use a pipe to avoid leaving the key in the process list
|
||||
proc = subprocess.Popen(
|
||||
["stenc", "-f", self.device_path, "--import", "-k", "-"],
|
||||
stdin=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
_, stderr = proc.communicate(input=self.encryption_key)
|
||||
|
||||
if proc.returncode != 0:
|
||||
logger.error(f"Failed to load encryption key: {stderr}")
|
||||
raise RuntimeError(f"LTO Encryption Setup Failed: {stderr}")
|
||||
|
||||
# Verify encryption is on
|
||||
subprocess.run(["stenc", "-f", self.device_path, "--on"], check=True)
|
||||
logger.info("LTO Hardware Encryption ENABLED and LOCKED")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Hardware encryption error: {e}")
|
||||
raise
|
||||
|
||||
def identify_media(self) -> Optional[str]:
|
||||
"""Reads the label from the beginning of the tape (File Mark 0)"""
|
||||
try:
|
||||
# We must set up encryption BEFORE trying to read the label if it's an encrypted tape
|
||||
self._setup_encryption()
|
||||
|
||||
self._run_mt("rewind")
|
||||
# Try to read the label file
|
||||
result = subprocess.run(
|
||||
@@ -35,6 +77,43 @@ class LTOProvider(AbstractStorageProvider):
|
||||
logger.error(f"Failed to identify tape: {e}")
|
||||
return None
|
||||
|
||||
def initialize_media(self, media_id: str) -> bool:
|
||||
"""Writes the identifier to File Mark 0 on the tape"""
|
||||
try:
|
||||
self._run_mt("rewind")
|
||||
self._run_mt("weof") # Ensure we are starting clean
|
||||
self._run_mt("rewind")
|
||||
|
||||
import tempfile
|
||||
import tarfile
|
||||
|
||||
with tempfile.NamedTemporaryFile("w") as tmp_lbl:
|
||||
tmp_lbl.write(media_id)
|
||||
tmp_lbl.flush()
|
||||
|
||||
with tempfile.NamedTemporaryFile("wb") as tmp_tar:
|
||||
with tarfile.open(tmp_tar.name, "w") as tar:
|
||||
tar.add(tmp_lbl.name, arcname=".tapehoard_label")
|
||||
|
||||
# Write to tape
|
||||
with open(tmp_tar.name, "rb") as f:
|
||||
proc = subprocess.Popen(
|
||||
["dd", f"of={self.device_path}", "bs=256k"],
|
||||
stdin=subprocess.PIPE,
|
||||
)
|
||||
if proc.stdin:
|
||||
proc.stdin.write(f.read())
|
||||
proc.stdin.close()
|
||||
proc.wait()
|
||||
|
||||
self._run_mt("weof")
|
||||
self._run_mt("rewind")
|
||||
logger.info(f"Initialized LTO tape with label {media_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize tape: {e}")
|
||||
return False
|
||||
|
||||
def prepare_for_write(self, media_id: str) -> bool:
|
||||
"""Fast-forwards to the end of the data to prepare for appending"""
|
||||
current_id = self.identify_media()
|
||||
@@ -46,10 +125,42 @@ class LTOProvider(AbstractStorageProvider):
|
||||
self._run_mt("eod")
|
||||
return True
|
||||
|
||||
def _get_current_file_number(self) -> str:
|
||||
"""Parses 'mt status' to get the current tape file position"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["mt", "-f", self.device_path, "status"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
# mt status output varies by OS/Driver, but usually contains 'File number=X'
|
||||
# We look for a line like 'File number=2, block number=0'
|
||||
import re
|
||||
|
||||
match = re.search(r"File number=(\d+)", result.stdout)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
# Alternative format
|
||||
match = re.search(r"file number (\d+)", result.stdout)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
logger.warning(
|
||||
f"Could not parse file number from mt status: {result.stdout}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get tape status: {e}")
|
||||
return "0"
|
||||
|
||||
def write_archive(self, media_id: str, stream: BinaryIO) -> str:
|
||||
"""Writes the stream to tape and returns the file number index"""
|
||||
logger.info(f"Streaming archive to LTO {media_id} at current head position")
|
||||
|
||||
# Get position BEFORE writing
|
||||
file_num = self._get_current_file_number()
|
||||
|
||||
proc = subprocess.Popen(
|
||||
["dd", f"of={self.device_path}", "bs=256k"], stdin=subprocess.PIPE
|
||||
)
|
||||
@@ -66,7 +177,10 @@ class LTOProvider(AbstractStorageProvider):
|
||||
|
||||
proc.wait()
|
||||
|
||||
return "unknown" # To be refined with 'mt status' parsing
|
||||
# After writing, we should be at the NEXT file mark.
|
||||
# But tar/dd usually leaves us at the end of the written data.
|
||||
# We'll return the position we started at as the 'location_id'
|
||||
return file_num
|
||||
|
||||
def finalize_media(self, media_id: str):
|
||||
self._run_mt("offline") # Rewind and eject
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import os
|
||||
import tarfile
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional, Dict, Any
|
||||
from loguru import logger
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import not_
|
||||
from sqlalchemy import not_, func
|
||||
from app.db import models
|
||||
from app.services.scanner import JobManager
|
||||
from app.providers.hdd import OfflineHDDProvider
|
||||
@@ -13,6 +14,39 @@ from app.providers.tape import LTOProvider
|
||||
from app.providers.cloud import CloudStorageProvider
|
||||
|
||||
|
||||
class RangeFile:
|
||||
"""A file-like object that only reads a specific range of a file."""
|
||||
|
||||
def __init__(self, file_path: str, offset_start: int, length: int):
|
||||
self.file_path = file_path
|
||||
self.offset_start = offset_start
|
||||
self.length = length
|
||||
self.remaining = length
|
||||
self.file = open(file_path, "rb")
|
||||
self.file.seek(offset_start)
|
||||
|
||||
def read(self, size: int = -1) -> bytes:
|
||||
if self.remaining <= 0:
|
||||
return b""
|
||||
|
||||
to_read = self.remaining
|
||||
if size > 0:
|
||||
to_read = min(size, self.remaining)
|
||||
|
||||
data = self.file.read(to_read)
|
||||
self.remaining -= len(data)
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
self.file.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
|
||||
class ArchiverService:
|
||||
def __init__(self, staging_dir: str = "/staging"):
|
||||
self.staging_dir = staging_dir
|
||||
@@ -33,7 +67,10 @@ class ArchiverService:
|
||||
pass
|
||||
|
||||
if media.media_type == "tape":
|
||||
return LTOProvider(device_path=config.get("device_path", "/dev/nst0"))
|
||||
return LTOProvider(
|
||||
device_path=config.get("device_path", "/dev/nst0"),
|
||||
encryption_key=config.get("encryption_key"),
|
||||
)
|
||||
elif media.media_type == "hdd":
|
||||
return OfflineHDDProvider(
|
||||
mount_base=config.get("mount_path", "/mnt/backup")
|
||||
@@ -42,22 +79,40 @@ class ArchiverService:
|
||||
return CloudStorageProvider(config=config)
|
||||
return None
|
||||
|
||||
def get_eligible_files(self, db: Session) -> List[models.FilesystemState]:
|
||||
"""Returns files that are indexed but have no version on any media"""
|
||||
def get_eligible_files(self, db: Session):
|
||||
"""Returns files that are indexed but have no version, or are partially backed up"""
|
||||
# Optimized query to find files that are not fully covered by their versions
|
||||
# A file is eligible if sum(offset_end - offset_start) < size
|
||||
|
||||
subquery = (
|
||||
db.query(
|
||||
models.FileVersion.filesystem_state_id,
|
||||
func.sum(
|
||||
models.FileVersion.offset_end - models.FileVersion.offset_start
|
||||
).label("covered_size"),
|
||||
)
|
||||
.group_by(models.FileVersion.filesystem_state_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
return (
|
||||
db.query(models.FilesystemState)
|
||||
.outerjoin(
|
||||
subquery, models.FilesystemState.id == subquery.c.filesystem_state_id
|
||||
)
|
||||
.filter(
|
||||
models.FilesystemState.is_indexed,
|
||||
not_(models.FilesystemState.is_ignored),
|
||||
not_(models.FilesystemState.versions.any()),
|
||||
(subquery.c.covered_size.is_(None))
|
||||
| (subquery.c.covered_size < models.FilesystemState.size),
|
||||
)
|
||||
.all()
|
||||
.yield_per(1000)
|
||||
)
|
||||
|
||||
def create_backup_set(
|
||||
self, db: Session, media_id: int, max_bytes: Optional[int] = None
|
||||
) -> List[models.FilesystemState]:
|
||||
"""Selects a batch of files that fit on the media's remaining capacity"""
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Selects a batch of files/chunks that fit on the media's remaining capacity"""
|
||||
media = db.query(models.StorageMedia).get(media_id)
|
||||
if not media:
|
||||
return []
|
||||
@@ -68,14 +123,72 @@ class ArchiverService:
|
||||
|
||||
eligible = self.get_eligible_files(db)
|
||||
|
||||
# Simple Greedy Bin-Packing
|
||||
backup_set = []
|
||||
current_size = 0
|
||||
|
||||
# We need at least some space to make it worthwhile
|
||||
MIN_CHUNK_SIZE = 100 * 1024 * 1024 # 100MB
|
||||
|
||||
for f in eligible:
|
||||
if current_size + f.size <= remaining_capacity:
|
||||
backup_set.append(f)
|
||||
current_size += f.size
|
||||
if current_size >= remaining_capacity:
|
||||
break
|
||||
|
||||
# Calculate how much of this file is already backed up
|
||||
# For simplicity, we assume we always backup from the end of the last chunk
|
||||
covered_size = (
|
||||
db.query(
|
||||
func.sum(
|
||||
models.FileVersion.offset_end - models.FileVersion.offset_start
|
||||
)
|
||||
)
|
||||
.filter(models.FileVersion.filesystem_state_id == f.id)
|
||||
.scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
remaining_file_size = f.size - covered_size
|
||||
|
||||
# Allow 0-byte files if they have no versions yet
|
||||
if remaining_file_size <= 0 and f.size > 0:
|
||||
continue
|
||||
if f.size == 0:
|
||||
# Check if it already has a version to avoid infinite loop
|
||||
has_version = (
|
||||
db.query(models.FileVersion)
|
||||
.filter(models.FileVersion.filesystem_state_id == f.id)
|
||||
.first()
|
||||
is not None
|
||||
)
|
||||
if has_version:
|
||||
continue
|
||||
|
||||
space_left = remaining_capacity - current_size
|
||||
|
||||
if remaining_file_size <= space_left:
|
||||
# Entire remaining file fits
|
||||
backup_set.append(
|
||||
{
|
||||
"file_state": f,
|
||||
"offset_start": covered_size,
|
||||
"offset_end": f.size,
|
||||
"is_split": covered_size
|
||||
> 0, # It's a split if we already had parts
|
||||
}
|
||||
)
|
||||
current_size += remaining_file_size
|
||||
elif space_left >= MIN_CHUNK_SIZE:
|
||||
# Only part of it fits
|
||||
backup_set.append(
|
||||
{
|
||||
"file_state": f,
|
||||
"offset_start": covered_size,
|
||||
"offset_end": covered_size + space_left,
|
||||
"is_split": True,
|
||||
}
|
||||
)
|
||||
current_size += space_left
|
||||
# Once we split a file to fill the media, we are done with this set
|
||||
break
|
||||
|
||||
return backup_set
|
||||
|
||||
@@ -96,11 +209,15 @@ class ArchiverService:
|
||||
logger.info("No eligible files for backup")
|
||||
return
|
||||
|
||||
total_bytes = sum(f.size for f in backup_set)
|
||||
total_bytes = sum(
|
||||
item["offset_end"] - item["offset_start"] for item in backup_set
|
||||
)
|
||||
divisor = max(total_bytes, 1)
|
||||
|
||||
JobManager.update_job(
|
||||
job_id,
|
||||
10.0,
|
||||
f"Backing up {len(backup_set)} files ({total_bytes / 1e9:.2f} GB)...",
|
||||
f"Backing up {len(backup_set)} items ({total_bytes / 1e9:.2f} GB)...",
|
||||
)
|
||||
|
||||
provider = self._get_provider(media)
|
||||
@@ -122,7 +239,6 @@ class ArchiverService:
|
||||
return
|
||||
|
||||
# 2. Create Archive in Staging
|
||||
# For now, we package everything into one tar for this job
|
||||
archive_name = (
|
||||
f"backup_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}.tar"
|
||||
)
|
||||
@@ -130,60 +246,310 @@ class ArchiverService:
|
||||
|
||||
try:
|
||||
processed_bytes = 0
|
||||
# Identify files that can be deduplicated (hash already exists on media)
|
||||
deduped_items = []
|
||||
remaining_backup_set = []
|
||||
|
||||
for item in backup_set:
|
||||
f_state = item["file_state"]
|
||||
# Look for an existing version with the same hash
|
||||
if f_state.sha256_hash:
|
||||
existing_v = (
|
||||
db.query(models.FileVersion)
|
||||
.join(models.FilesystemState)
|
||||
.filter(
|
||||
models.FilesystemState.sha256_hash == f_state.sha256_hash,
|
||||
models.FileVersion.offset_start == item["offset_start"],
|
||||
models.FileVersion.offset_end == item["offset_end"],
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing_v:
|
||||
logger.info(
|
||||
f"Deduplicating {f_state.file_path} -> existing version on {existing_v.media_id}"
|
||||
)
|
||||
deduped_items.append(
|
||||
{
|
||||
"file_state": f_state,
|
||||
"existing_v": existing_v,
|
||||
"item": item,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
remaining_backup_set.append(item)
|
||||
|
||||
if remaining_backup_set:
|
||||
with tarfile.open(staging_path, "w") as tar:
|
||||
for f_state in backup_set:
|
||||
for item in remaining_backup_set:
|
||||
if JobManager.is_cancelled(job_id):
|
||||
break
|
||||
|
||||
f_state = item["file_state"]
|
||||
start = item["offset_start"]
|
||||
end = item["offset_end"]
|
||||
chunk_size = end - start
|
||||
|
||||
JobManager.update_job(
|
||||
job_id,
|
||||
15.0 + (70.0 * (processed_bytes / total_bytes)),
|
||||
f"Archiving: {os.path.basename(f_state.file_path)}",
|
||||
15.0 + (70.0 * (processed_bytes / divisor)),
|
||||
f"Archiving: {os.path.basename(f_state.file_path)} (Part {start}-{end})",
|
||||
)
|
||||
|
||||
if os.path.exists(f_state.file_path):
|
||||
tar.add(
|
||||
f_state.file_path, arcname=f_state.file_path.lstrip("/")
|
||||
)
|
||||
# Use RangeFile to stream only the requested part
|
||||
arcname = f_state.file_path.lstrip("/")
|
||||
if item["is_split"]:
|
||||
# Append part info to arcname if it's split
|
||||
arcname = f"{arcname}.part_{start}_{end}"
|
||||
|
||||
processed_bytes += f_state.size
|
||||
tarinfo = tar.gettarinfo(f_state.file_path, arcname=arcname)
|
||||
tarinfo.size = chunk_size # Override size
|
||||
|
||||
with RangeFile(f_state.file_path, start, chunk_size) as rf:
|
||||
tar.addfile(tarinfo, rf)
|
||||
|
||||
processed_bytes += chunk_size
|
||||
|
||||
if JobManager.is_cancelled(job_id):
|
||||
if os.path.exists(staging_path):
|
||||
os.remove(staging_path)
|
||||
return
|
||||
|
||||
# 3. Stream to Provider
|
||||
# 3. Stream to Provider (if there's anything to stream)
|
||||
location_id = "DEDUPLICATED"
|
||||
if remaining_backup_set:
|
||||
JobManager.update_job(
|
||||
job_id, 85.0, f"Streaming archive to {media.media_type}..."
|
||||
)
|
||||
with open(staging_path, "rb") as archive_stream:
|
||||
location_id = provider.write_archive(media.identifier, archive_stream)
|
||||
location_id = provider.write_archive(
|
||||
media.identifier, archive_stream
|
||||
)
|
||||
media.bytes_used += os.path.getsize(staging_path)
|
||||
|
||||
# 4. Finalize & Record
|
||||
provider.finalize_media(media.identifier)
|
||||
|
||||
# Update database records
|
||||
for f_state in backup_set:
|
||||
# Update database records for written files
|
||||
split_id = str(uuid.uuid4())
|
||||
for item in remaining_backup_set:
|
||||
f_state = item["file_state"]
|
||||
version = models.FileVersion(
|
||||
filesystem_state_id=f_state.id,
|
||||
media_id=media.id,
|
||||
file_number=location_id,
|
||||
is_split=item["is_split"],
|
||||
split_id=split_id if item["is_split"] else None,
|
||||
offset_start=item["offset_start"],
|
||||
offset_end=item["offset_end"],
|
||||
)
|
||||
db.add(version)
|
||||
|
||||
# Update database records for deduped files
|
||||
for dedup in deduped_items:
|
||||
f_state = dedup["file_state"]
|
||||
existing_v = dedup["existing_v"]
|
||||
item = dedup["item"]
|
||||
|
||||
version = models.FileVersion(
|
||||
filesystem_state_id=f_state.id,
|
||||
media_id=existing_v.media_id,
|
||||
file_number=existing_v.file_number,
|
||||
is_split=existing_v.is_split,
|
||||
split_id=existing_v.split_id,
|
||||
offset_start=item["offset_start"],
|
||||
offset_end=item["offset_end"],
|
||||
)
|
||||
db.add(version)
|
||||
|
||||
media.bytes_used += os.path.getsize(staging_path)
|
||||
db.commit()
|
||||
|
||||
JobManager.complete_job(job_id)
|
||||
logger.info(f"Backup job {job_id} completed successfully")
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Archival Completed",
|
||||
f"Archival job to {media.identifier} finished. {len(backup_set)} items written.",
|
||||
"success",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Backup failed: {e}")
|
||||
JobManager.fail_job(job_id, str(e))
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Archival Failed",
|
||||
f"Archival job to {media.identifier} failed: {str(e)}",
|
||||
"failure",
|
||||
)
|
||||
finally:
|
||||
if os.path.exists(staging_path):
|
||||
os.remove(staging_path)
|
||||
|
||||
def run_restore(self, db: Session, destination: str, job_id: int):
|
||||
JobManager.start_job(job_id)
|
||||
JobManager.update_job(job_id, 2.0, "Preparing restore manifest...")
|
||||
|
||||
cart_items = db.query(models.RestoreCart).all()
|
||||
if not cart_items:
|
||||
JobManager.complete_job(job_id)
|
||||
logger.info("No items in restore cart")
|
||||
return
|
||||
|
||||
total_bytes = sum(item.file_state.size for item in cart_items)
|
||||
divisor = max(total_bytes, 1)
|
||||
|
||||
JobManager.update_job(
|
||||
job_id, 5.0, f"Restoring {len(cart_items)} files to {destination}..."
|
||||
)
|
||||
|
||||
# Ensure destination exists
|
||||
os.makedirs(destination, exist_ok=True)
|
||||
|
||||
# Group by media -> location_id -> [FileVersion]
|
||||
# We need FileVersion objects to know the offsets
|
||||
media_tasks = {} # media_id -> {location_id: [FileVersion]}
|
||||
|
||||
for item in cart_items:
|
||||
if not item.file_state.versions:
|
||||
continue
|
||||
|
||||
# Find the most recent "full" version or set of parts
|
||||
# For now, we'll just pick all parts of the FIRST version group we find
|
||||
# Actually, let's just get ALL versions and filter the logic
|
||||
# Simpler: Get the latest versions for this file
|
||||
versions = (
|
||||
db.query(models.FileVersion)
|
||||
.filter(models.FileVersion.filesystem_state_id == item.file_state.id)
|
||||
.order_by(models.FileVersion.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
if not versions:
|
||||
continue
|
||||
|
||||
# If the latest one is split, we might need multiple.
|
||||
# For now, let's just restore WHATEVER we have versions for.
|
||||
for v in versions:
|
||||
if v.media_id not in media_tasks:
|
||||
media_tasks[v.media_id] = {}
|
||||
if v.file_number not in media_tasks[v.media_id]:
|
||||
media_tasks[v.media_id][v.file_number] = []
|
||||
media_tasks[v.media_id][v.file_number].append(v)
|
||||
|
||||
processed_bytes = 0
|
||||
|
||||
try:
|
||||
for media_id, locations in media_tasks.items():
|
||||
if JobManager.is_cancelled(job_id):
|
||||
break
|
||||
|
||||
media = db.query(models.StorageMedia).get(media_id)
|
||||
if not media:
|
||||
continue
|
||||
|
||||
provider = self._get_provider(media)
|
||||
if not provider:
|
||||
continue
|
||||
|
||||
# Check media
|
||||
JobManager.update_job(
|
||||
job_id,
|
||||
10.0 + (80.0 * (processed_bytes / divisor)),
|
||||
f"Waiting for {media.identifier}...",
|
||||
)
|
||||
current_id = provider.identify_media()
|
||||
if current_id != media.identifier:
|
||||
raise Exception(
|
||||
f"Media mismatch! Insert {media.identifier} (Found: {current_id})"
|
||||
)
|
||||
|
||||
# Sort location IDs for sequential access
|
||||
for loc_id in sorted(locations.keys()):
|
||||
v_list = locations[loc_id]
|
||||
if JobManager.is_cancelled(job_id):
|
||||
break
|
||||
|
||||
JobManager.update_job(
|
||||
job_id,
|
||||
10.0 + (80.0 * (processed_bytes / divisor)),
|
||||
f"Extracting from {media.identifier} (Archive {loc_id})...",
|
||||
)
|
||||
|
||||
archive_stream = provider.read_archive(media.identifier, loc_id)
|
||||
|
||||
# Extract using tarfile
|
||||
with tarfile.open(fileobj=archive_stream, mode="r|*") as tar:
|
||||
# Build a map of what's in this tar that we want
|
||||
# We use part names for split files
|
||||
wanted_map = {} # arcname -> FileVersion
|
||||
for v in v_list:
|
||||
arcname = v.file_state.file_path.lstrip("/")
|
||||
if v.is_split:
|
||||
arcname = (
|
||||
f"{arcname}.part_{v.offset_start}_{v.offset_end}"
|
||||
)
|
||||
wanted_map[arcname] = v
|
||||
|
||||
for member in tar:
|
||||
if JobManager.is_cancelled(job_id):
|
||||
break
|
||||
|
||||
if member.name in wanted_map:
|
||||
v = wanted_map[member.name]
|
||||
final_path = os.path.join(
|
||||
destination, v.file_state.file_path.lstrip("/")
|
||||
)
|
||||
|
||||
# Ensure dir exists
|
||||
os.makedirs(os.path.dirname(final_path), exist_ok=True)
|
||||
|
||||
if v.is_split:
|
||||
# Atomic reassembly: Write to specific offset
|
||||
# Use 'r+b' to allow seeking if file exists, 'wb' if not
|
||||
mode = "r+b" if os.path.exists(final_path) else "wb"
|
||||
with open(final_path, mode) as f:
|
||||
if mode == "wb":
|
||||
# Pre-allocate if possible or just seek
|
||||
f.truncate(v.file_state.size)
|
||||
f.seek(v.offset_start)
|
||||
# Extract the member bytes
|
||||
f_in = tar.extractfile(member)
|
||||
if f_in:
|
||||
f.write(f_in.read())
|
||||
else:
|
||||
# Standard extraction
|
||||
tar.extract(member, path=destination)
|
||||
|
||||
processed_bytes += v.offset_end - v.offset_start
|
||||
|
||||
if not JobManager.is_cancelled(job_id):
|
||||
JobManager.complete_job(job_id)
|
||||
# Clear cart
|
||||
db.query(models.RestoreCart).delete()
|
||||
db.commit()
|
||||
logger.info(f"Restore job {job_id} completed successfully")
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Recovery Completed",
|
||||
f"Data recovery to {destination} finished successfully.",
|
||||
"success",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Restore failed: {e}")
|
||||
JobManager.fail_job(job_id, str(e))
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Recovery Failed",
|
||||
f"Data recovery to {destination} failed: {str(e)}",
|
||||
"failure",
|
||||
)
|
||||
|
||||
|
||||
archiver_manager = ArchiverService()
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import apprise
|
||||
from loguru import logger
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import SessionLocal
|
||||
from app.db import models
|
||||
import json
|
||||
|
||||
|
||||
class NotificationService:
|
||||
def __init__(self):
|
||||
self.apobj = apprise.Apprise()
|
||||
|
||||
def _load_urls(self, db: Session):
|
||||
"""Loads notification URLs from settings"""
|
||||
self.apobj.clear()
|
||||
setting = (
|
||||
db.query(models.SystemSetting)
|
||||
.filter(models.SystemSetting.key == "notification_urls")
|
||||
.first()
|
||||
)
|
||||
if setting and setting.value:
|
||||
try:
|
||||
urls = json.loads(setting.value)
|
||||
for url in urls:
|
||||
if url.strip():
|
||||
self.apobj.add(url.strip())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to parse notification URLs: {e}")
|
||||
|
||||
def notify(self, title: str, body: str, notify_type: str = "info"):
|
||||
"""Sends a notification to all configured services"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
self._load_urls(db)
|
||||
if len(self.apobj) == 0:
|
||||
logger.debug("No notification services configured, skipping.")
|
||||
return
|
||||
|
||||
self.apobj.notify(
|
||||
title=f"[TapeHoard] {title}", body=body, notify_type=notify_type
|
||||
)
|
||||
logger.info(f"Sent notification: {title}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send notification: {e}")
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def test_notification(self, url: str) -> bool:
|
||||
"""Tests a single Apprise URL"""
|
||||
try:
|
||||
ap = apprise.Apprise()
|
||||
ap.add(url)
|
||||
result = ap.notify(
|
||||
title="[TapeHoard] Test Notification",
|
||||
body="This is a test notification from your TapeHoard instance. If you see this, your configuration is correct!",
|
||||
notify_type="info",
|
||||
)
|
||||
return bool(result)
|
||||
except Exception as e:
|
||||
logger.error(f"Test notification failed for {url}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
notification_manager = NotificationService()
|
||||
@@ -323,7 +323,8 @@ class ScannerService:
|
||||
self.files_processed += 1
|
||||
if self.files_processed % 50 == 0:
|
||||
db.commit()
|
||||
if job_id is not None and self.total_files_found > 0:
|
||||
if job_id is not None:
|
||||
if self.total_files_found > 0:
|
||||
prog = 10.0 + (
|
||||
90.0
|
||||
* (
|
||||
@@ -331,10 +332,15 @@ class ScannerService:
|
||||
/ self.total_files_found
|
||||
)
|
||||
)
|
||||
status_text = f"Indexing: {self.files_processed}/{self.total_files_found} items"
|
||||
else:
|
||||
prog = (
|
||||
10.0 # Keep it at a steady "working" phase
|
||||
)
|
||||
status_text = f"Scanning: {self.files_processed} items discovered..."
|
||||
|
||||
JobManager.update_job(
|
||||
job_id,
|
||||
round(prog, 1),
|
||||
f"Hashing & Indexing: {self.files_processed}/{self.total_files_found}...",
|
||||
job_id, round(prog, 1), status_text
|
||||
)
|
||||
|
||||
if job_id is not None and JobManager.is_cancelled(job_id):
|
||||
@@ -344,12 +350,24 @@ class ScannerService:
|
||||
self.last_run_time = datetime.now(timezone.utc)
|
||||
if job_id is not None:
|
||||
JobManager.complete_job(job_id)
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Scan Completed",
|
||||
f"System scan finished. {self.files_processed} files processed, {self.files_hashed} new hashes computed.",
|
||||
"success",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Scan failed: {e}")
|
||||
db.rollback()
|
||||
if job_id is not None:
|
||||
JobManager.fail_job(job_id, str(e))
|
||||
from app.services.notifications import notification_manager
|
||||
|
||||
notification_manager.notify(
|
||||
"Scan Failed", f"System scan failed: {str(e)}", "failure"
|
||||
)
|
||||
finally:
|
||||
self.is_running = False
|
||||
self.current_path = ""
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from loguru import logger
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import SessionLocal
|
||||
from app.db import models
|
||||
from app.services.scanner import scanner_manager, JobManager
|
||||
from app.services.archiver import archiver_manager
|
||||
|
||||
|
||||
class SchedulerService:
|
||||
def __init__(self):
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.jobs = {}
|
||||
|
||||
def start(self):
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info("Scheduler service started")
|
||||
self.load_schedules()
|
||||
|
||||
def stop(self):
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
logger.info("Scheduler service stopped")
|
||||
|
||||
def load_schedules(self):
|
||||
"""Loads and schedules jobs from database settings"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# 1. Scan Schedule
|
||||
scan_cron = self._get_setting(db, "schedule_scan")
|
||||
if scan_cron:
|
||||
self.add_job("system_scan", self.run_system_scan, scan_cron)
|
||||
|
||||
# 2. Archival Schedule
|
||||
# Note: This would typically pick the first active media or a designated 'auto' media
|
||||
archival_cron = self._get_setting(db, "schedule_archival")
|
||||
if archival_cron:
|
||||
self.add_job("system_archival", self.run_system_archival, archival_cron)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def _get_setting(self, db: Session, key: str) -> str:
|
||||
setting = (
|
||||
db.query(models.SystemSetting)
|
||||
.filter(models.SystemSetting.key == key)
|
||||
.first()
|
||||
)
|
||||
return setting.value if setting else ""
|
||||
|
||||
def add_job(self, job_id, func, cron_expression):
|
||||
"""Adds or updates a job with a cron expression"""
|
||||
try:
|
||||
# Remove existing if it exists
|
||||
if self.scheduler.get_job(job_id):
|
||||
self.scheduler.remove_job(job_id)
|
||||
|
||||
if cron_expression.strip():
|
||||
self.scheduler.add_job(
|
||||
func,
|
||||
CronTrigger.from_crontab(cron_expression),
|
||||
id=job_id,
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info(f"Scheduled job {job_id} with cron: {cron_expression}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to schedule job {job_id}: {e}")
|
||||
|
||||
def run_system_scan(self):
|
||||
logger.info("Starting scheduled system scan...")
|
||||
db = SessionLocal()
|
||||
try:
|
||||
if not scanner_manager.is_running:
|
||||
job = JobManager.create_job(db, "SCAN")
|
||||
scanner_manager.scan_sources(db, job_id=job.id)
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled scan failed: {e}")
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def run_system_archival(self):
|
||||
logger.info("Starting scheduled archival job...")
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Look for a designated primary target
|
||||
primary_id = self._get_setting(db, "primary_archival_target")
|
||||
|
||||
media = None
|
||||
if primary_id:
|
||||
media = (
|
||||
db.query(models.StorageMedia)
|
||||
.filter(
|
||||
models.StorageMedia.id == int(primary_id),
|
||||
models.StorageMedia.status == "active",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not media:
|
||||
# Fallback: pick first available 'active' media if no primary set
|
||||
media = (
|
||||
db.query(models.StorageMedia)
|
||||
.filter(models.StorageMedia.status == "active")
|
||||
.first()
|
||||
)
|
||||
|
||||
if media:
|
||||
job = JobManager.create_job(db, "BACKUP")
|
||||
archiver_manager.run_backup(db, media.id, job_id=job.id)
|
||||
else:
|
||||
logger.warning("No suitable media found for scheduled archival")
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled archival failed: {e}")
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
scheduler_manager = SchedulerService()
|
||||
|
||||
@@ -16,6 +16,7 @@ dependencies = [
|
||||
"pydantic-settings>=2.14.0",
|
||||
"pathspec>=1.1.0",
|
||||
"boto3>=1.42.94",
|
||||
"python-multipart>=0.0.26",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
|
||||
Generated
+11
@@ -855,6 +855,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-multipart"
|
||||
version = "0.0.26"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
@@ -1080,6 +1089,7 @@ dependencies = [
|
||||
{ name = "pathspec" },
|
||||
{ name = "prometheus-client" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
@@ -1103,6 +1113,7 @@ requires-dist = [
|
||||
{ name = "pathspec", specifier = ">=1.1.0" },
|
||||
{ name = "prometheus-client" },
|
||||
{ name = "pydantic-settings", specifier = ">=2.14.0" },
|
||||
{ name = "python-multipart", specifier = ">=0.0.26" },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "uvicorn", extras = ["standard"] },
|
||||
]
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
# Stage 1: Build Frontend
|
||||
FROM node:20-slim AS frontend-builder
|
||||
WORKDIR /app/frontend
|
||||
COPY frontend/package*.json ./
|
||||
RUN npm install
|
||||
COPY frontend/ ./
|
||||
RUN npm run build
|
||||
|
||||
# Stage 2: Backend & Runtime
|
||||
FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim AS runtime
|
||||
WORKDIR /app/backend
|
||||
|
||||
# Install system dependencies and build stenc from source
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
mt-st \
|
||||
tar \
|
||||
sqlite3 \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
pkg-config \
|
||||
pandoc \
|
||||
git \
|
||||
liblzo2-dev \
|
||||
zlib1g-dev \
|
||||
&& git clone https://github.com/scsitape/stenc.git /tmp/stenc \
|
||||
&& cd /tmp/stenc \
|
||||
&& ./autogen.sh \
|
||||
&& ./configure \
|
||||
&& make && make install \
|
||||
&& apt-get purge -y build-essential autoconf automake libtool pkg-config pandoc git \
|
||||
&& apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/stenc
|
||||
|
||||
# Copy backend files
|
||||
COPY README.md /app/
|
||||
COPY backend/pyproject.toml backend/uv.lock ./
|
||||
RUN uv sync --frozen --no-dev
|
||||
|
||||
COPY backend/ ./
|
||||
|
||||
# Copy built frontend assets
|
||||
COPY --from=frontend-builder /app/frontend/build /app/backend/static
|
||||
|
||||
# Setup volumes and permissions
|
||||
RUN mkdir -p /app/data /staging /source_data /restores
|
||||
ENV DATABASE_URL=sqlite:////app/data/tapehoard.db
|
||||
|
||||
# Entrypoint
|
||||
COPY docker/entrypoint.sh /app/entrypoint.sh
|
||||
RUN chmod +x /app/entrypoint.sh
|
||||
|
||||
EXPOSE 8000
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
services:
|
||||
tapehoard:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: docker/Dockerfile
|
||||
container_name: tapehoard
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
# - ./data:/app/data
|
||||
- ./staging:/staging
|
||||
- /Users/alamers/git/:/source_data:ro
|
||||
- /Users/alamers/restore/:/restores
|
||||
- /Users/alamers/backup/:/mnt/HDD-001
|
||||
# - /mnt/storage:/source_data:ro
|
||||
# - /mnt/restores:/restores
|
||||
# devices:
|
||||
# - /dev/nst0:/dev/nst0
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=UTC
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Starting TapeHoard..."
|
||||
|
||||
# Change to backend directory
|
||||
cd /app/backend
|
||||
|
||||
# Run database migrations
|
||||
echo "Running database migrations..."
|
||||
uv run alembic upgrade head
|
||||
|
||||
# Start the application
|
||||
echo "Starting application server..."
|
||||
exec uv run uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,8 +1,8 @@
|
||||
// This file is auto-generated by @hey-api/openapi-ts
|
||||
|
||||
import type { Client, Options as Options2, TDataShape } from './client';
|
||||
import { type Client, formDataBodySerializer, type Options as Options2, type TDataShape } from './client';
|
||||
import { client } from './client.gen';
|
||||
import type { AddDirectoryToCartRestoresCartDirectoryPostData, AddDirectoryToCartRestoresCartDirectoryPostErrors, AddDirectoryToCartRestoresCartDirectoryPostResponses, AddToCartRestoresCartFileIdPostData, AddToCartRestoresCartFileIdPostErrors, AddToCartRestoresCartFileIdPostResponses, BrowseIndexInventoryBrowseGetData, BrowseIndexInventoryBrowseGetErrors, BrowseIndexInventoryBrowseGetResponses, BrowsePathSystemBrowseGetData, BrowsePathSystemBrowseGetErrors, BrowsePathSystemBrowseGetResponses, CancelJobSystemJobsJobIdCancelPostData, CancelJobSystemJobsJobIdCancelPostErrors, CancelJobSystemJobsJobIdCancelPostResponses, ClearCartRestoresCartClearPostData, ClearCartRestoresCartClearPostResponses, DeleteMediaInventoryMediaMediaIdDeleteData, DeleteMediaInventoryMediaMediaIdDeleteErrors, DeleteMediaInventoryMediaMediaIdDeleteResponses, GetDashboardStatsSystemDashboardStatsGetData, GetDashboardStatsSystemDashboardStatsGetResponses, GetIndexTreeInventoryTreeGetData, GetIndexTreeInventoryTreeGetErrors, GetIndexTreeInventoryTreeGetResponses, GetItemMetadataInventoryMetadataGetData, GetItemMetadataInventoryMetadataGetErrors, GetItemMetadataInventoryMetadataGetResponses, GetManifestRestoresManifestGetData, GetManifestRestoresManifestGetResponses, GetScanStatusSystemScanStatusGetData, GetScanStatusSystemScanStatusGetResponses, GetSettingsSystemSettingsGetData, GetSettingsSystemSettingsGetResponses, GetTreeSystemTreeGetData, GetTreeSystemTreeGetErrors, GetTreeSystemTreeGetResponses, ListBackupsBackupsGetData, ListBackupsBackupsGetResponses, ListCartRestoresCartGetData, ListCartRestoresCartGetResponses, ListInventoryInventoryGetData, ListInventoryInventoryGetResponses, ListJobsSystemJobsGetData, ListJobsSystemJobsGetErrors, ListJobsSystemJobsGetResponses, ListMediaInventoryMediaGetData, ListMediaInventoryMediaGetResponses, ReadRootGetData, ReadRootGetResponses, RegisterMediaInventoryMediaPostData, RegisterMediaInventoryMediaPostErrors, RegisterMediaInventoryMediaPostResponses, RemoveFromCartRestoresCartItemIdDeleteData, RemoveFromCartRestoresCartItemIdDeleteErrors, RemoveFromCartRestoresCartItemIdDeleteResponses, StreamJobsSystemJobsStreamGetData, StreamJobsSystemJobsStreamGetResponses, TrackBatchSystemTrackBatchPostData, TrackBatchSystemTrackBatchPostErrors, TrackBatchSystemTrackBatchPostResponses, TriggerBackupBackupsTriggerMediaIdPostData, TriggerBackupBackupsTriggerMediaIdPostErrors, TriggerBackupBackupsTriggerMediaIdPostResponses, TriggerScanSystemScanPostData, TriggerScanSystemScanPostResponses, UpdateMediaInventoryMediaMediaIdPatchData, UpdateMediaInventoryMediaMediaIdPatchErrors, UpdateMediaInventoryMediaMediaIdPatchResponses, UpdateSettingSystemSettingsPostData, UpdateSettingSystemSettingsPostErrors, UpdateSettingSystemSettingsPostResponses } from './types.gen';
|
||||
import type { AddDirectoryToCartRestoresCartDirectoryPostData, AddDirectoryToCartRestoresCartDirectoryPostErrors, AddDirectoryToCartRestoresCartDirectoryPostResponses, AddToCartRestoresCartFileIdPostData, AddToCartRestoresCartFileIdPostErrors, AddToCartRestoresCartFileIdPostResponses, BrowseCartRestoresCartBrowseGetData, BrowseCartRestoresCartBrowseGetErrors, BrowseCartRestoresCartBrowseGetResponses, BrowseIndexInventoryBrowseGetData, BrowseIndexInventoryBrowseGetErrors, BrowseIndexInventoryBrowseGetResponses, BrowsePathSystemBrowseGetData, BrowsePathSystemBrowseGetErrors, BrowsePathSystemBrowseGetResponses, CancelJobSystemJobsJobIdCancelPostData, CancelJobSystemJobsJobIdCancelPostErrors, CancelJobSystemJobsJobIdCancelPostResponses, ClearCartRestoresCartClearPostData, ClearCartRestoresCartClearPostResponses, DeleteMediaInventoryMediaMediaIdDeleteData, DeleteMediaInventoryMediaMediaIdDeleteErrors, DeleteMediaInventoryMediaMediaIdDeleteResponses, ExportDatabaseSystemDatabaseExportGetData, ExportDatabaseSystemDatabaseExportGetResponses, GetCartTreeRestoresCartTreeGetData, GetCartTreeRestoresCartTreeGetErrors, GetCartTreeRestoresCartTreeGetResponses, GetDashboardStatsSystemDashboardStatsGetData, GetDashboardStatsSystemDashboardStatsGetResponses, GetIndexTreeInventoryTreeGetData, GetIndexTreeInventoryTreeGetErrors, GetIndexTreeInventoryTreeGetResponses, GetItemMetadataInventoryMetadataGetData, GetItemMetadataInventoryMetadataGetErrors, GetItemMetadataInventoryMetadataGetResponses, GetManifestRestoresManifestGetData, GetManifestRestoresManifestGetResponses, GetScanStatusSystemScanStatusGetData, GetScanStatusSystemScanStatusGetResponses, GetSettingsSystemSettingsGetData, GetSettingsSystemSettingsGetResponses, GetTreeSystemTreeGetData, GetTreeSystemTreeGetErrors, GetTreeSystemTreeGetResponses, HealthCheckHealthGetData, HealthCheckHealthGetResponses, ImportDatabaseSystemDatabaseImportPostData, ImportDatabaseSystemDatabaseImportPostErrors, ImportDatabaseSystemDatabaseImportPostResponses, InitializeMediaInventoryMediaMediaIdInitializePostData, InitializeMediaInventoryMediaMediaIdInitializePostErrors, InitializeMediaInventoryMediaMediaIdInitializePostResponses, ListBackupsBackupsGetData, ListBackupsBackupsGetResponses, ListCartRestoresCartGetData, ListCartRestoresCartGetResponses, ListInventoryInventoryGetData, ListInventoryInventoryGetResponses, ListJobsSystemJobsGetData, ListJobsSystemJobsGetErrors, ListJobsSystemJobsGetResponses, ListMediaInventoryMediaGetData, ListMediaInventoryMediaGetResponses, RegisterMediaInventoryMediaPostData, RegisterMediaInventoryMediaPostErrors, RegisterMediaInventoryMediaPostResponses, RemoveFromCartRestoresCartItemIdDeleteData, RemoveFromCartRestoresCartItemIdDeleteErrors, RemoveFromCartRestoresCartItemIdDeleteResponses, SearchIndexInventorySearchGetData, SearchIndexInventorySearchGetErrors, SearchIndexInventorySearchGetResponses, SearchSystemSystemSearchGetData, SearchSystemSystemSearchGetErrors, SearchSystemSystemSearchGetResponses, StreamJobsSystemJobsStreamGetData, StreamJobsSystemJobsStreamGetResponses, TestNotificationSystemNotificationsTestPostData, TestNotificationSystemNotificationsTestPostErrors, TestNotificationSystemNotificationsTestPostResponses, TrackBatchSystemTrackBatchPostData, TrackBatchSystemTrackBatchPostErrors, TrackBatchSystemTrackBatchPostResponses, TriggerBackupBackupsTriggerMediaIdPostData, TriggerBackupBackupsTriggerMediaIdPostErrors, TriggerBackupBackupsTriggerMediaIdPostResponses, TriggerRestoreRestoresTriggerPostData, TriggerRestoreRestoresTriggerPostErrors, TriggerRestoreRestoresTriggerPostResponses, TriggerScanSystemScanPostData, TriggerScanSystemScanPostResponses, UpdateMediaInventoryMediaMediaIdPatchData, UpdateMediaInventoryMediaMediaIdPatchErrors, UpdateMediaInventoryMediaMediaIdPatchResponses, UpdateSettingSystemSettingsPostData, UpdateSettingSystemSettingsPostErrors, UpdateSettingSystemSettingsPostResponses } from './types.gen';
|
||||
|
||||
export type Options<TData extends TDataShape = TDataShape, ThrowOnError extends boolean = boolean, TResponse = unknown> = Options2<TData, ThrowOnError, TResponse> & {
|
||||
/**
|
||||
@@ -53,6 +53,11 @@ export const getScanStatusSystemScanStatusGet = <ThrowOnError extends boolean =
|
||||
*/
|
||||
export const browsePathSystemBrowseGet = <ThrowOnError extends boolean = false>(options?: Options<BrowsePathSystemBrowseGetData, ThrowOnError>) => (options?.client ?? client).get<BrowsePathSystemBrowseGetResponses, BrowsePathSystemBrowseGetErrors, ThrowOnError>({ url: '/system/browse', ...options });
|
||||
|
||||
/**
|
||||
* Search System
|
||||
*/
|
||||
export const searchSystemSystemSearchGet = <ThrowOnError extends boolean = false>(options: Options<SearchSystemSystemSearchGetData, ThrowOnError>) => (options.client ?? client).get<SearchSystemSystemSearchGetResponses, SearchSystemSystemSearchGetErrors, ThrowOnError>({ url: '/system/search', ...options });
|
||||
|
||||
/**
|
||||
* Track Batch
|
||||
*/
|
||||
@@ -82,6 +87,36 @@ export const updateSettingSystemSettingsPost = <ThrowOnError extends boolean = f
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Test Notification
|
||||
*/
|
||||
export const testNotificationSystemNotificationsTestPost = <ThrowOnError extends boolean = false>(options: Options<TestNotificationSystemNotificationsTestPostData, ThrowOnError>) => (options.client ?? client).post<TestNotificationSystemNotificationsTestPostResponses, TestNotificationSystemNotificationsTestPostErrors, ThrowOnError>({
|
||||
url: '/system/notifications/test',
|
||||
...options,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...options.headers
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Export Database
|
||||
*/
|
||||
export const exportDatabaseSystemDatabaseExportGet = <ThrowOnError extends boolean = false>(options?: Options<ExportDatabaseSystemDatabaseExportGetData, ThrowOnError>) => (options?.client ?? client).get<ExportDatabaseSystemDatabaseExportGetResponses, unknown, ThrowOnError>({ url: '/system/database/export', ...options });
|
||||
|
||||
/**
|
||||
* Import Database
|
||||
*/
|
||||
export const importDatabaseSystemDatabaseImportPost = <ThrowOnError extends boolean = false>(options: Options<ImportDatabaseSystemDatabaseImportPostData, ThrowOnError>) => (options.client ?? client).post<ImportDatabaseSystemDatabaseImportPostResponses, ImportDatabaseSystemDatabaseImportPostErrors, ThrowOnError>({
|
||||
...formDataBodySerializer,
|
||||
url: '/system/database/import',
|
||||
...options,
|
||||
headers: {
|
||||
'Content-Type': null,
|
||||
...options.headers
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get Tree
|
||||
*/
|
||||
@@ -121,11 +156,21 @@ export const updateMediaInventoryMediaMediaIdPatch = <ThrowOnError extends boole
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize Media
|
||||
*/
|
||||
export const initializeMediaInventoryMediaMediaIdInitializePost = <ThrowOnError extends boolean = false>(options: Options<InitializeMediaInventoryMediaMediaIdInitializePostData, ThrowOnError>) => (options.client ?? client).post<InitializeMediaInventoryMediaMediaIdInitializePostResponses, InitializeMediaInventoryMediaMediaIdInitializePostErrors, ThrowOnError>({ url: '/inventory/media/{media_id}/initialize', ...options });
|
||||
|
||||
/**
|
||||
* Browse Index
|
||||
*/
|
||||
export const browseIndexInventoryBrowseGet = <ThrowOnError extends boolean = false>(options?: Options<BrowseIndexInventoryBrowseGetData, ThrowOnError>) => (options?.client ?? client).get<BrowseIndexInventoryBrowseGetResponses, BrowseIndexInventoryBrowseGetErrors, ThrowOnError>({ url: '/inventory/browse', ...options });
|
||||
|
||||
/**
|
||||
* Search Index
|
||||
*/
|
||||
export const searchIndexInventorySearchGet = <ThrowOnError extends boolean = false>(options: Options<SearchIndexInventorySearchGetData, ThrowOnError>) => (options.client ?? client).get<SearchIndexInventorySearchGetResponses, SearchIndexInventorySearchGetErrors, ThrowOnError>({ url: '/inventory/search', ...options });
|
||||
|
||||
/**
|
||||
* Get Index Tree
|
||||
*/
|
||||
@@ -151,15 +196,37 @@ export const triggerBackupBackupsTriggerMediaIdPost = <ThrowOnError extends bool
|
||||
*/
|
||||
export const listBackupsBackupsGet = <ThrowOnError extends boolean = false>(options?: Options<ListBackupsBackupsGetData, ThrowOnError>) => (options?.client ?? client).get<ListBackupsBackupsGetResponses, unknown, ThrowOnError>({ url: '/backups/', ...options });
|
||||
|
||||
/**
|
||||
* Trigger Restore
|
||||
*/
|
||||
export const triggerRestoreRestoresTriggerPost = <ThrowOnError extends boolean = false>(options: Options<TriggerRestoreRestoresTriggerPostData, ThrowOnError>) => (options.client ?? client).post<TriggerRestoreRestoresTriggerPostResponses, TriggerRestoreRestoresTriggerPostErrors, ThrowOnError>({
|
||||
url: '/restores/trigger',
|
||||
...options,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...options.headers
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Browse Cart
|
||||
*/
|
||||
export const browseCartRestoresCartBrowseGet = <ThrowOnError extends boolean = false>(options?: Options<BrowseCartRestoresCartBrowseGetData, ThrowOnError>) => (options?.client ?? client).get<BrowseCartRestoresCartBrowseGetResponses, BrowseCartRestoresCartBrowseGetErrors, ThrowOnError>({ url: '/restores/cart/browse', ...options });
|
||||
|
||||
/**
|
||||
* Get Cart Tree
|
||||
*/
|
||||
export const getCartTreeRestoresCartTreeGet = <ThrowOnError extends boolean = false>(options?: Options<GetCartTreeRestoresCartTreeGetData, ThrowOnError>) => (options?.client ?? client).get<GetCartTreeRestoresCartTreeGetResponses, GetCartTreeRestoresCartTreeGetErrors, ThrowOnError>({ url: '/restores/cart/tree', ...options });
|
||||
|
||||
/**
|
||||
* List Cart
|
||||
*/
|
||||
export const listCartRestoresCartGet = <ThrowOnError extends boolean = false>(options?: Options<ListCartRestoresCartGetData, ThrowOnError>) => (options?.client ?? client).get<ListCartRestoresCartGetResponses, unknown, ThrowOnError>({ url: '/restores/cart', ...options });
|
||||
|
||||
/**
|
||||
* Add To Cart
|
||||
* Clear Cart
|
||||
*/
|
||||
export const addToCartRestoresCartFileIdPost = <ThrowOnError extends boolean = false>(options: Options<AddToCartRestoresCartFileIdPostData, ThrowOnError>) => (options.client ?? client).post<AddToCartRestoresCartFileIdPostResponses, AddToCartRestoresCartFileIdPostErrors, ThrowOnError>({ url: '/restores/cart/{file_id}', ...options });
|
||||
export const clearCartRestoresCartClearPost = <ThrowOnError extends boolean = false>(options?: Options<ClearCartRestoresCartClearPostData, ThrowOnError>) => (options?.client ?? client).post<ClearCartRestoresCartClearPostResponses, unknown, ThrowOnError>({ url: '/restores/cart/clear', ...options });
|
||||
|
||||
/**
|
||||
* Add Directory To Cart
|
||||
@@ -173,22 +240,22 @@ export const addDirectoryToCartRestoresCartDirectoryPost = <ThrowOnError extends
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Add To Cart
|
||||
*/
|
||||
export const addToCartRestoresCartFileIdPost = <ThrowOnError extends boolean = false>(options: Options<AddToCartRestoresCartFileIdPostData, ThrowOnError>) => (options.client ?? client).post<AddToCartRestoresCartFileIdPostResponses, AddToCartRestoresCartFileIdPostErrors, ThrowOnError>({ url: '/restores/cart/{file_id}', ...options });
|
||||
|
||||
/**
|
||||
* Remove From Cart
|
||||
*/
|
||||
export const removeFromCartRestoresCartItemIdDelete = <ThrowOnError extends boolean = false>(options: Options<RemoveFromCartRestoresCartItemIdDeleteData, ThrowOnError>) => (options.client ?? client).delete<RemoveFromCartRestoresCartItemIdDeleteResponses, RemoveFromCartRestoresCartItemIdDeleteErrors, ThrowOnError>({ url: '/restores/cart/{item_id}', ...options });
|
||||
|
||||
/**
|
||||
* Clear Cart
|
||||
*/
|
||||
export const clearCartRestoresCartClearPost = <ThrowOnError extends boolean = false>(options?: Options<ClearCartRestoresCartClearPostData, ThrowOnError>) => (options?.client ?? client).post<ClearCartRestoresCartClearPostResponses, unknown, ThrowOnError>({ url: '/restores/cart/clear', ...options });
|
||||
|
||||
/**
|
||||
* Get Manifest
|
||||
*/
|
||||
export const getManifestRestoresManifestGet = <ThrowOnError extends boolean = false>(options?: Options<GetManifestRestoresManifestGetData, ThrowOnError>) => (options?.client ?? client).get<GetManifestRestoresManifestGetResponses, unknown, ThrowOnError>({ url: '/restores/manifest', ...options });
|
||||
|
||||
/**
|
||||
* Read Root
|
||||
* Health Check
|
||||
*/
|
||||
export const readRootGet = <ThrowOnError extends boolean = false>(options?: Options<ReadRootGetData, ThrowOnError>) => (options?.client ?? client).get<ReadRootGetResponses, unknown, ThrowOnError>({ url: '/', ...options });
|
||||
export const healthCheckHealthGet = <ThrowOnError extends boolean = false>(options?: Options<HealthCheckHealthGetData, ThrowOnError>) => (options?.client ?? client).get<HealthCheckHealthGetResponses, unknown, ThrowOnError>({ url: '/health', ...options });
|
||||
|
||||
@@ -18,6 +18,42 @@ export type BatchTrackRequest = {
|
||||
untracks?: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Body_import_database_system_database_import_post
|
||||
*/
|
||||
export type BodyImportDatabaseSystemDatabaseImportPost = {
|
||||
/**
|
||||
* File
|
||||
*/
|
||||
file: Blob | File;
|
||||
};
|
||||
|
||||
/**
|
||||
* CartFileItemSchema
|
||||
*/
|
||||
export type CartFileItemSchema = {
|
||||
/**
|
||||
* Name
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
* Type
|
||||
*/
|
||||
type: string;
|
||||
/**
|
||||
* Size
|
||||
*/
|
||||
size?: number | null;
|
||||
/**
|
||||
* Media
|
||||
*/
|
||||
media?: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* CartItemSchema
|
||||
*/
|
||||
@@ -40,6 +76,24 @@ export type CartItemSchema = {
|
||||
media_identifiers: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* CartTreeNodeSchema
|
||||
*/
|
||||
export type CartTreeNodeSchema = {
|
||||
/**
|
||||
* Name
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
* Has Children
|
||||
*/
|
||||
has_children?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* DashboardStatsSchema
|
||||
*/
|
||||
@@ -170,6 +224,18 @@ export type ItemMetadataSchema = {
|
||||
* Child Count
|
||||
*/
|
||||
child_count?: number | null;
|
||||
/**
|
||||
* Vulnerable
|
||||
*/
|
||||
vulnerable?: boolean;
|
||||
/**
|
||||
* Selected
|
||||
*/
|
||||
selected?: boolean;
|
||||
/**
|
||||
* Indeterminate
|
||||
*/
|
||||
indeterminate?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -350,6 +416,16 @@ export type RestoreManifestSchema = {
|
||||
media_required: Array<ManifestMediaRequirement>;
|
||||
};
|
||||
|
||||
/**
|
||||
* RestoreRequest
|
||||
*/
|
||||
export type RestoreRequest = {
|
||||
/**
|
||||
* Destination
|
||||
*/
|
||||
destination: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* ScanStatusSchema
|
||||
*/
|
||||
@@ -394,6 +470,16 @@ export type SettingSchema = {
|
||||
value: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* TestNotificationRequest
|
||||
*/
|
||||
export type TestNotificationRequest = {
|
||||
/**
|
||||
* Url
|
||||
*/
|
||||
url: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* TreeNodeSchema
|
||||
*/
|
||||
@@ -468,6 +554,18 @@ export type AppApiInventoryFileItemSchema = {
|
||||
* Media
|
||||
*/
|
||||
media?: Array<string>;
|
||||
/**
|
||||
* Vulnerable
|
||||
*/
|
||||
vulnerable?: boolean;
|
||||
/**
|
||||
* Selected
|
||||
*/
|
||||
selected?: boolean;
|
||||
/**
|
||||
* Indeterminate
|
||||
*/
|
||||
indeterminate?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -656,6 +754,42 @@ export type BrowsePathSystemBrowseGetResponses = {
|
||||
|
||||
export type BrowsePathSystemBrowseGetResponse = BrowsePathSystemBrowseGetResponses[keyof BrowsePathSystemBrowseGetResponses];
|
||||
|
||||
export type SearchSystemSystemSearchGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query: {
|
||||
/**
|
||||
* Q
|
||||
*/
|
||||
q: string;
|
||||
/**
|
||||
* Include Ignored
|
||||
*/
|
||||
include_ignored?: boolean;
|
||||
};
|
||||
url: '/system/search';
|
||||
};
|
||||
|
||||
export type SearchSystemSystemSearchGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type SearchSystemSystemSearchGetError = SearchSystemSystemSearchGetErrors[keyof SearchSystemSystemSearchGetErrors];
|
||||
|
||||
export type SearchSystemSystemSearchGetResponses = {
|
||||
/**
|
||||
* Response Search System System Search Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<AppApiSystemFileItemSchema>;
|
||||
};
|
||||
|
||||
export type SearchSystemSystemSearchGetResponse = SearchSystemSystemSearchGetResponses[keyof SearchSystemSystemSearchGetResponses];
|
||||
|
||||
export type TrackBatchSystemTrackBatchPostData = {
|
||||
body: BatchTrackRequest;
|
||||
path?: never;
|
||||
@@ -722,6 +856,66 @@ export type UpdateSettingSystemSettingsPostResponses = {
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type TestNotificationSystemNotificationsTestPostData = {
|
||||
body: TestNotificationRequest;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/system/notifications/test';
|
||||
};
|
||||
|
||||
export type TestNotificationSystemNotificationsTestPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type TestNotificationSystemNotificationsTestPostError = TestNotificationSystemNotificationsTestPostErrors[keyof TestNotificationSystemNotificationsTestPostErrors];
|
||||
|
||||
export type TestNotificationSystemNotificationsTestPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type ExportDatabaseSystemDatabaseExportGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/system/database/export';
|
||||
};
|
||||
|
||||
export type ExportDatabaseSystemDatabaseExportGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type ImportDatabaseSystemDatabaseImportPostData = {
|
||||
body: BodyImportDatabaseSystemDatabaseImportPost;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/system/database/import';
|
||||
};
|
||||
|
||||
export type ImportDatabaseSystemDatabaseImportPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type ImportDatabaseSystemDatabaseImportPostError = ImportDatabaseSystemDatabaseImportPostErrors[keyof ImportDatabaseSystemDatabaseImportPostErrors];
|
||||
|
||||
export type ImportDatabaseSystemDatabaseImportPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type GetTreeSystemTreeGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -851,6 +1045,34 @@ export type UpdateMediaInventoryMediaMediaIdPatchResponses = {
|
||||
|
||||
export type UpdateMediaInventoryMediaMediaIdPatchResponse = UpdateMediaInventoryMediaMediaIdPatchResponses[keyof UpdateMediaInventoryMediaMediaIdPatchResponses];
|
||||
|
||||
export type InitializeMediaInventoryMediaMediaIdInitializePostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* Media Id
|
||||
*/
|
||||
media_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/inventory/media/{media_id}/initialize';
|
||||
};
|
||||
|
||||
export type InitializeMediaInventoryMediaMediaIdInitializePostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type InitializeMediaInventoryMediaMediaIdInitializePostError = InitializeMediaInventoryMediaMediaIdInitializePostErrors[keyof InitializeMediaInventoryMediaMediaIdInitializePostErrors];
|
||||
|
||||
export type InitializeMediaInventoryMediaMediaIdInitializePostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type BrowseIndexInventoryBrowseGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -887,6 +1109,42 @@ export type BrowseIndexInventoryBrowseGetResponses = {
|
||||
|
||||
export type BrowseIndexInventoryBrowseGetResponse = BrowseIndexInventoryBrowseGetResponses[keyof BrowseIndexInventoryBrowseGetResponses];
|
||||
|
||||
export type SearchIndexInventorySearchGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query: {
|
||||
/**
|
||||
* Q
|
||||
*/
|
||||
q: string;
|
||||
/**
|
||||
* Include Ignored
|
||||
*/
|
||||
include_ignored?: boolean;
|
||||
};
|
||||
url: '/inventory/search';
|
||||
};
|
||||
|
||||
export type SearchIndexInventorySearchGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type SearchIndexInventorySearchGetError = SearchIndexInventorySearchGetErrors[keyof SearchIndexInventorySearchGetErrors];
|
||||
|
||||
export type SearchIndexInventorySearchGetResponses = {
|
||||
/**
|
||||
* Response Search Index Inventory Search Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<AppApiInventoryFileItemSchema>;
|
||||
};
|
||||
|
||||
export type SearchIndexInventorySearchGetResponse = SearchIndexInventorySearchGetResponses[keyof SearchIndexInventorySearchGetResponses];
|
||||
|
||||
export type GetIndexTreeInventoryTreeGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -1009,6 +1267,93 @@ export type ListBackupsBackupsGetResponses = {
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type TriggerRestoreRestoresTriggerPostData = {
|
||||
body: RestoreRequest;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/restores/trigger';
|
||||
};
|
||||
|
||||
export type TriggerRestoreRestoresTriggerPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type TriggerRestoreRestoresTriggerPostError = TriggerRestoreRestoresTriggerPostErrors[keyof TriggerRestoreRestoresTriggerPostErrors];
|
||||
|
||||
export type TriggerRestoreRestoresTriggerPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type BrowseCartRestoresCartBrowseGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: {
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
path?: string | null;
|
||||
};
|
||||
url: '/restores/cart/browse';
|
||||
};
|
||||
|
||||
export type BrowseCartRestoresCartBrowseGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type BrowseCartRestoresCartBrowseGetError = BrowseCartRestoresCartBrowseGetErrors[keyof BrowseCartRestoresCartBrowseGetErrors];
|
||||
|
||||
export type BrowseCartRestoresCartBrowseGetResponses = {
|
||||
/**
|
||||
* Response Browse Cart Restores Cart Browse Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<CartFileItemSchema>;
|
||||
};
|
||||
|
||||
export type BrowseCartRestoresCartBrowseGetResponse = BrowseCartRestoresCartBrowseGetResponses[keyof BrowseCartRestoresCartBrowseGetResponses];
|
||||
|
||||
export type GetCartTreeRestoresCartTreeGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: {
|
||||
/**
|
||||
* Path
|
||||
*/
|
||||
path?: string | null;
|
||||
};
|
||||
url: '/restores/cart/tree';
|
||||
};
|
||||
|
||||
export type GetCartTreeRestoresCartTreeGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type GetCartTreeRestoresCartTreeGetError = GetCartTreeRestoresCartTreeGetErrors[keyof GetCartTreeRestoresCartTreeGetErrors];
|
||||
|
||||
export type GetCartTreeRestoresCartTreeGetResponses = {
|
||||
/**
|
||||
* Response Get Cart Tree Restores Cart Tree Get
|
||||
*
|
||||
* Successful Response
|
||||
*/
|
||||
200: Array<CartTreeNodeSchema>;
|
||||
};
|
||||
|
||||
export type GetCartTreeRestoresCartTreeGetResponse = GetCartTreeRestoresCartTreeGetResponses[keyof GetCartTreeRestoresCartTreeGetResponses];
|
||||
|
||||
export type ListCartRestoresCartGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -1027,28 +1372,14 @@ export type ListCartRestoresCartGetResponses = {
|
||||
|
||||
export type ListCartRestoresCartGetResponse = ListCartRestoresCartGetResponses[keyof ListCartRestoresCartGetResponses];
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostData = {
|
||||
export type ClearCartRestoresCartClearPostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* File Id
|
||||
*/
|
||||
file_id: number;
|
||||
};
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/restores/cart/{file_id}';
|
||||
url: '/restores/cart/clear';
|
||||
};
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostError = AddToCartRestoresCartFileIdPostErrors[keyof AddToCartRestoresCartFileIdPostErrors];
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostResponses = {
|
||||
export type ClearCartRestoresCartClearPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
@@ -1078,6 +1409,34 @@ export type AddDirectoryToCartRestoresCartDirectoryPostResponses = {
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* File Id
|
||||
*/
|
||||
file_id: number;
|
||||
};
|
||||
query?: never;
|
||||
url: '/restores/cart/{file_id}';
|
||||
};
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostError = AddToCartRestoresCartFileIdPostErrors[keyof AddToCartRestoresCartFileIdPostErrors];
|
||||
|
||||
export type AddToCartRestoresCartFileIdPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type RemoveFromCartRestoresCartItemIdDeleteData = {
|
||||
body?: never;
|
||||
path: {
|
||||
@@ -1106,20 +1465,6 @@ export type RemoveFromCartRestoresCartItemIdDeleteResponses = {
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type ClearCartRestoresCartClearPostData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/restores/cart/clear';
|
||||
};
|
||||
|
||||
export type ClearCartRestoresCartClearPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: unknown;
|
||||
};
|
||||
|
||||
export type GetManifestRestoresManifestGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
@@ -1136,14 +1481,14 @@ export type GetManifestRestoresManifestGetResponses = {
|
||||
|
||||
export type GetManifestRestoresManifestGetResponse = GetManifestRestoresManifestGetResponses[keyof GetManifestRestoresManifestGetResponses];
|
||||
|
||||
export type ReadRootGetData = {
|
||||
export type HealthCheckHealthGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/';
|
||||
url: '/health';
|
||||
};
|
||||
|
||||
export type ReadRootGetResponses = {
|
||||
export type HealthCheckHealthGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
<div class="flex-1">
|
||||
<div class="flex justify-between items-center mb-1">
|
||||
<span class="text-xs font-black uppercase tracking-widest text-text-primary">System Scanner Active</span>
|
||||
<span class="text-sm font-black mono text-blue-400">{scanProgress}%</span>
|
||||
<span class="text-sm font-black mono text-blue-400">INDEXING</span>
|
||||
</div>
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="w-1.5 h-1.5 rounded-full bg-blue-500 animate-pulse"></div>
|
||||
@@ -61,18 +61,14 @@
|
||||
</div>
|
||||
|
||||
<div class="space-y-4">
|
||||
<div class="w-full bg-bg-primary h-2.5 rounded-full overflow-hidden shadow-inner border border-white/5">
|
||||
<div class="bg-gradient-to-r from-blue-600 to-blue-400 h-full transition-all duration-1000 shadow-[0_0_15px_rgba(59,130,246,0.4)]" style="width: {scanProgress}%"></div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col gap-2">
|
||||
<div class="flex justify-between items-center text-[10px] font-black uppercase tracking-widest text-text-secondary">
|
||||
<span class="flex items-center gap-2">
|
||||
<Activity size={12} class="opacity-50" />
|
||||
Throughput
|
||||
Progress
|
||||
</span>
|
||||
<span class="mono text-text-primary">
|
||||
{scanStatus.files_processed.toLocaleString()} <span class="opacity-40">/</span> {scanStatus.total_files_found.toLocaleString()} ITEMS
|
||||
{scanStatus.files_processed.toLocaleString()} ITEMS SCANNED
|
||||
</span>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -21,21 +21,24 @@
|
||||
|
||||
let {
|
||||
currentPath = $bindable("ROOT"),
|
||||
searchQuery = $bindable(""),
|
||||
files = [],
|
||||
onNavigate = (path: string) => {},
|
||||
onToggleTrack = (item: FileItem) => {},
|
||||
onSelect = (item: FileItem) => {},
|
||||
mode = "host"
|
||||
mode = "host",
|
||||
isSearching = false
|
||||
} = $props<{
|
||||
currentPath: string;
|
||||
files: FileItem[];
|
||||
currentPath?: string;
|
||||
searchQuery?: string;
|
||||
files?: FileItem[];
|
||||
onNavigate?: (path: string) => void;
|
||||
onToggleTrack?: (item: FileItem) => void;
|
||||
onSelect?: (item: FileItem) => void;
|
||||
mode?: "host" | "index";
|
||||
mode?: "host" | "index" | "cart";
|
||||
isSearching?: boolean;
|
||||
}>();
|
||||
|
||||
let searchQuery = $state("");
|
||||
let selectedPaths = $state<Set<string>>(new Set());
|
||||
let lastSelectedPath = $state<string | null>(null);
|
||||
let sortColumn = $state<"name" | "size" | "mtime" | "type">("name");
|
||||
@@ -102,19 +105,37 @@
|
||||
hasChildren: true
|
||||
});
|
||||
|
||||
const activeRoot = $derived(mode === "host" ? sourceDataRoot : virtualIndexRoot);
|
||||
const recoveryQueueRoot = $derived({
|
||||
name: "Recovery Queue",
|
||||
path: "ROOT",
|
||||
expanded: true,
|
||||
children: [],
|
||||
hasChildren: true
|
||||
});
|
||||
|
||||
const activeRoot = $derived(
|
||||
mode === "host" ? sourceDataRoot :
|
||||
mode === "index" ? virtualIndexRoot :
|
||||
recoveryQueueRoot
|
||||
);
|
||||
|
||||
// --- Logic ---
|
||||
|
||||
const breadcrumbs = $derived.by(() => {
|
||||
if (currentPath === "ROOT") {
|
||||
return [{ name: mode === "host" ? "All Sources" : "Index Browser", path: "ROOT" }];
|
||||
let name = "All Sources";
|
||||
if (mode === "index") name = "Index Browser";
|
||||
if (mode === "cart") name = "Recovery Queue";
|
||||
return [{ name, path: "ROOT" }];
|
||||
}
|
||||
|
||||
const parts = currentPath.split("/").filter(Boolean);
|
||||
const crumbs: Breadcrumb[] = [];
|
||||
|
||||
crumbs.push({ name: mode === "host" ? "All Sources" : "Index Browser", path: "ROOT" });
|
||||
let rootName = "All Sources";
|
||||
if (mode === "index") rootName = "Index Browser";
|
||||
if (mode === "cart") rootName = "Recovery Queue";
|
||||
crumbs.push({ name: rootName, path: "ROOT" });
|
||||
|
||||
let current = "";
|
||||
for (const part of parts) {
|
||||
@@ -125,7 +146,10 @@
|
||||
});
|
||||
|
||||
const filteredFiles = $derived.by(() => {
|
||||
let result = files.filter((f: FileItem) => f.name.toLowerCase().includes(searchQuery.toLowerCase()));
|
||||
// When doing backend search, the parent feeds us already-filtered results.
|
||||
// We'll still do a light local filter to ensure things like name matching,
|
||||
// but we should match on the full path just in case.
|
||||
let result = files.filter((f: FileItem) => f.path.toLowerCase().includes((searchQuery || "").toLowerCase()));
|
||||
|
||||
result.sort((a: FileItem, b: FileItem) => {
|
||||
const valA = sortColumn === "type" ? a.type : a[sortColumn as keyof FileItem] || 0;
|
||||
@@ -194,14 +218,55 @@
|
||||
}
|
||||
}
|
||||
|
||||
function bulkToggle(track: boolean) {
|
||||
const selectedItems = files.filter((f: FileItem) => selectedPaths.has(f.path) && f.tracked !== track);
|
||||
selectedItems.forEach((item: FileItem) => onToggleTrack(item));
|
||||
let isEditingPath = $state(false);
|
||||
let pathInputValue = $state("");
|
||||
|
||||
function handleAddressClick() {
|
||||
pathInputValue = currentPath;
|
||||
isEditingPath = true;
|
||||
}
|
||||
|
||||
function handlePathSubmit() {
|
||||
onNavigate(pathInputValue);
|
||||
isEditingPath = false;
|
||||
}
|
||||
|
||||
function handleKeyDown(e: KeyboardEvent) {
|
||||
if (isEditingPath) {
|
||||
if (e.key === "Enter") handlePathSubmit();
|
||||
if (e.key === "Escape") isEditingPath = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.key === "Enter" && selectedPaths.size === 1) {
|
||||
const item = files.find((f: FileItem) => f.path === Array.from(selectedPaths)[0]);
|
||||
if (item && item.type === "directory") {
|
||||
handleRowDoubleClick(item);
|
||||
}
|
||||
}
|
||||
if (e.key === "Backspace") {
|
||||
if (currentPath === "ROOT") return;
|
||||
const parts = currentPath.split("/").filter(Boolean);
|
||||
if (parts.length === 1) {
|
||||
onNavigate("ROOT");
|
||||
} else {
|
||||
onNavigate("/" + parts.slice(0, -1).join("/"));
|
||||
}
|
||||
}
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === "f") {
|
||||
e.preventDefault();
|
||||
const searchInput = document.getElementById("browser-search") as HTMLInputElement;
|
||||
searchInput?.focus();
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="file-browser flex h-full flex-col overflow-hidden rounded-lg border border-border-color bg-bg-secondary shadow-2xl min-w-0"
|
||||
onkeydown={handleKeyDown}
|
||||
tabindex="0"
|
||||
role="application"
|
||||
aria-label="File Browser"
|
||||
>
|
||||
<!-- ZONE A: TOP BAR -->
|
||||
<div class="flex h-14 shrink-0 items-center justify-between border-b border-border-color bg-bg-tertiary/50 px-6 shadow-sm">
|
||||
@@ -233,8 +298,23 @@
|
||||
</div>
|
||||
|
||||
<!-- Address Bar -->
|
||||
<div class="flex-1 flex items-center bg-bg-primary border border-border-color/40 rounded-md px-3 h-9 shadow-inner overflow-hidden max-w-3xl group transition-all focus-within:border-action-color/50 min-w-0">
|
||||
<div
|
||||
class="flex-1 flex items-center bg-bg-primary border border-border-color/40 rounded-md px-3 h-9 shadow-inner overflow-hidden max-w-3xl group transition-all focus-within:border-action-color/50 min-w-0"
|
||||
onclick={handleAddressClick}
|
||||
role="button"
|
||||
tabindex="-1"
|
||||
>
|
||||
<Folder size={16} class="text-yellow-500/80 mr-2 shrink-0"></Folder>
|
||||
|
||||
{#if isEditingPath}
|
||||
<input
|
||||
type="text"
|
||||
class="flex-1 bg-transparent border-none outline-none text-[13px] text-text-primary mono"
|
||||
bind:value={pathInputValue}
|
||||
onblur={() => setTimeout(() => isEditingPath = false, 100)}
|
||||
autoFocus
|
||||
/>
|
||||
{:else}
|
||||
<div class="flex-1 flex items-center overflow-x-auto scrollbar-hide">
|
||||
{#each breadcrumbs as crumb, i}
|
||||
{#if i > 0}
|
||||
@@ -245,13 +325,15 @@
|
||||
"px-2 py-0.5 rounded-md text-[13px] transition-colors hover:bg-white/5 whitespace-nowrap cursor-pointer",
|
||||
i === breadcrumbs.length - 1 ? "text-text-primary font-bold" : "text-text-secondary hover:text-text-primary"
|
||||
)}
|
||||
onclick={() => onNavigate(crumb.path)}
|
||||
onclick={(e) => { e.stopPropagation(); onNavigate(crumb.path); }}
|
||||
>
|
||||
{crumb.name}
|
||||
</button>
|
||||
{/each}
|
||||
</div>
|
||||
<button class="ml-2 text-text-secondary hover:text-text-primary p-1 transition-colors cursor-pointer shrink-0" onclick={() => onNavigate(currentPath)}>
|
||||
{/if}
|
||||
|
||||
<button class="ml-2 text-text-secondary hover:text-text-primary p-1 transition-colors cursor-pointer shrink-0" onclick={(e) => { e.stopPropagation(); onNavigate(currentPath); }}>
|
||||
<RotateCw size={14}></RotateCw>
|
||||
</button>
|
||||
</div>
|
||||
@@ -260,11 +342,16 @@
|
||||
<!-- Search Input -->
|
||||
<div class="flex items-center shrink-0 ml-12">
|
||||
<div class="relative w-48 sm:w-64 group">
|
||||
{#if isSearching}
|
||||
<RotateCw size={14} class="absolute left-3 top-3 text-action-color animate-spin"></RotateCw>
|
||||
{:else}
|
||||
<Search
|
||||
size={14}
|
||||
class="absolute left-3 top-3 text-text-secondary group-focus-within:text-action-color transition-colors"
|
||||
></Search>
|
||||
{/if}
|
||||
<Input
|
||||
id="browser-search"
|
||||
type="text"
|
||||
placeholder="Search folder"
|
||||
bind:value={searchQuery}
|
||||
@@ -417,8 +504,10 @@
|
||||
<span class="font-bold uppercase tracking-wider">
|
||||
{#if mode === 'host'}
|
||||
{files.filter((f: FileItem) => f.tracked).length} Tracked
|
||||
{:else}
|
||||
{:else if mode === 'index'}
|
||||
{files.filter((f: FileItem) => f.selected).length} Selected
|
||||
{:else}
|
||||
{files.length} Queued
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -110,7 +110,8 @@
|
||||
isSelected
|
||||
? "bg-blue-500/15 border-l-2 border-l-blue-500"
|
||||
: "hover:bg-white/5 border-l-2 border-l-transparent",
|
||||
item.ignored && "opacity-40 grayscale-[0.5]"
|
||||
item.ignored && "opacity-40 grayscale-[0.5]",
|
||||
(mode === 'index' && item.type === 'file' && item.vulnerable) && "opacity-60 cursor-not-allowed"
|
||||
)}
|
||||
role="button"
|
||||
tabindex="0"
|
||||
@@ -123,7 +124,9 @@
|
||||
class="flex h-10 w-12 shrink-0 items-center justify-center border-r border-border-color/10"
|
||||
onclick={(e) => {
|
||||
e.stopPropagation();
|
||||
if (!item.ignored) onToggleTrack();
|
||||
if (item.ignored) return;
|
||||
if (mode === 'index' && item.type === 'file' && item.vulnerable) return;
|
||||
onToggleTrack();
|
||||
}}
|
||||
onkeydown={(e) => e.key === " " && e.stopPropagation()}
|
||||
role="none"
|
||||
@@ -143,7 +146,12 @@
|
||||
</div>
|
||||
{/if}
|
||||
{:else}
|
||||
<Checkbox checked={item.selected} onCheckedChange={onToggleTrack} />
|
||||
<Checkbox
|
||||
checked={item.selected}
|
||||
indeterminate={item.indeterminate}
|
||||
onCheckedChange={onToggleTrack}
|
||||
disabled={item.type === 'file' && item.vulnerable}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -181,7 +189,8 @@
|
||||
>
|
||||
{item.name}
|
||||
</span>
|
||||
{#if mode === "index" && item.media && item.media.length > 0}
|
||||
{#if mode === "index"}
|
||||
{#if item.media && item.media.length > 0}
|
||||
<div class="flex gap-1 overflow-hidden shrink-0">
|
||||
{#each item.media as m}
|
||||
<span class="inline-flex items-center gap-1 bg-blue-500/10 text-blue-400 text-[9px] px-1.5 py-0.5 rounded border border-blue-500/20 font-bold uppercase tracking-wider">
|
||||
@@ -190,6 +199,22 @@
|
||||
</span>
|
||||
{/each}
|
||||
</div>
|
||||
{:else if item.vulnerable}
|
||||
<span class="inline-flex items-center gap-1 bg-error-color/10 text-error-color text-[8px] px-1.5 py-0.5 rounded border border-error-color/20 font-black uppercase tracking-widest">
|
||||
<ShieldAlert size={10} />
|
||||
Vulnerable
|
||||
</span>
|
||||
{:else if item.type === 'directory'}
|
||||
<span class="inline-flex items-center gap-1 bg-success-color/10 text-success-color text-[8px] px-1.5 py-0.5 rounded border border-success-color/20 font-black uppercase tracking-widest">
|
||||
<ShieldCheck size={10} />
|
||||
Protected
|
||||
</span>
|
||||
{:else if item.type === 'file'}
|
||||
<span class="inline-flex items-center gap-1 bg-error-color/10 text-error-color text-[8px] px-1.5 py-0.5 rounded border border-error-color/20 font-black uppercase tracking-widest">
|
||||
<ShieldAlert size={10} />
|
||||
Vulnerable
|
||||
</span>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
import { ChevronRight, Folder, HardDrive } from "lucide-svelte";
|
||||
import type { TreeNode } from "$lib/types";
|
||||
import { cn } from "$lib/utils";
|
||||
@@ -26,17 +27,29 @@
|
||||
let loading = $state(false);
|
||||
let loaded = $state(false);
|
||||
|
||||
// Initialize state from props once
|
||||
// Initialize state from props
|
||||
onMount(() => {
|
||||
if (node.expanded) expanded = true;
|
||||
if (node.children) children = node.children;
|
||||
if (node.children && node.children.length > 0) {
|
||||
children = node.children;
|
||||
loaded = true;
|
||||
}
|
||||
});
|
||||
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
// Auto-load if started expanded
|
||||
// AUTO-EXPAND LOGIC:
|
||||
// If the current global path is a child of this node, we should expand to show it.
|
||||
$effect(() => {
|
||||
if (expanded && !loaded) {
|
||||
if (selectedPath && node.path !== "ROOT") {
|
||||
// If selectedPath starts with node.path, we are a parent of the active view
|
||||
if (selectedPath.startsWith(node.path + "/")) {
|
||||
expanded = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Auto-load if expanded
|
||||
$effect(() => {
|
||||
if (expanded && !loaded && !loading) {
|
||||
loadSubdirs();
|
||||
}
|
||||
});
|
||||
@@ -51,7 +64,7 @@
|
||||
query: { path: node.path }
|
||||
});
|
||||
|
||||
const data = (response.data as any) as TreeNodeSchema[];
|
||||
const data = response.data as TreeNodeSchema[];
|
||||
|
||||
if (data && Array.isArray(data)) {
|
||||
children = data.map((d: TreeNodeSchema) => ({
|
||||
@@ -72,9 +85,6 @@
|
||||
|
||||
async function toggle() {
|
||||
expanded = !expanded;
|
||||
if (expanded && !loaded) {
|
||||
await loadSubdirs();
|
||||
}
|
||||
}
|
||||
|
||||
function select() {
|
||||
@@ -86,16 +96,16 @@
|
||||
return HardDrive;
|
||||
});
|
||||
|
||||
const hasSubdirs = $derived((children && children.length > 0) || (node as any).hasChildren);
|
||||
const hasSubdirs = $derived((children && children.length > 0) || node.hasChildren);
|
||||
|
||||
function handleKeyDown(e: KeyboardEvent) {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
e.preventDefault();
|
||||
select();
|
||||
} else if (e.key === "ArrowRight") {
|
||||
if (!expanded) toggle();
|
||||
if (!expanded) expanded = true;
|
||||
} else if (e.key === "ArrowLeft") {
|
||||
if (expanded) toggle();
|
||||
if (expanded) expanded = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
@@ -103,7 +113,7 @@
|
||||
<div class="tree-item-group">
|
||||
<div
|
||||
class={cn(
|
||||
"group flex items-center gap-2 py-1.5 px-3 cursor-pointer select-none transition-all rounded-sm",
|
||||
"group flex items-center gap-2 py-1.5 px-3 cursor-pointer select-none transition-all rounded-sm outline-none focus:ring-1 focus:ring-blue-500/30",
|
||||
selectedPath === node.path
|
||||
? "bg-blue-500/15 text-text-primary shadow-sm border-l-2 border-blue-500"
|
||||
: "text-text-secondary hover:bg-white/5 hover:text-text-primary border-l-2 border-transparent"
|
||||
|
||||
@@ -30,11 +30,10 @@ const buttonVariants = tv({
|
||||
type Variant = VariantProps<typeof buttonVariants>["variant"];
|
||||
type Size = VariantProps<typeof buttonVariants>["size"];
|
||||
|
||||
type Props = ButtonPrimitive.Props & {
|
||||
type Props = any & {
|
||||
variant?: Variant;
|
||||
size?: Size;
|
||||
};
|
||||
|
||||
export {
|
||||
Root,
|
||||
type Props,
|
||||
|
||||
@@ -1,26 +1,30 @@
|
||||
<script lang="ts">
|
||||
import { Checkbox as CheckboxPrimitive } from "bits-ui";
|
||||
import { Check } from "lucide-svelte";
|
||||
import { Check, Minus } from "lucide-svelte";
|
||||
import { cn } from "$lib/utils";
|
||||
|
||||
let {
|
||||
class: className,
|
||||
checked = $bindable(false),
|
||||
indeterminate = false,
|
||||
...rest
|
||||
}: CheckboxPrimitive.Props = $props();
|
||||
</script>
|
||||
}: any = $props();</script>
|
||||
|
||||
<CheckboxPrimitive.Root
|
||||
<CheckboxPrimitive.Root
|
||||
bind:checked
|
||||
class={cn(
|
||||
"peer h-4 w-4 shrink-0 rounded-sm border border-border-color bg-bg-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-action-color focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-action-color data-[state=checked]:border-action-color data-[state=checked]:text-white transition-all",
|
||||
"peer h-4 w-4 shrink-0 rounded-sm border border-border-color bg-bg-primary ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-action-color focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 data-[state=checked]:bg-action-color data-[state=checked]:border-action-color data-[state=checked]:text-white data-[state=indeterminate]:bg-action-color data-[state=indeterminate]:border-action-color data-[state=indeterminate]:text-white transition-all",
|
||||
className
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
{#if checked}
|
||||
<div class="flex items-center justify-center text-current h-full w-full">
|
||||
>
|
||||
{#if checked === true}
|
||||
<div class="flex items-center justify-center text-current h-full w-full animate-in zoom-in-50 duration-200">
|
||||
<Check class="h-3.5 w-3.5 stroke-[3]" />
|
||||
</div>
|
||||
{:else if indeterminate || checked === "indeterminate"}
|
||||
<div class="flex items-center justify-center text-current h-full w-full animate-in zoom-in-50 duration-200">
|
||||
<Minus class="h-3.5 w-3.5 stroke-[4]" />
|
||||
</div>
|
||||
{/if}
|
||||
</CheckboxPrimitive.Root>
|
||||
</CheckboxPrimitive.Root>
|
||||
|
||||
@@ -10,6 +10,8 @@ export interface FileItem {
|
||||
media?: string[]; // Media it's on (for index browsing)
|
||||
selected?: boolean; // For restore cart
|
||||
sha256_hash?: string | null;
|
||||
vulnerable?: boolean;
|
||||
indeterminate?: boolean;
|
||||
}
|
||||
|
||||
export interface TreeNode {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
<script lang="ts">
|
||||
// @ts-ignore
|
||||
import '../app.css';
|
||||
import { page } from '$app/stores';
|
||||
import {
|
||||
// @ts-ignore
|
||||
import { page } from '$app/stores'; import {
|
||||
LayoutDashboard,
|
||||
Library,
|
||||
FolderTree,
|
||||
@@ -22,18 +23,77 @@
|
||||
const navItems = [
|
||||
{ name: 'Dashboard', href: '/', icon: LayoutDashboard },
|
||||
{ name: 'Index Browser', href: '/index-browser', icon: Library },
|
||||
{ name: 'File Tracking', href: '/tracking', icon: FolderTree },
|
||||
{ name: 'Tracking Policy', href: '/tracking', icon: FolderTree },
|
||||
{ name: 'System Activity', href: '/jobs', icon: Activity },
|
||||
{ name: 'Physical Media', href: '/inventory', icon: CassetteTape },
|
||||
{ name: 'Restores', href: '/restores', icon: History }
|
||||
{ name: 'Media Fleet', href: '/inventory', icon: CassetteTape },
|
||||
{ name: 'Data Recovery', href: '/restores', icon: History }
|
||||
];
|
||||
|
||||
let isSidebarOpen = $state(true);
|
||||
let showShortcuts = $state(false);
|
||||
|
||||
function handleGlobalKeyDown(e: KeyboardEvent) {
|
||||
if (e.key === '?' && !['INPUT', 'TEXTAREA'].includes((e.target as HTMLElement).tagName)) {
|
||||
showShortcuts = !showShortcuts;
|
||||
}
|
||||
if (showShortcuts && e.key === 'Escape') {
|
||||
showShortcuts = false;
|
||||
}
|
||||
|
||||
// Navigation Shortcuts (Single keys only, no modifiers)
|
||||
if (!['INPUT', 'TEXTAREA'].includes((e.target as HTMLElement).tagName) && !e.ctrlKey && !e.metaKey && !e.altKey) {
|
||||
if (e.key === 'd') window.location.href = '/';
|
||||
if (e.key === 'i') window.location.href = '/index-browser';
|
||||
if (e.key === 't') window.location.href = '/tracking';
|
||||
if (e.key === 'a') window.location.href = '/jobs';
|
||||
if (e.key === 'm') window.location.href = '/inventory';
|
||||
if (e.key === 'r') window.location.href = '/restores';
|
||||
if (e.key === 's') window.location.href = '/settings';
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<Toaster position="top-right" richColors />
|
||||
<svelte:window onkeydown={handleGlobalKeyDown} />
|
||||
|
||||
<Toaster position="top-left" richColors />
|
||||
<ScanStatusOverlay />
|
||||
|
||||
<!-- Shortcuts Overlay -->
|
||||
{#if showShortcuts}
|
||||
<div class="fixed inset-0 z-[1000] bg-black/90 backdrop-blur-md flex items-center justify-center p-6 animate-in fade-in duration-300" onclick={() => showShortcuts = false} role="presentation">
|
||||
<div class="w-[500px] bg-bg-secondary border border-border-color shadow-2xl rounded-2xl p-10 flex flex-col gap-8" onclick={(e) => e.stopPropagation()} role="dialog">
|
||||
<header>
|
||||
<h2 class="text-2xl font-black text-text-primary uppercase tracking-tighter flex items-center gap-3">
|
||||
<span class="p-2 bg-action-color/10 rounded-lg text-action-color"><Settings size={24} /></span>
|
||||
Fleet Command Shortcuts
|
||||
</h2>
|
||||
<p class="text-[11px] font-bold text-text-secondary uppercase tracking-widest mt-2 opacity-60">Universal system navigation & control.</p>
|
||||
</header>
|
||||
|
||||
<div class="grid grid-cols-2 gap-x-12 gap-y-6">
|
||||
<div class="space-y-4">
|
||||
<h3 class="text-[10px] font-black uppercase tracking-widest text-text-secondary opacity-40">Navigation</h3>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Dashboard</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">D</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Index Browser</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">I</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Tracking Policy</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">T</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">System Activity</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">A</kbd></div>
|
||||
</div>
|
||||
<div class="space-y-4">
|
||||
<h3 class="text-[10px] font-black uppercase tracking-widest text-text-secondary opacity-40">Operations</h3>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Media Fleet</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">M</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Data Recovery</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">R</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">System Settings</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">S</kbd></div>
|
||||
<div class="flex justify-between items-center"><span class="text-xs font-bold text-text-primary">Close Menu</span> <kbd class="px-2 py-1 bg-bg-tertiary border border-border-color rounded text-[10px] mono">ESC</kbd></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer class="pt-6 border-t border-border-color flex justify-center">
|
||||
<p class="text-[9px] font-black uppercase tracking-widest text-text-secondary opacity-50 italic">Press '?' at any time to toggle this command set.</p>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="app-container flex h-screen w-full overflow-hidden bg-bg-primary text-text-primary font-sans selection:bg-action-color/30">
|
||||
<!-- SIDEBAR -->
|
||||
<aside
|
||||
@@ -98,14 +158,13 @@
|
||||
>
|
||||
<Settings size={18} class="shrink-0" />
|
||||
{#if isSidebarOpen}
|
||||
<span class="truncate text-[12px] font-bold uppercase tracking-wider animate-in fade-in slide-in-from-left-2 duration-300">Settings</span>
|
||||
<span class="text-[12px] font-bold uppercase tracking-wider animate-in fade-in slide-in-from-left-2 duration-300">Settings</span>
|
||||
{/if}
|
||||
</a>
|
||||
|
||||
<!-- COLLAPSE TOGGLE -->
|
||||
<button
|
||||
class="flex h-12 items-center justify-center border-t border-border-color bg-bg-secondary text-text-secondary hover:text-text-primary transition-colors shrink-0"
|
||||
onclick={() => isSidebarOpen = !isSidebarOpen}
|
||||
class="h-10 w-full flex items-center justify-center hover:bg-white/5 text-text-secondary hover:text-text-primary transition-colors border-t border-border-color/50"
|
||||
>
|
||||
{#if isSidebarOpen}
|
||||
<ChevronLeft size={16} />
|
||||
@@ -117,27 +176,17 @@
|
||||
</aside>
|
||||
|
||||
<!-- MAIN CONTENT -->
|
||||
<main class="flex-1 overflow-hidden relative flex flex-col min-w-0">
|
||||
<!-- Subtle background gradient -->
|
||||
<div class="absolute inset-0 bg-[radial-gradient(circle_at_50%_0%,rgba(59,130,246,0.03),transparent_50%)] pointer-events-none"></div>
|
||||
|
||||
<div class="flex-1 overflow-y-auto p-8 relative z-10">
|
||||
<div class="max-w-[1600px] mx-auto h-full">
|
||||
<main class="flex-1 min-w-0 flex flex-col relative overflow-hidden">
|
||||
<div class="flex-1 overflow-y-auto p-8 relative scrollbar-hide">
|
||||
{@render children()}
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
:global(body) {
|
||||
background-color: #000;
|
||||
}
|
||||
|
||||
/* Custom scrollbar for brutalist look */
|
||||
:global(::-webkit-scrollbar) {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
:global(::-webkit-scrollbar-track) {
|
||||
background: #0a0a0a;
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
HardDrive,
|
||||
Cloud,
|
||||
ArrowRight,
|
||||
EyeOff
|
||||
EyeOff,
|
||||
FolderTree,
|
||||
CassetteTape
|
||||
} from 'lucide-svelte';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
@@ -112,6 +114,50 @@
|
||||
{/each}
|
||||
</div>
|
||||
{:else if stats}
|
||||
{#if stats.total_files_indexed === 0 && stats.media_distribution.LTO === 0 && stats.media_distribution.HDD === 0}
|
||||
<!-- ONBOARDING SECTION -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-8 py-12">
|
||||
<Card class="p-8 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-dashed border-2 border-border-color flex flex-col items-center text-center gap-6 group hover:border-blue-500/50 transition-all">
|
||||
<div class="p-4 bg-blue-500/10 rounded-full text-blue-500 group-hover:scale-110 transition-transform">
|
||||
<FolderTree size={40} />
|
||||
</div>
|
||||
<div>
|
||||
<h3 class="text-lg font-black uppercase tracking-tight text-text-primary">1. Define Policy</h3>
|
||||
<p class="text-xs text-text-secondary mt-2 leading-relaxed">Tell TapeHoard which directories to track and what patterns to ignore.</p>
|
||||
</div>
|
||||
<Button variant="outline" class="w-full mt-auto h-11 font-black uppercase tracking-widest text-[10px] border-blue-500/30 text-blue-400 hover:bg-blue-500/10" href="/tracking">
|
||||
Configure Tracking <ArrowRight size={14} class="ml-2" />
|
||||
</Button>
|
||||
</Card>
|
||||
|
||||
<Card class="p-8 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-dashed border-2 border-border-color flex flex-col items-center text-center gap-6 group hover:border-action-color/50 transition-all">
|
||||
<div class="p-4 bg-action-color/10 rounded-full text-action-color group-hover:scale-110 transition-transform">
|
||||
<Activity size={40} />
|
||||
</div>
|
||||
<div>
|
||||
<h3 class="text-lg font-black uppercase tracking-tight text-text-primary">2. Scan Sources</h3>
|
||||
<p class="text-xs text-text-secondary mt-2 leading-relaxed">Run a system-wide scan to index your files and calculate protection hashes.</p>
|
||||
</div>
|
||||
<Button variant="default" class="w-full mt-auto h-11 font-black uppercase tracking-widest text-[10px]" onclick={startScan}>
|
||||
Start Discovery <ArrowRight size={14} class="ml-2" />
|
||||
</Button>
|
||||
</Card>
|
||||
|
||||
<Card class="p-8 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-dashed border-2 border-border-color flex flex-col items-center text-center gap-6 group hover:border-success-color/50 transition-all">
|
||||
<div class="p-4 bg-success-color/10 rounded-full text-success-color group-hover:scale-110 transition-transform">
|
||||
<CassetteTape size={40} />
|
||||
</div>
|
||||
<div>
|
||||
<h3 class="text-lg font-black uppercase tracking-tight text-text-primary">3. Provision Media</h3>
|
||||
<p class="text-xs text-text-secondary mt-2 leading-relaxed">Register your LTO tapes or backup disks to create a destination for your data.</p>
|
||||
</div>
|
||||
<Button variant="outline" class="w-full mt-auto h-11 font-black uppercase tracking-widest text-[10px] border-success-color/30 text-success-color hover:bg-success-color/10" href="/inventory">
|
||||
Manage Fleet <ArrowRight size={14} class="ml-2" />
|
||||
</Button>
|
||||
</Card>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- TOP STATS -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
<Card class="p-6 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color hover:border-blue-500/30 transition-all group relative overflow-hidden">
|
||||
@@ -162,7 +208,14 @@
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Last Scan</span>
|
||||
<span class="text-xl font-black text-text-primary tracking-tight">
|
||||
{stats.last_scan_time ? new Date(stats.last_scan_time).toLocaleDateString() : 'Never'}
|
||||
{#if stats.last_scan_time}
|
||||
{new Date(stats.last_scan_time).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })}
|
||||
<span class="text-[9px] block text-text-secondary opacity-50 uppercase font-black tracking-widest">
|
||||
{new Date(stats.last_scan_time).toLocaleDateString()}
|
||||
</span>
|
||||
{:else}
|
||||
Never
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -186,11 +239,11 @@
|
||||
<div class="space-y-4">
|
||||
<div class="flex justify-between items-end">
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary">Tracked File Coverage</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary">Tracking Coverage</span>
|
||||
<h4 class="text-3xl font-black text-text-primary">{protectionPercent}%</h4>
|
||||
</div>
|
||||
<span class="text-xs font-bold mono text-text-secondary">
|
||||
{stats.total_files_indexed - stats.ignored_files_count - stats.unprotected_files_count} / {stats.total_files_indexed - stats.ignored_files_count} ELIGIBLE FILES
|
||||
{stats.total_files_indexed - stats.ignored_files_count - stats.unprotected_files_count} / {stats.total_files_indexed - stats.ignored_files_count} TRACKED FILES
|
||||
</span>
|
||||
</div>
|
||||
<div class="w-full bg-bg-primary h-4 rounded-full border border-border-color shadow-inner overflow-hidden">
|
||||
@@ -201,7 +254,7 @@
|
||||
<div class="space-y-4">
|
||||
<div class="flex justify-between items-end">
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary">Active Data Redundancy</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary">Archive Redundancy</span>
|
||||
<h4 class="text-3xl font-black text-text-primary">{dataProtectionPercent}%</h4>
|
||||
</div>
|
||||
<span class="text-xs font-bold mono text-text-secondary">
|
||||
@@ -220,7 +273,7 @@
|
||||
<ShieldAlert size={18} />
|
||||
</div>
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block mb-1">Unprotected</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block mb-1">Vulnerable</span>
|
||||
<span class="text-lg font-black text-error-color mono">{stats.unprotected_files_count.toLocaleString()}</span>
|
||||
<p class="text-[9px] font-bold text-text-secondary uppercase tracking-tight mt-1">Files pending archival</p>
|
||||
</div>
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
Info,
|
||||
X,
|
||||
ShieldCheck,
|
||||
ShieldAlert,
|
||||
FileText,
|
||||
Folder,
|
||||
ListPlus,
|
||||
FolderTree,
|
||||
Clock
|
||||
Clock,
|
||||
ArrowRight
|
||||
} from 'lucide-svelte';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
@@ -24,18 +26,23 @@
|
||||
addToCartRestoresCartFileIdPost,
|
||||
removeFromCartRestoresCartItemIdDelete,
|
||||
addDirectoryToCartRestoresCartDirectoryPost,
|
||||
searchIndexInventorySearchGet,
|
||||
type ItemMetadataSchema,
|
||||
type CartItemSchema
|
||||
} from '$lib/api';
|
||||
import { toast } from 'svelte-sonner';
|
||||
import { cn } from '$lib/utils';
|
||||
|
||||
let currentPath = $state('ROOT');
|
||||
let searchQuery = $state('');
|
||||
let indexedFiles = $state<FileItem[]>([]);
|
||||
let loading = $state(false);
|
||||
let searchLoading = $state(false);
|
||||
let selectedItemMetadata = $state<ItemMetadataSchema | null>(null);
|
||||
let metadataLoading = $state(false);
|
||||
let searchTimeout: any;
|
||||
|
||||
// This handles the restore cart selection
|
||||
// This handles the recovery queue status bar
|
||||
let restoreCartItems = $state<CartItemSchema[]>([]);
|
||||
const restoreCartPaths = $derived(new Set(restoreCartItems.map(i => i.file_path)));
|
||||
|
||||
@@ -51,20 +58,23 @@
|
||||
}
|
||||
|
||||
async function loadIndexedFiles(path: string) {
|
||||
if (searchQuery.trim().length >= 3) return;
|
||||
loading = true;
|
||||
try {
|
||||
const response = await browseIndexInventoryBrowseGet({
|
||||
query: { path }
|
||||
});
|
||||
if (response.data) {
|
||||
indexedFiles = response.data.map(f => ({
|
||||
indexedFiles = (response.data as any[]).map(f => ({
|
||||
name: f.name,
|
||||
path: f.path,
|
||||
type: f.type as 'file' | 'directory' | 'link',
|
||||
size: f.size ?? null,
|
||||
mtime: f.mtime ?? null,
|
||||
media: f.media ?? [],
|
||||
selected: restoreCartPaths.has(f.path)
|
||||
vulnerable: f.vulnerable,
|
||||
selected: f.selected,
|
||||
indeterminate: f.indeterminate
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -75,6 +85,48 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function searchFiles(query: string) {
|
||||
searchLoading = true;
|
||||
try {
|
||||
const response = await searchIndexInventorySearchGet({
|
||||
query: { q: query }
|
||||
});
|
||||
if (response.data) {
|
||||
indexedFiles = (response.data as any[]).map(f => ({
|
||||
name: f.name,
|
||||
path: f.path,
|
||||
type: f.type as 'file' | 'directory' | 'link',
|
||||
size: f.size ?? null,
|
||||
mtime: f.mtime ?? null,
|
||||
media: f.media ?? [],
|
||||
vulnerable: f.vulnerable,
|
||||
selected: f.selected,
|
||||
indeterminate: f.indeterminate
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to search index:", error);
|
||||
toast.error("Search failed");
|
||||
} finally {
|
||||
searchLoading = false;
|
||||
}
|
||||
}
|
||||
|
||||
$effect(() => {
|
||||
const query = searchQuery.trim();
|
||||
if (searchTimeout) clearTimeout(searchTimeout);
|
||||
|
||||
if (query.length >= 3) {
|
||||
searchTimeout = setTimeout(() => {
|
||||
searchFiles(query);
|
||||
}, 300);
|
||||
} else if (query.length === 0) {
|
||||
searchTimeout = setTimeout(() => {
|
||||
loadIndexedFiles(currentPath);
|
||||
}, 50);
|
||||
}
|
||||
});
|
||||
|
||||
async function fetchMetadata(item: FileItem) {
|
||||
metadataLoading = true;
|
||||
try {
|
||||
@@ -93,20 +145,34 @@
|
||||
}
|
||||
|
||||
async function handleToggleCart(item: FileItem) {
|
||||
if (item.type !== 'file') return;
|
||||
const isCurrentlyInCart = item.selected;
|
||||
|
||||
const isCurrentlyInCart = restoreCartPaths.has(item.path);
|
||||
if (!isCurrentlyInCart) {
|
||||
// Check for vulnerability before adding
|
||||
if (item.type === 'file' && (!item.media || item.media.length === 0)) {
|
||||
toast.error(`Cannot add "${item.name}": This file has not been backed up to any media yet.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (isCurrentlyInCart) {
|
||||
if (item.type === 'file') {
|
||||
const cartItem = restoreCartItems.find(i => i.file_path === item.path);
|
||||
if (cartItem) {
|
||||
await removeFromCartRestoresCartItemIdDelete({
|
||||
path: { item_id: cartItem.id }
|
||||
});
|
||||
toast.info(`Removed ${item.name} from restore cart`);
|
||||
toast.info(`Removed ${item.name} from recovery queue`);
|
||||
}
|
||||
} else {
|
||||
// Toggling a directory off is currently not supported as a bulk op
|
||||
// The user should manage this in the Recovery Queue page
|
||||
toast.warning("To remove a folder, please manage items in the Data Recovery page.");
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
if (item.type === 'file') {
|
||||
// Fetch metadata to get the DB ID
|
||||
const metaResponse = await getItemMetadataInventoryMetadataGet({
|
||||
query: { path: item.path }
|
||||
@@ -116,12 +182,28 @@
|
||||
await addToCartRestoresCartFileIdPost({
|
||||
path: { file_id: metaResponse.data.id }
|
||||
});
|
||||
toast.success(`Added ${item.name} to restore cart`);
|
||||
toast.success(`Added ${item.name} to recovery queue`);
|
||||
}
|
||||
} else {
|
||||
// It's a directory
|
||||
const response = await addDirectoryToCartRestoresCartDirectoryPost({
|
||||
body: { path: item.path }
|
||||
});
|
||||
const msg = (response.data as any)?.message || `Added folder to recovery queue`;
|
||||
toast.success(msg);
|
||||
}
|
||||
}
|
||||
await loadCart();
|
||||
// Refresh file list for checkbox state
|
||||
loadIndexedFiles(currentPath);
|
||||
|
||||
// Refresh everything
|
||||
await Promise.all([
|
||||
loadCart(),
|
||||
searchQuery.length >= 3 ? searchFiles(searchQuery) : loadIndexedFiles(currentPath)
|
||||
]);
|
||||
|
||||
// Refresh metadata if it's the selected item
|
||||
if (selectedItemMetadata && selectedItemMetadata.file_path === item.path) {
|
||||
fetchMetadata(item);
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Action failed");
|
||||
}
|
||||
@@ -132,9 +214,18 @@
|
||||
const response = await addDirectoryToCartRestoresCartDirectoryPost({
|
||||
body: { path: itemPath }
|
||||
});
|
||||
toast.success((response.data as any)?.message || "Folder contents added to cart");
|
||||
await loadCart();
|
||||
loadIndexedFiles(currentPath);
|
||||
toast.success((response.data as any)?.message || "Folder added to recovery queue");
|
||||
|
||||
// Refresh everything
|
||||
await Promise.all([
|
||||
loadCart(),
|
||||
searchQuery.length >= 3 ? searchFiles(searchQuery) : loadIndexedFiles(currentPath)
|
||||
]);
|
||||
|
||||
if (selectedItemMetadata && selectedItemMetadata.file_path === itemPath) {
|
||||
const dummyItem = { path: itemPath, name: '', type: 'directory' } as FileItem;
|
||||
fetchMetadata(dummyItem);
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Action failed");
|
||||
}
|
||||
@@ -185,10 +276,10 @@
|
||||
{#if restoreCartItems.length > 0}
|
||||
<div class="flex items-center gap-4 z-10 animate-in fade-in zoom-in duration-300">
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary bg-bg-primary px-3 py-1.5 rounded-full border border-border-color">
|
||||
{restoreCartItems.length} items in cart
|
||||
{restoreCartItems.length} items in queue
|
||||
</span>
|
||||
<Button variant="default" class="bg-success-color hover:bg-success-color/90 text-white font-black uppercase tracking-widest text-[11px] px-6 h-10 shadow-lg shadow-success-color/20" href="/restores">
|
||||
Review Restore Manifest
|
||||
Review Recovery Manifest
|
||||
</Button>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -204,13 +295,14 @@
|
||||
{/if}
|
||||
<FileBrowser
|
||||
bind:currentPath
|
||||
bind:searchQuery
|
||||
files={indexedFiles}
|
||||
isSearching={searchLoading}
|
||||
mode="index"
|
||||
onNavigate={(path) => currentPath = path}
|
||||
onToggleTrack={handleToggleCart}
|
||||
onSelect={fetchMetadata}
|
||||
/>
|
||||
</div>
|
||||
/> </div>
|
||||
|
||||
<!-- Metadata Sidebar -->
|
||||
<aside class="w-96 flex flex-col gap-4 shrink-0">
|
||||
@@ -308,16 +400,29 @@
|
||||
|
||||
{#if selectedItemMetadata.type === 'file' && (selectedItemMetadata.versions?.length ?? 0) > 0}
|
||||
<div class="p-6 bg-bg-tertiary/30 border-t border-border-color mt-auto">
|
||||
<Button class="w-full h-11 font-black uppercase tracking-widest text-[11px] shadow-lg shadow-blue-500/10" onclick={() => handleToggleCart({path: selectedItemMetadata?.file_path || '', type: 'file', name: ''} as FileItem)}>
|
||||
<Button class="w-full h-11 font-black uppercase tracking-widest text-[11px] shadow-lg shadow-blue-500/10" onclick={() => handleToggleCart({path: selectedItemMetadata?.file_path || '', type: 'file', name: '', media: (selectedItemMetadata?.versions || []).map(v => v.media_identifier), selected: (selectedItemMetadata as any).selected} as FileItem)}>
|
||||
<ShieldCheck size={16} class="mr-2" />
|
||||
{restoreCartPaths.has(selectedItemMetadata?.file_path || '') ? 'Remove from Cart' : 'Add to Restore Cart'}
|
||||
{(selectedItemMetadata as any).selected ? 'Remove from Queue' : 'Add to Recovery Queue'}
|
||||
</Button>
|
||||
</div>
|
||||
{:else if selectedItemMetadata.type === 'file'}
|
||||
<div class="p-6 bg-bg-tertiary/30 border-t border-border-color mt-auto opacity-50">
|
||||
<Button disabled class="w-full h-11 font-black uppercase tracking-widest text-[11px] border-error-color/30 text-error-color">
|
||||
<ShieldAlert size={16} class="mr-2" />
|
||||
File Vulnerable (Not Backed Up)
|
||||
</Button>
|
||||
</div>
|
||||
{:else if selectedItemMetadata.type === 'directory' && (selectedItemMetadata.child_count || 0) > 0}
|
||||
<div class="p-6 bg-bg-tertiary/30 border-t border-border-color mt-auto">
|
||||
<Button variant="outline" class="w-full h-11 font-black uppercase tracking-widest text-[11px] border-blue-500/30 text-blue-400 hover:bg-blue-500/10" onclick={() => handleToggleDirectoryCart(selectedItemMetadata?.file_path || '')}>
|
||||
<Button variant="outline" class={cn("w-full h-11 font-black uppercase tracking-widest text-[11px]", (selectedItemMetadata as any).vulnerable ? "border-orange-500/30 text-orange-400 hover:bg-orange-500/10" : "border-success-color/30 text-success-color hover:bg-success-color/10")} onclick={() => handleToggleDirectoryCart(selectedItemMetadata?.file_path || '')} disabled={(selectedItemMetadata as any).selected}>
|
||||
<ListPlus size={16} class="mr-2" />
|
||||
Add Folder Contents to Cart
|
||||
{#if (selectedItemMetadata as any).selected}
|
||||
Folder Fully Queued
|
||||
{:else if (selectedItemMetadata as any).vulnerable}
|
||||
Add Backed Up Items to Queue
|
||||
{:else}
|
||||
Add Folder to Recovery Queue
|
||||
{/if}
|
||||
</Button>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -325,7 +430,7 @@
|
||||
{:else}
|
||||
<div class="flex-1 border-2 border-dashed border-border-color rounded-xl flex flex-col items-center justify-center p-12 text-center opacity-20">
|
||||
<Library size={48} class="mb-4 text-blue-500" />
|
||||
<p class="textxs font-black uppercase tracking-widest leading-relaxed">
|
||||
<p class="text-xs font-black uppercase tracking-widest leading-relaxed">
|
||||
Select an item from the index<br>to view detailed metadata and<br>storage locations.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
Save,
|
||||
Globe,
|
||||
Monitor,
|
||||
PlayCircle
|
||||
PlayCircle,
|
||||
Star
|
||||
} from 'lucide-svelte';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
@@ -26,6 +27,9 @@
|
||||
registerMediaInventoryMediaPost,
|
||||
deleteMediaInventoryMediaMediaIdDelete,
|
||||
triggerBackupBackupsTriggerMediaIdPost,
|
||||
initializeMediaInventoryMediaMediaIdInitializePost,
|
||||
getSettingsSystemSettingsGet,
|
||||
updateSettingSystemSettingsPost,
|
||||
type MediaSchema
|
||||
} from '$lib/api';
|
||||
import { toast } from 'svelte-sonner';
|
||||
@@ -33,6 +37,7 @@
|
||||
let mediaList = $state<MediaSchema[]>([]);
|
||||
let loading = $state(true);
|
||||
let showRegisterDialog = $state(false);
|
||||
let primaryTargetId = $state<string | null>(null);
|
||||
|
||||
// New Media Form State
|
||||
let newMedia = $state({
|
||||
@@ -43,6 +48,8 @@
|
||||
location: 'Storage Shelf',
|
||||
// Type-specific config
|
||||
device_path: '/dev/nst0', // For Tape
|
||||
encryption_key: '', // 256-bit Hex Key
|
||||
enable_encryption: false,
|
||||
mount_path: '', // For HDD
|
||||
bucket_name: '', // For Cloud
|
||||
cloud_provider: 'AWS S3',
|
||||
@@ -53,9 +60,16 @@
|
||||
async function loadMedia() {
|
||||
loading = true;
|
||||
try {
|
||||
const response = await listMediaInventoryMediaGet();
|
||||
if (response.data) {
|
||||
mediaList = response.data;
|
||||
const [mediaRes, settingsRes] = await Promise.all([
|
||||
listMediaInventoryMediaGet(),
|
||||
getSettingsSystemSettingsGet()
|
||||
]);
|
||||
|
||||
if (mediaRes.data) {
|
||||
mediaList = mediaRes.data;
|
||||
}
|
||||
if (settingsRes.data?.primary_archival_target) {
|
||||
primaryTargetId = settingsRes.data.primary_archival_target;
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error("Failed to load media fleet");
|
||||
@@ -64,14 +78,40 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStartBackup(mediaId: number, identifier: string) {
|
||||
async function setPrimaryTarget(mediaId: number) {
|
||||
try {
|
||||
const idStr = mediaId.toString();
|
||||
await updateSettingSystemSettingsPost({
|
||||
body: { key: "primary_archival_target", value: idStr }
|
||||
});
|
||||
primaryTargetId = idStr;
|
||||
toast.success("Primary archival target updated");
|
||||
} catch (error) {
|
||||
toast.error("Failed to set primary target");
|
||||
}
|
||||
}
|
||||
|
||||
async function handleInitialize(mediaId: number, identifier: string) {
|
||||
if (!confirm(`Are you sure you want to initialize ${identifier}? This may wipe existing data on the media.`)) return;
|
||||
|
||||
try {
|
||||
toast.info(`Initializing ${identifier}...`);
|
||||
await initializeMediaInventoryMediaMediaIdInitializePost({
|
||||
path: { media_id: mediaId }
|
||||
});
|
||||
toast.success(`${identifier} initialized successfully`);
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Failed to initialize media");
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStartBackup(mediaId: number, identifier: string) { try {
|
||||
await triggerBackupBackupsTriggerMediaIdPost({
|
||||
path: { media_id: mediaId }
|
||||
});
|
||||
toast.success(`Backup job initiated for ${identifier}`);
|
||||
toast.success(`Archival job initiated for ${identifier}`);
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Failed to start backup");
|
||||
toast.error(error.body?.detail || "Failed to start archival");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,6 +124,9 @@
|
||||
const config: Record<string, any> = {};
|
||||
if (newMedia.media_type === 'tape') {
|
||||
config.device_path = newMedia.device_path;
|
||||
if (newMedia.enable_encryption && newMedia.encryption_key) {
|
||||
config.encryption_key = newMedia.encryption_key;
|
||||
}
|
||||
} else if (newMedia.media_type === 'hdd') {
|
||||
if (!newMedia.mount_path) { toast.error("Mount path required"); return; }
|
||||
config.mount_path = newMedia.mount_path;
|
||||
@@ -92,7 +135,7 @@
|
||||
config.bucket_name = newMedia.bucket_name;
|
||||
config.provider = newMedia.cloud_provider;
|
||||
config.region = newMedia.cloud_region;
|
||||
if (newMedia.endpoint_url) config.endpoint_url = newMedia.endpoint_url;
|
||||
config.endpoint_url = newMedia.endpoint_url;
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -106,160 +149,156 @@
|
||||
config: config
|
||||
}
|
||||
});
|
||||
toast.success(`${newMedia.identifier} registered`);
|
||||
toast.success(`${newMedia.identifier} registered in fleet`);
|
||||
showRegisterDialog = false;
|
||||
loadMedia();
|
||||
newMedia.identifier = '';
|
||||
newMedia.mount_path = '';
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Registration failed");
|
||||
} catch (error) {
|
||||
toast.error("Failed to register media");
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDelete(mediaId: number) {
|
||||
if (!confirm("Remove this media?")) return;
|
||||
if (!confirm("Are you sure? This will remove the media from the system index.")) return;
|
||||
try {
|
||||
await deleteMediaInventoryMediaMediaIdDelete({
|
||||
path: { media_id: mediaId }
|
||||
});
|
||||
toast.success("Removed");
|
||||
toast.success("Media removed from fleet");
|
||||
loadMedia();
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Failed");
|
||||
toast.error(error.body?.detail || "Failed to delete media");
|
||||
}
|
||||
}
|
||||
|
||||
onMount(loadMedia);
|
||||
|
||||
function getPercentage(used: number, capacity: number) {
|
||||
if (capacity === 0) return 0;
|
||||
return Math.min(100, Math.round((used / capacity) * 100));
|
||||
}
|
||||
|
||||
function formatSize(bytes: number) {
|
||||
if (bytes === 0) return "0 GB";
|
||||
const gb = bytes / (1024 * 1024 * 1024);
|
||||
if (gb >= 1000) return `${(gb / 1024).toFixed(1)} TB`;
|
||||
if (gb >= 1000) return `${(gb / 1024).toFixed(2)} TB`;
|
||||
return `${gb.toFixed(0)} GB`;
|
||||
}
|
||||
|
||||
const totalCapacity = $derived(mediaList.reduce((acc, m) => acc + m.capacity, 0));
|
||||
const totalUsed = $derived(mediaList.reduce((acc, m) => acc + m.bytes_used, 0));
|
||||
const globalUtilization = $derived(totalCapacity > 0 ? Math.round((totalUsed / totalCapacity) * 100) : 0);
|
||||
onMount(loadMedia);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Physical Media - TapeHoard</title>
|
||||
<title>Media Fleet - TapeHoard</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="flex flex-col gap-6 relative">
|
||||
<div class="flex flex-col gap-8 h-full overflow-hidden animate-in fade-in duration-700">
|
||||
<!-- Header -->
|
||||
<header class="flex justify-between items-center bg-bg-secondary px-8 py-5 rounded-xl border border-border-color shadow-2xl relative overflow-hidden shrink-0">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-blue-500/5 to-transparent pointer-events-none"></div>
|
||||
<div class="relative z-10">
|
||||
<h1 class="text-2xl font-black uppercase tracking-tighter text-text-primary flex items-center gap-3">
|
||||
<CassetteTape class="text-blue-500" size={28} />
|
||||
Physical Media
|
||||
Media Fleet
|
||||
</h1>
|
||||
<p class="text-[12px] font-bold uppercase tracking-widest text-text-secondary mt-1 opacity-80">Inventory & Media Configuration</p>
|
||||
<p class="text-[12px] font-bold uppercase tracking-widest text-text-secondary mt-1 opacity-80">
|
||||
Physical Asset Management & Archival Targets
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<Button variant="default" size="lg" class="px-8 h-12 font-black uppercase tracking-widest text-[11px] z-10" onclick={() => showRegisterDialog = true}>
|
||||
<Plus size={18} class="mr-2" /> Register New Media
|
||||
</Button>
|
||||
</header>
|
||||
|
||||
{#if loading && mediaList.length === 0}
|
||||
<div class="flex flex-col items-center justify-center py-24 gap-4 opacity-50">
|
||||
<RotateCw size={48} class="animate-spin text-blue-500" />
|
||||
<span class="text-xs font-black uppercase tracking-widest">Auditing Fleet Status...</span>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="space-y-6 animate-in fade-in slide-in-from-bottom-2 duration-500 flex-1">
|
||||
<!-- Stats -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-6">
|
||||
<Card class="bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color p-5 flex items-center gap-4 shadow-xl">
|
||||
<div class="p-3 bg-blue-500/10 rounded-xl text-blue-500 border border-blue-500/20"><CassetteTape size={24} /></div>
|
||||
<div><span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Total Media</span><span class="text-2xl font-black text-text-primary mono tracking-tighter">{mediaList.length}</span></div>
|
||||
</Card>
|
||||
<Card class="bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color p-5 flex items-center gap-4 shadow-xl">
|
||||
<div class="p-3 bg-action-color/10 rounded-xl text-action-color border border-action-color/20"><Database size={24} /></div>
|
||||
<div><span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Fleet Capacity</span><span class="text-2xl font-black text-text-primary mono tracking-tighter">{formatSize(totalCapacity)}</span></div>
|
||||
</Card>
|
||||
<Card class="bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color p-5 flex items-center gap-4 shadow-xl">
|
||||
<div class="p-3 bg-success-color/10 rounded-xl text-success-color border border-success-color/20"><ShieldCheck size={24} /></div>
|
||||
<div><span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Active Usage</span><span class="text-2xl font-black text-text-primary mono tracking-tighter">{formatSize(totalUsed)}</span></div>
|
||||
</Card>
|
||||
<Card class="bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color p-5 flex items-center gap-4 shadow-xl">
|
||||
<div class="p-3 bg-orange-500/10 rounded-xl text-orange-500 border border-orange-500/20"><RotateCw size={24} /></div>
|
||||
<div><span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Utilization</span><span class="text-2xl font-black text-text-primary mono tracking-tighter">{globalUtilization}%</span></div>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<!-- Table -->
|
||||
<Card class="overflow-hidden border-border-color bg-bg-secondary shadow-2xl">
|
||||
<div class="overflow-x-auto">
|
||||
<table class="w-full text-left border-collapse">
|
||||
<!-- Content -->
|
||||
<div class="flex-1 overflow-y-auto pr-2 pb-12">
|
||||
<Card class="bg-bg-secondary border-border-color shadow-2xl overflow-hidden">
|
||||
<table class="w-full border-collapse">
|
||||
<thead>
|
||||
<tr class="bg-bg-tertiary/50 border-b border-border-color">
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary">Identifier</th>
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary">Spec / Tier</th>
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary">System Config</th>
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary">Usage</th>
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary">Lifecycle</th>
|
||||
<th class="px-8 py-5 text-[10px] font-black uppercase tracking-widest text-text-secondary text-right">Actions</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Primary</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Identity</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Type & Tier</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Location</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Utilization</th>
|
||||
<th class="px-6 py-4 text-left text-[10px] font-black uppercase tracking-widest text-text-secondary">Status</th>
|
||||
<th class="px-6 py-4 text-right text-[10px] font-black uppercase tracking-widest text-text-secondary">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-border-color/40">
|
||||
<tbody class="divide-y divide-border-color/30">
|
||||
{#each mediaList as media (media.id)}
|
||||
<tr class="hover:bg-white/[0.02] transition-colors group">
|
||||
<td class="px-8 py-5">
|
||||
<div class="flex flex-col">
|
||||
<span class="mono font-black text-text-primary text-sm">{media.identifier}</span>
|
||||
<span class="text-[9px] font-bold text-text-secondary/50 uppercase tracking-tighter">LOC: {media.location || 'Unknown'}</span>
|
||||
<tr class="hover:bg-bg-primary/30 transition-colors group">
|
||||
<td class="px-6 py-4">
|
||||
<button
|
||||
class={cn(
|
||||
"p-2 rounded-lg border transition-all",
|
||||
primaryTargetId === media.id.toString()
|
||||
? "bg-yellow-500/10 border-yellow-500/50 text-yellow-500 shadow-[0_0_10px_rgba(234,179,8,0.2)]"
|
||||
: "bg-bg-primary border-border-color text-text-secondary opacity-20 hover:opacity-100 hover:border-yellow-500/30"
|
||||
)}
|
||||
onclick={() => setPrimaryTarget(media.id)}
|
||||
title="Set as primary archival target"
|
||||
>
|
||||
<Star size={16} fill={primaryTargetId === media.id.toString() ? "currentColor" : "none"} />
|
||||
</button>
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-blue-500/10 rounded-lg text-blue-500">
|
||||
{#if media.media_type === 'tape'}<CassetteTape size={18} />{/if}
|
||||
{#if media.media_type === 'hdd'}<HardDrive size={18} />{/if}
|
||||
{#if media.media_type === 'cloud'}<Cloud size={18} />{/if}
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-8 py-5">
|
||||
<span class="inline-flex items-center gap-1.5 px-2.5 py-1 rounded-lg bg-bg-primary text-text-primary text-[10px] font-black border border-border-color uppercase">
|
||||
{#if media.media_type === 'tape'}<CassetteTape size={12} class="text-blue-400" />{:else if media.media_type === 'hdd'}<HardDrive size={12} class="text-yellow-400" />{:else}<Cloud size={12} class="text-green-400" />{/if}
|
||||
{media.generation_tier || media.media_type}
|
||||
<div>
|
||||
<span class="text-sm font-black text-text-primary mono tracking-tight">{media.identifier}</span>
|
||||
<div class="flex gap-2 mt-0.5">
|
||||
{#if media.config?.encryption_key}
|
||||
<span class="text-[8px] font-black uppercase tracking-tighter text-blue-400 bg-blue-500/10 px-1 rounded flex items-center gap-1">
|
||||
<ShieldCheck size={8} /> ENCRYPTED
|
||||
</span>
|
||||
</td>
|
||||
<td class="px-8 py-5">
|
||||
<div class="flex flex-col gap-1">
|
||||
{#if media.media_type === 'tape' && media.config.device_path}
|
||||
<div class="flex items-center gap-1.5 text-[10px] font-bold mono text-text-secondary"><Monitor size={10} class="opacity-50" /> {media.config.device_path}</div>
|
||||
{:else if media.media_type === 'hdd' && media.config.mount_path}
|
||||
<div class="flex items-center gap-1.5 text-[10px] font-bold mono text-text-secondary"><HardDrive size={10} class="opacity-50" /> {media.config.mount_path}</div>
|
||||
{:else if media.media_type === 'cloud' && media.config.bucket_name}
|
||||
<div class="flex items-center gap-1.5 text-[10px] font-bold mono text-text-secondary"><Globe size={10} class="opacity-50" /> {media.config.bucket_name}</div>
|
||||
{:else}
|
||||
<span class="text-[9px] font-bold uppercase tracking-tighter text-text-secondary opacity-30">No config</span>
|
||||
{/if}
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-8 py-5">
|
||||
<div class="flex flex-col gap-2 w-40">
|
||||
<div class="w-full bg-bg-primary rounded-full h-1.5 overflow-hidden shadow-inner border border-white/5">
|
||||
<div class={cn("h-full transition-all", media.status === 'full' ? 'bg-error-color' : 'bg-blue-500 shadow-[0_0_10px_rgba(59,130,246,0.3)]')} style="width: {getPercentage(media.bytes_used, media.capacity)}%"></div>
|
||||
</div>
|
||||
<span class="mono text-[9px] font-bold text-text-secondary opacity-70">{formatSize(media.bytes_used)} / {formatSize(media.capacity)}</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-8 py-5">
|
||||
<div class="flex items-center gap-2 text-[11px] font-black uppercase tracking-wider text-text-primary">
|
||||
<div class={cn("w-2.5 h-2.5 rounded-full", media.status === 'active' ? 'bg-success-color shadow-[0_0_10px_rgba(46,204,113,0.5)]' : 'bg-error-color')}></div>
|
||||
<td class="px-6 py-4">
|
||||
<span class="text-[10px] font-bold uppercase text-text-secondary">{media.media_type}</span>
|
||||
<span class="text-[10px] font-medium text-text-secondary/40 ml-2 border-l border-border-color pl-2">{media.generation_tier || 'Generic'}</span>
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center gap-1.5 text-text-secondary">
|
||||
<MapPin size={12} class="opacity-40" />
|
||||
<span class="text-[11px] font-bold uppercase tracking-tight">{media.location || 'Unknown'}</span>
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<div class="w-32 space-y-1.5">
|
||||
<div class="flex justify-between text-[9px] font-black mono text-text-secondary uppercase">
|
||||
<span>{formatSize(media.bytes_used)}</span>
|
||||
<span class="opacity-40">/ {formatSize(media.capacity)}</span>
|
||||
</div>
|
||||
<div class="w-full bg-bg-primary h-1.5 rounded-full border border-border-color overflow-hidden">
|
||||
<div class="bg-blue-500 h-full transition-all duration-1000" style="width: {(media.bytes_used / media.capacity) * 100}%"></div>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
<td class="px-6 py-4">
|
||||
<span class={cn("px-2.5 py-1 rounded text-[9px] font-black uppercase tracking-widest border",
|
||||
media.status === 'active' ? "bg-success-color/10 text-success-color border-success-color/20" : "bg-bg-primary text-text-secondary border-border-color"
|
||||
)}>
|
||||
{media.status}
|
||||
</div>
|
||||
</span>
|
||||
</td>
|
||||
<td class="px-8 py-5 text-right">
|
||||
<div class="flex justify-end gap-2 opacity-0 group-hover:opacity-100 transition-all">
|
||||
<td class="px-6 py-4 text-right">
|
||||
<div class="flex items-center justify-end gap-2">
|
||||
{#if media.status === 'active'}
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
class="h-9 px-4 font-black uppercase tracking-widest text-[9px] border-blue-500/30 text-blue-400 hover:bg-blue-500/10"
|
||||
class="h-9 px-4 font-black uppercase tracking-widest text-[9px] border-action-color/30 text-action-color hover:bg-action-color/10"
|
||||
onclick={() => handleInitialize(media.id, media.identifier)}
|
||||
>
|
||||
<RotateCw size={14} class="mr-1.5" /> Initialize
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
class="h-9 px-4 font-black uppercase tracking-widest text-[9px] border-success-color/30 text-success-color hover:bg-success-color/10"
|
||||
onclick={() => handleStartBackup(media.id, media.identifier)}
|
||||
>
|
||||
<PlayCircle size={14} class="mr-1.5" /> Start Backup
|
||||
<PlayCircle size={14} class="mr-1.5" /> Initiate Archival
|
||||
</Button>
|
||||
{/if}
|
||||
<Button variant="ghost" size="icon" class="h-9 w-9 hover:bg-error-color/10 hover:text-error-color" onclick={() => handleDelete(media.id)}><Trash2 size={16} /></Button>
|
||||
@@ -267,31 +306,25 @@
|
||||
</td>
|
||||
</tr>
|
||||
{:else}
|
||||
<tr><td colspan="6" class="px-8 py-24 text-center opacity-20"><Database size={48} class="mx-auto mb-3" /><p class="text-sm font-black uppercase tracking-[0.2em]">No Media Assets Registered</p></td></tr>
|
||||
<tr><td colspan="7" class="px-8 py-24 text-center opacity-20"><Database size={48} class="mx-auto mb-3" /><p class="text-sm font-black uppercase tracking-[0.2em]">No Media Assets Registered</p></td></tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- REGISTRATION DIALOG -->
|
||||
{#if showRegisterDialog}
|
||||
<div class="fixed inset-0 z-[999] flex items-center justify-center p-4 overflow-y-auto bg-black/80 backdrop-blur-md">
|
||||
<div class="absolute inset-0 cursor-pointer" onclick={() => showRegisterDialog = false} role="none"></div>
|
||||
<Card class="relative z-[1000] w-[600px] bg-bg-secondary border-border-color shadow-[0_30px_150px_rgba(0,0,0,1)] overflow-hidden animate-in zoom-in-95 duration-300 my-auto">
|
||||
<div class="p-8 border-b border-border-color bg-bg-tertiary/30">
|
||||
<div class="flex justify-between items-center mb-2">
|
||||
<h2 class="text-2xl font-black uppercase tracking-tighter text-text-primary">Register Media</h2>
|
||||
<button class="text-text-secondary hover:text-text-primary" onclick={() => showRegisterDialog = false}><X size={24} /></button>
|
||||
</div>
|
||||
<p class="text-[11px] font-bold uppercase tracking-widest text-text-secondary opacity-60">Provision new physical storage unit</p>
|
||||
<!-- Registration Dialog -->
|
||||
{#if showRegisterDialog}
|
||||
<div class="fixed inset-0 bg-black/80 backdrop-blur-sm z-[100] flex items-center justify-center p-6" onmousedown={() => showRegisterDialog = false}>
|
||||
<Card class="w-[700px] max-h-[90vh] overflow-y-auto bg-bg-secondary border-border-color shadow-2xl p-10 flex flex-col gap-8 animate-in zoom-in-95 duration-300" onmousedown={(e) => e.stopPropagation()}>
|
||||
<header class="flex justify-between items-start">
|
||||
<div>
|
||||
<h2 class="text-2xl font-black text-text-primary uppercase tracking-tighter">Register Fleet Asset</h2>
|
||||
<p class="text-[11px] font-bold text-text-secondary uppercase tracking-widest mt-1 opacity-60">Provisioning physical storage for the unified index.</p>
|
||||
</div>
|
||||
<Button variant="ghost" size="icon" class="hover:bg-white/5" onclick={() => showRegisterDialog = false}><X size={24} /></Button>
|
||||
</header>
|
||||
|
||||
<div class="p-8 space-y-8">
|
||||
<!-- Type Selection -->
|
||||
<div class="grid grid-cols-3 gap-4">
|
||||
{#each ['tape', 'hdd', 'cloud'] as type}
|
||||
<button class={cn("flex flex-col items-center gap-3 p-4 rounded-xl border-2 transition-all", newMedia.media_type === type ? "bg-blue-500/10 border-blue-500 text-blue-400 shadow-[0_0_20px_rgba(59,130,246,0.15)]" : "bg-bg-primary/50 border-border-color text-text-secondary hover:border-text-secondary/30")} onclick={() => newMedia.media_type = type}>
|
||||
@@ -315,9 +348,18 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="space-y-2">
|
||||
<label class="text-[10px] font-black uppercase tracking-widest text-text-secondary ml-1" for="location">Physical Location</label>
|
||||
<div class="relative">
|
||||
<MapPin size={16} class="absolute left-4 top-3.5 text-text-secondary opacity-50" />
|
||||
<Input id="location" bind:value={newMedia.location} placeholder="Cabinet A, Shelf 2" class="h-12 bg-bg-primary/50 pl-12 border-border-color font-mono text-sm" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Type Specific Fields -->
|
||||
{#if newMedia.media_type === 'tape'}
|
||||
<div class="grid grid-cols-2 gap-6 animate-in slide-in-from-top-2">
|
||||
<div class="space-y-6 animate-in slide-in-from-top-2">
|
||||
<div class="grid grid-cols-2 gap-6">
|
||||
<div class="space-y-2">
|
||||
<label class="text-[10px] font-black uppercase tracking-widest text-text-secondary ml-1" for="device">Tape Device Path</label>
|
||||
<Input id="device" bind:value={newMedia.device_path} class="h-12 bg-bg-primary/50 border-border-color font-mono text-sm" />
|
||||
@@ -327,6 +369,34 @@
|
||||
<Input id="tier" bind:value={newMedia.generation_tier} class="h-12 bg-bg-primary/50 border-border-color" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="p-5 bg-bg-tertiary/50 border border-border-color rounded-xl space-y-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<ShieldCheck size={18} class="text-blue-400" />
|
||||
<div class="flex flex-col">
|
||||
<span class="text-[11px] font-black uppercase tracking-widest text-text-primary">Hardware LTO Encryption</span>
|
||||
<span class="text-[9px] text-text-secondary font-medium uppercase tracking-tighter opacity-50 italic">FIPS 140-2 Level 4 COMPLIANT</span>
|
||||
</div>
|
||||
</div>
|
||||
<input type="checkbox" bind:checked={newMedia.enable_encryption} class="w-5 h-5 rounded-md border-border-color bg-bg-primary text-blue-500 focus:ring-blue-500/20" />
|
||||
</div>
|
||||
|
||||
{#if newMedia.enable_encryption}
|
||||
<div class="space-y-2 pt-2 animate-in fade-in slide-in-from-top-2 duration-300">
|
||||
<label for="enc_key" class="text-[9px] font-black uppercase tracking-widest text-text-secondary opacity-50 ml-1">256-bit HEX Encryption Key (32 Bytes)</label>
|
||||
<Input
|
||||
id="enc_key"
|
||||
type="password"
|
||||
bind:value={newMedia.encryption_key}
|
||||
placeholder="00112233445566778899aabbccddeeff..."
|
||||
class="h-11 bg-bg-primary/80 border-blue-500/30 font-mono text-xs focus:border-blue-500/60"
|
||||
/>
|
||||
<p class="text-[9px] text-text-secondary leading-relaxed opacity-60">WARNING: If you lose this key, the data on this tape is permanently unrecoverable. TapeHoard does not store keys in plain-text.</p>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{:else if newMedia.media_type === 'hdd'}
|
||||
<div class="grid grid-cols-2 gap-6 animate-in slide-in-from-top-2">
|
||||
<div class="space-y-2">
|
||||
@@ -357,25 +427,18 @@
|
||||
</div>
|
||||
<div class="space-y-2">
|
||||
<label class="text-[10px] font-black uppercase tracking-widest text-text-secondary ml-1" for="endpoint">Endpoint URL (Optional)</label>
|
||||
<Input id="endpoint" bind:value={newMedia.endpoint_url} placeholder="https://s3.us-west-004..." class="h-12 bg-bg-primary/50 border-border-color font-mono text-sm" />
|
||||
<Input id="endpoint" bind:value={newMedia.endpoint_url} placeholder="https://s3.amazonaws.com" class="h-12 bg-bg-primary/50 border-border-color font-mono text-sm" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="space-y-2">
|
||||
<label class="text-[10px] font-black uppercase tracking-widest text-text-secondary ml-1" for="location">Physical Location</label>
|
||||
<Input id="location" bind:value={newMedia.location} placeholder="e.g. Bank Vault" class="h-12 bg-bg-primary/50 border-border-color" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="p-8 bg-bg-tertiary/30 border-t border-border-color flex gap-4">
|
||||
<footer class="flex gap-3 pt-4 border-t border-border-color">
|
||||
<Button variant="outline" class="flex-1 h-12 font-black uppercase tracking-widest text-[11px]" onclick={() => showRegisterDialog = false}>Cancel</Button>
|
||||
<Button variant="default" class="flex-1 h-12 font-black uppercase tracking-widest text-[11px] shadow-lg shadow-blue-500/20" onclick={handleRegister}>
|
||||
<Save size={18} class="mr-2" /> Commit to Fleet
|
||||
</Button>
|
||||
</div>
|
||||
<Button variant="default" class="flex-[2] h-12 font-black uppercase tracking-widest text-[11px]" onclick={handleRegister}>Register Media</Button>
|
||||
</footer>
|
||||
</Card>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -144,20 +144,33 @@
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary truncate max-w-[300px]">
|
||||
{job.current_task || 'Waiting in queue...'}
|
||||
</span>
|
||||
{#if job.job_type !== 'SCAN' || job.status === 'COMPLETED'}
|
||||
<span class="text-xs font-bold mono text-text-primary">
|
||||
{job.progress.toFixed(1)}%
|
||||
{job.status === 'COMPLETED' ? '100.0' : job.progress.toFixed(1)}%
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="w-full bg-bg-primary h-2 rounded-full border border-border-color shadow-inner overflow-hidden">
|
||||
{#if job.job_type !== 'SCAN' || job.status !== 'RUNNING'}
|
||||
<div class="w-full bg-bg-primary h-2.5 rounded-full border border-border-color shadow-inner overflow-hidden">
|
||||
<div
|
||||
class={cn(
|
||||
"h-full transition-all duration-500",
|
||||
job.status === 'RUNNING' ? 'bg-blue-500 shadow-[0_0_10px_rgba(59,130,246,0.3)]' :
|
||||
job.status === 'FAILED' ? 'bg-error-color' : 'bg-success-color'
|
||||
)}
|
||||
style="width: {job.progress}%"
|
||||
style="width: {job.status === 'COMPLETED' ? 100 : job.progress}%"
|
||||
></div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="flex items-center gap-3 h-2.5">
|
||||
<div class="flex-1 bg-bg-primary h-1 rounded-full border border-border-color/30 relative overflow-hidden">
|
||||
<div class="absolute inset-0 bg-blue-500/20 animate-pulse"></div>
|
||||
</div>
|
||||
<span class="text-[9px] font-black uppercase tracking-widest text-blue-400 whitespace-nowrap animate-pulse">
|
||||
Streaming Discovery
|
||||
</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Timing Stats -->
|
||||
|
||||
@@ -10,79 +10,147 @@
|
||||
ArrowRight,
|
||||
X,
|
||||
FileText,
|
||||
Info,
|
||||
ShieldCheck,
|
||||
MapPin
|
||||
ShieldAlert,
|
||||
MapPin,
|
||||
Library
|
||||
} from 'lucide-svelte';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
import { ScrollArea } from '$lib/components/ui/scroll-area';
|
||||
import FileBrowser from '$lib/components/file-browser/FileBrowser.svelte';
|
||||
import type { FileItem } from '$lib/types';
|
||||
import {
|
||||
listCartRestoresCartGet,
|
||||
getManifestRestoresManifestGet,
|
||||
removeFromCartRestoresCartItemIdDelete,
|
||||
clearCartRestoresCartClearPost,
|
||||
getSettingsSystemSettingsGet,
|
||||
triggerRestoreRestoresTriggerPost,
|
||||
browseCartRestoresCartBrowseGet,
|
||||
type CartItemSchema,
|
||||
type RestoreManifestSchema
|
||||
type RestoreManifestSchema,
|
||||
type CartFileItemSchema
|
||||
} from '$lib/api';
|
||||
import { cn } from '$lib/utils';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
let cartItems = $state<CartItemSchema[]>([]);
|
||||
let currentPath = $state('ROOT');
|
||||
let cartFiles = $state<FileItem[]>([]);
|
||||
let manifest = $state<RestoreManifestSchema | null>(null);
|
||||
let restoreDests = $state<string[]>([]);
|
||||
let selectedDest = $state("");
|
||||
let loading = $state(true);
|
||||
let restoring = $state(false);
|
||||
|
||||
async function loadData() {
|
||||
loading = true;
|
||||
try {
|
||||
const [cartRes, manifestRes, settingsRes] = await Promise.all([
|
||||
listCartRestoresCartGet(),
|
||||
getManifestRestoresManifestGet(),
|
||||
getSettingsSystemSettingsGet()
|
||||
]);
|
||||
|
||||
if (cartRes.data) cartItems = cartRes.data;
|
||||
if (manifestRes.data) manifest = manifestRes.data;
|
||||
|
||||
const settingsRes = await getSettingsSystemSettingsGet();
|
||||
if (settingsRes.data?.restore_destinations) {
|
||||
restoreDests = JSON.parse(settingsRes.data.restore_destinations);
|
||||
if (restoreDests.length > 0) selectedDest = restoreDests[0];
|
||||
if (restoreDests.length > 0 && !selectedDest) selectedDest = restoreDests[0];
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
loadCartFiles(currentPath),
|
||||
refreshManifest()
|
||||
]);
|
||||
|
||||
} catch (error) {
|
||||
toast.error("Failed to load restore details");
|
||||
console.error("Failed to load recovery details:", error);
|
||||
toast.error("Failed to load recovery queue");
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function removeItem(itemId: number) {
|
||||
async function refreshManifest() {
|
||||
try {
|
||||
await removeFromCartRestoresCartItemIdDelete({ path: { item_id: itemId } });
|
||||
await loadData();
|
||||
} catch (error) {
|
||||
toast.error("Failed to remove item");
|
||||
const manifestRes = await getManifestRestoresManifestGet();
|
||||
if (manifestRes.data) manifest = manifestRes.data;
|
||||
} catch (err) {
|
||||
console.error("Failed to load manifest:", err);
|
||||
}
|
||||
}
|
||||
|
||||
async function loadCartFiles(path: string) {
|
||||
loading = true;
|
||||
try {
|
||||
const response = await browseCartRestoresCartBrowseGet({
|
||||
query: { path }
|
||||
});
|
||||
if (response.data) {
|
||||
cartFiles = (response.data as CartFileItemSchema[]).map(f => ({
|
||||
name: f.name,
|
||||
path: f.path,
|
||||
type: f.type as any,
|
||||
size: f.size ?? null,
|
||||
media: f.media ?? []
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to browse cart:", error);
|
||||
toast.error("Failed to load recovery folder structure");
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
$effect(() => {
|
||||
if (currentPath) {
|
||||
loadCartFiles(currentPath);
|
||||
}
|
||||
});
|
||||
|
||||
async function initiateRestore() {
|
||||
if (!selectedDest) {
|
||||
toast.error("Please select a recovery destination");
|
||||
return;
|
||||
}
|
||||
|
||||
restoring = true;
|
||||
try {
|
||||
await triggerRestoreRestoresTriggerPost({
|
||||
body: { destination: selectedDest }
|
||||
});
|
||||
toast.success("Recovery job initiated! Check System Activity for progress.");
|
||||
// Reset queue UI
|
||||
cartFiles = [];
|
||||
manifest = null;
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Failed to initiate recovery");
|
||||
} finally {
|
||||
restoring = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRemove(item: FileItem) {
|
||||
// Find the DB ID for this specific file in the cart
|
||||
// This is a bit tricky with browseCart as it doesn't return cart_item.id
|
||||
// We'll just clear the whole folder or rely on the Data Recovery page being a tree view
|
||||
// For now, removing individual items from the tree isn't fully implemented
|
||||
// so we'll just show a toast instruction.
|
||||
toast.info("Individual item removal from tree view coming soon. Use 'Clear Queue' for now.");
|
||||
}
|
||||
|
||||
async function clearCart() {
|
||||
if (!confirm("Clear entire restore cart?")) return;
|
||||
if (!confirm("Are you sure you want to clear the entire recovery queue?")) return;
|
||||
try {
|
||||
await clearCartRestoresCartClearPost();
|
||||
cartFiles = [];
|
||||
manifest = null;
|
||||
await loadData();
|
||||
toast.info("Cart cleared");
|
||||
} catch (error) {
|
||||
toast.error("Failed to clear cart");
|
||||
toast.info("Recovery queue cleared");
|
||||
} catch (error: any) {
|
||||
console.error("Failed to clear queue:", error);
|
||||
toast.error("Failed to clear recovery queue");
|
||||
}
|
||||
}
|
||||
|
||||
onMount(loadData);
|
||||
|
||||
function formatSize(bytes: number) {
|
||||
if (bytes === 0) return "0 B";
|
||||
const units = ["B", "KB", "MB", "GB", "TB", "PB"];
|
||||
const units = ["B", "KB", "MB", "GB", "TB"];
|
||||
let unitIndex = 0;
|
||||
let size = bytes;
|
||||
while (size >= 1024 && unitIndex < units.length - 1) {
|
||||
@@ -91,171 +159,161 @@
|
||||
}
|
||||
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
||||
}
|
||||
|
||||
onMount(loadData);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Restore Management - TapeHoard</title>
|
||||
<title>Data Recovery - TapeHoard</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="flex flex-col gap-6 h-full">
|
||||
<!-- HEADER -->
|
||||
<header class="flex justify-between items-center bg-bg-secondary px-8 py-5 rounded-xl border border-border-color shadow-2xl relative overflow-hidden">
|
||||
<div class="flex flex-col gap-6 h-full overflow-hidden animate-in fade-in duration-700">
|
||||
<!-- Header -->
|
||||
<header class="flex justify-between items-center bg-bg-secondary px-8 py-5 rounded-xl border border-border-color shadow-2xl relative overflow-hidden shrink-0">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-success-color/5 to-transparent pointer-events-none"></div>
|
||||
<div class="relative z-10">
|
||||
<h1 class="text-2xl font-black uppercase tracking-tighter text-text-primary flex items-center gap-3">
|
||||
<History class="text-success-color" size={28} />
|
||||
Restore Management
|
||||
Data Recovery
|
||||
</h1>
|
||||
<p class="text-[12px] font-bold uppercase tracking-widest text-text-secondary mt-1 opacity-80">
|
||||
Cart Review & Physical Media Manifest
|
||||
Recovery Queue & Physical Media Manifest
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-3 z-10">
|
||||
<Button variant="outline" class="h-10 px-6 font-black uppercase tracking-widest text-[10px] border-border-color hover:bg-error-color/5 hover:text-error-color hover:border-error-color/30" onclick={clearCart} disabled={cartItems.length === 0}>
|
||||
<Trash2 size={14} class="mr-2" /> Clear Cart
|
||||
<Button variant="outline" class="h-10 px-6 font-black uppercase tracking-widest text-[10px] border-border-color hover:bg-error-color/5 hover:text-error-color hover:border-error-color/30" onclick={clearCart} disabled={(manifest?.total_files || 0) === 0}>
|
||||
<Trash2 size={14} class="mr-2" /> Clear Queue
|
||||
</Button>
|
||||
<Button variant="default" class="h-10 px-6 font-black uppercase tracking-widest text-[10px] bg-success-color hover:bg-success-color/90" disabled={cartItems.length === 0 || !selectedDest}>
|
||||
<ShieldCheck size={14} class="mr-2" /> Initiate Restore
|
||||
<Button variant="default" class="h-10 px-6 font-black uppercase tracking-widest text-[10px] bg-success-color hover:bg-success-color/90" disabled={(manifest?.total_files || 0) === 0 || !selectedDest || restoring} onclick={initiateRestore}>
|
||||
{#if restoring}
|
||||
<RotateCw size={14} class="mr-2 animate-spin" /> Starting...
|
||||
{:else}
|
||||
<ShieldCheck size={14} class="mr-2" /> Initiate Recovery
|
||||
{/if}
|
||||
</Button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{#if loading && cartItems.length === 0}
|
||||
<div class="flex flex-col items-center justify-center py-24 gap-4 opacity-50">
|
||||
<RotateCw size={48} class="animate-spin text-success-color" />
|
||||
<span class="text-xs font-black uppercase tracking-widest">Generating Manifest...</span>
|
||||
{#if (manifest?.total_files || 0) === 0 && !loading}
|
||||
<div class="flex-1 flex flex-col items-center justify-center p-12 text-center animate-in fade-in zoom-in duration-500 w-full">
|
||||
<div class="w-24 h-24 bg-bg-tertiary rounded-full flex items-center justify-center mb-8 border-2 border-dashed border-border-color opacity-50">
|
||||
<History size={48} class="text-text-secondary" strokeWidth={1} />
|
||||
</div>
|
||||
{:else if cartItems.length === 0}
|
||||
<Card class="flex-1 border-2 border-dashed border-border-color flex flex-col items-center justify-center p-12 text-center opacity-30">
|
||||
<History size={64} class="mb-4" strokeWidth={1} />
|
||||
<p class="text-lg font-black uppercase tracking-widest">Restore Cart is Empty</p>
|
||||
<p class="text-[11px] font-bold uppercase tracking-[0.2em] mt-2">Go to the Index Browser to select files for recovery.</p>
|
||||
<Button variant="outline" class="mt-8 border-border-color" href="/index-browser">
|
||||
Browse Index <ArrowRight size={14} class="ml-2" />
|
||||
<h2 class="text-2xl font-black uppercase tracking-tighter text-text-primary">Recovery Queue is Empty</h2>
|
||||
<p class="text-[11px] font-bold uppercase tracking-[0.2em] mt-3 text-text-secondary max-w-xl leading-relaxed opacity-60">
|
||||
You haven't selected any files for restoration yet. Use the Index Browser to find and queue the items you need to recover from your fleet.
|
||||
</p>
|
||||
<Button variant="default" class="mt-10 h-12 px-8 font-black uppercase tracking-widest text-[11px] shadow-lg shadow-blue-500/20" href="/index-browser">
|
||||
Browse Virtual Index <ArrowRight size={14} class="ml-2" />
|
||||
</Button>
|
||||
</Card>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="grid grid-cols-1 lg:grid-cols-3 gap-8 flex-1 min-h-0">
|
||||
<!-- CART LIST -->
|
||||
<div class="lg:col-span-2 flex flex-col min-h-0">
|
||||
<Card class="flex-1 overflow-hidden flex flex-col bg-bg-secondary border-border-color shadow-xl">
|
||||
<div class="p-6 border-b border-border-color flex justify-between items-center bg-bg-tertiary/30">
|
||||
<h3 class="text-[11px] font-black uppercase tracking-widest text-text-primary">Queued for Restore ({cartItems.length})</h3>
|
||||
<span class="text-xs font-bold mono text-text-secondary">{formatSize(manifest?.total_size || 0)}</span>
|
||||
<!-- RECOVERY STRUCTURE TREE -->
|
||||
<div class="lg:col-span-2 flex flex-col min-h-0 relative min-w-0">
|
||||
{#if loading && cartFiles.length === 0}
|
||||
<div class="absolute inset-0 bg-bg-primary/50 z-50 flex items-center justify-center rounded-lg">
|
||||
<RotateCw size={32} class="animate-spin text-blue-500" />
|
||||
</div>
|
||||
<ScrollArea class="flex-1">
|
||||
<div class="divide-y divide-border-color/30">
|
||||
{#each cartItems as item (item.id)}
|
||||
<div class="p-4 flex items-center justify-between hover:bg-white/[0.02] transition-colors group">
|
||||
<div class="flex items-center gap-4 min-w-0">
|
||||
<div class="p-2 bg-bg-primary rounded-lg border border-border-color/50 text-text-secondary">
|
||||
<FileText size={18} />
|
||||
</div>
|
||||
<div class="flex flex-col min-w-0">
|
||||
<span class="text-[13px] font-bold text-text-primary truncate">{item.file_path.split('/').pop()}</span>
|
||||
<span class="text-[10px] mono text-text-secondary/50 truncate italic">{item.file_path}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center gap-6 shrink-0">
|
||||
<div class="flex gap-1">
|
||||
{#each item.media_identifiers as media}
|
||||
<span class="text-[9px] font-black uppercase tracking-tighter bg-blue-500/10 text-blue-400 px-2 py-0.5 rounded border border-blue-500/20">{media}</span>
|
||||
{/each}
|
||||
</div>
|
||||
<span class="text-xs font-bold mono text-text-secondary w-20 text-right">{formatSize(item.size)}</span>
|
||||
<button class="text-text-secondary hover:text-error-color opacity-0 group-hover:opacity-100 transition-all p-1" onclick={() => removeItem(item.id)}>
|
||||
<X size={16} />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<!-- MANIFEST SIDEBAR -->
|
||||
<div class="flex flex-col gap-6">
|
||||
<!-- Recovery Destination Card -->
|
||||
<Card class="p-8 bg-bg-secondary border-border-color shadow-xl">
|
||||
<h3 class="text-xs font-black uppercase tracking-widest text-text-primary mb-6 flex items-center gap-2">
|
||||
<MapPin size={14} class="text-action-color" />
|
||||
Recovery Destination
|
||||
</h3>
|
||||
|
||||
<div class="space-y-4">
|
||||
<div class="grid grid-cols-1 gap-2">
|
||||
{#each restoreDests as dest}
|
||||
<button
|
||||
class={cn(
|
||||
"flex items-center gap-3 p-3 rounded-lg border transition-all text-left group",
|
||||
selectedDest === dest
|
||||
? "bg-action-color/10 border-action-color text-text-primary shadow-[0_0_15px_rgba(52,152,219,0.1)]"
|
||||
: "bg-bg-primary/50 border-border-color text-text-secondary hover:border-text-secondary/30"
|
||||
)}
|
||||
onclick={() => selectedDest = dest}
|
||||
>
|
||||
<div class={cn(
|
||||
"w-2 h-2 rounded-full",
|
||||
selectedDest === dest ? "bg-action-color animate-pulse" : "bg-border-color"
|
||||
)}></div>
|
||||
<span class="text-[11px] font-bold mono truncate">{dest}</span>
|
||||
</button>
|
||||
{:else}
|
||||
<div class="p-4 border-2 border-dashed border-border-color rounded-lg text-center">
|
||||
<p class="text-[10px] font-black uppercase tracking-widest text-text-secondary/50">No targets defined in settings</p>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
<p class="text-[9px] font-bold text-text-secondary/50 uppercase tracking-tight italic">Files will be extracted into this directory using their original folder structure.</p>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<Card class="p-8 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color shadow-2xl relative overflow-hidden">
|
||||
<div class="absolute top-0 right-0 p-4 opacity-5 pointer-events-none">
|
||||
<Database size={120} />
|
||||
</div>
|
||||
|
||||
<div class="relative z-10">
|
||||
<h3 class="text-lg font-black uppercase tracking-tighter text-text-primary mb-6 flex items-center gap-2">
|
||||
<Info size={18} class="text-blue-500" />
|
||||
Physical Manifest
|
||||
</h3>
|
||||
|
||||
<div class="space-y-4">
|
||||
{#if manifest}
|
||||
{#each manifest.media_required as req}
|
||||
<div class="p-4 bg-bg-primary/50 border border-border-color rounded-xl flex items-center gap-4 group hover:border-blue-500/30 transition-colors">
|
||||
<div class={cn(
|
||||
"p-3 rounded-lg border shadow-inner",
|
||||
req.media_type === 'tape' ? 'bg-blue-500/10 text-blue-400 border-blue-500/20' : 'bg-yellow-500/10 text-yellow-400 border-yellow-500/20'
|
||||
)}>
|
||||
{#if req.media_type === 'tape'}<CassetteTape size={20} />{:else}<HardDrive size={20} />{/if}
|
||||
</div>
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex justify-between items-center mb-1">
|
||||
<span class="text-sm font-black text-text-primary mono">{req.identifier}</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary">{req.media_type}</span>
|
||||
</div>
|
||||
<div class="flex justify-between text-[10px] font-bold text-text-secondary opacity-60">
|
||||
<span>{req.file_count} FILES</span>
|
||||
<span>{formatSize(req.total_size)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
<FileBrowser
|
||||
bind:currentPath
|
||||
files={cartFiles}
|
||||
mode="cart"
|
||||
onNavigate={(path) => currentPath = path}
|
||||
onToggleTrack={handleRemove}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="mt-8 p-4 bg-blue-500/5 border border-dashed border-blue-500/20 rounded-lg">
|
||||
<p class="text-[10px] font-bold text-blue-300/70 leading-relaxed italic">
|
||||
Note: Recovery will proceed sequentially by media to minimize hardware cycles.
|
||||
<!-- SIDEBAR: MANIFEST & SETTINGS -->
|
||||
<aside class="flex flex-col gap-6 min-h-0 overflow-y-auto pr-2 pb-4">
|
||||
<!-- Queue Summary -->
|
||||
<Card class="p-6 bg-gradient-to-br from-bg-secondary to-bg-tertiary border-border-color shadow-xl">
|
||||
<h3 class="text-[10px] font-black uppercase tracking-widest text-text-secondary mb-4 opacity-50">Queue Statistics</h3>
|
||||
<div class="grid grid-cols-2 gap-4">
|
||||
<div class="p-4 bg-bg-primary/40 border border-border-color/40 rounded-xl">
|
||||
<span class="text-[9px] font-black uppercase tracking-widest text-text-secondary block mb-1">Total Files</span>
|
||||
<span class="text-xl font-black text-text-primary mono">{manifest?.total_files || 0}</span>
|
||||
</div>
|
||||
<div class="p-4 bg-bg-primary/40 border border-border-color/40 rounded-xl">
|
||||
<span class="text-[9px] font-black uppercase tracking-widest text-text-secondary block mb-1">Recovery Size</span>
|
||||
<span class="text-xl font-black text-text-primary mono">{formatSize(manifest?.total_size || 0)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<!-- DESTINATION SELECTOR -->
|
||||
<Card class="bg-bg-secondary border-border-color shadow-xl overflow-hidden">
|
||||
<div class="p-5 border-b border-border-color bg-bg-tertiary/30 flex items-center gap-3">
|
||||
<MapPin size={16} class="text-success-color" />
|
||||
<h2 class="text-xs font-black uppercase tracking-widest text-text-primary">Recovery Target</h2>
|
||||
</div>
|
||||
<div class="p-5 space-y-4">
|
||||
<div class="space-y-2">
|
||||
<label for="destination" class="text-[10px] font-black uppercase tracking-widest text-text-secondary opacity-50 ml-1">Restore to Host Path</label>
|
||||
<select
|
||||
id="destination"
|
||||
bind:value={selectedDest}
|
||||
class="w-full h-12 bg-bg-primary border border-border-color rounded-xl px-4 text-sm font-bold text-text-primary outline-none focus:ring-2 focus:ring-success-color/20 transition-all appearance-none cursor-pointer"
|
||||
>
|
||||
{#each restoreDests as dest}
|
||||
<option value={dest}>{dest}</option>
|
||||
{/each}
|
||||
{#if restoreDests.length === 0}
|
||||
<option value="">No destinations configured</option>
|
||||
{/if}
|
||||
</select>
|
||||
</div>
|
||||
<p class="text-[10px] text-text-secondary leading-relaxed italic opacity-60">
|
||||
Files will be restored into this directory, maintaining their original folder structure.
|
||||
</p>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<!-- MEDIA MANIFEST -->
|
||||
<Card class="bg-bg-secondary border-border-color shadow-xl flex flex-col min-h-0">
|
||||
<div class="p-5 border-b border-border-color bg-bg-tertiary/30 flex items-center gap-3">
|
||||
<Database size={16} class="text-blue-400" />
|
||||
<h2 class="text-xs font-black uppercase tracking-widest text-text-primary">Physical Manifest</h2>
|
||||
</div>
|
||||
|
||||
<div class="p-5 space-y-3 flex-1 overflow-y-auto">
|
||||
{#each manifest?.media_required || [] as media}
|
||||
<div class="bg-bg-primary/50 border border-border-color rounded-xl p-4 flex items-center gap-4 group hover:border-blue-500/30 transition-all">
|
||||
<div class="p-2 bg-blue-500/10 rounded-lg text-blue-500">
|
||||
{#if media.media_type === 'tape'}<CassetteTape size={20} />{/if}
|
||||
{#if media.media_type === 'hdd'}<HardDrive size={20} />{/if}
|
||||
{#if media.media_type === 'cloud'}<Library size={20} />{/if}
|
||||
</div>
|
||||
<div class="flex-1">
|
||||
<div class="flex justify-between items-center">
|
||||
<span class="text-sm font-black text-text-primary mono">{media.identifier}</span>
|
||||
<span class="text-[9px] font-black uppercase text-blue-400">{media.media_type}</span>
|
||||
</div>
|
||||
<div class="flex gap-3 mt-1">
|
||||
<span class="text-[10px] font-bold text-text-secondary opacity-60 uppercase">{media.file_count} Files</span>
|
||||
<span class="text-[10px] font-bold text-text-secondary opacity-60 uppercase border-l border-border-color pl-3">{formatSize(media.total_size)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="py-12 text-center opacity-20 border-2 border-dashed border-border-color rounded-xl">
|
||||
<p class="text-[10px] font-black uppercase tracking-widest">No Media Required</p>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
|
||||
<div class="p-5 bg-bg-tertiary/20 border-t border-border-color">
|
||||
<div class="flex items-start gap-3">
|
||||
<ShieldCheck size={14} class="text-success-color shrink-0 mt-0.5" />
|
||||
<p class="text-[10px] text-text-secondary leading-normal">
|
||||
Verification active: Media identifiers will be checked physically before extraction. Recovery will proceed sequentially by media to minimize hardware cycles.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</aside>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -1,115 +1,203 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
import { Search, Save, ShieldAlert, FolderSearch, RotateCw, Plus, Trash2, Download } from 'lucide-svelte';
|
||||
import { Search, Save, ShieldAlert, FolderSearch, RotateCw, Plus, Trash2, Download, Database, Upload, CalendarClock, Zap, Bell, Send } from 'lucide-svelte';
|
||||
import { Button } from '$lib/components/ui/button';
|
||||
import { Card } from '$lib/components/ui/card';
|
||||
import { Input } from '$lib/components/ui/input';
|
||||
import { getSettingsSystemSettingsGet, updateSettingSystemSettingsPost } from '$lib/api/sdk.gen';
|
||||
import {
|
||||
getSettingsSystemSettingsGet,
|
||||
updateSettingSystemSettingsPost,
|
||||
exportDatabaseSystemDatabaseExportGet,
|
||||
importDatabaseSystemDatabaseImportPost,
|
||||
testNotificationSystemNotificationsTestPost
|
||||
} from '$lib/api';
|
||||
import { toast } from "svelte-sonner";
|
||||
|
||||
let sourceRoots = $state<string[]>(["/source_data"]);
|
||||
let restoreDestinations = $state<string[]>(["/restores"]);
|
||||
let globalExclusions = $state("*.tmp\nnode_modules/\n.DS_Store\nThumbs.db\nCache/\n");
|
||||
let scanSchedule = $state("");
|
||||
let archivalSchedule = $state("");
|
||||
let notificationUrls = $state<string[]>([]);
|
||||
|
||||
let loading = $state(true);
|
||||
let saving = $state(false);
|
||||
let exporting = $state(false);
|
||||
let importing = $state(false);
|
||||
let testingUrlIdx = $state<number | null>(null);
|
||||
|
||||
async function loadSettings() {
|
||||
loading = true;
|
||||
try {
|
||||
const response = await getSettingsSystemSettingsGet();
|
||||
if (response.data) {
|
||||
if (response.data.source_roots) {
|
||||
try { sourceRoots = JSON.parse(response.data.source_roots); } catch { sourceRoots = [response.data.source_roots]; }
|
||||
}
|
||||
if (response.data.restore_destinations) {
|
||||
try { restoreDestinations = JSON.parse(response.data.restore_destinations); } catch { restoreDestinations = [response.data.restore_destinations]; }
|
||||
}
|
||||
if (response.data.global_exclusions) {
|
||||
globalExclusions = response.data.global_exclusions;
|
||||
}
|
||||
const data = response.data;
|
||||
if (data.source_roots) sourceRoots = JSON.parse(data.source_roots);
|
||||
if (data.restore_destinations) restoreDestinations = JSON.parse(data.restore_destinations);
|
||||
if (data.global_exclusions) globalExclusions = data.global_exclusions;
|
||||
if (data.schedule_scan) scanSchedule = data.schedule_scan;
|
||||
if (data.schedule_archival) archivalSchedule = data.schedule_archival;
|
||||
if (data.notification_urls) notificationUrls = JSON.parse(data.notification_urls);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to load settings:", error);
|
||||
toast.error("Failed to load system settings");
|
||||
toast.error("Failed to load system configuration");
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
onMount(loadSettings);
|
||||
|
||||
function addSourceRoot() { sourceRoots = [...sourceRoots, ""]; }
|
||||
function removeSourceRoot(index: number) { sourceRoots = sourceRoots.filter((_, i) => i !== index); }
|
||||
|
||||
function addRestoreDest() { restoreDestinations = [...restoreDestinations, ""]; }
|
||||
function removeRestoreDest(index: number) { restoreDestinations = restoreDestinations.filter((_, i) => i !== index); }
|
||||
|
||||
async function saveSettings() {
|
||||
saving = true;
|
||||
try {
|
||||
const roots = sourceRoots.filter(r => r.trim() !== "");
|
||||
const dests = restoreDestinations.filter(d => d.trim() !== "");
|
||||
|
||||
await Promise.all([
|
||||
updateSettingSystemSettingsPost({ body: { key: "source_roots", value: JSON.stringify(roots) } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "restore_destinations", value: JSON.stringify(dests) } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "global_exclusions", value: globalExclusions } })
|
||||
updateSettingSystemSettingsPost({ body: { key: "source_roots", value: JSON.stringify(sourceRoots) } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "restore_destinations", value: JSON.stringify(restoreDestinations) } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "global_exclusions", value: globalExclusions } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "schedule_scan", value: scanSchedule } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "schedule_archival", value: archivalSchedule } }),
|
||||
updateSettingSystemSettingsPost({ body: { key: "notification_urls", value: JSON.stringify(notificationUrls) } })
|
||||
]);
|
||||
|
||||
toast.success("Settings saved successfully");
|
||||
sourceRoots = roots;
|
||||
restoreDestinations = dests;
|
||||
} catch (error) {
|
||||
toast.error("Failed to save settings");
|
||||
} finally {
|
||||
saving = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleTestNotification(url: string, index: number) {
|
||||
if (!url || !url.trim()) {
|
||||
toast.error("Please enter a URL first");
|
||||
return;
|
||||
}
|
||||
testingUrlIdx = index;
|
||||
try {
|
||||
await testNotificationSystemNotificationsTestPost({
|
||||
body: { url }
|
||||
});
|
||||
toast.success("Test notification sent!");
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Test failed");
|
||||
} finally {
|
||||
testingUrlIdx = null;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBackup() {
|
||||
exporting = true;
|
||||
try {
|
||||
const url = "http://localhost:8000/system/database/export";
|
||||
window.location.href = url;
|
||||
toast.success("Database backup initiated");
|
||||
} catch (error) {
|
||||
toast.error("Failed to backup database");
|
||||
} finally {
|
||||
exporting = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRestore(event: Event) {
|
||||
const input = event.target as HTMLInputElement;
|
||||
if (!input.files || input.files.length === 0) return;
|
||||
|
||||
const file = input.files[0];
|
||||
if (!confirm(`Are you sure you want to restore "${file.name}"? This will overwrite your current index and ALL settings!`)) {
|
||||
input.value = "";
|
||||
return;
|
||||
}
|
||||
|
||||
importing = true;
|
||||
try {
|
||||
await importDatabaseSystemDatabaseImportPost({
|
||||
body: {
|
||||
file: file
|
||||
}
|
||||
});
|
||||
toast.success("Database restored successfully");
|
||||
setTimeout(() => window.location.reload(), 1500);
|
||||
} catch (error: any) {
|
||||
toast.error(error.body?.detail || "Failed to restore database");
|
||||
} finally {
|
||||
importing = false;
|
||||
input.value = "";
|
||||
}
|
||||
}
|
||||
|
||||
function addRoot() {
|
||||
sourceRoots = [...sourceRoots, ""];
|
||||
}
|
||||
|
||||
function removeRoot(index: number) {
|
||||
sourceRoots = sourceRoots.filter((_, i) => i !== index);
|
||||
}
|
||||
|
||||
function addDest() {
|
||||
restoreDestinations = [...restoreDestinations, ""];
|
||||
}
|
||||
|
||||
function removeDest(index: number) {
|
||||
restoreDestinations = restoreDestinations.filter((_, i) => i !== index);
|
||||
}
|
||||
|
||||
function addNotificationUrl() {
|
||||
notificationUrls = [...notificationUrls, ""];
|
||||
}
|
||||
|
||||
function removeNotificationUrl(index: number) {
|
||||
notificationUrls = notificationUrls.filter((_, i) => i !== index);
|
||||
}
|
||||
|
||||
onMount(loadSettings);
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Settings - TapeHoard</title>
|
||||
<title>System Settings - TapeHoard</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="flex justify-between items-center mb-8 bg-bg-secondary p-6 rounded-xl border border-border-color shadow-lg relative overflow-hidden">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-action-color/5 to-transparent pointer-events-none"></div>
|
||||
<div class="flex flex-col gap-8 h-full overflow-y-auto pr-2 pb-12 animate-in fade-in duration-700">
|
||||
<!-- Header -->
|
||||
<header class="flex justify-between items-center bg-bg-secondary px-8 py-5 rounded-xl border border-border-color shadow-2xl relative overflow-hidden shrink-0">
|
||||
<div class="absolute inset-0 bg-gradient-to-r from-orange-500/5 to-transparent pointer-events-none"></div>
|
||||
<div class="relative z-10">
|
||||
<h1 class="text-3xl font-black uppercase tracking-tighter text-text-primary">System Settings</h1>
|
||||
<p class="text-text-secondary mt-1 font-bold uppercase tracking-widest text-[10px] opacity-70">Global Backup Configuration & Policy Engine</p>
|
||||
<h1 class="text-2xl font-black uppercase tracking-tighter text-text-primary flex items-center gap-3">
|
||||
<Search class="text-orange-500" size={28} />
|
||||
System Settings
|
||||
</h1>
|
||||
<p class="text-[12px] font-bold uppercase tracking-widest text-text-secondary mt-1 opacity-80">
|
||||
Core Configuration & Disaster Recovery
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex gap-4 relative z-10">
|
||||
<Button variant="default" size="lg" class="px-8 h-12 font-black uppercase tracking-widest text-[11px]" onclick={saveSettings} disabled={saving}>
|
||||
{#if saving}
|
||||
<RotateCw size={20} class="mr-2 animate-spin" />
|
||||
{:else}
|
||||
<Save size={20} class="mr-2" />
|
||||
{/if}
|
||||
Apply Settings
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if loading}
|
||||
<div class="flex flex-col items-center justify-center py-24 gap-4 opacity-50">
|
||||
<RotateCw size={48} class="animate-spin text-action-color" />
|
||||
<span class="text-xs font-black uppercase tracking-widest">Hydrating Configuration...</span>
|
||||
<Button variant="default" class="h-11 px-8 font-black uppercase tracking-widest text-[11px] z-10" onclick={saveSettings} disabled={saving}>
|
||||
{#if saving}
|
||||
<RotateCw size={18} class="mr-2 animate-spin" /> Committing...
|
||||
{:else}
|
||||
<Save size={18} class="mr-2" /> Commit Changes
|
||||
{/if}
|
||||
</Button>
|
||||
</header>
|
||||
|
||||
{#if loading}
|
||||
<div class="flex-1 flex flex-col items-center justify-center gap-4 opacity-50">
|
||||
<RotateCw size={48} class="animate-spin text-orange-500" />
|
||||
<span class="text-xs font-black uppercase tracking-widest">Parsing Manifests...</span>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="max-w-4xl mx-auto space-y-8 animate-in fade-in slide-in-from-bottom-4 duration-700">
|
||||
<!-- Source Configuration -->
|
||||
{:else}
|
||||
<div class="grid grid-cols-1 xl:grid-cols-2 gap-8">
|
||||
<!-- Source Roots -->
|
||||
<Card class="p-8 shadow-xl border-border-color/60 bg-gradient-to-br from-bg-secondary to-bg-tertiary">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-blue-500/10 rounded-lg text-blue-500 border border-blue-500/20"><FolderSearch size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Source Provisioning</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Primary data ingestion points.</p></div>
|
||||
<div class="p-2 bg-orange-500/10 rounded-lg text-orange-500 border border-orange-500/20"><Search size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Source Roots</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Base directories to scan for backups.</p></div>
|
||||
</div>
|
||||
<Button variant="secondary" size="sm" class="h-8 uppercase tracking-widest text-[10px] font-bold" onclick={addSourceRoot}><Plus size={14} class="mr-1" /> Add Source</Button>
|
||||
<Button variant="outline" size="sm" class="h-8 text-[10px] font-black uppercase tracking-widest border-orange-500/30 text-orange-400" onclick={addRoot}><Plus size={14} class="mr-1" /> Add Root</Button>
|
||||
</div>
|
||||
<div class="space-y-4">
|
||||
<div class="space-y-3">
|
||||
{#each sourceRoots as root, i}
|
||||
<div class="flex gap-2">
|
||||
<Input bind:value={sourceRoots[i]} class="h-11 bg-bg-primary/50 border-border-color font-mono text-[13px]" placeholder="/path/to/data" />
|
||||
<Button variant="ghost" size="icon" class="h-11 w-11 text-text-secondary hover:text-error-color hover:bg-error-color/10" onclick={() => removeSourceRoot(i)}><Trash2 size={18} /></Button>
|
||||
<div class="flex gap-2 animate-in slide-in-from-left-2 duration-300">
|
||||
<Input bind:value={sourceRoots[i]} placeholder="/mnt/data" class="h-12 bg-bg-primary/50 border-border-color font-mono text-sm" />
|
||||
<Button variant="ghost" size="icon" class="h-12 w-12 hover:bg-error-color/10 hover:text-error-color" onclick={() => removeRoot(i)}><Trash2 size={18} /></Button>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
@@ -120,34 +208,166 @@
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-success-color/10 rounded-lg text-success-color border border-success-color/20"><Download size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Recovery Targets</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Authorized destinations for restored data.</p></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Recovery Targets</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Locations available for file recovery.</p></div>
|
||||
</div>
|
||||
<Button variant="secondary" size="sm" class="h-8 uppercase tracking-widest text-[10px] font-bold" onclick={addRestoreDest}><Plus size={14} class="mr-1" /> Add Target</Button>
|
||||
<Button variant="outline" size="sm" class="h-8 text-[10px] font-black uppercase tracking-widest border-success-color/30 text-success-color" onclick={addDest}><Plus size={14} class="mr-1" /> Add Target</Button>
|
||||
</div>
|
||||
<div class="space-y-4">
|
||||
<div class="space-y-3">
|
||||
{#each restoreDestinations as dest, i}
|
||||
<div class="flex gap-2">
|
||||
<Input bind:value={restoreDestinations[i]} class="h-11 bg-bg-primary/50 border-border-color font-mono text-[13px]" placeholder="/path/to/restores" />
|
||||
<Button variant="ghost" size="icon" class="h-11 w-11 text-text-secondary hover:text-error-color hover:bg-error-color/10" onclick={() => removeRestoreDest(i)}><Trash2 size={18} /></Button>
|
||||
<div class="flex gap-2 animate-in slide-in-from-right-2 duration-300">
|
||||
<Input bind:value={restoreDestinations[i]} placeholder="/mnt/recovery" class="h-12 bg-bg-primary/50 border-border-color font-mono text-sm" />
|
||||
<Button variant="ghost" size="icon" class="h-12 w-12 hover:bg-error-color/10 hover:text-error-color" onclick={() => removeDest(i)}><Trash2 size={18} /></Button>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<!-- Exclusion Engine -->
|
||||
<!-- Global Exclusions -->
|
||||
<Card class="p-8 shadow-xl border-border-color/60 bg-gradient-to-br from-bg-secondary to-bg-tertiary">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-action-color/10 rounded-lg text-action-color border border-action-color/20"><Search size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Exclusion Engine</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Patterns to bypass during scans.</p></div>
|
||||
<div class="p-2 bg-orange-500/10 rounded-lg text-orange-500 border border-orange-500/20"><FolderSearch size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Global Exclusions</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Git-style ignore patterns for all scans.</p></div>
|
||||
</div>
|
||||
<span class="text-[10px] font-black tracking-widest text-text-secondary bg-bg-primary px-3 py-1 rounded-full border border-border-color uppercase">.gitignore syntax</span>
|
||||
</div>
|
||||
<textarea bind:value={globalExclusions} class="w-full h-48 bg-bg-primary/50 border border-border-color rounded-lg p-6 text-[14px] mono text-text-primary focus:ring-1 focus:ring-action-color focus:outline-none resize-none leading-relaxed transition-all shadow-inner" placeholder="e.g. *.tmp"></textarea>
|
||||
<div class="space-y-4">
|
||||
<p class="text-[11px] text-text-secondary uppercase tracking-widest font-black opacity-40 mb-2">Exclusion Rules (One per line)</p>
|
||||
<textarea
|
||||
bind:value={globalExclusions}
|
||||
class="w-full h-48 bg-bg-primary/50 border border-border-color rounded-xl p-4 font-mono text-sm text-text-primary focus:ring-2 focus:ring-orange-500/20 focus:border-orange-500/40 transition-all outline-none"
|
||||
placeholder="*.tmp node_modules/ .DS_Store"
|
||||
></textarea>
|
||||
</div>
|
||||
<div class="mt-6 p-4 bg-orange-500/5 border border-dashed border-orange-500/30 rounded-lg flex gap-4 items-start">
|
||||
<ShieldAlert size={20} class="text-orange-500 shrink-0 mt-0.5" />
|
||||
<p class="text-[12px] text-text-secondary leading-normal font-medium"><strong class="text-orange-500 uppercase tracking-tight text-[11px] block mb-1">Warning</strong>Broad exclusion patterns can result in incomplete backups.</p>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<div class="grid grid-cols-1 xl:grid-cols-2 gap-8">
|
||||
<!-- Automated Scheduling -->
|
||||
<Card class="p-8 shadow-xl border-border-color/60 bg-gradient-to-br from-bg-secondary to-bg-tertiary">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-blue-500/10 rounded-lg text-blue-500 border border-blue-500/20"><CalendarClock size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Automated Scheduling</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Manage recurring background tasks.</p></div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="space-y-6">
|
||||
<div class="space-y-4">
|
||||
<div class="flex items-center gap-2 mb-2">
|
||||
<Zap size={14} class="text-blue-400" />
|
||||
<span class="text-[11px] font-black uppercase tracking-widest text-text-primary">System Scan Frequency</span>
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
<Input
|
||||
bind:value={scanSchedule}
|
||||
placeholder="0 2 * * *"
|
||||
class="h-11 bg-bg-primary/50 border-border-color font-mono text-sm"
|
||||
/>
|
||||
<Button variant="outline" class="h-11 px-4 text-[10px] uppercase font-black tracking-widest shrink-0" onclick={() => scanSchedule = "0 2 * * *"}>Daily</Button>
|
||||
</div>
|
||||
<p class="text-[10px] text-text-secondary italic leading-relaxed opacity-60">Standard Cron expression (m h d M dw). Empty to disable.</p>
|
||||
</div>
|
||||
|
||||
<div class="space-y-4">
|
||||
<div class="flex items-center gap-2 mb-2">
|
||||
<Zap size={14} class="text-success-color" />
|
||||
<span class="text-[11px] font-black uppercase tracking-widest text-text-primary">Media Archival Frequency</span>
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
<Input
|
||||
bind:value={archivalSchedule}
|
||||
placeholder="0 4 * * 0"
|
||||
class="h-11 bg-bg-primary/50 border-border-color font-mono text-sm"
|
||||
/>
|
||||
<Button variant="outline" class="h-11 px-4 text-[10px] uppercase font-black tracking-widest shrink-0" onclick={() => archivalSchedule = "0 4 * * 0"}>Weekly</Button>
|
||||
</div>
|
||||
<p class="text-[10px] text-text-secondary italic leading-relaxed opacity-60">Standard Cron expression. Weekly default is Sunday at 4 AM.</p>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
<!-- Notifications -->
|
||||
<Card class="p-8 shadow-xl border-border-color/60 bg-gradient-to-br from-bg-secondary to-bg-tertiary">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-action-color/10 rounded-lg text-action-color border border-action-color/20"><Bell size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Notification Fleet</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">External alerts via Apprise URLs.</p></div>
|
||||
</div>
|
||||
<Button variant="outline" size="sm" class="h-8 text-[10px] font-black uppercase tracking-widest border-action-color/30 text-action-color" onclick={addNotificationUrl}><Plus size={14} class="mr-1" /> Add Service</Button>
|
||||
</div>
|
||||
<div class="space-y-4">
|
||||
{#each notificationUrls as url, i}
|
||||
<div class="flex gap-2 animate-in slide-in-from-right-2 duration-300">
|
||||
<Input bind:value={notificationUrls[i]} placeholder="discord://token/id" class="h-12 bg-bg-primary/50 border-border-color font-mono text-[11px]" />
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
class="h-12 w-12 border-action-color/20 text-action-color hover:bg-action-color/10"
|
||||
onclick={() => handleTestNotification(notificationUrls[i], i)}
|
||||
disabled={testingUrlIdx !== null}
|
||||
>
|
||||
{#if testingUrlIdx === i}
|
||||
<RotateCw size={16} class="animate-spin" />
|
||||
{:else}
|
||||
<Send size={16} />
|
||||
{/if}
|
||||
</Button>
|
||||
<Button variant="ghost" size="icon" class="h-12 w-12 hover:bg-error-color/10 hover:text-error-color" onclick={() => removeNotificationUrl(i)}><Trash2 size={18} /></Button>
|
||||
</div>
|
||||
{/each}
|
||||
{#if notificationUrls.length === 0}
|
||||
<div class="py-12 border-2 border-dashed border-border-color rounded-xl flex flex-col items-center justify-center opacity-20">
|
||||
<Bell size={48} class="mb-2" />
|
||||
<p class="text-[10px] font-black uppercase tracking-widest">No Alerts Configured</p>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<p class="text-[10px] text-text-secondary mt-6 leading-relaxed opacity-60 italic">Supports Discord, Slack, Telegram, Email, and more via Apprise. Format: <code>discord://webhook_id/webhook_token</code></p>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
<!-- Database Maintenance -->
|
||||
<Card class="p-8 shadow-xl border-border-color/60 bg-gradient-to-br from-bg-secondary to-bg-tertiary">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div class="flex items-center gap-3">
|
||||
<div class="p-2 bg-purple-500/10 rounded-lg text-purple-500 border border-purple-500/20"><Database size={24} /></div>
|
||||
<div><h3 class="text-lg font-black text-text-primary uppercase tracking-tight">Database Maintenance</h3><p class="text-[11px] text-text-secondary font-medium uppercase tracking-wider opacity-60">Disaster recovery & index portability.</p></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="grid grid-cols-2 gap-4">
|
||||
<div class="p-6 bg-bg-primary/30 border border-border-color rounded-xl flex flex-col gap-4">
|
||||
<div>
|
||||
<h4 class="text-sm font-black uppercase text-text-primary tracking-tight">Export Index</h4>
|
||||
<p class="text-[11px] text-text-secondary mt-1 leading-relaxed">Download a portable copy of the TapeHoard index database. This includes all file tracking data, media history, and system settings.</p>
|
||||
</div>
|
||||
<Button variant="secondary" class="mt-auto h-11 font-black uppercase tracking-widest text-[10px]" onclick={handleBackup} disabled={exporting}>
|
||||
{#if exporting}
|
||||
<RotateCw size={16} class="mr-2 animate-spin" /> Preparing...
|
||||
{:else}
|
||||
<Download size={16} class="mr-2" /> Export Database (.db)
|
||||
{/if}
|
||||
</Button>
|
||||
</div>
|
||||
<div class="p-6 bg-bg-primary/30 border border-border-color rounded-xl flex flex-col gap-4">
|
||||
<div>
|
||||
<h4 class="text-sm font-black uppercase text-text-primary tracking-tight">Import Index</h4>
|
||||
<p class="text-[11px] text-text-secondary mt-1 leading-relaxed">Restore the system state from a previous database export. <span class="text-error-color font-bold italic">Warning: This will overwrite all current data!</span></p>
|
||||
</div>
|
||||
<div class="relative">
|
||||
<input type="file" accept=".db" class="hidden" id="db-import" onchange={handleRestore} disabled={importing} />
|
||||
<Button variant="outline" class="w-full h-11 font-black uppercase tracking-widest text-[10px] border-purple-500/30 text-purple-400 hover:bg-purple-500/10" onclick={() => document.getElementById('db-import')?.click()} disabled={importing}>
|
||||
{#if importing}
|
||||
<RotateCw size={16} class="mr-2 animate-spin" /> Restoring...
|
||||
{:else}
|
||||
<Upload size={16} class="mr-2" /> Import & Restore
|
||||
{/if}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
trackBatchSystemTrackBatchPost,
|
||||
triggerScanSystemScanPost,
|
||||
getScanStatusSystemScanStatusGet,
|
||||
searchSystemSystemSearchGet,
|
||||
type ScanStatusSchema
|
||||
} from '$lib/api';
|
||||
import { toast } from "svelte-sonner";
|
||||
@@ -17,18 +18,22 @@
|
||||
|
||||
// Current directory state
|
||||
let currentPath = $state('ROOT');
|
||||
let searchQuery = $state('');
|
||||
let files = $state<FileItem[]>([]);
|
||||
let loading = $state(false);
|
||||
let searchLoading = $state(false);
|
||||
let committing = $state(false);
|
||||
|
||||
// Scanner Status (local for button state only)
|
||||
let scanRunning = $state(false);
|
||||
let pollInterval: any;
|
||||
let searchTimeout: any;
|
||||
|
||||
// Staging area for tracking changes: path -> desired tracked state
|
||||
let pendingChanges = $state<Map<string, boolean>>(new Map());
|
||||
|
||||
async function loadFiles(path: string) {
|
||||
if (searchQuery.trim().length >= 3) return; // Prevent loading path if searching
|
||||
loading = true;
|
||||
try {
|
||||
const response = await browsePathSystemBrowseGet({
|
||||
@@ -54,6 +59,48 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function searchFiles(query: string) {
|
||||
searchLoading = true;
|
||||
try {
|
||||
const response = await searchSystemSystemSearchGet({
|
||||
query: { q: query }
|
||||
});
|
||||
if (response.data) {
|
||||
files = response.data.map(f => ({
|
||||
name: f.name,
|
||||
path: f.path,
|
||||
type: f.type as 'file' | 'directory' | 'link',
|
||||
size: f.size ?? null,
|
||||
mtime: f.mtime ?? null,
|
||||
tracked: f.tracked ?? false,
|
||||
ignored: f.ignored ?? false,
|
||||
sha256_hash: null
|
||||
}));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to search files:", error);
|
||||
toast.error("Search failed");
|
||||
} finally {
|
||||
searchLoading = false;
|
||||
}
|
||||
}
|
||||
|
||||
$effect(() => {
|
||||
const query = searchQuery.trim();
|
||||
if (searchTimeout) clearTimeout(searchTimeout);
|
||||
|
||||
if (query.length >= 3) {
|
||||
searchTimeout = setTimeout(() => {
|
||||
searchFiles(query);
|
||||
}, 300);
|
||||
} else if (query.length === 0) {
|
||||
// Wait slightly so we don't immediately fetch while user is deleting text rapidly
|
||||
searchTimeout = setTimeout(() => {
|
||||
loadFiles(currentPath);
|
||||
}, 50);
|
||||
}
|
||||
});
|
||||
|
||||
async function updateScanStatus() {
|
||||
try {
|
||||
const response = await getScanStatusSystemScanStatusGet();
|
||||
@@ -175,7 +222,7 @@
|
||||
<div class="relative z-10">
|
||||
<h1 class="text-2xl font-black uppercase tracking-tighter text-text-primary flex items-center gap-3">
|
||||
<FolderTree class="text-action-color" size={28} />
|
||||
File Tracking
|
||||
Tracking Policy
|
||||
</h1>
|
||||
<p class="text-[12px] font-bold uppercase tracking-widest text-text-secondary mt-1 opacity-80">
|
||||
Data Provisioning & Indexing Configuration
|
||||
@@ -225,7 +272,7 @@
|
||||
<LayoutGrid size={24} />
|
||||
</div>
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Tracked Items</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Selection Set</span>
|
||||
<span class="text-xl font-black text-text-primary mono">
|
||||
{files.filter(f => f.tracked).length}
|
||||
</span>
|
||||
@@ -237,7 +284,7 @@
|
||||
<Database size={24} />
|
||||
</div>
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Sync Items</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Directory Index</span>
|
||||
<span class="text-xl font-black text-action-color mono">
|
||||
{files.length}
|
||||
</span>
|
||||
@@ -249,7 +296,7 @@
|
||||
<HardDrive size={24} />
|
||||
</div>
|
||||
<div>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Eligible Items</span>
|
||||
<span class="text-[10px] font-black uppercase tracking-widest text-text-secondary block">Tracked Files</span>
|
||||
<span class="text-xl font-black text-success-color mono">
|
||||
{files.filter(f => !f.ignored).length}
|
||||
</span>
|
||||
@@ -282,9 +329,10 @@
|
||||
|
||||
<FileBrowser
|
||||
bind:currentPath
|
||||
bind:searchQuery
|
||||
files={displayFiles}
|
||||
isSearching={searchLoading}
|
||||
onNavigate={handleNavigate}
|
||||
onToggleTrack={handleToggleTrack}
|
||||
/>
|
||||
</div>
|
||||
/> </div>
|
||||
</div>
|
||||
|
||||
@@ -58,3 +58,20 @@ generate-client:
|
||||
@echo "Generating TypeScript API client..."
|
||||
# Ensure backend is running first: `just backend`
|
||||
cd frontend && npx @hey-api/openapi-ts -i http://localhost:8000/openapi.json -o src/lib/api -c @hey-api/client-fetch
|
||||
|
||||
# --- Docker ---
|
||||
|
||||
# Build the production Docker image
|
||||
docker-build:
|
||||
@echo "Building TapeHoard Docker image..."
|
||||
docker build -t tapehoard:latest -f docker/Dockerfile .
|
||||
|
||||
# Start the production stack using Docker Compose
|
||||
docker-up:
|
||||
@echo "Starting TapeHoard stack..."
|
||||
cd docker && docker-compose up -d
|
||||
|
||||
# Stop the production stack
|
||||
docker-down:
|
||||
@echo "Stopping TapeHoard stack..."
|
||||
cd docker && docker-compose down
|
||||
|
||||
Reference in New Issue
Block a user