Compare commits

...

4 Commits

Author SHA1 Message Date
adamlamers 9f8c7a97c6 select keys for tape/hdd
Continuous Integration / backend-tests (push) Successful in 1m8s
Continuous Integration / frontend-check (push) Successful in 31s
Continuous Integration / e2e-tests (push) Successful in 9m29s
2026-05-05 11:26:24 -04:00
adamlamers 06c0b1631b show partial archival in archive browser 2026-05-05 10:02:26 -04:00
adamlamers 40c56f8301 secret management3 2026-05-05 09:21:55 -04:00
adamlamers 779dfd114a fix cancelled jobs being marked completed 2026-05-05 08:29:29 -04:00
22 changed files with 1344 additions and 132 deletions
@@ -0,0 +1,33 @@
"""add_secret_reference_columns
Revision ID: bbe2fb40a559
Revises: 6a15f2e5b03b
Create Date: 2026-05-05 08:35:21.154584
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "bbe2fb40a559"
down_revision: Union[str, Sequence[str], None] = "6a15f2e5b03b"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"storage_media", sa.Column("secret_access_key_name", sa.String(), nullable=True)
)
op.add_column(
"storage_media", sa.Column("encryption_secret_name", sa.String(), nullable=True)
)
def downgrade() -> None:
op.drop_column("storage_media", "encryption_secret_name")
op.drop_column("storage_media", "secret_access_key_name")
+42 -17
View File
@@ -121,7 +121,7 @@ def browse(path: str = "ROOT", db_session: Session = Depends(get_db)):
dir_sql, {"prefix": query_path, "prefix_wildcard": f"{query_path}%"} dir_sql, {"prefix": query_path, "prefix_wildcard": f"{query_path}%"}
).fetchall() ).fetchall()
# Find files (immediate children) with their media locations # Find files (immediate children) with their media locations and archive coverage
file_sql = text(""" file_sql = text("""
SELECT SELECT
fs.id, fs.file_path, fs.size, fs.mtime, fs.id, fs.file_path, fs.size, fs.mtime,
@@ -130,7 +130,10 @@ def browse(path: str = "ROOT", db_session: Session = Depends(get_db)):
FROM file_versions fv FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id JOIN storage_media sm ON sm.id = fv.media_id
WHERE fv.filesystem_state_id = fs.id) as media_list, WHERE fv.filesystem_state_id = fs.id) as media_list,
EXISTS(SELECT 1 FROM restore_cart rc WHERE rc.filesystem_state_id = fs.id) as is_selected EXISTS(SELECT 1 FROM restore_cart rc WHERE rc.filesystem_state_id = fs.id) as is_selected,
COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
FROM file_versions fv
WHERE fv.filesystem_state_id = fs.id), 0) as archived_bytes
FROM filesystem_state fs FROM filesystem_state fs
WHERE fs.file_path LIKE :prefix_wildcard WHERE fs.file_path LIKE :prefix_wildcard
AND fs.file_path != :prefix AND fs.file_path != :prefix
@@ -175,6 +178,10 @@ def browse(path: str = "ROOT", db_session: Session = Depends(get_db)):
if not f[4]: # f[4] is has_version if not f[4]: # f[4] is has_version
continue continue
archived_bytes = f[7] or 0
file_size = f[2] or 0
is_partially_archived = archived_bytes > 0 and archived_bytes < file_size
results.append( results.append(
{ {
"name": os.path.basename(f[1]), "name": os.path.basename(f[1]),
@@ -185,6 +192,8 @@ def browse(path: str = "ROOT", db_session: Session = Depends(get_db)):
"vulnerable": False, "vulnerable": False,
"selected": bool(f[6]), "selected": bool(f[6]),
"media": f[5].split(",") if f[5] else [], "media": f[5].split(",") if f[5] else [],
"is_partially_archived": is_partially_archived,
"archived_bytes": archived_bytes,
} }
) )
@@ -215,7 +224,10 @@ def search(q: str, path: Optional[str] = None, db_session: Session = Depends(get
FROM file_versions fv FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id JOIN storage_media sm ON sm.id = fv.media_id
WHERE fv.filesystem_state_id = fs.id) as media_list, WHERE fv.filesystem_state_id = fs.id) as media_list,
EXISTS(SELECT 1 FROM restore_cart rc WHERE rc.filesystem_state_id = fs.id) as is_selected EXISTS(SELECT 1 FROM restore_cart rc WHERE rc.filesystem_state_id = fs.id) as is_selected,
COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
FROM file_versions fv
WHERE fv.filesystem_state_id = fs.id), 0) as archived_bytes
FROM filesystem_fts fts FROM filesystem_fts fts
JOIN filesystem_state fs ON fs.id = fts.rowid JOIN filesystem_state fs ON fs.id = fts.rowid
WHERE filesystem_fts MATCH :query WHERE filesystem_fts MATCH :query
@@ -229,20 +241,28 @@ def search(q: str, path: Optional[str] = None, db_session: Session = Depends(get
query_params = {"query": q, "path_prefix": path_prefix} query_params = {"query": q, "path_prefix": path_prefix}
rows = db_session.execute(search_sql, query_params).fetchall() rows = db_session.execute(search_sql, query_params).fetchall()
return [ results = []
{ for r in rows:
"name": os.path.basename(r[1]), if not r[4]: # Only show if has_version is True
"path": r[1], continue
"type": "file", archived_bytes = r[7] or 0
"size": r[2], file_size = r[2] or 0
"mtime": datetime.fromtimestamp(r[3], tz=timezone.utc), is_partially_archived = archived_bytes > 0 and archived_bytes < file_size
"vulnerable": False, results.append(
"selected": bool(r[6]), {
"media": r[5].split(",") if r[5] else [], "name": os.path.basename(r[1]),
} "path": r[1],
for r in rows "type": "file",
if r[4] # Only show if has_version is True "size": r[2],
] "mtime": datetime.fromtimestamp(r[3], tz=timezone.utc),
"vulnerable": False,
"selected": bool(r[6]),
"media": r[5].split(",") if r[5] else [],
"is_partially_archived": is_partially_archived,
"archived_bytes": archived_bytes,
}
)
return results
@router.get("/tree", response_model=List[TreeNodeSchema], operation_id="archive_tree") @router.get("/tree", response_model=List[TreeNodeSchema], operation_id="archive_tree")
@@ -323,6 +343,9 @@ def metadata(path: str, db_session: Session = Depends(get_db)):
} }
) )
archived_bytes = sum((v.offset_end - v.offset_start) for v in item.versions)
is_partially_archived = archived_bytes > 0 and archived_bytes < item.size
return ItemMetadataSchema( return ItemMetadataSchema(
id=item.id, id=item.id,
path=item.file_path, path=item.file_path,
@@ -333,6 +356,8 @@ def metadata(path: str, db_session: Session = Depends(get_db)):
sha256_hash=item.sha256_hash, sha256_hash=item.sha256_hash,
is_ignored=item.is_ignored, is_ignored=item.is_ignored,
versions=versions, versions=versions,
is_partially_archived=is_partially_archived,
archived_bytes=archived_bytes,
) )
# No exact match — check if this is a directory with archived children # No exact match — check if this is a directory with archived children
+10 -10
View File
@@ -67,10 +67,12 @@ def _media_to_schema(media: models.StorageMedia, config: Dict[str, Any]) -> Medi
region=media.region, region=media.region,
bucket_name=media.bucket_name, bucket_name=media.bucket_name,
access_key_id=media.access_key_id, access_key_id=media.access_key_id,
secret_access_key_name=media.secret_access_key_name,
path_style_access=media.path_style_access, path_style_access=media.path_style_access,
storage_class=media.storage_class, storage_class=media.storage_class,
max_part_size_mb=media.max_part_size_mb, max_part_size_mb=media.max_part_size_mb,
obfuscate_filenames=media.obfuscate_filenames, obfuscate_filenames=media.obfuscate_filenames,
encryption_secret_name=media.encryption_secret_name,
config=config, config=config,
) )
@@ -228,6 +230,7 @@ def create_media(
new_media.compression = request_data.compression new_media.compression = request_data.compression
new_media.encryption_key_id = request_data.encryption_key_id new_media.encryption_key_id = request_data.encryption_key_id
new_media.cleaning_cartridge = request_data.cleaning_cartridge new_media.cleaning_cartridge = request_data.cleaning_cartridge
new_media.encryption_secret_name = request_data.encryption_secret_name
elif request_data.media_type == "local_hdd": elif request_data.media_type == "local_hdd":
assert isinstance(request_data, schemas.OfflineHddCreateSchema) assert isinstance(request_data, schemas.OfflineHddCreateSchema)
new_media.drive_model = request_data.drive_model new_media.drive_model = request_data.drive_model
@@ -238,6 +241,7 @@ def create_media(
new_media.connection_interface = request_data.connection_interface new_media.connection_interface = request_data.connection_interface
new_media.encrypted = request_data.encrypted new_media.encrypted = request_data.encrypted
new_media.encryption_key_id = request_data.encryption_key_id new_media.encryption_key_id = request_data.encryption_key_id
new_media.encryption_secret_name = request_data.encryption_secret_name
elif request_data.media_type == "s3_compat": elif request_data.media_type == "s3_compat":
assert isinstance(request_data, schemas.CloudCreateSchema) assert isinstance(request_data, schemas.CloudCreateSchema)
new_media.provider_template = request_data.provider_template new_media.provider_template = request_data.provider_template
@@ -245,14 +249,12 @@ def create_media(
new_media.region = request_data.region new_media.region = request_data.region
new_media.bucket_name = request_data.bucket_name new_media.bucket_name = request_data.bucket_name
new_media.access_key_id = request_data.access_key_id new_media.access_key_id = request_data.access_key_id
new_media.secret_access_key = request_data.secret_access_key new_media.secret_access_key_name = request_data.secret_access_key_name
new_media.path_style_access = request_data.path_style_access new_media.path_style_access = request_data.path_style_access
new_media.storage_class = request_data.storage_class new_media.storage_class = request_data.storage_class
new_media.max_part_size_mb = request_data.max_part_size_mb new_media.max_part_size_mb = request_data.max_part_size_mb
new_media.obfuscate_filenames = request_data.obfuscate_filenames new_media.obfuscate_filenames = request_data.obfuscate_filenames
new_media.client_side_encryption_passphrase = ( new_media.encryption_secret_name = request_data.encryption_secret_name
request_data.client_side_encryption_passphrase
)
db_session.add(new_media) db_session.add(new_media)
db_session.commit() db_session.commit()
@@ -349,8 +351,8 @@ def update_media(
media_record.bucket_name = request_data.bucket_name media_record.bucket_name = request_data.bucket_name
if request_data.access_key_id is not None: if request_data.access_key_id is not None:
media_record.access_key_id = request_data.access_key_id media_record.access_key_id = request_data.access_key_id
if request_data.secret_access_key is not None: if request_data.secret_access_key_name is not None:
media_record.secret_access_key = request_data.secret_access_key media_record.secret_access_key_name = request_data.secret_access_key_name
if request_data.path_style_access is not None: if request_data.path_style_access is not None:
media_record.path_style_access = request_data.path_style_access media_record.path_style_access = request_data.path_style_access
if request_data.storage_class is not None: if request_data.storage_class is not None:
@@ -359,10 +361,8 @@ def update_media(
media_record.max_part_size_mb = request_data.max_part_size_mb media_record.max_part_size_mb = request_data.max_part_size_mb
if request_data.obfuscate_filenames is not None: if request_data.obfuscate_filenames is not None:
media_record.obfuscate_filenames = request_data.obfuscate_filenames media_record.obfuscate_filenames = request_data.obfuscate_filenames
if request_data.client_side_encryption_passphrase is not None: if request_data.encryption_secret_name is not None:
media_record.client_side_encryption_passphrase = ( media_record.encryption_secret_name = request_data.encryption_secret_name
request_data.client_side_encryption_passphrase
)
# Handle legacy extra_config for backward compatibility # Handle legacy extra_config for backward compatibility
if media_record.extra_config: if media_record.extra_config:
+14 -4
View File
@@ -25,6 +25,8 @@ class ItemMetadataSchema(BaseModel):
child_count: Optional[int] = 0 child_count: Optional[int] = 0
selected: bool = False selected: bool = False
versions: List[Dict[str, Any]] = [] versions: List[Dict[str, Any]] = []
is_partially_archived: bool = False
archived_bytes: int = 0
class DiscrepancySchema(BaseModel): class DiscrepancySchema(BaseModel):
@@ -66,6 +68,8 @@ class LtoTapeCreateSchema(MediaBaseSchema):
compression: bool = True compression: bool = True
encryption_key_id: Optional[str] = None encryption_key_id: Optional[str] = None
cleaning_cartridge: bool = False cleaning_cartridge: bool = False
# Reference to encryption passphrase in the settings keystore
encryption_secret_name: Optional[str] = None
class OfflineHddCreateSchema(MediaBaseSchema): class OfflineHddCreateSchema(MediaBaseSchema):
@@ -80,6 +84,8 @@ class OfflineHddCreateSchema(MediaBaseSchema):
connection_interface: Optional[str] = None connection_interface: Optional[str] = None
encrypted: bool = False encrypted: bool = False
encryption_key_id: Optional[str] = None encryption_key_id: Optional[str] = None
# Reference to encryption passphrase in the settings keystore
encryption_secret_name: Optional[str] = None
class CloudCreateSchema(MediaBaseSchema): class CloudCreateSchema(MediaBaseSchema):
@@ -91,12 +97,14 @@ class CloudCreateSchema(MediaBaseSchema):
region: str region: str
bucket_name: str bucket_name: str
access_key_id: str access_key_id: str
secret_access_key: str # References to secrets in the settings keystore
secret_access_key_name: Optional[str] = None
path_style_access: bool = False path_style_access: bool = False
storage_class: Optional[str] = None storage_class: Optional[str] = None
max_part_size_mb: int = 5000 max_part_size_mb: int = 5000
obfuscate_filenames: bool = False obfuscate_filenames: bool = False
client_side_encryption_passphrase: Optional[str] = None # Reference to encryption passphrase in the settings keystore
encryption_secret_name: Optional[str] = None
# Discriminated union type for creating media # Discriminated union type for creating media
@@ -135,12 +143,12 @@ class MediaUpdateSchema(BaseModel):
region: Optional[str] = None region: Optional[str] = None
bucket_name: Optional[str] = None bucket_name: Optional[str] = None
access_key_id: Optional[str] = None access_key_id: Optional[str] = None
secret_access_key: Optional[str] = None secret_access_key_name: Optional[str] = None
path_style_access: Optional[bool] = None path_style_access: Optional[bool] = None
storage_class: Optional[str] = None storage_class: Optional[str] = None
max_part_size_mb: Optional[int] = None max_part_size_mb: Optional[int] = None
obfuscate_filenames: Optional[bool] = None obfuscate_filenames: Optional[bool] = None
client_side_encryption_passphrase: Optional[str] = None encryption_secret_name: Optional[str] = None
class MediaSchema(BaseModel): class MediaSchema(BaseModel):
@@ -179,10 +187,12 @@ class MediaSchema(BaseModel):
region: Optional[str] = None region: Optional[str] = None
bucket_name: Optional[str] = None bucket_name: Optional[str] = None
access_key_id: Optional[str] = None access_key_id: Optional[str] = None
secret_access_key_name: Optional[str] = None
path_style_access: bool = False path_style_access: bool = False
storage_class: Optional[str] = None storage_class: Optional[str] = None
max_part_size_mb: int = 5000 max_part_size_mb: int = 5000
obfuscate_filenames: bool = False obfuscate_filenames: bool = False
encryption_secret_name: Optional[str] = None
# Legacy config fallback # Legacy config fallback
config: Dict[str, Any] = {} config: Dict[str, Any] = {}
# Runtime status # Runtime status
+23 -15
View File
@@ -21,23 +21,31 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
SUM(size) as total_size, SUM(size) as total_size,
SUM(CASE WHEN is_ignored = 1 THEN 1 ELSE 0 END) as ignored_count, SUM(CASE WHEN is_ignored = 1 THEN 1 ELSE 0 END) as ignored_count,
SUM(CASE WHEN is_ignored = 1 THEN size ELSE 0 END) as ignored_size, SUM(CASE WHEN is_ignored = 1 THEN size ELSE 0 END) as ignored_size,
SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 AND id NOT IN ( SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 AND
SELECT fv.filesystem_state_id FROM file_versions fv COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
JOIN storage_media sm ON sm.id = fv.media_id FROM file_versions fv
WHERE sm.status IN ('active', 'full') JOIN storage_media sm ON sm.id = fv.media_id
) THEN 1 ELSE 0 END) as unprotected_count, WHERE fv.filesystem_state_id = filesystem_state.id
SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 AND id NOT IN ( AND sm.status IN ('active', 'full')), 0) < filesystem_state.size
SELECT fv.filesystem_state_id FROM file_versions fv THEN 1 ELSE 0 END) as unprotected_count,
JOIN storage_media sm ON sm.id = fv.media_id SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 AND
WHERE sm.status IN ('active', 'full') COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
) THEN size ELSE 0 END) as unprotected_size, FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id
WHERE fv.filesystem_state_id = filesystem_state.id
AND sm.status IN ('active', 'full')), 0) < filesystem_state.size
THEN filesystem_state.size - COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id
WHERE fv.filesystem_state_id = filesystem_state.id
AND sm.status IN ('active', 'full')), 0)
ELSE 0 END) as unprotected_size,
SUM(CASE WHEN sha256_hash IS NOT NULL AND is_ignored = 0 AND is_deleted = 0 THEN 1 ELSE 0 END) as hashed_count, SUM(CASE WHEN sha256_hash IS NOT NULL AND is_ignored = 0 AND is_deleted = 0 THEN 1 ELSE 0 END) as hashed_count,
SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 THEN 1 ELSE 0 END) as eligible_count, SUM(CASE WHEN is_ignored = 0 AND is_deleted = 0 THEN 1 ELSE 0 END) as eligible_count,
SUM(CASE WHEN is_deleted = 0 AND id IN ( COALESCE((SELECT SUM(fv.offset_end - fv.offset_start)
SELECT fv.filesystem_state_id FROM file_versions fv FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id JOIN storage_media sm ON sm.id = fv.media_id
WHERE sm.status IN ('active', 'full') WHERE sm.status IN ('active', 'full')), 0) as archived_size,
) THEN size ELSE 0 END) as archived_size,
SUM(CASE WHEN is_deleted = 1 THEN 1 ELSE 0 END) as missing_count, SUM(CASE WHEN is_deleted = 1 THEN 1 ELSE 0 END) as missing_count,
SUM(CASE WHEN is_deleted = 1 AND missing_acknowledged_at IS NULL AND is_ignored = 0 THEN 1 ELSE 0 END) as active_discrepancies_count SUM(CASE WHEN is_deleted = 1 AND missing_acknowledged_at IS NULL AND is_ignored = 0 THEN 1 ELSE 0 END) as active_discrepancies_count
FROM filesystem_state FROM filesystem_state
+91
View File
@@ -1,5 +1,6 @@
import csv import csv
import io import io
import json
from typing import Dict, List from typing import Dict, List
import pathspec import pathspec
@@ -15,6 +16,44 @@ from app.db.database import get_db
router = APIRouter(tags=["System"]) router = APIRouter(tags=["System"])
# --- Secrets Keystore Helpers ---
SECRETS_KEY = "secrets"
def _get_secrets(db_session: Session) -> Dict[str, str]:
"""Retrieve the secrets keystore as a dict."""
record = (
db_session.query(models.SystemSetting)
.filter(models.SystemSetting.key == SECRETS_KEY)
.first()
)
if record and record.value:
try:
return json.loads(record.value)
except json.JSONDecodeError:
return {}
return {}
def _set_secrets(db_session: Session, secrets: Dict[str, str]) -> None:
"""Persist the secrets keystore."""
record = (
db_session.query(models.SystemSetting)
.filter(models.SystemSetting.key == SECRETS_KEY)
.first()
)
value = json.dumps(secrets)
if record:
record.value = value
else:
db_session.add(models.SystemSetting(key=SECRETS_KEY, value=value))
db_session.commit()
# --- Schemas ---
class TestExclusionsRequest(BaseModel): class TestExclusionsRequest(BaseModel):
patterns: str patterns: str
limit: int = 10 limit: int = 10
@@ -28,6 +67,15 @@ class TestExclusionsResponse(BaseModel):
sample: List[FileItemSchema] sample: List[FileItemSchema]
class SecretCreateRequest(BaseModel):
name: str
value: str
class SecretDeleteRequest(BaseModel):
name: str
@router.get("/settings", response_model=Dict[str, str], operation_id="get_settings") @router.get("/settings", response_model=Dict[str, str], operation_id="get_settings")
def get_settings(db_session: Session = Depends(get_db)): def get_settings(db_session: Session = Depends(get_db)):
"""Retrieves all global system configuration key-value pairs.""" """Retrieves all global system configuration key-value pairs."""
@@ -165,3 +213,46 @@ def download_exclusion_report(
media_type="text/csv", media_type="text/csv",
headers={"Content-Disposition": "attachment; filename=exclusion_report.csv"}, headers={"Content-Disposition": "attachment; filename=exclusion_report.csv"},
) )
# --- Secrets Keystore Endpoints ---
@router.get("/secrets", response_model=List[str], operation_id="list_secrets")
def list_secrets(db_session: Session = Depends(get_db)):
"""Returns a list of secret names in the keystore (values are never returned)."""
secrets = _get_secrets(db_session)
return list(secrets.keys())
@router.post("/secrets", operation_id="create_secret")
def create_secret(
request_data: SecretCreateRequest, db_session: Session = Depends(get_db)
):
"""Adds or updates a secret in the keystore."""
secrets = _get_secrets(db_session)
secrets[request_data.name] = request_data.value
_set_secrets(db_session, secrets)
return {"message": f"Secret '{request_data.name}' stored."}
@router.delete("/secrets", operation_id="delete_secret")
def delete_secret(
request_data: SecretDeleteRequest, db_session: Session = Depends(get_db)
):
"""Removes a secret from the keystore."""
secrets = _get_secrets(db_session)
if request_data.name not in secrets:
raise HTTPException(status_code=404, detail="Secret not found.")
del secrets[request_data.name]
_set_secrets(db_session, secrets)
return {"message": f"Secret '{request_data.name}' removed."}
@router.get("/secrets/{name}", operation_id="get_secret")
def get_secret(name: str, db_session: Session = Depends(get_db)):
"""Retrieves the value of a secret by name."""
secrets = _get_secrets(db_session)
if name not in secrets:
raise HTTPException(status_code=404, detail="Secret not found.")
return {"name": name, "value": secrets[name]}
+3 -5
View File
@@ -3,8 +3,10 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings): class Settings(BaseSettings):
""" """
Standardized secret management and application configuration. Application configuration.
Values can be overridden via environment variables or a .env file. Values can be overridden via environment variables or a .env file.
NOTE: No default secrets or passphrases are set. Users must configure
secrets via the settings keystore before encryption can be used.
""" """
model_config = SettingsConfigDict( model_config = SettingsConfigDict(
@@ -14,10 +16,6 @@ class Settings(BaseSettings):
# Database # Database
database_url: str = "sqlite:///./tapehoard.db" database_url: str = "sqlite:///./tapehoard.db"
# Security / Encryption
# Standardized secret management pattern
encryption_passphrase: str = "tapehoard-default-insecure-passphrase"
# Staging # Staging
staging_directory: str = "/staging" staging_directory: str = "/staging"
+10
View File
@@ -94,12 +94,22 @@ class StorageMedia(Base):
region: Mapped[Optional[str]] = mapped_column(String) region: Mapped[Optional[str]] = mapped_column(String)
bucket_name: Mapped[Optional[str]] = mapped_column(String) bucket_name: Mapped[Optional[str]] = mapped_column(String)
access_key_id: Mapped[Optional[str]] = mapped_column(String) access_key_id: Mapped[Optional[str]] = mapped_column(String)
# DEPRECATED: raw secret values are no longer stored on media records.
# Use secret_access_key_name (reference to settings keystore) instead.
secret_access_key: Mapped[Optional[str]] = mapped_column(String) secret_access_key: Mapped[Optional[str]] = mapped_column(String)
secret_access_key_name: Mapped[Optional[str]] = mapped_column(
String
) # Reference to settings secrets keystore
path_style_access: Mapped[bool] = mapped_column(Boolean, default=False) path_style_access: Mapped[bool] = mapped_column(Boolean, default=False)
storage_class: Mapped[Optional[str]] = mapped_column(String) storage_class: Mapped[Optional[str]] = mapped_column(String)
max_part_size_mb: Mapped[int] = mapped_column(Integer, default=5000) max_part_size_mb: Mapped[int] = mapped_column(Integer, default=5000)
obfuscate_filenames: Mapped[bool] = mapped_column(Boolean, default=False) obfuscate_filenames: Mapped[bool] = mapped_column(Boolean, default=False)
# DEPRECATED: raw passphrase values are no longer stored on media records.
# Use encryption_secret_name (reference to settings keystore) instead.
client_side_encryption_passphrase: Mapped[Optional[str]] = mapped_column(String) client_side_encryption_passphrase: Mapped[Optional[str]] = mapped_column(String)
encryption_secret_name: Mapped[Optional[str]] = mapped_column(
String
) # Reference to settings secrets keystore
versions: Mapped[List["FileVersion"]] = relationship(back_populates="media") versions: Mapped[List["FileVersion"]] = relationship(back_populates="media")
+38 -9
View File
@@ -1,4 +1,5 @@
import hashlib import hashlib
import json
import boto3 import boto3
import os import os
import io import io
@@ -11,7 +12,28 @@ from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2 from Crypto.Protocol.KDF import PBKDF2
from Crypto.Hash import SHA256 from Crypto.Hash import SHA256
from app.core.config import settings
# Keystore helpers (avoid circular imports)
def _get_secret(name: str) -> Optional[str]:
"""Look up a secret value from the settings keystore by name."""
if not name:
return None
try:
from app.db.database import SessionLocal
from app.db import models
with SessionLocal() as db_session:
record = (
db_session.query(models.SystemSetting)
.filter(models.SystemSetting.key == "secrets")
.first()
)
if record and record.value:
secrets = json.loads(record.value)
return secrets.get(name)
except Exception:
pass
return None
class CloudStorageProvider(AbstractStorageProvider): class CloudStorageProvider(AbstractStorageProvider):
@@ -48,9 +70,10 @@ class CloudStorageProvider(AbstractStorageProvider):
"type": "string", "type": "string",
"title": "Access Key ID", "title": "Access Key ID",
}, },
"secret_access_key": { "secret_access_key_name": {
"type": "string", "type": "string",
"title": "Secret Access Key", "title": "Secret Access Key",
"description": "Name of a secret stored in the settings keystore.",
}, },
"path_style_access": { "path_style_access": {
"type": "boolean", "type": "boolean",
@@ -69,10 +92,10 @@ class CloudStorageProvider(AbstractStorageProvider):
"description": "Multipart upload chunk size.", "description": "Multipart upload chunk size.",
"default": 5000, "default": 5000,
}, },
"encryption_passphrase": { "encryption_secret_name": {
"type": "string", "type": "string",
"title": "Client-Side Encryption Passphrase", "title": "Encryption Secret",
"description": "Used to encrypt data locally before uploading via AES-256-GCM.", "description": "Name of a secret in the settings keystore used for client-side encryption.",
}, },
"obfuscate_filenames": { "obfuscate_filenames": {
"type": "boolean", "type": "boolean",
@@ -93,14 +116,20 @@ class CloudStorageProvider(AbstractStorageProvider):
self.endpoint_url = endpoint or None self.endpoint_url = endpoint or None
self.obfuscate = config.get("obfuscate_filenames", False) self.obfuscate = config.get("obfuscate_filenames", False)
# Local Encryption Settings: Use provided or global default # Resolve encryption passphrase from keystore (no global fallback)
encryption_secret_name = config.get("encryption_secret_name")
self.passphrase = ( self.passphrase = (
config.get("encryption_passphrase") or settings.encryption_passphrase _get_secret(encryption_secret_name) if encryption_secret_name else None
) )
# Credentials # Resolve credentials from keystore
access_key = config.get("access_key") access_key = config.get("access_key")
secret_key = config.get("secret_key") secret_key_name = config.get("secret_access_key_name")
secret_key = (
_get_secret(secret_key_name)
if secret_key_name
else config.get("secret_key")
)
client_kwargs = { client_kwargs = {
"aws_access_key_id": access_key, "aws_access_key_id": access_key,
+24 -15
View File
@@ -153,12 +153,14 @@ class ArchiverService:
provider_config.setdefault("bucket_name", media_record.bucket_name) provider_config.setdefault("bucket_name", media_record.bucket_name)
if media_record.access_key_id: if media_record.access_key_id:
provider_config.setdefault("access_key", media_record.access_key_id) provider_config.setdefault("access_key", media_record.access_key_id)
if media_record.secret_access_key: if media_record.secret_access_key_name:
provider_config.setdefault("secret_key", media_record.secret_access_key)
if media_record.client_side_encryption_passphrase:
provider_config.setdefault( provider_config.setdefault(
"encryption_passphrase", "secret_access_key_name", media_record.secret_access_key_name
media_record.client_side_encryption_passphrase, )
if media_record.encryption_secret_name:
provider_config.setdefault(
"encryption_secret_name",
media_record.encryption_secret_name,
) )
provider_config.setdefault( provider_config.setdefault(
"obfuscate_filenames", media_record.obfuscate_filenames "obfuscate_filenames", media_record.obfuscate_filenames
@@ -694,17 +696,24 @@ class ArchiverService:
f"Media record {media_id_for_log} was modified or deleted by another process; skipping final commit" f"Media record {media_id_for_log} was modified or deleted by another process; skipping final commit"
) )
JobManager.add_job_log( if JobManager.is_cancelled(job_id):
job_id, f"Backup complete. Utilization: {utilization_ratio*100:.1f}%" JobManager.add_job_log(
) job_id,
JobManager.complete_job(job_id) f"Backup cancelled. Utilization: {utilization_ratio*100:.1f}%",
from app.services.notifications import notification_manager )
else:
JobManager.add_job_log(
job_id,
f"Backup complete. Utilization: {utilization_ratio*100:.1f}%",
)
JobManager.complete_job(job_id)
from app.services.notifications import notification_manager
notification_manager.notify( notification_manager.notify(
"Archival Complete", "Archival Complete",
f"{media_identifier_for_log} synchronized.", f"{media_identifier_for_log} synchronized.",
"success", "success",
) )
except Exception as e: except Exception as e:
logger.exception(f"Archival failed: {e}") logger.exception(f"Archival failed: {e}")
+389
View File
@@ -171,3 +171,392 @@ def test_get_metadata(client, db_session):
response = client.get("/archive/metadata?path=data/meta.txt") response = client.get("/archive/metadata?path=data/meta.txt")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()["path"] == "data/meta.txt" assert response.json()["path"] == "data/meta.txt"
# ── Partial Archive Detection ──
def test_browse_shows_partially_archived_file(client, db_session):
"""Files with offset_end < size should show is_partially_archived=True."""
db_session.add(
models.SystemSetting(key="source_roots", value=json.dumps(["source_data"]))
)
db_session.flush()
media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file1 = models.FilesystemState(
file_path="source_data/big.zip", size=1000, mtime=1000
)
db_session.add(file1)
db_session.flush()
# Only 600 bytes archived (partial)
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=600,
)
)
db_session.commit()
response = client.get("/archive/browse?path=source_data")
assert response.status_code == 200
data = response.json()
file_entry = next((f for f in data if f["path"] == "source_data/big.zip"), None)
assert file_entry is not None
assert file_entry["is_partially_archived"] is True
assert file_entry["archived_bytes"] == 600
def test_browse_fully_archived_file_not_partial(client, db_session):
"""Files with offset_end == size should show is_partially_archived=False."""
db_session.add(
models.SystemSetting(key="source_roots", value=json.dumps(["source_data"]))
)
db_session.flush()
media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file1 = models.FilesystemState(
file_path="source_data/complete.txt", size=500, mtime=1000
)
db_session.add(file1)
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=500,
)
)
db_session.commit()
response = client.get("/archive/browse?path=source_data")
assert response.status_code == 200
data = response.json()
file_entry = next(
(f for f in data if f["path"] == "source_data/complete.txt"), None
)
assert file_entry is not None
assert file_entry["is_partially_archived"] is False
assert file_entry["archived_bytes"] == 500
def test_search_shows_partially_archived(client, db_session):
"""Search results include partial archive indicators if FTS5 finds the file."""
db_session.add(models.SystemSetting(key="source_roots", value=json.dumps(["data"])))
db_session.flush()
file1 = models.FilesystemState(file_path="data/partial.bin", size=1000, mtime=1000)
db_session.add(file1)
db_session.commit()
media = models.StorageMedia(
media_type="hdd", identifier="M2", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=300,
)
)
db_session.commit()
# Manually insert into FTS5 since triggers may not fire on ORM inserts in tests
from sqlalchemy import text
db_session.execute(
text("INSERT INTO filesystem_fts(rowid, file_path) VALUES (:rowid, :path)"),
{"rowid": file1.id, "path": file1.file_path},
)
db_session.commit()
response = client.get("/archive/search?q=partial")
assert response.status_code == 200
data = response.json()
assert len(data) == 1
assert data[0]["is_partially_archived"] is True
assert data[0]["archived_bytes"] == 300
def test_metadata_partial_archive(client, db_session):
"""Metadata endpoint returns archived_bytes and is_partially_archived."""
media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file1 = models.FilesystemState(file_path="data/half.txt", size=800, mtime=1000)
db_session.add(file1)
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=350,
)
)
db_session.commit()
response = client.get("/archive/metadata?path=data/half.txt")
assert response.status_code == 200
data = response.json()
assert data["is_partially_archived"] is True
assert data["archived_bytes"] == 350
assert data["size"] == 800
# ── Type-Specific Media Schemas ──
def test_register_lto_tape_media(client):
"""Tests registering an LTO tape with type-specific fields."""
media_data = {
"media_type": "lto_tape",
"identifier": "LTO7_001",
"capacity": 6000000000000,
"location": "Vault A",
"generation": "LTO-7",
"worm": False,
"write_protected": False,
"compression": True,
"encryption_key_id": "tape-key-1",
"encryption_secret_name": "my-tape-secret",
}
response = client.post("/inventory/media", json=media_data)
assert response.status_code == 200
data = response.json()
assert data["identifier"] == "LTO7_001"
assert data["media_type"] == "lto_tape"
assert data["generation"] == "LTO-7"
assert data["compression"] is True
assert data["encryption_key_id"] == "tape-key-1"
assert data["encryption_secret_name"] == "my-tape-secret"
def test_register_cloud_media(client):
"""Tests registering S3-compatible cloud storage with secret names."""
media_data = {
"media_type": "s3_compat",
"identifier": "s3-primary",
"capacity": 100000000000,
"location": "us-east-1",
"provider_template": "aws",
"endpoint_url": "https://s3.amazonaws.com",
"region": "us-east-1",
"bucket_name": "my-backup-bucket",
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
"secret_access_key_name": "aws-production-key",
"obfuscate_filenames": True,
"encryption_secret_name": "my-encryption-key",
}
response = client.post("/inventory/media", json=media_data)
assert response.status_code == 200
data = response.json()
assert data["identifier"] == "s3-primary"
assert data["media_type"] == "s3_compat"
assert data["bucket_name"] == "my-backup-bucket"
assert data["secret_access_key_name"] == "aws-production-key"
assert data["encryption_secret_name"] == "my-encryption-key"
# ── Structured Location Fields ──
def test_register_hdd_media_with_encryption_secret(client):
"""Tests registering HDD with encryption secret reference."""
media_data = {
"media_type": "local_hdd",
"identifier": "DISK_ENC_001",
"capacity": 1000000000,
"location": "Safe B",
"encrypted": True,
"encryption_key_id": "hdd-key-1",
"encryption_secret_name": "my-hdd-secret",
}
response = client.post("/inventory/media", json=media_data)
assert response.status_code == 200
data = response.json()
assert data["identifier"] == "DISK_ENC_001"
assert data["encrypted"] is True
assert data["encryption_key_id"] == "hdd-key-1"
assert data["encryption_secret_name"] == "my-hdd-secret"
def test_register_media_with_structured_location(client):
"""Tests that structured location fields are persisted."""
media_data = {
"media_type": "local_hdd",
"identifier": "DISK_LOC_001",
"capacity": 1000000000,
"location": "Building 1, Room 101",
"location_building": "Building 1",
"location_room": "Room 101",
"location_rack": "Rack A",
"location_slot": "Slot 3",
}
response = client.post("/inventory/media", json=media_data)
assert response.status_code == 200
data = response.json()
assert data["location_building"] == "Building 1"
assert data["location_room"] == "Room 101"
assert data["location_rack"] == "Rack A"
assert data["location_slot"] == "Slot 3"
def test_update_structured_location(client, db_session):
"""Tests updating structured location fields individually."""
media = models.StorageMedia(
media_type="hdd",
identifier="DISK_LOC_002",
capacity=1000000,
status="active",
location_building="Old Building",
)
db_session.add(media)
db_session.commit()
response = client.patch(
f"/inventory/media/{media.id}",
json={
"location_building": "New Building",
"location_room": "Room 202",
"location_rack": "Rack B",
},
)
assert response.status_code == 200
data = response.json()
assert data["location_building"] == "New Building"
assert data["location_room"] == "Room 202"
assert data["location_rack"] == "Rack B"
# ── Capacity Management ──
def test_capacity_validation_rejects_decrease_below_used(client, db_session):
"""Updating capacity below bytes_used should return 400."""
media = models.StorageMedia(
media_type="hdd",
identifier="DISK_CAP_001",
capacity=1000000,
status="active",
bytes_used=500000,
)
db_session.add(media)
db_session.commit()
response = client.patch(
f"/inventory/media/{media.id}",
json={"capacity": 400000},
)
assert response.status_code == 400
assert "utilized space" in response.json()["detail"]
def test_capacity_increase_reactivates_full_media(client, db_session):
"""Increasing capacity on a 'full' media should auto-set status to active."""
media = models.StorageMedia(
media_type="hdd",
identifier="DISK_FULL_001",
capacity=1000000,
status="full",
bytes_used=500000,
)
db_session.add(media)
db_session.commit()
response = client.patch(
f"/inventory/media/{media.id}",
json={"capacity": 2000000},
)
assert response.status_code == 200
assert response.json()["status"] == "active"
def test_capacity_increase_keeps_full_if_still_near_limit(client, db_session):
"""Increasing capacity but still near 98% should keep status as full."""
media = models.StorageMedia(
media_type="hdd",
identifier="DISK_FULL_002",
capacity=1000000,
status="full",
bytes_used=990000,
)
db_session.add(media)
db_session.commit()
response = client.patch(
f"/inventory/media/{media.id}",
json={"capacity": 1000001},
)
assert response.status_code == 200
assert response.json()["status"] == "full"
# ── Status Auto-Purge on Failure/Retired ──
def test_update_status_to_retired_purges_versions(client, db_session):
"""Setting status to RETIRED should delete all file_versions."""
media = models.StorageMedia(
media_type="hdd", identifier="DISK_RET_001", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file1 = models.FilesystemState(file_path="data/file1.txt", size=100, mtime=1000)
db_session.add(file1)
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=100,
)
)
db_session.commit()
response = client.patch(
f"/inventory/media/{media.id}",
json={"status": "RETIRED"},
)
assert response.status_code == 200
assert response.json()["status"] == "RETIRED"
# Verify versions are purged via raw SQL to bypass identity map caching
from sqlalchemy import text
db_session.commit() # ensure test session sees committed changes
result = db_session.execute(
text("SELECT COUNT(*) FROM file_versions WHERE media_id = :media_id"),
{"media_id": media.id},
).scalar()
assert result == 0
+125
View File
@@ -298,6 +298,57 @@ def test_dashboard_stats_excludes_failed_media(client, db_session):
assert data["archived_data_size"] == 2048 assert data["archived_data_size"] == 2048
def test_dashboard_stats_counts_only_archived_bytes(client, db_session):
"""Tests that archived_data_size counts only written bytes, not full file size."""
active_media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=5000, status="active"
)
db_session.add(active_media)
db_session.flush()
# File 1: fully archived (2048 bytes)
file1 = models.FilesystemState(
file_path="/source/full.txt", size=2048, mtime=1000, is_ignored=False
)
# File 2: partially archived (only 500 of 3000 bytes)
file2 = models.FilesystemState(
file_path="/source/partial.bin", size=3000, mtime=1000, is_ignored=False
)
db_session.add_all([file1, file2])
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file1.id,
media_id=active_media.id,
file_number="1",
offset_start=0,
offset_end=2048,
)
)
db_session.add(
models.FileVersion(
filesystem_state_id=file2.id,
media_id=active_media.id,
file_number="1",
offset_start=0,
offset_end=500,
)
)
db_session.commit()
response = client.get("/system/dashboard/stats")
assert response.status_code == 200
data = response.json()
# Archived data = 2048 + 500 = 2548, NOT 2048 + 3000
assert data["archived_data_size"] == 2548
# Unprotected count = 1 (partial file is still vulnerable)
assert data["unprotected_files_count"] == 1
# Unprotected size = 3000 - 500 = 2500 (the remaining unarchived bytes)
assert data["unprotected_data_size"] == 2500
def test_discrepancies_excludes_versions_on_unavailable_media(client, db_session): def test_discrepancies_excludes_versions_on_unavailable_media(client, db_session):
"""Tests that discrepancy has_versions is False when only backed up on failed/retired media.""" """Tests that discrepancy has_versions is False when only backed up on failed/retired media."""
failed_media = models.StorageMedia( failed_media = models.StorageMedia(
@@ -379,3 +430,77 @@ def test_discrepancies_excludes_versions_on_unavailable_media(client, db_session
good_backed = next(d for d in data if d["path"] == "/data/exists_on_good.txt") good_backed = next(d for d in data if d["path"] == "/data/exists_on_good.txt")
assert good_backed["has_versions"] is True assert good_backed["has_versions"] is True
# ── Secrets Keystore ──
def test_list_secrets_empty(client):
"""Tests listing secrets when keystore is empty."""
response = client.get("/system/secrets")
assert response.status_code == 200
assert response.json() == []
def test_create_and_list_secret(client):
"""Tests creating a secret and verifying it appears in the list."""
response = client.post(
"/system/secrets", json={"name": "my-api-key", "value": "secret123"}
)
assert response.status_code == 200
assert "stored" in response.json()["message"]
response = client.get("/system/secrets")
assert response.status_code == 200
assert "my-api-key" in response.json()
def test_get_secret_value(client):
"""Tests retrieving a secret value by name."""
client.post(
"/system/secrets", json={"name": "encryption-key", "value": "super-secret"}
)
response = client.get("/system/secrets/encryption-key")
assert response.status_code == 200
data = response.json()
assert data["name"] == "encryption-key"
assert data["value"] == "super-secret"
def test_get_secret_not_found(client):
"""Tests retrieving a non-existent secret returns 404."""
response = client.get("/system/secrets/nonexistent")
assert response.status_code == 404
def test_delete_secret(client):
"""Tests deleting a secret from the keystore."""
client.post("/system/secrets", json={"name": "to-delete", "value": "val"})
response = client.request("DELETE", "/system/secrets", json={"name": "to-delete"})
assert response.status_code == 200
assert "removed" in response.json()["message"]
response = client.get("/system/secrets")
assert "to-delete" not in response.json()
def test_delete_secret_not_found(client):
"""Tests deleting a non-existent secret returns 404."""
response = client.request("DELETE", "/system/secrets", json={"name": "missing"})
assert response.status_code == 404
def test_update_existing_secret(client):
"""Tests overwriting an existing secret value."""
client.post(
"/system/secrets", json={"name": " rotating-key ", "value": "old-value"}
)
client.post(
"/system/secrets", json={"name": " rotating-key ", "value": "new-value"}
)
response = client.get("/system/secrets/ rotating-key ")
assert response.status_code == 200
assert response.json()["value"] == "new-value"
+27 -11
View File
@@ -41,28 +41,44 @@ def test_cloud_provider_obfuscation_logic():
assert "secret_plan.pdf" not in key_hidden assert "secret_plan.pdf" not in key_hidden
def test_cloud_secret_fallback(mocker): def test_cloud_secret_lookup(mocker, db_session):
"""Verifies that the provider prioritizes local config over global settings for passphrases.""" """Verifies that the provider looks up secrets from the keystore by name."""
from app.core.config import settings from app.db import models
# Mock boto3.client to avoid slow initialization in unit tests # Mock boto3.client to avoid slow initialization in unit tests
mocker.patch("app.providers.cloud.boto3") mocker.patch("app.providers.cloud.boto3")
# Mock global settings # Seed the secrets keystore
mocker.patch.object(settings, "encryption_passphrase", "global-fallback") db_session.add(
models.SystemSetting(
key="secrets",
value='{"my-encryption-key": "local-override", "empty-secret": ""}',
)
)
db_session.commit()
# CASE1: Local config provides passphrase # CASE 1: Secret name provided and exists in keystore
config_local = {"bucket_name": "b", "encryption_passphrase": "local-override"} config_local = {
"bucket_name": "b",
"encryption_secret_name": "my-encryption-key",
}
provider_local = CloudStorageProvider(config_local) provider_local = CloudStorageProvider(config_local)
assert provider_local.passphrase == "local-override" assert provider_local.passphrase == "local-override"
# CASE 2: Local config is empty, should fallback to global # CASE 2: No secret name provided, passphrase is None
config_empty = {"bucket_name": "b"} config_empty = {"bucket_name": "b"}
provider_fallback = CloudStorageProvider(config_empty) provider_fallback = CloudStorageProvider(config_empty)
assert provider_fallback.passphrase == "global-fallback" assert provider_fallback.passphrase is None
# CASE 3: No passphrase anywhere (ValueError on key derivation) # CASE 3: Secret name provided but value is empty string
mocker.patch.object(settings, "encryption_passphrase", "") config_empty_secret = {
"bucket_name": "b",
"encryption_secret_name": "empty-secret",
}
provider_empty = CloudStorageProvider(config_empty_secret)
assert provider_empty.passphrase == ""
# CASE 4: No passphrase anywhere (ValueError on key derivation)
provider_none = CloudStorageProvider({"bucket_name": "b"}) provider_none = CloudStorageProvider({"bucket_name": "b"})
with pytest.raises(ValueError, match="No encryption passphrase configured"): with pytest.raises(ValueError, match="No encryption passphrase configured"):
provider_none._derive_key(b"salt") provider_none._derive_key(b"salt")
+55
View File
@@ -434,3 +434,58 @@ def test_run_restore_mocked(db_session, mocker, tmp_path):
expected_file = restore_dest / "original/path/data.txt" expected_file = restore_dest / "original/path/data.txt"
assert expected_file.exists() assert expected_file.exists()
assert expected_file.read_bytes() == b"hello" assert expected_file.read_bytes() == b"hello"
def test_cancelled_backup_job_status(db_session, mocker, tmp_path):
"""Verifies that a cancelled backup job never calls complete_job."""
staging = tmp_path / "staging"
staging.mkdir()
archiver = ArchiverService(staging_directory=str(staging))
media = models.StorageMedia(
media_type="hdd",
identifier="CANCEL_DISK",
capacity=10**9,
status="active",
bytes_used=0,
)
db_session.add(media)
source_file = tmp_path / "source.txt"
source_file.write_bytes(b"hello world")
f1 = models.FilesystemState(
file_path=str(source_file),
size=source_file.stat().st_size,
mtime=1,
sha256_hash="hash1",
)
db_session.add(f1)
db_session.commit()
mock_provider = mocker.MagicMock()
mock_provider.capabilities = {"supports_random_access": False}
mock_provider.identify_media.return_value = "CANCEL_DISK"
mock_provider.prepare_for_write.return_value = True
mock_provider.write_archive.return_value = "ARCH_1"
mocker.patch.object(archiver, "_get_storage_provider", return_value=mock_provider)
from app.services.scanner import JobManager
job = JobManager.create_job(db_session, "BACKUP")
job_id = job.id
# Simulate cancellation mid-flight by mocking is_cancelled to True
mocker.patch.object(JobManager, "is_cancelled", return_value=True)
complete_job_spy = mocker.spy(JobManager, "complete_job")
archiver.run_backup(db_session, media.id, job_id)
# complete_job should NEVER be called for a cancelled backup
complete_job_spy.assert_not_called()
# Job should not be COMPLETED
db_session.expire_all()
refreshed_job = db_session.get(models.Job, job_id)
assert refreshed_job.status != "COMPLETED"
File diff suppressed because one or more lines are too long
+43 -1
View File
@@ -2,7 +2,7 @@
import type { Client, Options as Options2, TDataShape } from './client'; import type { Client, Options as Options2, TDataShape } from './client';
import { client } from './client.gen'; import { client } from './client.gen';
import type { AddDirectoryToRestoreQueueData, AddDirectoryToRestoreQueueErrors, AddDirectoryToRestoreQueueResponses, AddFileToRestoreQueueData, AddFileToRestoreQueueErrors, AddFileToRestoreQueueResponses, ArchiveBrowseData, ArchiveBrowseErrors, ArchiveBrowseResponses, ArchiveMetadataData, ArchiveMetadataErrors, ArchiveMetadataResponses, ArchiveSearchData, ArchiveSearchErrors, ArchiveSearchResponses, ArchiveTreeData, ArchiveTreeErrors, ArchiveTreeResponses, BatchAddToRestoreQueueData, BatchAddToRestoreQueueErrors, BatchAddToRestoreQueueResponses, BatchConfirmDiscrepanciesData, BatchConfirmDiscrepanciesErrors, BatchConfirmDiscrepanciesResponses, BatchDeleteDiscrepanciesData, BatchDeleteDiscrepanciesErrors, BatchDeleteDiscrepanciesResponses, BatchDismissDiscrepanciesData, BatchDismissDiscrepanciesErrors, BatchDismissDiscrepanciesResponses, BatchResolveDiscrepanciesData, BatchResolveDiscrepanciesErrors, BatchResolveDiscrepanciesResponses, BatchTrackData, BatchTrackErrors, BatchTrackResponses, BrowseDiscrepanciesData, BrowseDiscrepanciesErrors, BrowseDiscrepanciesResponses, BrowseRestoreQueueData, BrowseRestoreQueueErrors, BrowseRestoreQueueResponses, CancelJobData, CancelJobErrors, CancelJobResponses, CheckHealthData, CheckHealthResponses, ClearRestoreQueueData, ClearRestoreQueueResponses, ConfirmDiscrepancyData, ConfirmDiscrepancyErrors, ConfirmDiscrepancyResponses, CreateMediaData, CreateMediaErrors, CreateMediaResponses, DeleteDiscrepancyData, DeleteDiscrepancyErrors, DeleteDiscrepancyResponses, DeleteMediaData, DeleteMediaErrors, DeleteMediaResponses, DetectMediaData, DetectMediaResponses, DiscoverHardwareData, DiscoverHardwareResponses, DismissDiscrepancyData, DismissDiscrepancyErrors, DismissDiscrepancyResponses, DownloadExclusionReportData, DownloadExclusionReportErrors, DownloadExclusionReportResponses, ExportDatabaseData, ExportDatabaseResponses, FilesystemBrowseData, FilesystemBrowseErrors, FilesystemBrowseResponses, FilesystemSearchData, FilesystemSearchErrors, FilesystemSearchResponses, FilesystemTreeData, FilesystemTreeErrors, FilesystemTreeResponses, GetAnalyticsData, GetAnalyticsResponses, GetDashboardStatsData, GetDashboardStatsResponses, GetDiscrepancyTreeData, GetDiscrepancyTreeErrors, GetDiscrepancyTreeResponses, GetJobCountData, GetJobCountResponses, GetJobData, GetJobErrors, GetJobLogsData, GetJobLogsErrors, GetJobLogsResponses, GetJobResponses, GetJobStatsData, GetJobStatsResponses, GetRestoreManifestData, GetRestoreManifestResponses, GetRestoreQueueData, GetRestoreQueueResponses, GetRestoreQueueTreeData, GetRestoreQueueTreeErrors, GetRestoreQueueTreeResponses, GetScanStatusData, GetScanStatusResponses, GetSettingsData, GetSettingsResponses, GetTreemapData, GetTreemapResponses, IgnoreHardwareData, IgnoreHardwareErrors, IgnoreHardwareResponses, ImportDatabaseData, ImportDatabaseErrors, ImportDatabaseResponses, InitializeMediaData, InitializeMediaErrors, InitializeMediaResponses, ListBackupsData, ListBackupsResponses, ListDirectoriesData, ListDirectoriesErrors, ListDirectoriesResponses, ListDiscrepanciesData, ListDiscrepanciesResponses, ListJobsData, ListJobsErrors, ListJobsResponses, ListMediaData, ListMediaErrors, ListMediaResponses, ListProvidersData, ListProvidersResponses, RemoveFromRestoreQueueData, RemoveFromRestoreQueueErrors, RemoveFromRestoreQueueResponses, ReorderMediaData, ReorderMediaErrors, ReorderMediaResponses, ResetTestEnvironmentData, ResetTestEnvironmentResponses, RetryJobData, RetryJobErrors, RetryJobResponses, StreamJobsData, StreamJobsResponses, TestExclusionsData, TestExclusionsErrors, TestExclusionsResponses, TestNotificationData, TestNotificationErrors, TestNotificationResponses, TriggerAutoBackupData, TriggerAutoBackupResponses, TriggerBackupData, TriggerBackupErrors, TriggerBackupResponses, TriggerIndexingData, TriggerIndexingResponses, TriggerRestoreData, TriggerRestoreErrors, TriggerRestoreResponses, TriggerScanData, TriggerScanResponses, UndoDismissDiscrepancyData, UndoDismissDiscrepancyErrors, UndoDismissDiscrepancyResponses, UpdateMediaData, UpdateMediaErrors, UpdateMediaResponses, UpdateSettingsData, UpdateSettingsErrors, UpdateSettingsResponses } from './types.gen'; import type { AddDirectoryToRestoreQueueData, AddDirectoryToRestoreQueueErrors, AddDirectoryToRestoreQueueResponses, AddFileToRestoreQueueData, AddFileToRestoreQueueErrors, AddFileToRestoreQueueResponses, ArchiveBrowseData, ArchiveBrowseErrors, ArchiveBrowseResponses, ArchiveMetadataData, ArchiveMetadataErrors, ArchiveMetadataResponses, ArchiveSearchData, ArchiveSearchErrors, ArchiveSearchResponses, ArchiveTreeData, ArchiveTreeErrors, ArchiveTreeResponses, BatchAddToRestoreQueueData, BatchAddToRestoreQueueErrors, BatchAddToRestoreQueueResponses, BatchConfirmDiscrepanciesData, BatchConfirmDiscrepanciesErrors, BatchConfirmDiscrepanciesResponses, BatchDeleteDiscrepanciesData, BatchDeleteDiscrepanciesErrors, BatchDeleteDiscrepanciesResponses, BatchDismissDiscrepanciesData, BatchDismissDiscrepanciesErrors, BatchDismissDiscrepanciesResponses, BatchResolveDiscrepanciesData, BatchResolveDiscrepanciesErrors, BatchResolveDiscrepanciesResponses, BatchTrackData, BatchTrackErrors, BatchTrackResponses, BrowseDiscrepanciesData, BrowseDiscrepanciesErrors, BrowseDiscrepanciesResponses, BrowseRestoreQueueData, BrowseRestoreQueueErrors, BrowseRestoreQueueResponses, CancelJobData, CancelJobErrors, CancelJobResponses, CheckHealthData, CheckHealthResponses, ClearRestoreQueueData, ClearRestoreQueueResponses, ConfirmDiscrepancyData, ConfirmDiscrepancyErrors, ConfirmDiscrepancyResponses, CreateMediaData, CreateMediaErrors, CreateMediaResponses, CreateSecretData, CreateSecretErrors, CreateSecretResponses, DeleteDiscrepancyData, DeleteDiscrepancyErrors, DeleteDiscrepancyResponses, DeleteMediaData, DeleteMediaErrors, DeleteMediaResponses, DeleteSecretData, DeleteSecretErrors, DeleteSecretResponses, DetectMediaData, DetectMediaResponses, DiscoverHardwareData, DiscoverHardwareResponses, DismissDiscrepancyData, DismissDiscrepancyErrors, DismissDiscrepancyResponses, DownloadExclusionReportData, DownloadExclusionReportErrors, DownloadExclusionReportResponses, ExportDatabaseData, ExportDatabaseResponses, FilesystemBrowseData, FilesystemBrowseErrors, FilesystemBrowseResponses, FilesystemSearchData, FilesystemSearchErrors, FilesystemSearchResponses, FilesystemTreeData, FilesystemTreeErrors, FilesystemTreeResponses, GetAnalyticsData, GetAnalyticsResponses, GetDashboardStatsData, GetDashboardStatsResponses, GetDiscrepancyTreeData, GetDiscrepancyTreeErrors, GetDiscrepancyTreeResponses, GetJobCountData, GetJobCountResponses, GetJobData, GetJobErrors, GetJobLogsData, GetJobLogsErrors, GetJobLogsResponses, GetJobResponses, GetJobStatsData, GetJobStatsResponses, GetRestoreManifestData, GetRestoreManifestResponses, GetRestoreQueueData, GetRestoreQueueResponses, GetRestoreQueueTreeData, GetRestoreQueueTreeErrors, GetRestoreQueueTreeResponses, GetScanStatusData, GetScanStatusResponses, GetSecretData, GetSecretErrors, GetSecretResponses, GetSettingsData, GetSettingsResponses, GetTreemapData, GetTreemapResponses, IgnoreHardwareData, IgnoreHardwareErrors, IgnoreHardwareResponses, ImportDatabaseData, ImportDatabaseErrors, ImportDatabaseResponses, InitializeMediaData, InitializeMediaErrors, InitializeMediaResponses, ListBackupsData, ListBackupsResponses, ListDirectoriesData, ListDirectoriesErrors, ListDirectoriesResponses, ListDiscrepanciesData, ListDiscrepanciesResponses, ListJobsData, ListJobsErrors, ListJobsResponses, ListMediaData, ListMediaErrors, ListMediaResponses, ListProvidersData, ListProvidersResponses, ListSecretsData, ListSecretsResponses, RemoveFromRestoreQueueData, RemoveFromRestoreQueueErrors, RemoveFromRestoreQueueResponses, ReorderMediaData, ReorderMediaErrors, ReorderMediaResponses, ResetTestEnvironmentData, ResetTestEnvironmentResponses, RetryJobData, RetryJobErrors, RetryJobResponses, StreamJobsData, StreamJobsResponses, TestExclusionsData, TestExclusionsErrors, TestExclusionsResponses, TestNotificationData, TestNotificationErrors, TestNotificationResponses, TriggerAutoBackupData, TriggerAutoBackupResponses, TriggerBackupData, TriggerBackupErrors, TriggerBackupResponses, TriggerIndexingData, TriggerIndexingResponses, TriggerRestoreData, TriggerRestoreErrors, TriggerRestoreResponses, TriggerScanData, TriggerScanResponses, UndoDismissDiscrepancyData, UndoDismissDiscrepancyErrors, UndoDismissDiscrepancyResponses, UpdateMediaData, UpdateMediaErrors, UpdateMediaResponses, UpdateSettingsData, UpdateSettingsErrors, UpdateSettingsResponses } from './types.gen';
export type Options<TData extends TDataShape = TDataShape, ThrowOnError extends boolean = boolean, TResponse = unknown> = Options2<TData, ThrowOnError, TResponse> & { export type Options<TData extends TDataShape = TDataShape, ThrowOnError extends boolean = boolean, TResponse = unknown> = Options2<TData, ThrowOnError, TResponse> & {
/** /**
@@ -189,6 +189,48 @@ export const downloadExclusionReport = <ThrowOnError extends boolean = false>(op
} }
}); });
/**
* Delete Secret
*
* Removes a secret from the keystore.
*/
export const deleteSecret = <ThrowOnError extends boolean = false>(options: Options<DeleteSecretData, ThrowOnError>) => (options.client ?? client).delete<DeleteSecretResponses, DeleteSecretErrors, ThrowOnError>({
url: '/system/secrets',
...options,
headers: {
'Content-Type': 'application/json',
...options.headers
}
});
/**
* List Secrets
*
* Returns a list of secret names in the keystore (values are never returned).
*/
export const listSecrets = <ThrowOnError extends boolean = false>(options?: Options<ListSecretsData, ThrowOnError>) => (options?.client ?? client).get<ListSecretsResponses, unknown, ThrowOnError>({ url: '/system/secrets', ...options });
/**
* Create Secret
*
* Adds or updates a secret in the keystore.
*/
export const createSecret = <ThrowOnError extends boolean = false>(options: Options<CreateSecretData, ThrowOnError>) => (options.client ?? client).post<CreateSecretResponses, CreateSecretErrors, ThrowOnError>({
url: '/system/secrets',
...options,
headers: {
'Content-Type': 'application/json',
...options.headers
}
});
/**
* Get Secret
*
* Retrieves the value of a secret by name.
*/
export const getSecret = <ThrowOnError extends boolean = false>(options: Options<GetSecretData, ThrowOnError>) => (options.client ?? client).get<GetSecretResponses, GetSecretErrors, ThrowOnError>({ url: '/system/secrets/{name}', ...options });
/** /**
* Test Notification * Test Notification
* *
+148 -8
View File
@@ -219,9 +219,9 @@ export type CloudCreateSchema = {
*/ */
access_key_id: string; access_key_id: string;
/** /**
* Secret Access Key * Secret Access Key Name
*/ */
secret_access_key: string; secret_access_key_name?: string | null;
/** /**
* Path Style Access * Path Style Access
*/ */
@@ -239,9 +239,9 @@ export type CloudCreateSchema = {
*/ */
obfuscate_filenames?: boolean; obfuscate_filenames?: boolean;
/** /**
* Client Side Encryption Passphrase * Encryption Secret Name
*/ */
client_side_encryption_passphrase?: string | null; encryption_secret_name?: string | null;
}; };
/** /**
@@ -460,6 +460,14 @@ export type ItemMetadataSchema = {
versions?: Array<{ versions?: Array<{
[key: string]: unknown; [key: string]: unknown;
}>; }>;
/**
* Is Partially Archived
*/
is_partially_archived?: boolean;
/**
* Archived Bytes
*/
archived_bytes?: number;
}; };
/** /**
@@ -542,6 +550,10 @@ export type LtoTapeCreateSchema = {
* Cleaning Cartridge * Cleaning Cartridge
*/ */
cleaning_cartridge?: boolean; cleaning_cartridge?: boolean;
/**
* Encryption Secret Name
*/
encryption_secret_name?: string | null;
}; };
/** /**
@@ -698,6 +710,10 @@ export type MediaSchema = {
* Access Key Id * Access Key Id
*/ */
access_key_id?: string | null; access_key_id?: string | null;
/**
* Secret Access Key Name
*/
secret_access_key_name?: string | null;
/** /**
* Path Style Access * Path Style Access
*/ */
@@ -714,6 +730,10 @@ export type MediaSchema = {
* Obfuscate Filenames * Obfuscate Filenames
*/ */
obfuscate_filenames?: boolean; obfuscate_filenames?: boolean;
/**
* Encryption Secret Name
*/
encryption_secret_name?: string | null;
/** /**
* Config * Config
*/ */
@@ -859,9 +879,9 @@ export type MediaUpdateSchema = {
*/ */
access_key_id?: string | null; access_key_id?: string | null;
/** /**
* Secret Access Key * Secret Access Key Name
*/ */
secret_access_key?: string | null; secret_access_key_name?: string | null;
/** /**
* Path Style Access * Path Style Access
*/ */
@@ -879,9 +899,9 @@ export type MediaUpdateSchema = {
*/ */
obfuscate_filenames?: boolean | null; obfuscate_filenames?: boolean | null;
/** /**
* Client Side Encryption Passphrase * Encryption Secret Name
*/ */
client_side_encryption_passphrase?: string | null; encryption_secret_name?: string | null;
}; };
/** /**
@@ -954,6 +974,10 @@ export type OfflineHddCreateSchema = {
* Encryption Key Id * Encryption Key Id
*/ */
encryption_key_id?: string | null; encryption_key_id?: string | null;
/**
* Encryption Secret Name
*/
encryption_secret_name?: string | null;
}; };
/** /**
@@ -1044,6 +1068,30 @@ export type ScanStatusSchema = {
last_run_time?: string | null; last_run_time?: string | null;
}; };
/**
* SecretCreateRequest
*/
export type SecretCreateRequest = {
/**
* Name
*/
name: string;
/**
* Value
*/
value: string;
};
/**
* SecretDeleteRequest
*/
export type SecretDeleteRequest = {
/**
* Name
*/
name: string;
};
/** /**
* SettingSchema * SettingSchema
*/ */
@@ -1714,6 +1762,98 @@ export type DownloadExclusionReportResponses = {
200: unknown; 200: unknown;
}; };
export type DeleteSecretData = {
body: SecretDeleteRequest;
path?: never;
query?: never;
url: '/system/secrets';
};
export type DeleteSecretErrors = {
/**
* Validation Error
*/
422: HttpValidationError;
};
export type DeleteSecretError = DeleteSecretErrors[keyof DeleteSecretErrors];
export type DeleteSecretResponses = {
/**
* Successful Response
*/
200: unknown;
};
export type ListSecretsData = {
body?: never;
path?: never;
query?: never;
url: '/system/secrets';
};
export type ListSecretsResponses = {
/**
* Response List Secrets
*
* Successful Response
*/
200: Array<string>;
};
export type ListSecretsResponse = ListSecretsResponses[keyof ListSecretsResponses];
export type CreateSecretData = {
body: SecretCreateRequest;
path?: never;
query?: never;
url: '/system/secrets';
};
export type CreateSecretErrors = {
/**
* Validation Error
*/
422: HttpValidationError;
};
export type CreateSecretError = CreateSecretErrors[keyof CreateSecretErrors];
export type CreateSecretResponses = {
/**
* Successful Response
*/
200: unknown;
};
export type GetSecretData = {
body?: never;
path: {
/**
* Name
*/
name: string;
};
query?: never;
url: '/system/secrets/{name}';
};
export type GetSecretErrors = {
/**
* Validation Error
*/
422: HttpValidationError;
};
export type GetSecretError = GetSecretErrors[keyof GetSecretErrors];
export type GetSecretResponses = {
/**
* Successful Response
*/
200: unknown;
};
export type TestNotificationData = { export type TestNotificationData = {
body: TestNotificationRequest; body: TestNotificationRequest;
path?: never; path?: never;
@@ -10,11 +10,12 @@
MoreVertical, MoreVertical,
ExternalLink, ExternalLink,
CassetteTape, CassetteTape,
ShieldCheck, ShieldCheck,
ShieldAlert, ShieldAlert,
Square, Square,
EyeOff, EyeOff,
Trash2 Trash2,
AlertTriangle
} from "lucide-svelte"; } from "lucide-svelte";
import { Checkbox } from "$lib/components/ui/checkbox"; import { Checkbox } from "$lib/components/ui/checkbox";
import { Button } from "$lib/components/ui/button"; import { Button } from "$lib/components/ui/button";
@@ -205,18 +206,24 @@
> >
{item.name} {item.name}
</span> </span>
{#if mode === "index"} {#if mode === "index"}
{#if item.media && item.media.length > 0} {#if item.media && item.media.length > 0}
<div class="flex gap-1 overflow-hidden shrink-0"> <div class="flex gap-1 overflow-hidden shrink-0">
{#each item.media as m} {#each item.media as m}
<span class="inline-flex items-center gap-1 bg-blue-500/10 text-blue-400 text-[10px] px-1.5 py-0.5 rounded border border-blue-500/20 font-medium"> <span class="inline-flex items-center gap-1 bg-blue-500/10 text-blue-400 text-[10px] px-1.5 py-0.5 rounded border border-blue-500/20 font-medium">
<CassetteTape size={10} /> <CassetteTape size={10} />
{m} {m}
</span> </span>
{/each} {/each}
</div> </div>
{/if} {/if}
{/if} {#if item.is_partially_archived}
<span class="inline-flex items-center gap-1 bg-orange-500/10 text-orange-400 text-[10px] px-1.5 py-0.5 rounded border border-orange-500/20 font-medium" title="Only {formatSize(item.archived_bytes)} of {formatSize(item.size)} archived">
<AlertTriangle size={10} />
Partial
</span>
{/if}
{/if}
{#if mode === "discrepancies"} {#if mode === "discrepancies"}
{#if item.is_deleted} {#if item.is_deleted}
<span class="inline-flex items-center gap-1 bg-red-500/10 text-red-400 text-[10px] px-1.5 py-0.5 rounded border border-red-500/20 font-medium"> <span class="inline-flex items-center gap-1 bg-red-500/10 text-red-400 text-[10px] px-1.5 py-0.5 rounded border border-red-500/20 font-medium">
@@ -256,7 +263,7 @@
class="shrink-0 px-4 h-full flex items-center justify-end text-xs text-text-secondary mono text-right tabular-nums font-medium border-r border-border-color/10" class="shrink-0 px-4 h-full flex items-center justify-end text-xs text-text-secondary mono text-right tabular-nums font-medium border-r border-border-color/10"
style="width: {colWidths.size}px" style="width: {colWidths.size}px"
> >
{formatSize(item.size)} {formatSize(item.archived_bytes !== undefined ? item.archived_bytes : item.size)}
</div> </div>
<!-- QUICK ACTIONS --> <!-- QUICK ACTIONS -->
+5 -2
View File
@@ -11,6 +11,9 @@ export interface FileItem {
sha256_hash?: string | null; sha256_hash?: string | null;
vulnerable?: boolean; vulnerable?: boolean;
indeterminate?: boolean; indeterminate?: boolean;
// Partial archive indicator
is_partially_archived?: boolean;
archived_bytes?: number;
// Discrepancy fields // Discrepancy fields
discrepancy_id?: number; discrepancy_id?: number;
is_deleted?: boolean; is_deleted?: boolean;
@@ -90,12 +93,12 @@ export interface CloudCreateData {
region: string; region: string;
bucket_name: string; bucket_name: string;
access_key_id: string; access_key_id: string;
secret_access_key: string; secret_access_key_name?: string;
path_style_access?: boolean; path_style_access?: boolean;
storage_class?: string; storage_class?: string;
max_part_size_mb?: number; max_part_size_mb?: number;
obfuscate_filenames?: boolean; obfuscate_filenames?: boolean;
client_side_encryption_passphrase?: string; encryption_secret_name?: string;
} }
export type MediaCreateData = LtoTapeCreateData | OfflineHddCreateData | CloudCreateData; export type MediaCreateData = LtoTapeCreateData | OfflineHddCreateData | CloudCreateData;
+22 -5
View File
@@ -12,7 +12,8 @@
ListPlus, ListPlus,
FolderTree, FolderTree,
Clock, Clock,
ArrowRight ArrowRight,
AlertTriangle
} from 'lucide-svelte'; } from 'lucide-svelte';
import { Button } from '$lib/components/ui/button'; import { Button } from '$lib/components/ui/button';
import PageHeader from '$lib/components/ui/PageHeader.svelte'; import PageHeader from '$lib/components/ui/PageHeader.svelte';
@@ -89,7 +90,9 @@
media: f.media ?? [], media: f.media ?? [],
vulnerable: f.vulnerable, vulnerable: f.vulnerable,
selected: f.selected, selected: f.selected,
indeterminate: f.indeterminate indeterminate: f.indeterminate,
is_partially_archived: f.is_partially_archived ?? false,
archived_bytes: f.archived_bytes ?? undefined
})); }));
} }
} catch (error) { } catch (error) {
@@ -116,7 +119,9 @@
media: f.media ?? [], media: f.media ?? [],
vulnerable: f.vulnerable, vulnerable: f.vulnerable,
selected: f.selected, selected: f.selected,
indeterminate: f.indeterminate indeterminate: f.indeterminate,
is_partially_archived: f.is_partially_archived ?? false,
archived_bytes: f.archived_bytes ?? undefined
})); }));
} }
} catch (error) { } catch (error) {
@@ -315,9 +320,9 @@
<div class="grid grid-cols-2 gap-4"> <div class="grid grid-cols-2 gap-4">
<div class="space-y-1"> <div class="space-y-1">
<span class="text-xs font-medium text-text-secondary opacity-60 block"> <span class="text-xs font-medium text-text-secondary opacity-60 block">
{selectedItemMetadata.type === 'directory' ? 'Aggregate Size' : 'File Size'} {selectedItemMetadata.type === 'directory' ? 'Aggregate Size' : 'Archived Size'}
</span> </span>
<span class="text-xs font-semibold text-text-primary mono">{formatSize(selectedItemMetadata.size)}</span> <span class="text-xs font-semibold text-text-primary mono">{formatSize(selectedItemMetadata.type === 'file' ? (selectedItemMetadata.archived_bytes || 0) : selectedItemMetadata.size)}</span>
</div> </div>
<div class="space-y-1"> <div class="space-y-1">
<span class="text-xs font-medium text-text-secondary opacity-60 block">Last Indexed</span> <span class="text-xs font-medium text-text-secondary opacity-60 block">Last Indexed</span>
@@ -332,6 +337,18 @@
</div> </div>
{#if selectedItemMetadata.type === 'file'} {#if selectedItemMetadata.type === 'file'}
{#if selectedItemMetadata.is_partially_archived}
<div class="p-3 bg-orange-500/5 border border-orange-500/20 rounded-lg space-y-1">
<div class="flex items-center gap-2">
<AlertTriangle size={14} class="text-orange-400" />
<span class="text-xs font-semibold text-orange-400">Partially Archived</span>
</div>
<p class="text-5xs text-text-secondary opacity-60 leading-relaxed">
Only {formatSize(selectedItemMetadata.archived_bytes || 0)} of {formatSize(selectedItemMetadata.size)} has been written to archive media. The remaining {formatSize(selectedItemMetadata.size - (selectedItemMetadata.archived_bytes || 0))} was not archived because the target media became full.
</p>
</div>
{/if}
<!-- Hash --> <!-- Hash -->
<div class="space-y-2"> <div class="space-y-2">
<span class="text-xs font-medium text-text-secondary opacity-60 block">SHA-256 Fingerprint</span> <span class="text-xs font-medium text-text-secondary opacity-60 block">SHA-256 Fingerprint</span>
+120 -8
View File
@@ -51,6 +51,7 @@
discoverHardware, discoverHardware,
ignoreHardware, ignoreHardware,
listProviders, listProviders,
listSecrets,
type MediaSchema, type MediaSchema,
type StorageProviderSchema type StorageProviderSchema
} from '$lib/api'; } from '$lib/api';
@@ -62,6 +63,7 @@
let mediaList = $state<MediaSchema[]>([]); let mediaList = $state<MediaSchema[]>([]);
let providersList = $state<StorageProviderSchema[]>([]); let providersList = $state<StorageProviderSchema[]>([]);
let discoveredAssets = $state<any[]>([]); let discoveredAssets = $state<any[]>([]);
let secretsList = $state<string[]>([]);
let loading = $state(true); let loading = $state(true);
let showRegisterDialog = $state(false); let showRegisterDialog = $state(false);
let editingMedia = $state<MediaSchema | null>(null); let editingMedia = $state<MediaSchema | null>(null);
@@ -102,12 +104,12 @@
region: 'us-east-1', region: 'us-east-1',
bucket_name: '', bucket_name: '',
access_key_id: '', access_key_id: '',
secret_access_key: '', secret_access_key_name: '',
path_style_access: false, path_style_access: false,
storage_class: '', storage_class: '',
max_part_size_mb: 5000, max_part_size_mb: 5000,
obfuscate_filenames: false, obfuscate_filenames: false,
client_side_encryption_passphrase: '' encryption_secret_name: ''
}); });
// Provider template change handler // Provider template change handler
@@ -252,9 +254,19 @@
prevOnlineCount = currentOnlineCount; prevOnlineCount = currentOnlineCount;
}); });
async function loadSecrets() {
try {
const res = await listSecrets();
if (res.data) secretsList = res.data as string[];
} catch (error) {
console.error("Failed to load secrets:", error);
}
}
onMount(async () => { onMount(async () => {
// Initial load (non-silent and forced refresh to show live hardware status immediately) // Initial load (non-silent and forced refresh to show live hardware status immediately)
loadMedia(false, true); loadMedia(false, true);
loadSecrets();
try { try {
const res = await listProviders(); const res = await listProviders();
@@ -380,6 +392,7 @@
payload.compression = newMedia.compression; payload.compression = newMedia.compression;
payload.encryption_key_id = newMedia.encryption_key_id || undefined; payload.encryption_key_id = newMedia.encryption_key_id || undefined;
payload.cleaning_cartridge = newMedia.cleaning_cartridge; payload.cleaning_cartridge = newMedia.cleaning_cartridge;
payload.encryption_secret_name = newMedia.encryption_secret_name || undefined;
} else if (newMedia.media_type === 'local_hdd') { } else if (newMedia.media_type === 'local_hdd') {
payload.drive_model = newMedia.drive_model || undefined; payload.drive_model = newMedia.drive_model || undefined;
payload.device_uuid = newMedia.device_uuid || undefined; payload.device_uuid = newMedia.device_uuid || undefined;
@@ -389,18 +402,19 @@
payload.connection_interface = newMedia.connection_interface || undefined; payload.connection_interface = newMedia.connection_interface || undefined;
payload.encrypted = newMedia.encrypted; payload.encrypted = newMedia.encrypted;
payload.encryption_key_id = newMedia.hdd_encryption_key_id || undefined; payload.encryption_key_id = newMedia.hdd_encryption_key_id || undefined;
payload.encryption_secret_name = newMedia.encryption_secret_name || undefined;
} else if (newMedia.media_type === 's3_compat') { } else if (newMedia.media_type === 's3_compat') {
payload.provider_template = newMedia.provider_template; payload.provider_template = newMedia.provider_template;
payload.endpoint_url = newMedia.endpoint_url; payload.endpoint_url = newMedia.endpoint_url;
payload.region = newMedia.region; payload.region = newMedia.region;
payload.bucket_name = newMedia.bucket_name; payload.bucket_name = newMedia.bucket_name;
payload.access_key_id = newMedia.access_key_id; payload.access_key_id = newMedia.access_key_id;
payload.secret_access_key = newMedia.secret_access_key; payload.secret_access_key_name = newMedia.secret_access_key_name || undefined;
payload.path_style_access = newMedia.path_style_access; payload.path_style_access = newMedia.path_style_access;
payload.storage_class = newMedia.storage_class || undefined; payload.storage_class = newMedia.storage_class || undefined;
payload.max_part_size_mb = newMedia.max_part_size_mb; payload.max_part_size_mb = newMedia.max_part_size_mb;
payload.obfuscate_filenames = newMedia.obfuscate_filenames; payload.obfuscate_filenames = newMedia.obfuscate_filenames;
payload.client_side_encryption_passphrase = newMedia.client_side_encryption_passphrase || undefined; payload.encryption_secret_name = newMedia.encryption_secret_name || undefined;
} }
try { try {
@@ -438,6 +452,8 @@
editingMedia.storage_class = editingMedia.storage_class || ''; editingMedia.storage_class = editingMedia.storage_class || '';
editingMedia.path_style_access = editingMedia.path_style_access ?? false; editingMedia.path_style_access = editingMedia.path_style_access ?? false;
editingMedia.obfuscate_filenames = editingMedia.obfuscate_filenames ?? false; editingMedia.obfuscate_filenames = editingMedia.obfuscate_filenames ?? false;
editingMedia.secret_access_key_name = editingMedia.secret_access_key_name || '';
editingMedia.encryption_secret_name = editingMedia.encryption_secret_name || '';
} }
} }
@@ -468,6 +484,7 @@
payload.write_protected = editingMedia.write_protected; payload.write_protected = editingMedia.write_protected;
payload.cleaning_cartridge = editingMedia.cleaning_cartridge; payload.cleaning_cartridge = editingMedia.cleaning_cartridge;
payload.encryption_key_id = editingMedia.encryption_key_id || undefined; payload.encryption_key_id = editingMedia.encryption_key_id || undefined;
payload.encryption_secret_name = editingMedia.encryption_secret_name || undefined;
} }
// HDD fields // HDD fields
else if (editingMedia.media_type === 'local_hdd') { else if (editingMedia.media_type === 'local_hdd') {
@@ -476,6 +493,7 @@
payload.is_ssd = editingMedia.is_ssd; payload.is_ssd = editingMedia.is_ssd;
payload.encrypted = editingMedia.encrypted; payload.encrypted = editingMedia.encrypted;
payload.encryption_key_id = editingMedia.encryption_key_id || undefined; payload.encryption_key_id = editingMedia.encryption_key_id || undefined;
payload.encryption_secret_name = editingMedia.encryption_secret_name || undefined;
} }
// Cloud fields // Cloud fields
else if (editingMedia.media_type === 's3_compat') { else if (editingMedia.media_type === 's3_compat') {
@@ -483,9 +501,11 @@
payload.region = editingMedia.region || undefined; payload.region = editingMedia.region || undefined;
payload.bucket_name = editingMedia.bucket_name || undefined; payload.bucket_name = editingMedia.bucket_name || undefined;
payload.access_key_id = editingMedia.access_key_id || undefined; payload.access_key_id = editingMedia.access_key_id || undefined;
payload.secret_access_key_name = editingMedia.secret_access_key_name || undefined;
payload.path_style_access = editingMedia.path_style_access; payload.path_style_access = editingMedia.path_style_access;
payload.obfuscate_filenames = editingMedia.obfuscate_filenames; payload.obfuscate_filenames = editingMedia.obfuscate_filenames;
payload.storage_class = editingMedia.storage_class || undefined; payload.storage_class = editingMedia.storage_class || undefined;
payload.encryption_secret_name = editingMedia.encryption_secret_name || undefined;
} }
// Remove undefined values // Remove undefined values
@@ -1150,8 +1170,17 @@
</div> </div>
</div> </div>
<div class="space-y-2"> <div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="secret_access_key">Secret Access Key</label> <label class="text-xs font-medium text-text-secondary ml-1" for="secret_access_key_name">Secret Access Key</label>
<Input id="secret_access_key" bind:value={newMedia.secret_access_key} placeholder="Secret key" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" type="password" /> <div class="relative">
<select id="secret_access_key_name" bind:value={newMedia.secret_access_key_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (unauthenticated)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
<p class="text-[10px] text-text-secondary leading-tight opacity-60">Manage secrets in <a href="/settings" class="text-blue-500 hover:underline">Settings</a>.</p>
</div> </div>
{/if} {/if}
</div> </div>
@@ -1225,6 +1254,19 @@
<label class="text-xs font-medium text-text-secondary ml-1" for="encryption_key_id">Encryption Key ID</label> <label class="text-xs font-medium text-text-secondary ml-1" for="encryption_key_id">Encryption Key ID</label>
<Input id="encryption_key_id" bind:value={newMedia.encryption_key_id} placeholder="Key reference in system keystore" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" /> <Input id="encryption_key_id" bind:value={newMedia.encryption_key_id} placeholder="Key reference in system keystore" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" />
</div> </div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="lto-encryption_secret_name">Encryption Secret</label>
<div class="relative">
<select id="lto-encryption_secret_name" bind:value={newMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
<p class="text-[10px] text-text-secondary leading-tight opacity-60">Manage secrets in <a href="/settings" class="text-blue-500 hover:underline">Settings</a>.</p>
</div>
{:else if newMedia.media_type === 'local_hdd'} {:else if newMedia.media_type === 'local_hdd'}
<div class="grid grid-cols-2 gap-4"> <div class="grid grid-cols-2 gap-4">
<div class="flex items-center gap-3 h-10 px-1"> <div class="flex items-center gap-3 h-10 px-1">
@@ -1269,6 +1311,19 @@
<label class="text-xs font-medium text-text-secondary ml-1" for="hdd_encryption_key_id">Encryption Key ID</label> <label class="text-xs font-medium text-text-secondary ml-1" for="hdd_encryption_key_id">Encryption Key ID</label>
<Input id="hdd_encryption_key_id" bind:value={newMedia.hdd_encryption_key_id} placeholder="Key reference in system keystore" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" /> <Input id="hdd_encryption_key_id" bind:value={newMedia.hdd_encryption_key_id} placeholder="Key reference in system keystore" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" />
</div> </div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="hdd-encryption_secret_name">Encryption Secret</label>
<div class="relative">
<select id="hdd-encryption_secret_name" bind:value={newMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
<p class="text-[10px] text-text-secondary leading-tight opacity-60">Manage secrets in <a href="/settings" class="text-blue-500 hover:underline">Settings</a>.</p>
</div>
{:else if newMedia.media_type === 's3_compat'} {:else if newMedia.media_type === 's3_compat'}
<div class="grid grid-cols-2 gap-4"> <div class="grid grid-cols-2 gap-4">
<div class="flex items-center gap-3 h-10 px-1"> <div class="flex items-center gap-3 h-10 px-1">
@@ -1291,8 +1346,17 @@
</div> </div>
</div> </div>
<div class="space-y-2"> <div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="client_side_encryption_passphrase">Client-Side Encryption Passphrase</label> <label class="text-xs font-medium text-text-secondary ml-1" for="encryption_secret_name">Encryption Secret</label>
<Input id="client_side_encryption_passphrase" bind:value={newMedia.client_side_encryption_passphrase} type="password" placeholder="Encrypts payloads before upload" class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" /> <div class="relative">
<select id="encryption_secret_name" bind:value={newMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
<p class="text-[10px] text-text-secondary leading-tight opacity-60">Manage secrets in <a href="/settings" class="text-blue-500 hover:underline">Settings</a>.</p>
</div> </div>
{/if} {/if}
</div> </div>
@@ -1389,6 +1453,18 @@
<label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-cleaning_cartridge">Cleaning Cartridge</label> <label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-cleaning_cartridge">Cleaning Cartridge</label>
</div> </div>
</div> </div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="edit-lto-encryption_secret_name">Encryption Secret</label>
<div class="relative">
<select id="edit-lto-encryption_secret_name" bind:value={editingMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
</div>
</div> </div>
{:else if editingMedia.media_type === 'local_hdd'} {:else if editingMedia.media_type === 'local_hdd'}
<div class="space-y-4"> <div class="space-y-4">
@@ -1411,6 +1487,18 @@
<label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-encrypted">Encrypted</label> <label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-encrypted">Encrypted</label>
</div> </div>
</div> </div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="edit-hdd-encryption_secret_name">Encryption Secret</label>
<div class="relative">
<select id="edit-hdd-encryption_secret_name" bind:value={editingMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
</div>
</div> </div>
{:else if editingMedia.media_type === 's3_compat'} {:else if editingMedia.media_type === 's3_compat'}
<div class="space-y-4"> <div class="space-y-4">
@@ -1436,6 +1524,30 @@
<label class="text-xs font-medium text-text-secondary ml-1" for="edit-access_key_id">Access Key ID</label> <label class="text-xs font-medium text-text-secondary ml-1" for="edit-access_key_id">Access Key ID</label>
<Input id="edit-access_key_id" bind:value={editingMedia.access_key_id} class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" /> <Input id="edit-access_key_id" bind:value={editingMedia.access_key_id} class="h-10 bg-bg-primary/50 border-border-color font-mono text-sm" />
</div> </div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="edit-secret_access_key_name">Secret Access Key</label>
<div class="relative">
<select id="edit-secret_access_key_name" bind:value={editingMedia.secret_access_key_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (unauthenticated)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
</div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="edit-encryption_secret_name">Encryption Secret</label>
<div class="relative">
<select id="edit-encryption_secret_name" bind:value={editingMedia.encryption_secret_name} class="w-full h-10 bg-bg-primary border border-border-color rounded-xl px-4 pr-10 text-sm font-medium text-text-primary outline-none focus:ring-2 focus:ring-blue-500/20 transition-all appearance-none cursor-pointer">
<option value="">None (no encryption)</option>
{#each secretsList as secret}
<option value={secret}>{secret}</option>
{/each}
</select>
<ChevronDown size={16} class="absolute right-3 top-1/2 -translate-y-1/2 text-text-secondary pointer-events-none" />
</div>
</div>
<div class="flex items-center gap-3 h-10 px-1"> <div class="flex items-center gap-3 h-10 px-1">
<input id="edit-path_style_access" type="checkbox" bind:checked={editingMedia.path_style_access} class="w-4 h-4 rounded border-border-color bg-bg-primary text-blue-600 focus:ring-blue-500/20" /> <input id="edit-path_style_access" type="checkbox" bind:checked={editingMedia.path_style_access} class="w-4 h-4 rounded border-border-color bg-bg-primary text-blue-600 focus:ring-blue-500/20" />
<label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-path_style_access">Path-Style Access</label> <label class="text-xs font-medium text-text-secondary cursor-pointer" for="edit-path_style_access">Path-Style Access</label>
+95 -2
View File
@@ -19,7 +19,8 @@
Download, Download,
Upload, Upload,
Terminal, Terminal,
Globe Globe,
Key
} from "lucide-svelte"; } from "lucide-svelte";
import { Button } from "$lib/components/ui/button"; import { Button } from "$lib/components/ui/button";
import PageHeader from "$lib/components/ui/PageHeader.svelte"; import PageHeader from "$lib/components/ui/PageHeader.svelte";
@@ -33,7 +34,10 @@
exportDatabase, exportDatabase,
importDatabase, importDatabase,
testExclusions, testExclusions,
downloadExclusionReport downloadExclusionReport,
listSecrets,
createSecret,
deleteSecret
} from "$lib/api"; } from "$lib/api";
import { toast } from "svelte-sonner"; import { toast } from "svelte-sonner";
import { cn, formatSize } from "$lib/utils"; import { cn, formatSize } from "$lib/utils";
@@ -48,6 +52,12 @@
let archivalSchedule = $state(""); let archivalSchedule = $state("");
let notificationUrls = $state<string[]>([]); let notificationUrls = $state<string[]>([]);
// Secrets keystore
let secretsList = $state<string[]>([]);
let newSecretName = $state("");
let newSecretValue = $state("");
let showAddSecret = $state(false);
let initialState = $state(""); let initialState = $state("");
const isDirty = $derived(initialState !== JSON.stringify({ const isDirty = $derived(initialState !== JSON.stringify({
sourceRoots, sourceRoots,
@@ -127,6 +137,7 @@
{ id: "paths", label: "Storage Paths", icon: HardDrive }, { id: "paths", label: "Storage Paths", icon: HardDrive },
{ id: "exclusions", label: "Exclusions", icon: ListX }, { id: "exclusions", label: "Exclusions", icon: ListX },
{ id: "scheduling", label: "Scheduling", icon: CalendarClock }, { id: "scheduling", label: "Scheduling", icon: CalendarClock },
{ id: "secrets", label: "Secrets", icon: Key },
{ id: "notifications", label: "Alerting", icon: Bell }, { id: "notifications", label: "Alerting", icon: Bell },
{ id: "system", label: "System", icon: Cpu }, { id: "system", label: "System", icon: Cpu },
]; ];
@@ -146,6 +157,10 @@
if (data.notification_urls) notificationUrls = JSON.parse(data.notification_urls); if (data.notification_urls) notificationUrls = JSON.parse(data.notification_urls);
} }
// Load secrets
const secretsRes = await listSecrets();
if (secretsRes.data) secretsList = secretsRes.data as string[];
// Capture snapshot for dirty check // Capture snapshot for dirty check
initialState = JSON.stringify({ initialState = JSON.stringify({
sourceRoots, sourceRoots,
@@ -195,6 +210,34 @@
} }
} }
async function handleAddSecret() {
if (!newSecretName.trim() || !newSecretValue.trim()) {
toast.error("Secret name and value are required");
return;
}
try {
await createSecret({ body: { name: newSecretName.trim(), value: newSecretValue.trim() } });
toast.success(`Secret '${newSecretName}' saved`);
secretsList = [...secretsList, newSecretName.trim()];
newSecretName = "";
newSecretValue = "";
showAddSecret = false;
} catch (error) {
toast.error("Failed to save secret");
}
}
async function handleDeleteSecret(name: string) {
if (!confirm(`Delete secret '${name}'? This may break media that references it.`)) return;
try {
await deleteSecret({ body: { name } });
toast.success(`Secret '${name}' deleted`);
secretsList = secretsList.filter(s => s !== name);
} catch (error) {
toast.error("Failed to delete secret");
}
}
async function handleExport() { async function handleExport() {
exporting = true; exporting = true;
try { try {
@@ -553,6 +596,56 @@
</Card> </Card>
</div> </div>
{:else if activeTab === 'secrets'}
<div class="animate-in slide-in-from-bottom-4 duration-500 space-y-6">
<Card class="p-5 shadow-xl">
<SectionHeader title="Secrets Keystore" icon={Key} class="mb-6 px-0" />
<p class="text-sm text-text-secondary opacity-60 mb-4">Store sensitive credentials centrally. Media configurations reference secrets by name instead of storing raw values.</p>
{#if secretsList.length > 0}
<div class="space-y-2 mb-4">
{#each secretsList as secret}
<div class="flex items-center justify-between p-3 bg-bg-primary/50 rounded-lg border border-border-color">
<div class="flex items-center gap-3">
<Key size={14} class="text-text-secondary opacity-40" />
<span class="text-sm font-medium text-text-primary">{secret}</span>
</div>
<Button variant="ghost" size="icon" class="h-8 w-8 text-error-color/60 hover:text-error-color hover:bg-error-color/10" onclick={() => handleDeleteSecret(secret)}>
<Trash2 size={14} />
</Button>
</div>
{/each}
</div>
{:else}
<div class="text-center py-8 opacity-30 mb-4">
<Key size={32} class="mx-auto mb-2" />
<p class="text-sm">No secrets stored yet</p>
</div>
{/if}
{#if showAddSecret}
<div class="space-y-3 p-4 bg-bg-primary/30 rounded-lg border border-border-color">
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="new-secret-name">Secret Name</label>
<Input id="new-secret-name" bind:value={newSecretName} placeholder="e.g., aws-production-key" class="h-10 bg-bg-primary border-border-color text-sm" />
</div>
<div class="space-y-2">
<label class="text-xs font-medium text-text-secondary ml-1" for="new-secret-value">Secret Value</label>
<Input id="new-secret-value" bind:value={newSecretValue} type="password" placeholder="Enter secret value" class="h-10 bg-bg-primary border-border-color font-mono text-sm" />
</div>
<div class="flex gap-2">
<Button variant="outline" class="flex-1 h-10" onclick={() => { showAddSecret = false; newSecretName = ''; newSecretValue = ''; }}>Cancel</Button>
<Button variant="default" class="flex-[2] h-10" onclick={handleAddSecret}>Save Secret</Button>
</div>
</div>
{:else}
<Button variant="outline" class="w-full h-11 border-dashed border-2 font-medium text-sm" onclick={() => showAddSecret = true}>
<Plus size={20} class="mr-2" /> Add Secret
</Button>
{/if}
</Card>
</div>
{:else if activeTab === 'system'} {:else if activeTab === 'system'}
<div class="animate-in slide-in-from-bottom-4 duration-500 space-y-6"> <div class="animate-in slide-in-from-bottom-4 duration-500 space-y-6">
<Card class="p-5 shadow-xl"> <Card class="p-5 shadow-xl">