Compare commits

...

2 Commits

Author SHA1 Message Date
adamlamers 2daa967863 more integration tests
Continuous Integration / frontend-check (push) Successful in 9m39s
Continuous Integration / backend-tests (push) Successful in 9m57s
Continuous Integration / e2e-tests (push) Successful in 19m6s
2026-04-29 20:32:58 -04:00
adamlamers 54d2b0c025 file deletion detection 2026-04-29 18:21:58 -04:00
13 changed files with 1162 additions and 16 deletions
+10
View File
@@ -318,6 +318,16 @@ def initialize_storage_hardware(
try:
if storage_provider.initialize_media(media_record.identifier):
# Persist auto-generated device_path to DB so archiver finds the same dir
current_config = (
json.loads(media_record.extra_config)
if media_record.extra_config
else {}
)
if "device_path" not in current_config:
current_config["device_path"] = storage_provider.device_path
media_record.extra_config = json.dumps(current_config)
db_session.commit()
return {"message": "Hardware initialization complete."}
except PermissionError as pe:
raise HTTPException(status_code=403, detail=str(pe))
+25 -11
View File
@@ -1,10 +1,26 @@
import atexit
import os
import shutil
import tempfile
from typing import Any, BinaryIO, Dict, Optional
from loguru import logger
from .base import AbstractStorageProvider
# Track auto-created temp dirs for cleanup at process exit
_auto_temp_dirs: set = set()
def _cleanup_temp_dirs():
for d in _auto_temp_dirs:
try:
shutil.rmtree(d, ignore_errors=True)
except Exception:
pass
atexit.register(_cleanup_temp_dirs)
class MockLTOProvider(AbstractStorageProvider):
provider_id = "mock_lto"
@@ -19,27 +35,28 @@ class MockLTOProvider(AbstractStorageProvider):
"device_path": {
"type": "string",
"title": "Mock Directory Path",
"description": "Path to a directory representing the tape drive (e.g., /tmp/mock_lto)",
"required": True,
"default": "/tmp/mock_lto",
"description": "Path to a directory representing the tape drive (optional — auto-created if omitted)",
"required": False,
}
}
def __init__(self, config: Dict[str, Any]):
self.device_path = config.get("device_path", "/tmp/mock_lto")
provided_path = config.get("device_path")
if provided_path:
self.device_path: str = provided_path
else:
self.device_path = tempfile.mkdtemp(prefix="tapehoard_mock_lto_")
_auto_temp_dirs.add(self.device_path)
os.makedirs(self.device_path, exist_ok=True)
# We store metadata in a .mam file inside the mock directory
self.mam_path = os.path.join(self.device_path, ".mam")
def get_name(self) -> str:
return "Mock LTO Drive"
def check_online(self, force: bool = False) -> bool:
# For testing, we consider it online if the directory exists
return os.path.exists(self.device_path)
def check_existing_data(self) -> bool:
# Check if there are archive files in the directory
if not self.check_online():
return False
for f in os.listdir(self.device_path):
@@ -57,7 +74,6 @@ class MockLTOProvider(AbstractStorageProvider):
return None
def initialize_media(self, media_id: str) -> bool:
# Clear out the directory
for item in os.listdir(self.device_path):
item_path = os.path.join(self.device_path, item)
if os.path.isfile(item_path):
@@ -65,7 +81,6 @@ class MockLTOProvider(AbstractStorageProvider):
elif os.path.isdir(item_path):
shutil.rmtree(item_path)
# Write the new media_id
with open(self.mam_path, "w") as f:
f.write(media_id)
return True
@@ -77,7 +92,6 @@ class MockLTOProvider(AbstractStorageProvider):
if not self.prepare_for_write(media_id):
raise Exception("Media mismatch")
# Determine the next file number
file_num = 0
while os.path.exists(os.path.join(self.device_path, f"archive_{file_num}.tar")):
file_num += 1
@@ -90,7 +104,7 @@ class MockLTOProvider(AbstractStorageProvider):
return str(file_num)
def get_utilization(self) -> Optional[float]:
return 0.1 # Mock utilization
return 0.1
def finalize_media(self, media_id: str):
logger.info(f"Mock media {media_id} finalized")
+75
View File
@@ -200,3 +200,78 @@ def test_browse_queue_virtual_fs(client, db_session):
response = client.get("/restores/queue/browse?path=/source/dir")
assert response.status_code == 200
assert response.json()[0]["name"] == "file.txt"
def test_deleted_file_rejected_from_restore_queue(client, db_session):
"""Tests that a deleted file cannot be added to the recovery queue."""
media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file_record = models.FilesystemState(
file_path="/data/deleted.txt", size=100, mtime=1000, is_deleted=True
)
db_session.add(file_record)
db_session.flush()
version = models.FileVersion(
filesystem_state_id=file_record.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=100,
)
db_session.add(version)
db_session.commit()
response = client.post(f"/restores/queue/file/{file_record.id}")
assert response.status_code == 400
assert "marked as deleted" in response.json()["detail"]
def test_manifest_excludes_deleted_files(client, db_session):
"""Tests that deleted files are excluded from the restore manifest."""
media = models.StorageMedia(
media_type="tape", identifier="T001", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
active_file = models.FilesystemState(
file_path="/source/keep.bin", size=500, mtime=1, is_deleted=False
)
deleted_file = models.FilesystemState(
file_path="/source/gone.bin", size=500, mtime=1, is_deleted=True
)
db_session.add_all([active_file, deleted_file])
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=active_file.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=500,
)
)
db_session.add(
models.FileVersion(
filesystem_state_id=deleted_file.id,
media_id=media.id,
file_number="2",
offset_start=0,
offset_end=500,
)
)
db_session.add(models.RestoreCart(filesystem_state_id=active_file.id))
db_session.add(models.RestoreCart(filesystem_state_id=deleted_file.id))
db_session.commit()
response = client.get("/restores/manifest")
assert response.status_code == 200
data = response.json()
assert data["total_files"] == 1
assert data["total_size"] == 500
+130
View File
@@ -93,3 +93,133 @@ def test_ignore_hardware(client):
response = client.get("/system/settings")
assert "ignored_hardware" in response.json()
assert "DISK_001" in response.json()["ignored_hardware"]
def test_scan_status_includes_files_missing(client):
"""Tests that scan status includes the files_missing metric."""
response = client.get("/system/scan/status")
assert response.status_code == 200
data = response.json()
assert "files_missing" in data
assert data["files_missing"] == 0
def test_list_discrepancies_empty(client):
"""Tests listing discrepancies when none exist."""
response = client.get("/system/discrepancies")
assert response.status_code == 200
assert response.json() == []
def test_list_discrepancies_deleted_file(client, db_session):
"""Tests listing a confirmed-deleted file in discrepancies."""
file_record = models.FilesystemState(
file_path="/data/old.txt",
size=100,
mtime=1000,
is_deleted=True,
is_ignored=False,
sha256_hash=None,
)
db_session.add(file_record)
db_session.commit()
response = client.get("/system/discrepancies")
assert response.status_code == 200
data = response.json()
assert len(data) == 1
assert data[0]["path"] == "/data/old.txt"
assert data[0]["is_deleted"] is True
def test_confirm_file_deleted(client, db_session):
"""Tests confirming a file as deleted."""
file_record = models.FilesystemState(
file_path="/data/verify.txt",
size=50,
mtime=2000,
is_deleted=False,
)
db_session.add(file_record)
db_session.commit()
response = client.post(f"/system/discrepancies/{file_record.id}/confirm")
assert response.status_code == 200
assert "marked as deleted" in response.json()["message"]
db_session.expire_all()
db_session.refresh(file_record)
assert file_record.is_deleted is True
def test_confirm_file_deleted_not_found(client):
"""Tests confirming a non-existent file returns 404."""
response = client.post("/system/discrepancies/9999/confirm")
assert response.status_code == 404
def test_dismiss_discrepancy(client, db_session):
"""Tests dismissing a deleted file."""
file_record = models.FilesystemState(
file_path="/data/dismiss.txt",
size=50,
mtime=2000,
is_deleted=True,
)
db_session.add(file_record)
db_session.commit()
response = client.post(f"/system/discrepancies/{file_record.id}/dismiss")
assert response.status_code == 200
assert "dismissed" in response.json()["message"]
db_session.expire_all()
db_session.refresh(file_record)
assert file_record.is_deleted is False
def test_delete_file_record(client, db_session):
"""Tests hard-deleting a file record and its versions."""
media = models.StorageMedia(
media_type="hdd", identifier="M1", capacity=1000, status="active"
)
db_session.add(media)
db_session.flush()
file_record = models.FilesystemState(
file_path="/data/hard_delete.txt",
size=100,
mtime=1000,
is_deleted=True,
)
db_session.add(file_record)
db_session.flush()
db_session.add(
models.FileVersion(
filesystem_state_id=file_record.id,
media_id=media.id,
file_number="1",
offset_start=0,
offset_end=100,
)
)
db_session.commit()
file_id = file_record.id
response = client.delete(f"/system/discrepancies/{file_id}")
assert response.status_code == 200
db_session.expire_all()
# Verify file and version are gone
assert (
db_session.query(models.FilesystemState).filter_by(id=file_id).first() is None
)
assert (
db_session.query(models.FileVersion)
.filter_by(filesystem_state_id=file_id)
.first()
is None
)
+115
View File
@@ -207,3 +207,118 @@ def test_hash_file_batch_fast_nonexistent():
results = _hash_file_batch_fast(["/nonexistent/path"], _FAST_HASH_BINARY)
# Non-existent files may or may not appear in results depending on binary behavior
assert isinstance(results, dict)
def test_missing_file_marked_deleted_at_end_of_scan(db_session, mocker):
"""Tests that files not seen during a scan are marked as deleted."""
scanner = ScannerService()
mocker.patch("app.services.scanner._FAST_FIND_BINARY", None)
mocker.patch("app.api.system.get_source_roots", return_value=["/mock_source"])
mocker.patch("app.api.system.get_exclusion_spec", return_value=None)
mocker.patch("os.walk", return_value=[])
# os.path.exists returns True for source roots, False for the missing file
def mock_exists(path):
return path != "/old/missing.txt"
mocker.patch("os.path.exists", side_effect=mock_exists)
old_timestamp = datetime.now(timezone.utc).replace(year=2020)
existing_file = models.FilesystemState(
file_path="/old/missing.txt",
size=100,
mtime=1,
is_ignored=False,
is_deleted=False,
last_seen_timestamp=old_timestamp,
)
db_session.add(existing_file)
db_session.commit()
scanner.scan_sources(db_session)
db_session.expire_all()
db_session.refresh(existing_file)
assert existing_file.is_deleted is True
assert scanner.files_missing > 0
def test_existing_file_not_marked_deleted(db_session, mocker):
"""Tests that files found during scan retain is_deleted=False."""
scanner = ScannerService()
mocker.patch("app.services.scanner._FAST_FIND_BINARY", None)
mocker.patch("app.api.system.get_source_roots", return_value=["/mock_source"])
mocker.patch("app.api.system.get_exclusion_spec", return_value=None)
mocker.patch("os.path.exists", return_value=True)
mocker.patch("os.walk", return_value=[("/mock_source", [], ["file.txt"])])
mock_stat = mocker.MagicMock()
mock_stat.st_size = 100
mock_stat.st_mtime = 99999
mocker.patch("os.stat", return_value=mock_stat)
old_timestamp = datetime.now(timezone.utc).replace(year=2020)
existing_file = models.FilesystemState(
file_path="/mock_source/file.txt",
size=50,
mtime=1,
is_ignored=False,
is_deleted=False,
last_seen_timestamp=old_timestamp,
)
db_session.add(existing_file)
db_session.commit()
scanner.scan_sources(db_session)
db_session.expire_all()
db_session.refresh(existing_file)
assert existing_file.is_deleted is False
assert existing_file.size == 100
def test_missing_file_during_hashing_marked_deleted(db_session, mocker):
"""Tests that files missing during hashing are marked as deleted."""
scanner = ScannerService()
mocker.patch("app.services.scanner._FAST_HASH_BINARY", None)
f = models.FilesystemState(
file_path="/data/vanished.bin", size=10, mtime=1, is_ignored=False
)
db_session.add(f)
db_session.commit()
mocker.patch.object(ScannerService, "compute_sha256", return_value=None)
mocker.patch("os.path.exists", return_value=False)
scanner.run_hashing()
db_session.refresh(f)
assert f.is_deleted is True
def test_missing_file_skipped_in_hashing_query(db_session):
"""Tests that already-deleted files are excluded from hashing targets."""
deleted_file = models.FilesystemState(
file_path="/data/deleted.bin",
size=10,
mtime=1,
is_ignored=False,
is_deleted=True,
sha256_hash=None,
)
db_session.add(deleted_file)
db_session.commit()
pending = (
db_session.query(models.FilesystemState)
.filter(
models.FilesystemState.sha256_hash.is_(None),
models.FilesystemState.is_ignored.is_(False),
models.FilesystemState.is_deleted.is_(False),
)
.all()
)
assert len(pending) == 0
+4 -1
View File
@@ -8,7 +8,10 @@
"build": "vite build",
"preview": "vite preview",
"check": "svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch"
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
"test": "playwright test",
"test:ui": "playwright test --ui",
"test:debug": "playwright test --debug"
},
"devDependencies": {
"@hey-api/openapi-ts": "^0.96.1",
+4 -4
View File
@@ -12,8 +12,8 @@ export default defineConfig({
forbidOnly: !!process.env.CI,
/* Retry on CI only */
retries: process.env.CI ? 2 : 0,
/* Opt out of parallel tests on CI. */
workers: process.env.CI ? 1 : undefined,
/* Run tests serially to avoid DB conflicts */
workers: 1,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: [['html', { open: 'never' }]],
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
@@ -39,13 +39,13 @@ export default defineConfig({
{
command: 'cd ../backend && rm -f e2e_test.db* && DATABASE_URL="sqlite:///e2e_test.db" TAPEHOARD_TEST_MODE="true" uv run python -m app.start_test_server --host 0.0.0.0 --port 8001',
url: 'http://localhost:8001/health',
reuseExistingServer: !process.env.CI,
reuseExistingServer: false,
timeout: 120 * 1000,
},
{
command: 'VITE_API_URL=http://localhost:8001 npm run dev',
url: 'http://localhost:5173',
reuseExistingServer: !process.env.CI,
reuseExistingServer: false,
timeout: 120 * 1000,
},
],
+256
View File
@@ -0,0 +1,256 @@
import { test, expect } from '@playwright/test';
import fs from 'fs';
import path from 'path';
import { API_URL, SOURCE_ROOT, RESTORE_DEST, setupRequestContext, configureBackend, waitForScanComplete } from './helpers';
test.describe('Backup & Restore', () => {
test.beforeEach(async () => {
fs.mkdirSync(SOURCE_ROOT, { recursive: true });
fs.mkdirSync(RESTORE_DEST, { recursive: true });
fs.writeFileSync(path.join(SOURCE_ROOT, 'backup_test.txt'), 'backup me');
fs.mkdirSync(path.join(SOURCE_ROOT, 'subdir'), { recursive: true });
fs.writeFileSync(path.join(SOURCE_ROOT, 'subdir', 'nested.txt'), 'nested content');
});
test('auto-backup to registered media completes', async ({}) => {
const requestContext = await setupRequestContext();
await configureBackend(requestContext);
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'BACKUP_TAPE_001',
media_type: 'mock_lto',
generation_tier: 'LTO-8',
capacity: 12000,
location: 'Test Lab',
config: {}
}
});
expect(registerResp.ok()).toBe(true);
const media = await registerResp.json();
const initResp = await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
expect(initResp.ok()).toBe(true);
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await waitForScanComplete(requestContext);
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/auto`);
expect(backupResp.ok()).toBe(true);
const backupResult = await backupResp.json();
expect(backupResult.job_id).toBeDefined();
await expect(async () => {
const jobsResp = await requestContext.get(`${API_URL}/system/jobs`);
const jobs = await jobsResp.json();
const backupJob = (jobs as Array<any>).find((j: any) => j.job_type === 'BACKUP');
expect(backupJob).toBeDefined();
expect(backupJob.status).toBe('COMPLETED');
}).toPass({ timeout: 30000 });
const metaResp = await requestContext.get(`${API_URL}/inventory/metadata`, {
params: { path: path.join(SOURCE_ROOT, 'backup_test.txt') }
});
expect(metaResp.ok()).toBe(true);
const meta = await metaResp.json();
expect(meta.versions.length).toBeGreaterThan(0);
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
test('backup to specific media works', async ({}) => {
const requestContext = await setupRequestContext();
await configureBackend(requestContext);
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'SPECIFIC_TAPE_001',
media_type: 'mock_lto',
generation_tier: 'LTO-7',
capacity: 6000,
config: {}
}
});
const media = await registerResp.json();
const initResp = await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
expect(initResp.ok()).toBe(true);
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await waitForScanComplete(requestContext);
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/${media.id}`);
expect(backupResp.ok()).toBe(true);
const result = await backupResp.json();
expect(result.media).toBe('SPECIFIC_TAPE_001');
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
test('backup fails when no active media', async ({}) => {
const requestContext = await setupRequestContext();
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/auto`);
expect(backupResp.status()).toBe(400);
const errBody = await backupResp.json();
expect(errBody.detail.toLowerCase()).toContain('no active media');
await requestContext.dispose();
});
test('add file to restore queue and clear it', async ({}) => {
const requestContext = await setupRequestContext();
await configureBackend(requestContext);
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'RESTORE_TAPE_001',
media_type: 'mock_lto',
generation_tier: 'LTO-8',
capacity: 12000,
config: {}
}
});
const media = await registerResp.json();
await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await waitForScanComplete(requestContext);
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/${media.id}`);
expect(backupResp.ok()).toBe(true);
await expect(async () => {
const jobsResp = await requestContext.get(`${API_URL}/system/jobs`);
const jobs = await jobsResp.json();
const backupJob = (jobs as Array<any>).find((j: any) => j.job_type === 'BACKUP');
expect(backupJob).toBeDefined();
expect(backupJob.status).toBe('COMPLETED');
}).toPass({ timeout: 30000 });
const metaResp = await requestContext.get(`${API_URL}/inventory/metadata`, {
params: { path: path.join(SOURCE_ROOT, 'backup_test.txt') }
});
expect(metaResp.ok()).toBe(true);
const meta = await metaResp.json();
const fileId = meta.id;
const queueResp = await requestContext.post(`${API_URL}/restores/queue/file/${fileId}`);
expect(queueResp.ok()).toBe(true);
const listQueueResp = await requestContext.get(`${API_URL}/restores/queue`);
const queue = await listQueueResp.json();
expect((queue as Array<any>).length).toBeGreaterThan(0);
const clearResp = await requestContext.post(`${API_URL}/restores/queue/clear`);
expect(clearResp.ok()).toBe(true);
const afterClearResp = await requestContext.get(`${API_URL}/restores/queue`);
const afterClear = await afterClearResp.json();
expect((afterClear as Array<any>).length).toBe(0);
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
test('deleted file cannot be added to restore queue', async ({}) => {
const requestContext = await setupRequestContext();
await configureBackend(requestContext);
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'DELETE_TAPE_001',
media_type: 'mock_lto',
generation_tier: 'LTO-8',
capacity: 12000,
config: {}
}
});
const media = await registerResp.json();
await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await waitForScanComplete(requestContext);
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/${media.id}`);
expect(backupResp.ok()).toBe(true);
await expect(async () => {
const jobsResp = await requestContext.get(`${API_URL}/system/jobs`);
const jobs = await jobsResp.json();
const backupJob = (jobs as Array<any>).find((j: any) => j.job_type === 'BACKUP');
expect(backupJob).toBeDefined();
expect(backupJob.status).toBe('COMPLETED');
}).toPass({ timeout: 30000 });
const metaResp = await requestContext.get(`${API_URL}/inventory/metadata`, {
params: { path: path.join(SOURCE_ROOT, 'backup_test.txt') }
});
expect(metaResp.ok()).toBe(true);
const meta = await metaResp.json();
const fileId = meta.id;
const confirmResp = await requestContext.post(`${API_URL}/system/discrepancies/${fileId}/confirm`);
expect(confirmResp.ok()).toBe(true);
const queueResp = await requestContext.post(`${API_URL}/restores/queue/file/${fileId}`);
expect(queueResp.status()).toBe(400);
const errBody = await queueResp.json();
expect(errBody.detail.toLowerCase()).toContain('deleted');
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
test('recovery manifest calculates correctly', async ({}) => {
const requestContext = await setupRequestContext();
await configureBackend(requestContext);
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'MANIFEST_TAPE_001',
media_type: 'mock_lto',
generation_tier: 'LTO-8',
capacity: 12000,
config: {}
}
});
const media = await registerResp.json();
await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await waitForScanComplete(requestContext);
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/${media.id}`);
expect(backupResp.ok()).toBe(true);
await expect(async () => {
const jobsResp = await requestContext.get(`${API_URL}/system/jobs`);
const jobs = await jobsResp.json();
const backupJob = (jobs as Array<any>).find((j: any) => j.job_type === 'BACKUP');
expect(backupJob).toBeDefined();
expect(backupJob.status).toBe('COMPLETED');
}).toPass({ timeout: 30000 });
const metaResp = await requestContext.get(`${API_URL}/inventory/metadata`, {
params: { path: path.join(SOURCE_ROOT, 'backup_test.txt') }
});
expect(metaResp.ok()).toBe(true);
const meta = await metaResp.json();
await requestContext.post(`${API_URL}/restores/queue/file/${meta.id}`);
const manifestResp = await requestContext.get(`${API_URL}/restores/manifest`);
expect(manifestResp.ok()).toBe(true);
const manifest = await manifestResp.json();
expect(manifest.total_files).toBeGreaterThan(0);
expect(manifest.media_required.length).toBeGreaterThan(0);
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
});
@@ -163,4 +163,68 @@ test.describe('TapeHoard Golden Path', () => {
expect(fs.readFileSync(restoredFilePath, 'utf-8')).toBe('Hello world 2');
}
});
test('file deletion discrepancy workflow', async ({ page }) => {
page.on('console', msg => console.log('BROWSER CONSOLE:', msg.text()));
page.on('pageerror', err => console.log('BROWSER ERROR:', err.message));
console.log('Step 1: Create test file and scan');
const testFilePath = path.join(SOURCE_ROOT, 'discrepancy_test.txt');
fs.writeFileSync(testFilePath, 'will be deleted');
const requestContext = await page.context().request;
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
// Wait for scan to complete
await expect(async () => {
const statusResp = await requestContext.get(`${API_URL}/system/scan/status`);
const status = await statusResp.json();
expect(status.is_running).toBe(false);
}).toPass({ timeout: 20000 });
// Also wait for hashing to complete
await page.waitForTimeout(2000);
// Get the file ID from the metadata endpoint
const encodedPath = encodeURIComponent(testFilePath);
const metaResp = await requestContext.get(`${API_URL}/inventory/metadata?path=${encodedPath}`);
expect(metaResp.ok()).toBe(true);
const meta = await metaResp.json();
const fileId = meta.id;
// Mark the file as deleted via discrepancy API
const confirmResp = await requestContext.post(`${API_URL}/system/discrepancies/${fileId}/confirm`);
if (!confirmResp.ok()) {
const errBody = await confirmResp.json();
console.log('Confirm failed:', errBody);
}
expect(confirmResp.ok()).toBe(true);
// Verify it shows in discrepancies
const discrepanciesResp = await requestContext.get(`${API_URL}/system/discrepancies`);
const discrepancies = await discrepanciesResp.json();
const deletedItem = (discrepancies as Array<any>).find((d: any) => d.path === testFilePath);
expect(deletedItem).toBeDefined();
expect(deletedItem.is_deleted).toBe(true);
console.log('Step 2: Verify deleted file cannot be restored');
const restoreResp = await requestContext.post(`${API_URL}/restores/queue/file/${fileId}`);
expect(restoreResp.status()).toBe(400);
const restoreBody = await restoreResp.json();
expect(restoreBody.detail.toLowerCase()).toContain('deleted');
console.log('Step 3: Dismiss the discrepancy');
const dismissResp = await requestContext.post(`${API_URL}/system/discrepancies/${fileId}/dismiss`);
expect(dismissResp.ok()).toBe(true);
// Verify it's no longer in discrepancies
const afterDismissResp = await requestContext.get(`${API_URL}/system/discrepancies`);
const afterDismiss = await afterDismissResp.json();
const stillDeleted = (afterDismiss as Array<any>).find((d: any) => d.path === testFilePath);
expect(stillDeleted).toBeUndefined();
fs.rmSync(testFilePath, { force: true });
await requestContext.dispose();
});
});
+47
View File
@@ -0,0 +1,47 @@
import { expect, request } from '@playwright/test';
export const API_URL = 'http://localhost:8001';
export const SOURCE_ROOT = '/tmp/tapehoard_e2e_source';
export const MOCK_LTO_PATH = '/tmp/tapehoard_e2e_mock_lto';
export const RESTORE_DEST = '/tmp/tapehoard_e2e_restore';
/**
* Creates a request context and resets the test environment.
* Call dispose() on the returned context when done.
*/
export async function setupRequestContext() {
const requestContext = await request.newContext();
const resetResponse = await requestContext.post(`${API_URL}/system/test/reset`);
if (!resetResponse.ok()) {
console.error('Failed to reset test environment');
}
return requestContext;
}
/**
* Waits for a scan job to complete by polling /system/scan/status.
*/
export async function waitForScanComplete(requestContext: any, timeoutMs = 20000) {
const deadline = Date.now() + timeoutMs;
while (Date.now() < deadline) {
const statusResp = await requestContext.get(`${API_URL}/system/scan/status`);
const status = await statusResp.json();
if (!status.is_running) {
return;
}
await new Promise(r => setTimeout(r, 500));
}
throw new Error('Scan did not complete within timeout');
}
/**
* Configures the source roots and restore destinations.
*/
export async function configureBackend(requestContext: any) {
await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'source_roots', value: JSON.stringify([SOURCE_ROOT]) }
});
await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'restore_destinations', value: JSON.stringify([RESTORE_DEST]) }
});
}
+131
View File
@@ -0,0 +1,131 @@
import { test, expect } from '@playwright/test';
import fs from 'fs';
import path from 'path';
import { API_URL, setupRequestContext } from './helpers';
test.describe('Media Lifecycle', () => {
test('register and initialize mock LTO media', async ({}) => {
const requestContext = await setupRequestContext();
console.log('Step 1: Register media via API');
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'TEST_LTO_001',
media_type: 'mock_lto',
generation_tier: 'LTO-8',
capacity: 12000,
location: 'Test Lab',
config: {}
}
});
expect(registerResp.ok()).toBe(true);
const media = await registerResp.json();
expect(media.identifier).toBe('TEST_LTO_001');
console.log('Step 2: Verify media appears in inventory');
const listResp = await requestContext.get(`${API_URL}/inventory/media`);
const mediaList = await listResp.json();
const found = (mediaList as Array<any>).find((m: any) => m.identifier === 'TEST_LTO_001');
expect(found).toBeDefined();
expect(found.status).toBe('active');
console.log('Step 3: Initialize media');
const initResp = await requestContext.post(`${API_URL}/inventory/media/${media.id}/initialize`);
expect(initResp.ok()).toBe(true);
console.log('Step 4: Verify media status after initialization');
const afterInitResp = await requestContext.get(`${API_URL}/inventory/media`);
const afterInit = await afterInitResp.json();
const updated = (afterInit as Array<any>).find((m: any) => m.identifier === 'TEST_LTO_001');
expect(updated).toBeDefined();
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
await requestContext.dispose();
});
test('register and retire HDD media', async ({}) => {
const requestContext = await setupRequestContext();
const hddPath = '/tmp/tapehoard_e2e_hdd_test';
fs.mkdirSync(hddPath, { recursive: true });
try {
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'TEST_HDD_001',
media_type: 'hdd',
generation_tier: 'HDD',
capacity: 1000000,
location: 'Test Lab',
config: { mount_path: hddPath }
}
});
expect(registerResp.ok()).toBe(true);
const media = await registerResp.json();
expect(media.media_type).toBe('hdd');
console.log('Step 2: Retire media');
const retireResp = await requestContext.patch(`${API_URL}/inventory/media/${media.id}`, {
data: { status: 'retired' }
});
expect(retireResp.ok()).toBe(true);
const retired = await retireResp.json();
expect(retired.status).toBe('retired');
console.log('Step 3: Verify retired media cannot accept backups');
const backupResp = await requestContext.post(`${API_URL}/backups/trigger/${media.id}`);
expect(backupResp.status()).toBe(400);
const errBody = await backupResp.json();
expect(errBody.detail.toLowerCase()).toContain('cannot');
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
} finally {
fs.rmSync(hddPath, { recursive: true, force: true });
}
await requestContext.dispose();
});
test('discover hardware returns empty when no drives configured', async ({}) => {
const requestContext = await setupRequestContext();
const discoverResp = await requestContext.get(`${API_URL}/system/hardware/discover`);
expect(discoverResp.ok()).toBe(true);
const devices = await discoverResp.json();
expect(Array.isArray(devices)).toBe(true);
await requestContext.dispose();
});
test('duplicate media identifier is rejected', async ({}) => {
const requestContext = await setupRequestContext();
const registerResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'TEST_DUP_001',
media_type: 'mock_lto',
generation_tier: 'LTO-7',
capacity: 6000,
config: {}
}
});
expect(registerResp.ok()).toBe(true);
const media = await registerResp.json();
try {
const dupResp = await requestContext.post(`${API_URL}/inventory/media`, {
data: {
identifier: 'TEST_DUP_001',
media_type: 'mock_lto',
generation_tier: 'LTO-7',
capacity: 6000,
config: {}
}
});
expect(dupResp.status()).toBe(400);
} finally {
await requestContext.delete(`${API_URL}/inventory/media/${media.id}`);
}
await requestContext.dispose();
});
});
+171
View File
@@ -0,0 +1,171 @@
import { test, expect } from '@playwright/test';
import { API_URL, SOURCE_ROOT, setupRequestContext, configureBackend } from './helpers';
test.describe('Settings & System', () => {
test('CRUD system settings', async ({ page }) => {
const requestContext = await setupRequestContext();
console.log('Step 1: Get initial settings (should be empty)');
const getResp = await requestContext.get(`${API_URL}/system/settings`);
expect(getResp.ok()).toBe(true);
const initial = await getResp.json();
expect(Object.keys(initial).length).toBe(0);
console.log('Step 2: Create a new setting');
const createResp = await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'test_key', value: 'test_value' }
});
expect(createResp.ok()).toBe(true);
console.log('Step 3: Verify setting persisted');
const afterCreate = await requestContext.get(`${API_URL}/system/settings`);
const settings = await afterCreate.json();
expect(settings.test_key).toBe('test_value');
console.log('Step 4: Update existing setting');
const updateResp = await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'test_key', value: 'updated_value' }
});
expect(updateResp.ok()).toBe(true);
const afterUpdate = await requestContext.get(`${API_URL}/system/settings`);
const updated = await afterUpdate.json();
expect(updated.test_key).toBe('updated_value');
await requestContext.dispose();
});
test('source roots setting configures browse endpoint', async ({ page }) => {
const requestContext = await setupRequestContext();
configureBackend(requestContext);
const browseResp = await requestContext.get(`${API_URL}/system/browse?path=ROOT`);
expect(browseResp.ok()).toBe(true);
const roots = await browseResp.json();
const sourceRoot = (roots as Array<any>).find((r: any) => r.path === SOURCE_ROOT);
expect(sourceRoot).toBeDefined();
await requestContext.dispose();
});
test('dashboard stats reflect system state', async ({ page }) => {
const requestContext = await setupRequestContext();
configureBackend(requestContext);
const statsResp = await requestContext.get(`${API_URL}/system/dashboard/stats`);
expect(statsResp.ok()).toBe(true);
const stats = await statsResp.json();
expect(stats.monitored_files_count).toBeDefined();
expect(stats.total_data_size).toBeDefined();
expect(stats.media_distribution).toBeDefined();
expect(stats.media_distribution.LTO).toBeDefined();
expect(stats.media_distribution.HDD).toBeDefined();
await requestContext.dispose();
});
test('job listing returns empty list initially', async ({ page }) => {
const requestContext = await setupRequestContext();
const jobsResp = await requestContext.get(`${API_URL}/system/jobs`);
expect(jobsResp.ok()).toBe(true);
const jobs = await jobsResp.json();
expect(Array.isArray(jobs)).toBe(true);
const countResp = await requestContext.get(`${API_URL}/system/jobs/count`);
const count = await countResp.json();
expect(count.count).toBe(0);
await requestContext.dispose();
});
test('hardware discovery returns empty when nothing configured', async ({ page }) => {
const requestContext = await setupRequestContext();
const discoverResp = await requestContext.get(`${API_URL}/system/hardware/discover`);
expect(discoverResp.ok()).toBe(true);
const devices = await discoverResp.json();
expect(Array.isArray(devices)).toBe(true);
await requestContext.dispose();
});
test('scan and indexing workflow', async ({ page }) => {
const requestContext = await setupRequestContext();
configureBackend(requestContext);
// Trigger scan
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
// Wait for scan to complete
await new Promise(r => setTimeout(r, 3000));
const statusResp = await requestContext.get(`${API_URL}/system/scan/status`);
const status = await statusResp.json();
expect(status.is_running).toBe(false);
// Trigger indexing
const indexResp = await requestContext.post(`${API_URL}/system/index/hash`);
expect(indexResp.ok()).toBe(true);
await new Promise(r => setTimeout(r, 2000));
const afterIndex = await requestContext.get(`${API_URL}/system/scan/status`);
const indexStatus = await afterIndex.json();
// Hashing runs in background, just verify it started without error
expect(indexStatus.is_throttled).toBeDefined();
await requestContext.dispose();
});
test('search returns results after scan and hash', async ({ page }) => {
const requestContext = await setupRequestContext();
configureBackend(requestContext);
// Create a searchable file
const fs = await import('fs');
const path = await import('path');
fs.writeFileSync(path.join(SOURCE_ROOT, 'searchable_file.txt'), 'searchable content here');
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
// Wait for scan and hashing
await new Promise(r => setTimeout(r, 5000));
// Search requires at least 3 chars and hashed files
const searchResp = await requestContext.get(`${API_URL}/system/search?q=searchable`);
expect(searchResp.ok()).toBe(true);
const results = await searchResp.json();
expect(Array.isArray(results)).toBe(true);
// Results may be empty if hashing hasn't completed; API is functional either way
await requestContext.dispose();
});
test('tree endpoint returns source roots', async ({ page }) => {
const requestContext = await setupRequestContext();
configureBackend(requestContext);
const treeResp = await requestContext.get(`${API_URL}/system/tree`);
expect(treeResp.ok()).toBe(true);
const tree = await treeResp.json();
expect(Array.isArray(tree)).toBe(true);
expect(tree.length).toBeGreaterThan(0);
await requestContext.dispose();
});
test('host directory listing works', async ({ page }) => {
const requestContext = await setupRequestContext();
const lsResp = await requestContext.get(`${API_URL}/system/ls?path=/`);
expect(lsResp.ok()).toBe(true);
const dirs = await lsResp.json();
expect(Array.isArray(dirs)).toBe(true);
await requestContext.dispose();
});
});
+130
View File
@@ -0,0 +1,130 @@
import { test, expect } from '@playwright/test';
import fs from 'fs';
import path from 'path';
import { API_URL, SOURCE_ROOT, setupRequestContext } from './helpers';
test.describe('Source Management', () => {
test.beforeEach(async () => {
// Ensure source root exists
fs.mkdirSync(SOURCE_ROOT, { recursive: true });
fs.writeFileSync(path.join(SOURCE_ROOT, 'include.txt'), 'should be tracked');
fs.mkdirSync(path.join(SOURCE_ROOT, 'excluded_dir'), { recursive: true });
fs.writeFileSync(path.join(SOURCE_ROOT, 'excluded_dir', 'secret.txt'), 'should be ignored');
});
test('configure source roots and trigger scan', async ({ page }) => {
const requestContext = await setupRequestContext();
console.log('Step 1: Set source roots');
const settingsResp = await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'source_roots', value: JSON.stringify([SOURCE_ROOT]) }
});
expect(settingsResp.ok()).toBe(true);
console.log('Step 2: Verify settings persisted');
const getSettings = await requestContext.get(`${API_URL}/system/settings`);
const settings = await getSettings.json();
expect(settings.source_roots).toBeDefined();
console.log('Step 3: Trigger scan');
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
expect(scanResp.json()).toBeDefined();
// Wait for scan to complete
await new Promise(r => setTimeout(r, 3000));
const statusResp = await requestContext.get(`${API_URL}/system/scan/status`);
const status = await statusResp.json();
expect(status.is_running).toBe(false);
expect(status.total_files_found).toBeGreaterThan(0);
await requestContext.dispose();
});
test('exclude path via tracking rules', async ({ page }) => {
const requestContext = await setupRequestContext();
await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'source_roots', value: JSON.stringify([SOURCE_ROOT]) }
});
console.log('Step 1: Exclude a path via batch tracking');
const trackResp = await requestContext.post(`${API_URL}/system/track/batch`, {
data: {
tracks: [],
untracks: [path.join(SOURCE_ROOT, 'excluded_dir')]
}
});
expect(trackResp.ok()).toBe(true);
console.log('Step 2: Trigger scan');
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await new Promise(r => setTimeout(r, 3000));
console.log('Step 3: Verify excluded file is marked ignored');
const browseResp = await requestContext.get(
`${API_URL}/system/browse?path=${path.join(SOURCE_ROOT, 'excluded_dir')}`
);
const files = await browseResp.json();
const secretFile = (files as Array<any>).find((f: any) => f.name === 'secret.txt');
if (secretFile) {
expect(secretFile.ignored).toBe(true);
}
await requestContext.dispose();
});
test('override exclusion with include rule', async ({ page }) => {
const requestContext = await setupRequestContext();
await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'source_roots', value: JSON.stringify([SOURCE_ROOT]) }
});
// First exclude, then include the same path (most specific wins)
await requestContext.post(`${API_URL}/system/track/batch`, {
data: {
tracks: [path.join(SOURCE_ROOT, 'excluded_dir', 'secret.txt')],
untracks: [path.join(SOURCE_ROOT, 'excluded_dir')]
}
});
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await new Promise(r => setTimeout(r, 3000));
const browseResp = await requestContext.get(
`${API_URL}/system/browse?path=${SOURCE_ROOT}`
);
const files = await browseResp.json();
const includedFile = (files as Array<any>).find((f: any) => f.name === 'include.txt');
expect(includedFile).toBeDefined();
expect(includedFile.ignored).toBe(false);
await requestContext.dispose();
});
test('remove source root stops scanning', async ({ page }) => {
const requestContext = await setupRequestContext();
// Set a nonexistent source root
await requestContext.post(`${API_URL}/system/settings`, {
data: { key: 'source_roots', value: JSON.stringify(['/nonexistent/path']) }
});
const scanResp = await requestContext.post(`${API_URL}/system/scan`);
expect(scanResp.ok()).toBe(true);
await new Promise(r => setTimeout(r, 3000));
const statusResp = await requestContext.get(`${API_URL}/system/scan/status`);
const status = await statusResp.json();
// No files found since source doesn't exist
expect(status.total_files_found).toBe(0);
await requestContext.dispose();
});
});