From f44895d40f2cb638e076f16fde501fb187c3ac83 Mon Sep 17 00:00:00 2001 From: Adam Lamers Date: Tue, 5 May 2026 17:13:43 -0400 Subject: [PATCH] more checks in archiver & scanner tests --- backend/tests/test_service_archiver.py | 23 ++++++++++++++++++ backend/tests/test_service_scanner.py | 33 ++++++++++++++------------ 2 files changed, 41 insertions(+), 15 deletions(-) diff --git a/backend/tests/test_service_archiver.py b/backend/tests/test_service_archiver.py index a54fd19..d6c0773 100644 --- a/backend/tests/test_service_archiver.py +++ b/backend/tests/test_service_archiver.py @@ -150,8 +150,31 @@ def test_run_backup_mocked(db_session, mocker, tmp_path): # Verify result db_session.expire_all() + + # Media usage updated assert media.bytes_used > 0 + # FileVersion recorded for the archived file + version = ( + db_session.query(models.FileVersion) + .filter_by(filesystem_state_id=f1.id) + .first() + ) + assert version is not None + assert version.media_id == media.id + assert version.offset_start == 0 + assert version.offset_end == f1.size + + # Backup job completed successfully + refreshed_job = db_session.get(models.Job, job.id) + assert refreshed_job.status == "COMPLETED" + assert refreshed_job.progress == 100.0 + + # Provider was asked to write the archive + mock_provider.write_archive.assert_called_once() + call_args = mock_provider.write_archive.call_args + assert call_args[0][0] == "DISK_001" # media identifier + def test_archiver_saturated_media_logic(db_session, mocker, tmp_path): """Verifies that media is marked full and priority ceded based on hardware feedback.""" diff --git a/backend/tests/test_service_scanner.py b/backend/tests/test_service_scanner.py index ee15d50..b691bbf 100644 --- a/backend/tests/test_service_scanner.py +++ b/backend/tests/test_service_scanner.py @@ -176,13 +176,13 @@ def test_hash_file_batch_fast_empty(): def test_hash_file_batch_fast_nonexistent(): - """Tests that non-existent files are gracefully handled.""" + """Tests that non-existent files are silently skipped.""" if _FAST_HASH_BINARY is None: pytest.skip("No native hash binary available") results = _hash_file_batch_fast(["/nonexistent/path"], _FAST_HASH_BINARY) - # Non-existent files may or may not appear in results depending on binary behavior - assert isinstance(results, dict) + # Non-existent files should not produce hash entries + assert results == {} def test_missing_file_marked_deleted_at_end_of_scan(db_session, mocker): @@ -275,8 +275,11 @@ def test_missing_file_during_hashing_marked_deleted(db_session, mocker): assert f.is_deleted is True -def test_missing_file_skipped_in_hashing_query(db_session): - """Tests that already-deleted files are excluded from hashing targets.""" +def test_deleted_files_excluded_from_hashing(db_session): + """Tests that run_hashing skips already-deleted files.""" + scanner = ScannerService() + scanner.is_running = False # Causes run_hashing to exit when no targets found + deleted_file = models.FilesystemState( file_path="/data/deleted.bin", size=10, @@ -288,13 +291,13 @@ def test_missing_file_skipped_in_hashing_query(db_session): db_session.add(deleted_file) db_session.commit() - pending = ( - db_session.query(models.FilesystemState) - .filter( - models.FilesystemState.sha256_hash.is_(None), - models.FilesystemState.is_ignored.is_(False), - models.FilesystemState.is_deleted.is_(False), - ) - .all() - ) - assert len(pending) == 0 + scanner.run_hashing() + + # Deleted file should not have been processed (hash still None) + db_session.refresh(deleted_file) + assert deleted_file.sha256_hash is None + + # A HASH job should have been created and completed (no work to do) + job = db_session.query(models.Job).filter_by(job_type="HASH").first() + assert job is not None + assert job.status == "COMPLETED"