dont load directory treemap by default
Continuous Integration / backend-tests (push) Successful in 48s
Continuous Integration / frontend-check (push) Successful in 25s
Continuous Integration / e2e-tests (push) Failing after 7m49s

This commit is contained in:
2026-05-01 20:10:58 -04:00
parent c56973c254
commit 901b17f7cd
10 changed files with 296 additions and 98 deletions
+75 -1
View File
@@ -567,10 +567,84 @@ def get_system_analytics(db_session: Session = Depends(get_db)):
{"path": dup[0], "size": dup[1], "copies": dup[2], "saved": dup[3]}
for dup in duplicate_offenders
],
"directories": convert_tree_to_list(nested_dir_map, 10),
}
@router.get("/directories")
def get_directory_treemap(db_session: Session = Depends(get_db)):
"""Returns directory tree data for treemap visualization."""
# Directory aggregation - same as insights but only directories
directory_aggregation_sql = text("""
SELECT
RTRIM(file_path, REPLACE(file_path, '/', '')) as dir_path,
SUM(size) as byte_total,
MAX(mtime) as latest_mtime
FROM filesystem_state
WHERE is_ignored = 0
GROUP BY dir_path
""")
all_directories = db_session.execute(directory_aggregation_sql).fetchall()
# Hierarchical tree construction
nested_dir_map = {}
for path_str, size_val, mtime_val in all_directories:
if not path_str:
continue
path_segments = [p for p in path_str.split("/") if p]
current_node = nested_dir_map
accumulated_path = ""
for segment in path_segments:
if not accumulated_path:
accumulated_path = (
"/" + segment if path_str.startswith("/") else segment
)
else:
accumulated_path += "/" + segment
if segment not in current_node:
current_node[segment] = {
"size": 0,
"mtime": 0,
"children": {},
"fullPath": accumulated_path,
}
current_node[segment]["size"] += size_val or 0
current_node[segment]["mtime"] = max(
current_node[segment]["mtime"], mtime_val or 0
)
current_node = current_node[segment]["children"]
# Collapse unhelpful single-child roots
while len(nested_dir_map) == 1:
root_key = list(nested_dir_map.keys())[0]
if not nested_dir_map[root_key]["children"]:
break
nested_dir_map = nested_dir_map[root_key]["children"]
def convert_tree_to_list(tree_dict, max_depth, current_depth=0):
if current_depth >= max_depth:
return []
output_list = []
for key, value in tree_dict.items():
children_list = convert_tree_to_list(
value["children"], max_depth, current_depth + 1
)
output_list.append(
{
"path": key,
"size": value["size"],
"mtime": value["mtime"],
"fullPath": value["fullPath"],
"children": children_list,
}
)
output_list.sort(key=lambda x: x["size"], reverse=True)
return output_list[:15]
return convert_tree_to_list(nested_dir_map, 10)
@router.get("/detect")
def detect_unregistered_media(db_session: Session = Depends(get_db)):
"""Scans all configured hardware providers for newly inserted, unregistered media."""
+10 -2
View File
@@ -46,6 +46,7 @@ class DashboardStatsSchema(BaseModel):
ignored_data_size: int
unprotected_files_count: int
unprotected_data_size: int
discrepancies_count: int
media_distribution: Dict[str, int]
last_scan_time: Optional[datetime]
redundancy_ratio: float
@@ -253,7 +254,9 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
SELECT fv.filesystem_state_id FROM file_versions fv
JOIN storage_media sm ON sm.id = fv.media_id
WHERE sm.status IN ('active', 'full')
) THEN size ELSE 0 END) as archived_size
) THEN size ELSE 0 END) as archived_size,
SUM(CASE WHEN is_deleted = 1 THEN 1 ELSE 0 END) as missing_count,
SUM(CASE WHEN is_deleted = 1 AND missing_acknowledged_at IS NULL AND is_ignored = 0 THEN 1 ELSE 0 END) as active_discrepancies_count
FROM filesystem_state
""")
@@ -265,10 +268,14 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
hashed_count = res[6] or 0
eligible_count = res[7] or 0
archived_size = res[8] or 0
# missing_count = res[9] or 0
active_discrepancies_count = res[10] or 0
else:
total_count = total_size = ignored_count = ignored_size = unprotected_count = (
unprotected_size
) = hashed_count = eligible_count = archived_size = 0
) = hashed_count = eligible_count = archived_size = (
active_discrepancies_count
) = 0
media_counts = {
"LTO": db_session.query(models.StorageMedia)
@@ -310,6 +317,7 @@ def get_dashboard_stats(db_session: Session = Depends(get_db)):
ignored_data_size=ignored_size,
unprotected_files_count=unprotected_count,
unprotected_data_size=unprotected_size,
discrepancies_count=active_discrepancies_count,
media_distribution=media_counts,
last_scan_time=last_scan.completed_at if last_scan else None,
redundancy_ratio=round(redundancy_percentage, 1),