fix: destroy charts when switching tabs

This commit is contained in:
Jordan Wages 2025-07-19 17:03:48 -05:00
commit 297c913f2a
7 changed files with 22 additions and 18 deletions

View file

@ -105,7 +105,9 @@ def hits(domain: Optional[str] = typer.Option(None, help="Filter by domain")) ->
@app.command("cache-ratio")
def cache_ratio_cmd(domain: Optional[str] = typer.Option(None, help="Filter by domain")) -> None:
def cache_ratio_cmd(
domain: Optional[str] = typer.Option(None, help="Filter by domain")
) -> None:
"""Display cache hit ratio as a percentage."""
ratio = get_cache_ratio(domain) * 100
if domain:
@ -115,7 +117,11 @@ def cache_ratio_cmd(domain: Optional[str] = typer.Option(None, help="Filter by d
@app.command("check-missing-domains")
def check_missing_domains(json_output: bool = typer.Option(False, "--json", help="Output missing domains as JSON")) -> None:
def check_missing_domains(
json_output: bool = typer.Option(
False, "--json", help="Output missing domains as JSON"
)
) -> None:
"""Show domains present in the database but absent from Nginx config."""
try:
from scripts.generate_reports import _get_domains as _db_domains
@ -151,9 +157,7 @@ def check_missing_domains(json_output: bool = typer.Option(False, "--json", help
@app.command("suggest-cache")
def suggest_cache(
threshold: int = typer.Option(
10, help="Minimum number of MISS entries to report"
),
threshold: int = typer.Option(10, help="Minimum number of MISS entries to report"),
json_output: bool = typer.Option(False, "--json", help="Output results as JSON"),
) -> None:
"""Suggest domain/path pairs that could benefit from caching.
@ -211,9 +215,7 @@ def suggest_cache(
@app.command("detect-threats")
def detect_threats(
hours: int = typer.Option(1, help="Number of recent hours to analyze"),
ip_threshold: int = typer.Option(
100, help="Requests from a single IP to flag"
),
ip_threshold: int = typer.Option(100, help="Requests from a single IP to flag"),
) -> None:
"""Detect potential security threats from recent logs."""
@ -316,9 +318,7 @@ def detect_threats(
""",
(recent_start_s, recent_end_s, ip_threshold),
)
high_ip_requests = [
{"ip": ip, "requests": cnt} for ip, cnt in cur.fetchall()
]
high_ip_requests = [{"ip": ip, "requests": cnt} for ip, cnt in cur.fetchall()]
conn.close()

View file

@ -61,7 +61,9 @@ try:
suffix = match.group(1)
number = int(suffix.lstrip(".")) if suffix else 0
log_files.append((number, os.path.join(LOG_DIR, f)))
log_files = [path for _, path in sorted(log_files, key=lambda x: x[0], reverse=True)]
log_files = [
path for _, path in sorted(log_files, key=lambda x: x[0], reverse=True)
]
except FileNotFoundError:
print(f"[ERROR] Log directory not found: {LOG_DIR}")
exit(1)

View file

@ -93,4 +93,3 @@ def parse_servers(paths: Set[Path]) -> List[Dict[str, str]]:
entry["root"] = " ".join(directives["root"])
servers.append(entry)
return servers