fix: destroy charts when switching tabs
This commit is contained in:
parent
bf09af6b5e
commit
297c913f2a
7 changed files with 22 additions and 18 deletions
|
@ -105,7 +105,9 @@ def hits(domain: Optional[str] = typer.Option(None, help="Filter by domain")) ->
|
||||||
|
|
||||||
|
|
||||||
@app.command("cache-ratio")
|
@app.command("cache-ratio")
|
||||||
def cache_ratio_cmd(domain: Optional[str] = typer.Option(None, help="Filter by domain")) -> None:
|
def cache_ratio_cmd(
|
||||||
|
domain: Optional[str] = typer.Option(None, help="Filter by domain")
|
||||||
|
) -> None:
|
||||||
"""Display cache hit ratio as a percentage."""
|
"""Display cache hit ratio as a percentage."""
|
||||||
ratio = get_cache_ratio(domain) * 100
|
ratio = get_cache_ratio(domain) * 100
|
||||||
if domain:
|
if domain:
|
||||||
|
@ -115,7 +117,11 @@ def cache_ratio_cmd(domain: Optional[str] = typer.Option(None, help="Filter by d
|
||||||
|
|
||||||
|
|
||||||
@app.command("check-missing-domains")
|
@app.command("check-missing-domains")
|
||||||
def check_missing_domains(json_output: bool = typer.Option(False, "--json", help="Output missing domains as JSON")) -> None:
|
def check_missing_domains(
|
||||||
|
json_output: bool = typer.Option(
|
||||||
|
False, "--json", help="Output missing domains as JSON"
|
||||||
|
)
|
||||||
|
) -> None:
|
||||||
"""Show domains present in the database but absent from Nginx config."""
|
"""Show domains present in the database but absent from Nginx config."""
|
||||||
try:
|
try:
|
||||||
from scripts.generate_reports import _get_domains as _db_domains
|
from scripts.generate_reports import _get_domains as _db_domains
|
||||||
|
@ -151,9 +157,7 @@ def check_missing_domains(json_output: bool = typer.Option(False, "--json", help
|
||||||
|
|
||||||
@app.command("suggest-cache")
|
@app.command("suggest-cache")
|
||||||
def suggest_cache(
|
def suggest_cache(
|
||||||
threshold: int = typer.Option(
|
threshold: int = typer.Option(10, help="Minimum number of MISS entries to report"),
|
||||||
10, help="Minimum number of MISS entries to report"
|
|
||||||
),
|
|
||||||
json_output: bool = typer.Option(False, "--json", help="Output results as JSON"),
|
json_output: bool = typer.Option(False, "--json", help="Output results as JSON"),
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Suggest domain/path pairs that could benefit from caching.
|
"""Suggest domain/path pairs that could benefit from caching.
|
||||||
|
@ -211,9 +215,7 @@ def suggest_cache(
|
||||||
@app.command("detect-threats")
|
@app.command("detect-threats")
|
||||||
def detect_threats(
|
def detect_threats(
|
||||||
hours: int = typer.Option(1, help="Number of recent hours to analyze"),
|
hours: int = typer.Option(1, help="Number of recent hours to analyze"),
|
||||||
ip_threshold: int = typer.Option(
|
ip_threshold: int = typer.Option(100, help="Requests from a single IP to flag"),
|
||||||
100, help="Requests from a single IP to flag"
|
|
||||||
),
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Detect potential security threats from recent logs."""
|
"""Detect potential security threats from recent logs."""
|
||||||
|
|
||||||
|
@ -316,9 +318,7 @@ def detect_threats(
|
||||||
""",
|
""",
|
||||||
(recent_start_s, recent_end_s, ip_threshold),
|
(recent_start_s, recent_end_s, ip_threshold),
|
||||||
)
|
)
|
||||||
high_ip_requests = [
|
high_ip_requests = [{"ip": ip, "requests": cnt} for ip, cnt in cur.fetchall()]
|
||||||
{"ip": ip, "requests": cnt} for ip, cnt in cur.fetchall()
|
|
||||||
]
|
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,9 @@ try:
|
||||||
suffix = match.group(1)
|
suffix = match.group(1)
|
||||||
number = int(suffix.lstrip(".")) if suffix else 0
|
number = int(suffix.lstrip(".")) if suffix else 0
|
||||||
log_files.append((number, os.path.join(LOG_DIR, f)))
|
log_files.append((number, os.path.join(LOG_DIR, f)))
|
||||||
log_files = [path for _, path in sorted(log_files, key=lambda x: x[0], reverse=True)]
|
log_files = [
|
||||||
|
path for _, path in sorted(log_files, key=lambda x: x[0], reverse=True)
|
||||||
|
]
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print(f"[ERROR] Log directory not found: {LOG_DIR}")
|
print(f"[ERROR] Log directory not found: {LOG_DIR}")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
|
@ -93,4 +93,3 @@ def parse_servers(paths: Set[Path]) -> List[Dict[str, str]]:
|
||||||
entry["root"] = " ".join(directives["root"])
|
entry["root"] = " ".join(directives["root"])
|
||||||
servers.append(entry)
|
servers.append(entry)
|
||||||
return servers
|
return servers
|
||||||
|
|
||||||
|
|
|
@ -180,6 +180,10 @@
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function destroyAllCharts() {
|
||||||
|
Object.values(containers).forEach(destroyCharts);
|
||||||
|
}
|
||||||
|
|
||||||
function loadReports() {
|
function loadReports() {
|
||||||
let path;
|
let path;
|
||||||
let container;
|
let container;
|
||||||
|
@ -296,6 +300,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
function switchTab(name) {
|
function switchTab(name) {
|
||||||
|
destroyAllCharts();
|
||||||
currentTab = name;
|
currentTab = name;
|
||||||
tabs.forEach(tab => {
|
tabs.forEach(tab => {
|
||||||
tab.classList.toggle('is-active', tab.dataset.tab === name);
|
tab.classList.toggle('is-active', tab.dataset.tab === name);
|
||||||
|
|
|
@ -15,10 +15,10 @@ def sample_logs(tmp_path):
|
||||||
log_dir.mkdir(parents=True, exist_ok=True)
|
log_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
(log_dir / "access.log.1").write_text(
|
(log_dir / "access.log.1").write_text(
|
||||||
"127.0.0.1 - example.com [01/Jan/2024:10:00:00 +0000] \"GET / HTTP/1.1\" 200 123 \"-\" \"curl\" MISS\n"
|
'127.0.0.1 - example.com [01/Jan/2024:10:00:00 +0000] "GET / HTTP/1.1" 200 123 "-" "curl" MISS\n'
|
||||||
)
|
)
|
||||||
(log_dir / "access.log").write_text(
|
(log_dir / "access.log").write_text(
|
||||||
"127.0.0.1 - example.com [01/Jan/2024:10:05:00 +0000] \"GET /about HTTP/1.1\" 200 123 \"-\" \"curl\" MISS\n"
|
'127.0.0.1 - example.com [01/Jan/2024:10:05:00 +0000] "GET /about HTTP/1.1" 200 123 "-" "curl" MISS\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
yield log_dir
|
yield log_dir
|
||||||
|
@ -59,4 +59,3 @@ def test_idempotent_import(sample_logs, tmp_path):
|
||||||
|
|
||||||
assert first_count == 2
|
assert first_count == 2
|
||||||
assert second_count == first_count
|
assert second_count == first_count
|
||||||
|
|
||||||
|
|
|
@ -67,4 +67,3 @@ server {
|
||||||
assert servers[1]["server_name"] == "example.org"
|
assert servers[1]["server_name"] == "example.org"
|
||||||
assert servers[1]["listen"] == "443 ssl"
|
assert servers[1]["listen"] == "443 ssl"
|
||||||
assert "proxy_cache" not in servers[1]
|
assert "proxy_cache" not in servers[1]
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ def test_script_invokes_commands(tmp_path):
|
||||||
|
|
||||||
calls = tmp_path / "calls.txt"
|
calls = tmp_path / "calls.txt"
|
||||||
python_stub = tmp_path / "python"
|
python_stub = tmp_path / "python"
|
||||||
python_stub.write_text(f"#!/usr/bin/env bash\necho \"$*\" >> \"{calls}\"\n")
|
python_stub.write_text(f'#!/usr/bin/env bash\necho "$*" >> "{calls}"\n')
|
||||||
python_stub.chmod(0o755)
|
python_stub.chmod(0o755)
|
||||||
(tmp_path / "python3").write_text(f"#!/usr/bin/env bash\nexit 0\n")
|
(tmp_path / "python3").write_text(f"#!/usr/bin/env bash\nexit 0\n")
|
||||||
(tmp_path / "python3").chmod(0o755)
|
(tmp_path / "python3").chmod(0o755)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue