112 lines
3.4 KiB
Python
112 lines
3.4 KiB
Python
#!/usr/bin/env python3
|
|
|
|
import os
|
|
import re
|
|
import sqlite3
|
|
from datetime import datetime, timezone
|
|
|
|
LOG_DIR = "/var/log/nginx"
|
|
DB_FILE = "database/ngxstat.db"
|
|
LOG_FILE_PATTERN = re.compile(r"access\.log(\.\d+)?$")
|
|
LOG_FORMAT_REGEX = re.compile(
|
|
r'(?P<ip>\S+) - (?P<host>\S+) \[(?P<time>.*?)\] "(?P<request>.*?)" '
|
|
r'(?P<status>\d{3}) (?P<bytes_sent>\d+) "(?P<referer>.*?)" "(?P<user_agent>.*?)" (?P<cache_status>\S+)'
|
|
)
|
|
DATE_FMT = "%d/%b/%Y:%H:%M:%S %z"
|
|
|
|
os.makedirs("database", exist_ok=True)
|
|
conn = sqlite3.connect(DB_FILE)
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS logs (
|
|
id INTEGER PRIMARY KEY,
|
|
ip TEXT,
|
|
host TEXT,
|
|
time TEXT,
|
|
request TEXT,
|
|
status INTEGER,
|
|
bytes_sent INTEGER,
|
|
referer TEXT,
|
|
user_agent TEXT,
|
|
cache_status TEXT
|
|
)
|
|
"""
|
|
)
|
|
cursor.execute("CREATE INDEX IF NOT EXISTS idx_logs_time ON logs(time)")
|
|
|
|
conn.commit()
|
|
|
|
cursor.execute("SELECT time FROM logs ORDER BY id DESC LIMIT 1")
|
|
row = cursor.fetchone()
|
|
last_dt = None
|
|
if row and row[0]:
|
|
# Support both legacy log date format and ISO timestamps
|
|
for fmt in ("%Y-%m-%d %H:%M:%S", DATE_FMT):
|
|
try:
|
|
parsed = datetime.strptime(row[0], fmt)
|
|
if fmt == DATE_FMT:
|
|
parsed = parsed.astimezone(timezone.utc).replace(tzinfo=None)
|
|
last_dt = parsed
|
|
break
|
|
except ValueError:
|
|
continue
|
|
|
|
try:
|
|
log_files = []
|
|
for f in os.listdir(LOG_DIR):
|
|
match = LOG_FILE_PATTERN.match(f)
|
|
if match:
|
|
suffix = match.group(1)
|
|
number = int(suffix.lstrip(".")) if suffix else 0
|
|
log_files.append((number, os.path.join(LOG_DIR, f)))
|
|
log_files = [
|
|
path for _, path in sorted(log_files, key=lambda x: x[0], reverse=True)
|
|
]
|
|
except FileNotFoundError:
|
|
print(f"[ERROR] Log directory not found: {LOG_DIR}")
|
|
exit(1)
|
|
|
|
print(f"[INFO] Found {len(log_files)} log files.")
|
|
|
|
inserted = 0
|
|
for log_file in log_files:
|
|
print(f"[INFO] Parsing {log_file}...")
|
|
with open(log_file, "r", encoding="utf-8", errors="ignore") as f:
|
|
for line in f:
|
|
match = LOG_FORMAT_REGEX.match(line.strip())
|
|
if match:
|
|
data = match.groupdict()
|
|
try:
|
|
entry_dt = datetime.strptime(data["time"], DATE_FMT)
|
|
except ValueError:
|
|
continue
|
|
entry_dt = entry_dt.astimezone(timezone.utc).replace(tzinfo=None)
|
|
if last_dt and entry_dt <= last_dt:
|
|
continue
|
|
cursor.execute(
|
|
"""
|
|
INSERT INTO logs (
|
|
ip, host, time, request, status, bytes_sent,
|
|
referer, user_agent, cache_status
|
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
""",
|
|
(
|
|
data["ip"],
|
|
data["host"],
|
|
entry_dt.strftime("%Y-%m-%d %H:%M:%S"),
|
|
data["request"],
|
|
int(data["status"]),
|
|
int(data["bytes_sent"]),
|
|
data["referer"],
|
|
data["user_agent"],
|
|
data["cache_status"],
|
|
),
|
|
)
|
|
last_dt = entry_dt
|
|
inserted += 1
|
|
|
|
conn.commit()
|
|
conn.close()
|
|
print(f"[DONE] Inserted {inserted} entries into {DB_FILE}.")
|