Initial Code Commit
This commit is contained in:
commit
3b709107bb
5 changed files with 386 additions and 0 deletions
10
Dockerfile
Normal file
10
Dockerfile
Normal file
|
@ -0,0 +1,10 @@
|
|||
FROM python:3.11-slim
|
||||
|
||||
# install only Python deps
|
||||
RUN pip install --no-cache-dir requests icalendar python-dateutil
|
||||
|
||||
WORKDIR /app
|
||||
COPY sync.py dispatch.py entrypoint.sh /app/
|
||||
RUN chmod +x /app/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
9
caldav-cron
Normal file
9
caldav-cron
Normal file
|
@ -0,0 +1,9 @@
|
|||
# ── environment is inherited from Docker, so your .env values will be visible ──
|
||||
|
||||
# run sync every 5 minutes
|
||||
*/5 * * * * root /usr/local/bin/python /app/sync.py
|
||||
|
||||
# run dispatch every minute
|
||||
* * * * * root /usr/local/bin/python /app/dispatch.py
|
||||
|
||||
# (blank line required at end)
|
87
dispatch.py
Normal file
87
dispatch.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sqlite3
|
||||
import smtplib
|
||||
import logging
|
||||
from email.message import EmailMessage
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# ─── Environment & Logging ─────────────────────────────────────────────────────
|
||||
DB_PATH = os.getenv("DB_PATH", "/data/sync_state.db")
|
||||
|
||||
# SMTP config from env; convert TLS/STARTTLS flags to real booleans
|
||||
SMTP_HOST = os.getenv("SMTP_HOST")
|
||||
SMTP_PORT = int(os.getenv("SMTP_PORT", "25"))
|
||||
SMTP_USERNAME = os.getenv("SMTP_USERNAME")
|
||||
SMTP_PASSWORD = os.getenv("SMTP_PASSWORD")
|
||||
SMTP_USE_SSL = os.getenv("SMTP_USE_TLS", "false").lower() in ("1","true","yes")
|
||||
SMTP_USE_STARTTLS = os.getenv("SMTP_USE_STARTTLS", "false").lower() in ("1","true","yes")
|
||||
|
||||
EMAIL_FROM = os.getenv("EMAIL_FROM")
|
||||
EMAIL_TO = os.getenv("EMAIL_TO")
|
||||
EMAIL_SUBJECT_PREFIX = os.getenv("EMAIL_SUBJECT_PREFIX", "")
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s [%(levelname)s] %(message)s"
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ─── Email sender ──────────────────────────────────────────────────────────────
|
||||
def send_email(subject: str, body: str, to_addrs: str):
|
||||
# 1) Choose SSL socket or plain
|
||||
if SMTP_USE_SSL:
|
||||
server = smtplib.SMTP_SSL(SMTP_HOST, SMTP_PORT)
|
||||
else:
|
||||
server = smtplib.SMTP(SMTP_HOST, SMTP_PORT)
|
||||
if SMTP_USE_STARTTLS:
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.ehlo()
|
||||
|
||||
# 2) Login if creds provided
|
||||
if SMTP_USERNAME and SMTP_PASSWORD:
|
||||
server.login(SMTP_USERNAME, SMTP_PASSWORD)
|
||||
|
||||
# 3) Build & send
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = EMAIL_FROM
|
||||
msg["To"] = to_addrs
|
||||
msg.set_content(body)
|
||||
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
|
||||
# ─── Dispatch loop ─────────────────────────────────────────────────────────────
|
||||
def dispatch():
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
c = conn.cursor()
|
||||
|
||||
now_iso = datetime.now(timezone.utc).isoformat()
|
||||
c.execute("""
|
||||
SELECT id, event_uid, trigger_dt, summary, description
|
||||
FROM reminders
|
||||
WHERE trigger_dt <= ? AND sent_flag = 0
|
||||
""", (now_iso,))
|
||||
rows = c.fetchall()
|
||||
|
||||
for rid, uid, trigger_dt, summary, description in rows:
|
||||
subject = f"{EMAIL_SUBJECT_PREFIX} {summary}"
|
||||
body = (
|
||||
f"{summary}\n\n"
|
||||
f"{description}\n\n"
|
||||
f"Scheduled at: {trigger_dt}"
|
||||
)
|
||||
try:
|
||||
send_email(subject, body, EMAIL_TO)
|
||||
c.execute("UPDATE reminders SET sent_flag = 1 WHERE id = ?", (rid,))
|
||||
conn.commit()
|
||||
logger.info(f"Sent reminder {rid} ({uid})")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send reminder {rid}: {e}")
|
||||
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
dispatch()
|
57
entrypoint.sh
Normal file
57
entrypoint.sh
Normal file
|
@ -0,0 +1,57 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# ─── Map / export all of your .env into what the Python scripts expect ─────────
|
||||
|
||||
# CalDAV (your .env uses CALDAV_BASE_URL and CALDAV_CALENDARS)
|
||||
export CALDAV_BASE="$CALDAV_BASE_URL"
|
||||
export CALENDAR_URLS="$CALDAV_CALENDARS"
|
||||
|
||||
# credentials
|
||||
export CALDAV_USERNAME
|
||||
export CALDAV_PASSWORD
|
||||
|
||||
# sync settings
|
||||
export FETCH_WINDOW_DAYS="${FETCH_WINDOW_DAYS:-90}"
|
||||
export DB_PATH="${DB_PATH:-/data/sync_state.db}"
|
||||
export RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
||||
|
||||
# SMTP / dispatch settings (already in env from --env-file)
|
||||
export SMTP_HOST
|
||||
export SMTP_PORT
|
||||
export SMTP_USERNAME
|
||||
export SMTP_PASSWORD
|
||||
export SMTP_USE_TLS
|
||||
export SMTP_USE_STARTTLS
|
||||
export EMAIL_FROM
|
||||
export EMAIL_TO
|
||||
export EMAIL_SUBJECT_PREFIX
|
||||
|
||||
# optional override intervals (in seconds)
|
||||
: "${SYNC_INTERVAL:=300}" # default 5min
|
||||
: "${DISPATCH_INTERVAL:=60}" # default 1min
|
||||
|
||||
# ─── Initial run so you don’t have to wait ────────────────────────────────────
|
||||
echo "[entrypoint] Starting initial sync at $(date -u '+%Y-%m-%dT%H:%M:%SZ')"
|
||||
python3 /app/sync.py || echo "[entrypoint] sync failed on first run"
|
||||
|
||||
# ─── Background sync loop ────────────────────────────────────────────────────
|
||||
(
|
||||
while true; do
|
||||
sleep "$SYNC_INTERVAL"
|
||||
echo "[entrypoint] Running sync at $(date -u '+%Y-%m-%dT%H:%M:%SZ')"
|
||||
python3 /app/sync.py || echo "[entrypoint] sync failed"
|
||||
done
|
||||
) &
|
||||
|
||||
# ─── Background dispatch loop ────────────────────────────────────────────────
|
||||
(
|
||||
while true; do
|
||||
sleep "$DISPATCH_INTERVAL"
|
||||
echo "[entrypoint] Running dispatch at $(date -u '+%Y-%m-%dT%H:%M:%SZ')"
|
||||
python3 /app/dispatch.py || echo "[entrypoint] dispatch failed"
|
||||
done
|
||||
) &
|
||||
|
||||
# ─── Wait on both loops (keeps container alive) ───────────────────────────────
|
||||
wait
|
223
sync.py
Normal file
223
sync.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sqlite3
|
||||
import logging
|
||||
import requests
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime, timedelta, timezone, date
|
||||
from urllib.parse import urljoin
|
||||
from icalendar import Calendar
|
||||
from dateutil.rrule import rrulestr
|
||||
|
||||
# ─── Configuration from environment ─────────────────────────────────────────────
|
||||
CALDAV_BASE = os.getenv("CALDAV_BASE", "https://example.com/")
|
||||
USERNAME = os.getenv("CALDAV_USERNAME", "your-username")
|
||||
PASSWORD = os.getenv("CALDAV_PASSWORD", "your-password")
|
||||
# Optional comma-separated override: e.g.
|
||||
# CALENDAR_URLS="https://…/personal/,https://…/work/"
|
||||
CALENDAR_URLS_ENV = os.getenv("CALENDAR_URLS", "")
|
||||
FETCH_WINDOW_DAYS = int(os.getenv("FETCH_WINDOW_DAYS", "90"))
|
||||
DB_PATH = os.getenv("DB_PATH", "/data/sync_state.db")
|
||||
|
||||
# ─── Logging setup ───────────────────────────────────────────────────────────────
|
||||
logging.basicConfig(
|
||||
level=os.getenv("LOG_LEVEL", "INFO"),
|
||||
format="%(asctime)s [%(levelname)s] %(message)s"
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ─── Database initialization ────────────────────────────────────────────────────
|
||||
def init_db():
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
c = conn.cursor()
|
||||
c.execute("""
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
uid TEXT PRIMARY KEY,
|
||||
lastmod TEXT
|
||||
)
|
||||
""")
|
||||
c.execute("""
|
||||
CREATE TABLE IF NOT EXISTS reminders (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
event_uid TEXT,
|
||||
trigger_dt TEXT,
|
||||
summary TEXT,
|
||||
description TEXT,
|
||||
sent_flag INTEGER DEFAULT 0,
|
||||
UNIQUE(event_uid, trigger_dt)
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
# ─── Calendar discovery ─────────────────────────────────────────────────────────
|
||||
def discover_calendars():
|
||||
if CALENDAR_URLS_ENV:
|
||||
urls = [u.strip() for u in CALENDAR_URLS_ENV.split(",") if u.strip()]
|
||||
logger.info(f"Using override: {urls}")
|
||||
return urls
|
||||
|
||||
# autodiscover via PROPFIND
|
||||
root_coll = urljoin(CALDAV_BASE, f"remote.php/dav/calendars/{USERNAME}/")
|
||||
body = """<?xml version="1.0"?>
|
||||
<d:propfind xmlns:d="DAV:">
|
||||
<d:prop><d:resourcetype/></d:prop>
|
||||
</d:propfind>
|
||||
"""
|
||||
headers = {"Depth":"1","Content-Type":"application/xml"}
|
||||
resp = requests.request("PROPFIND", root_coll,
|
||||
auth=(USERNAME, PASSWORD),
|
||||
headers=headers, data=body)
|
||||
resp.raise_for_status()
|
||||
tree = ET.fromstring(resp.text)
|
||||
ns = {"d":"DAV:"}
|
||||
cals = []
|
||||
for r in tree.findall(".//d:response", ns):
|
||||
href = r.find("d:href", ns).text
|
||||
# skip the root itself
|
||||
if href.rstrip("/").endswith(f"calendars/{USERNAME}"):
|
||||
continue
|
||||
full = urljoin(CALDAV_BASE, href)
|
||||
cals.append(full)
|
||||
logger.info(f"Discovered calendars: {cals}")
|
||||
return cals
|
||||
|
||||
# ─── Fetch report XML for one calendar ───────────────────────────────────────────
|
||||
def fetch_report_xml(caldav_url):
|
||||
body = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<calendar-query xmlns="urn:ietf:params:xml:ns:caldav">
|
||||
<prop><getetag/><getlastmodified/><calendar-data/></prop>
|
||||
<filter><comp-filter name="VCALENDAR">
|
||||
<comp-filter name="VEVENT"/>
|
||||
</comp-filter></filter>
|
||||
</calendar-query>
|
||||
"""
|
||||
resp = requests.request(
|
||||
"REPORT",
|
||||
caldav_url,
|
||||
auth=(USERNAME, PASSWORD),
|
||||
headers={"Depth":"1","Content-Type":"application/xml"},
|
||||
data=body
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp.text
|
||||
|
||||
# ─── Parse the REPORT response ─────────────────────────────────────────────────
|
||||
def parse_report(xml_text):
|
||||
ns = {"d":"DAV:"}
|
||||
root = ET.fromstring(xml_text)
|
||||
for resp in root.findall(".//d:response", ns):
|
||||
href = resp.find("d:href", ns).text
|
||||
lastmod = resp.find(".//d:getlastmodified", ns).text
|
||||
yield href, lastmod
|
||||
|
||||
# ─── Download a single .ics by href ─────────────────────────────────────────────
|
||||
def download_ics(href):
|
||||
url = urljoin(CALDAV_BASE, href)
|
||||
r = requests.get(url, auth=(USERNAME, PASSWORD))
|
||||
r.raise_for_status()
|
||||
return r.content
|
||||
|
||||
# ─── Expand VEVENT alarms, handling RRULE and timezones ────────────────────────
|
||||
def expand_event_alarms(ics_bytes, window_days=FETCH_WINDOW_DAYS):
|
||||
cal = Calendar.from_ical(ics_bytes)
|
||||
now = datetime.now(timezone.utc)
|
||||
horizon = now + timedelta(days=window_days)
|
||||
reminders = []
|
||||
|
||||
for comp in cal.walk():
|
||||
if comp.name != "VEVENT":
|
||||
continue
|
||||
|
||||
# UID
|
||||
uid = str(comp.get("UID"))
|
||||
|
||||
# DTSTART normalized to aware datetime
|
||||
dt = comp.decoded("DTSTART")
|
||||
if isinstance(dt, date) and not isinstance(dt, datetime):
|
||||
dtstart = datetime.combine(dt, datetime.min.time(), tzinfo=timezone.utc)
|
||||
else:
|
||||
dtstart = dt
|
||||
if dtstart.tzinfo is None:
|
||||
dtstart = dtstart.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
dtstart = dtstart.astimezone(timezone.utc)
|
||||
|
||||
# Summary & Description
|
||||
summary = str(comp.get("SUMMARY", "(no title)"))
|
||||
description = str(comp.get("DESCRIPTION", ""))
|
||||
|
||||
# VALARM blocks with EMAIL action
|
||||
alarms = [
|
||||
a for a in comp.subcomponents
|
||||
if a.name=="VALARM" and a.get("ACTION")=="EMAIL"
|
||||
]
|
||||
if not alarms:
|
||||
continue
|
||||
|
||||
# Recurrence expansion
|
||||
if comp.get("RRULE"):
|
||||
rule_str = comp["RRULE"].to_ical().decode()
|
||||
rule = rrulestr(rule_str, dtstart=dtstart)
|
||||
occs = rule.between(now, horizon, inc=True)
|
||||
else:
|
||||
occs = [dtstart] if dtstart >= now else []
|
||||
|
||||
for occ in occs:
|
||||
for alarm in alarms:
|
||||
trig = alarm.decoded("TRIGGER")
|
||||
if isinstance(trig, timedelta):
|
||||
trigger_dt = occ + trig
|
||||
else:
|
||||
# absolute triggers
|
||||
trigger_dt = parse_dt(str(trig)).astimezone(timezone.utc)
|
||||
reminders.append((uid,
|
||||
trigger_dt.isoformat(),
|
||||
summary,
|
||||
description))
|
||||
return reminders
|
||||
|
||||
# ─── Main sync logic ────────────────────────────────────────────────────────────
|
||||
def sync():
|
||||
conn = init_db()
|
||||
c = conn.cursor()
|
||||
|
||||
calendars = discover_calendars()
|
||||
logger.info(f"Calendars to sync: {calendars}")
|
||||
|
||||
for cal_url in calendars:
|
||||
try:
|
||||
report = fetch_report_xml(cal_url)
|
||||
for href, lastmod in parse_report(report):
|
||||
uid = os.path.basename(href).rsplit(".ics",1)[0]
|
||||
|
||||
# check state
|
||||
c.execute("SELECT lastmod FROM events WHERE uid=?", (uid,))
|
||||
row = c.fetchone()
|
||||
|
||||
if not row or row[0] != lastmod:
|
||||
logger.info(f"Fetching event {uid}")
|
||||
ics = download_ics(href)
|
||||
for (u, trigger_iso, summary, desc) in expand_event_alarms(ics):
|
||||
c.execute("""
|
||||
INSERT OR IGNORE INTO reminders
|
||||
(event_uid, trigger_dt, summary, description)
|
||||
VALUES (?,?,?,?)
|
||||
""", (u, trigger_iso, summary, desc))
|
||||
# update lastmod
|
||||
c.execute("""
|
||||
INSERT INTO events(uid, lastmod)
|
||||
VALUES (?,?)
|
||||
ON CONFLICT(uid) DO UPDATE SET lastmod=excluded.lastmod
|
||||
""", (uid, lastmod))
|
||||
conn.commit()
|
||||
else:
|
||||
logger.debug(f"Skipping up-to-date {uid}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error syncing {cal_url}: {e}")
|
||||
|
||||
conn.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
sync()
|
Loading…
Reference in a new issue