"""
Dashboard Scheduler — automated data refreshes, proactive WhatsApp nudges,
weekly reports, health data ingestion (Oura API + Health Connect fallback),
Gmail briefing integration, and 'on this day' insights.

Runs as a background thread inside the Flask app.

Health data pipeline:
  PRIMARY: Oura API v2 → sleep, steps, exercise, HRV, readiness (direct API pull)
  FALLBACK: Google Drive API → Health Connect zip → weight, body fat, blood pressure
  FALLBACK2: Filesystem watcher on local Google Drive sync + ~/clawd/health-imports/
"""

import os
import json
import glob
import sqlite3
import logging
import threading
import time
import requests
from datetime import datetime, date, timedelta
from pathlib import Path

logger = logging.getLogger("scheduler")

# ── Configuration ────────────────────────────────────────────

DASHBOARD_DATA_PATH = os.getenv(
    "DASHBOARD_DATA_PATH",
    str(Path(__file__).resolve().parent.parent / "dashboard-data.json")
)
KNOWLEDGE_DB_PATH = str(Path(__file__).resolve().parent.parent / "memory" / "chatgpt-kb.db")

TWILIO_ACCOUNT_SID = os.getenv("TWILIO_ACCOUNT_SID", "")
TWILIO_AUTH_TOKEN = os.getenv("TWILIO_AUTH_TOKEN", "")
TWILIO_PHONE_NUMBER = os.getenv("TWILIO_PHONE_NUMBER", "")
BILL_PHONE_NUMBER = os.getenv("BILL_PHONE_NUMBER", "")

# Schedule: hours in 24h format (Eastern Time, Bill's local time)
MORNING_NUDGE_HOUR = 7    # 7:00 AM
MORNING_NUDGE_MIN = 0
EVENING_NUDGE_HOUR = 20   # 8:00 PM
EVENING_NUDGE_MIN = 30
WEEKLY_REPORT_DOW = 6     # Sunday
WEEKLY_REPORT_HOUR = 9    # 9:00 AM
DATA_REFRESH_INTERVAL = 3600  # seconds (every hour)
HEALTH_CHECK_INTERVAL = 300   # seconds (every 5 min during morning window)

# Filesystem fallback paths for Health Connect zips
# (used when Google Drive API is not configured)
HEALTH_CONNECT_DIR = os.getenv(
    "HEALTH_CONNECT_DIR",
    str(Path.home() / "Google Drive" / "My Drive")
)
HEALTH_CONNECT_ALT_DIRS = [
    str(Path.home() / "Google Drive"),
    str(Path.home() / "Library" / "CloudStorage" / "GoogleDrive-syrros@gmail.com" / "My Drive"),
    str(Path(DASHBOARD_DATA_PATH).parent / "health-imports"),  # Manual drop folder
]


class DashboardScheduler:
    """Background scheduler that runs data refreshes and proactive nudges."""

    def __init__(self, dashboard_manager):
        self.dm = dashboard_manager
        self.running = False
        self.thread = None
        self._last_refresh = 0
        self._last_morning = None
        self._last_evening = None
        self._last_weekly = None
        self._last_briefing_update = None
        self._last_health_check = 0
        self._ingested_files = set()  # Track already-ingested zip files
        self._gdrive_connector = None  # Lazy-loaded Google Drive connector
        self._gmail_briefing = None    # Lazy-loaded Gmail briefing
        self._gdrive_last_file_id = None  # Track last ingested Drive file ID
        self._last_email_nudges = {}  # Track email nudges sent: {hour: date}
        self._last_new_music = None   # Track Friday new music drop
        self._last_health_daily = None  # Track daily 8 AM Health Connect grab
        self._last_pregame_rapid = 0   # Track rapid pregame validation cycle

    def start(self):
        """Start the scheduler in a background thread."""
        if self.running:
            return
        self.running = True
        self.thread = threading.Thread(target=self._run_loop, daemon=True)
        self.thread.start()
        logger.info("Scheduler started")

    def stop(self):
        """Stop the scheduler."""
        self.running = False
        logger.info("Scheduler stopped")

    def _run_loop(self):
        """Main scheduler loop — checks every 30 seconds."""
        # Initial delay to let Flask start
        time.sleep(10)
        # Generate initial briefing
        self._update_briefing_data()

        while self.running:
            try:
                now = datetime.now()

                # 1. Data refresh (every hour)
                if time.time() - self._last_refresh >= DATA_REFRESH_INTERVAL:
                    self._refresh_data()
                    self._last_refresh = time.time()

                # 2. Morning nudge (7:00 AM)
                if (now.hour == MORNING_NUDGE_HOUR and
                    now.minute >= MORNING_NUDGE_MIN and
                    now.minute < MORNING_NUDGE_MIN + 5 and
                    self._last_morning != now.date()):
                    self._send_morning_nudge()
                    self._last_morning = now.date()

                # 3. Evening nudge (8:30 PM)
                if (now.hour == EVENING_NUDGE_HOUR and
                    now.minute >= EVENING_NUDGE_MIN and
                    now.minute < EVENING_NUDGE_MIN + 5 and
                    self._last_evening != now.date()):
                    self._send_evening_nudge()
                    self._last_evening = now.date()

                # 4. Weekly report (Sunday 9:00 AM)
                if (now.weekday() == WEEKLY_REPORT_DOW and
                    now.hour == WEEKLY_REPORT_HOUR and
                    now.minute < 5 and
                    self._last_weekly != now.date()):
                    self._send_weekly_report()
                    self._last_weekly = now.date()

                # 5. Health Connect watcher (every 5 min during 6-9 AM window)
                if (6 <= now.hour <= 9 and
                    time.time() - self._last_health_check >= HEALTH_CHECK_INTERVAL):
                    self._check_health_connect()
                    self._last_health_check = time.time()

                # 5b. Daily health grab — 8:00 AM
                #     Oura API (sleep/steps/exercise) + Health Connect (weight/body fat)
                if (now.hour == 8 and
                    now.minute < 5 and
                    self._last_health_daily != now.date()):
                    self._daily_health_grab()
                    self._last_health_daily = now.date()

                # 6. Gmail digest — 9 AM, 12 PM, 6 PM
                EMAIL_NUDGE_HOURS = [9, 12, 18]
                if (now.hour in EMAIL_NUDGE_HOURS and
                    now.minute < 5 and
                    self._last_email_nudges.get(now.hour) != now.date()):
                    self._send_email_digest(now.hour)
                    self._last_email_nudges[now.hour] = now.date()

                # 7. New Music Friday — Friday 7:00 AM
                if (now.weekday() == 4 and  # Friday
                    now.hour == 7 and
                    now.minute < 5 and
                    self._last_new_music != now.date()):
                    self._send_new_music()
                    self._last_new_music = now.date()

                # 7. Briefing data update (every hour, on the hour)
                if self._last_briefing_update != now.hour:
                    self._update_briefing_data()
                    self._last_briefing_update = now.hour

                # 8. Rapid pregame validation — every 15 min when games are
                #    within 90 minutes. Catches late scratches, goalie
                #    confirmations, and lineup changes that drop right before
                #    puck drop / kickoff.
                if time.time() - self._last_pregame_rapid >= 900:  # 15 min
                    try:
                        self._maybe_rapid_pregame_refresh(now)
                    except Exception as e:
                        logger.debug(f"Rapid pregame check error (non-critical): {e}")

            except Exception as e:
                logger.error(f"Scheduler loop error: {e}", exc_info=True)

            time.sleep(30)

    # ── Data Refresh ─────────────────────────────────────────

    def _refresh_data(self):
        """Refresh live data in dashboard-data.json (crypto prices, etc.)."""
        try:
            logger.info("Running scheduled data refresh...")
            data = self.dm.read()
            changed = False

            # Refresh CRO price
            cro_price = self._fetch_cro_price()
            if cro_price:
                if "crypto" not in data:
                    data["crypto"] = {}
                data["crypto"]["cro_price"] = cro_price
                data["crypto"]["cro_updated"] = datetime.now().isoformat()
                changed = True
                logger.info(f"CRO price refreshed: ${cro_price}")

            # Update the date
            today_str = date.today().isoformat()
            if data.get("date") != today_str:
                data["date"] = today_str
                changed = True

            # Update nextEvent to the soonest upcoming event
            upcoming = self._get_next_event(data)
            if upcoming:
                data["nextEvent"] = upcoming
                changed = True

            # Refresh betting picks
            try:
                picks = self._fetch_betting_picks()
                if picks is not None:
                    data["betting"] = {
                        "games": picks,
                        "updated": datetime.now().isoformat(),
                    }
                    changed = True
                    logger.info(f"Betting picks refreshed: {len(picks)} games")
            except Exception as e:
                logger.debug(f"Betting picks refresh failed (non-critical): {e}")

            if changed:
                # Write using dashboard manager's atomic write
                from filelock import FileLock
                lock = FileLock(str(self.dm.file_path) + ".lock", timeout=10)
                with lock:
                    self.dm._write(data)
                logger.info("Dashboard data refreshed")

        except Exception as e:
            logger.error(f"Data refresh failed: {e}", exc_info=True)

    def _fetch_cro_price(self):
        """Fetch CRO price from CoinGecko."""
        try:
            resp = requests.get(
                "https://api.coingecko.com/api/v3/simple/price",
                params={"ids": "crypto-com-chain", "vs_currencies": "usd"},
                timeout=10
            )
            if resp.status_code == 200:
                return resp.json().get("crypto-com-chain", {}).get("usd")
        except Exception as e:
            logger.warning(f"CRO price fetch failed: {e}")
        return None

    def _get_next_event(self, data):
        """Find the next upcoming event."""
        today = date.today()
        now_str = today.isoformat()
        for event in data.get("schedule", []):
            if event.get("date", "") >= now_str:
                return {
                    "title": event["title"],
                    "date": event["date"],
                    "time": event.get("time", ""),
                    "location": event.get("location", ""),
                    "note": event.get("note", ""),
                }
        return None

    # ── Health Connect Watcher ────────────────────────────────

    def _get_gdrive_connector(self):
        """Lazy-load the Google Drive connector."""
        if self._gdrive_connector is None:
            try:
                from gdrive_health import get_connector
                self._gdrive_connector = get_connector()
            except Exception as e:
                logger.debug(f"Google Drive connector not available: {e}")
                self._gdrive_connector = False  # Sentinel
        return self._gdrive_connector if self._gdrive_connector is not False else None

    def _get_gmail_briefing(self):
        """Lazy-load the Gmail briefing module."""
        if self._gmail_briefing is None:
            try:
                from gmail_briefing import get_gmail_briefing
                self._gmail_briefing = get_gmail_briefing()
            except Exception as e:
                logger.debug(f"Gmail briefing not available: {e}")
                self._gmail_briefing = False
        return self._gmail_briefing if self._gmail_briefing is not False else None

    def _check_health_connect(self):
        """
        Watch for new Health Connect zip files.

        Strategy:
          1. PRIMARY: Google Drive API — search for files, download, parse
          2. FALLBACK: Filesystem watcher on local sync folders
        """
        try:
            # ── Try Google Drive API first ──
            gdrive = self._get_gdrive_connector()
            if gdrive and gdrive.available:
                self._check_health_connect_gdrive(gdrive)
                return

            # ── Fallback to filesystem watcher ──
            self._check_health_connect_filesystem()

        except Exception as e:
            logger.error(f"Health Connect check failed: {e}", exc_info=True)

    def _check_health_connect_gdrive(self, gdrive):
        """Check Google Drive for new Health Connect files via API."""
        try:
            files = gdrive.search_health_connect_files(max_age_hours=24)
            if not files:
                return

            latest = files[0]
            file_id = latest["id"]

            # Skip if already ingested this file
            if file_id == self._gdrive_last_file_id:
                return

            # Skip tiny files
            if latest["size"] < 100 * 1024:
                logger.warning(f"Skipping small Drive file: {latest['name']} ({latest['size']} bytes)")
                return

            logger.info(f"New Health Connect file in Drive: {latest['name']}")

            # Download
            local_path = gdrive.download_file(file_id, latest["name"])
            if not local_path:
                logger.error("Download failed — falling back to filesystem")
                self._check_health_connect_filesystem()
                return

            # Ingest
            result = self._ingest_health_file(local_path)
            if result:
                self._gdrive_last_file_id = file_id
                self._ingested_files.add(local_path)
                self._send_whatsapp(f"Luke here — health data auto-imported from Drive!\n\n{result}")
                self._update_briefing_data()

            # Cleanup old downloads periodically
            gdrive.cleanup_old_downloads(max_age_days=7)

        except Exception as e:
            logger.error(f"Google Drive health check failed: {e}", exc_info=True)
            # Fall back to filesystem
            self._check_health_connect_filesystem()

    def _check_health_connect_filesystem(self):
        """Fallback: watch local filesystem for Health Connect zips."""
        search_dirs = [HEALTH_CONNECT_DIR] + HEALTH_CONNECT_ALT_DIRS

        for search_dir in search_dirs:
            if not os.path.isdir(search_dir):
                continue

            patterns = [
                os.path.join(search_dir, "Health Connect*.zip"),
                os.path.join(search_dir, "health_connect*.zip"),
                os.path.join(search_dir, "**", "Health Connect*.zip"),
            ]

            for pattern in patterns:
                for zip_path in glob.glob(pattern, recursive=True):
                    if zip_path in self._ingested_files:
                        continue

                    mtime = os.path.getmtime(zip_path)
                    age_hours = (time.time() - mtime) / 3600
                    if age_hours > 24:
                        continue

                    if os.path.getsize(zip_path) < 100 * 1024:
                        continue

                    logger.info(f"New Health Connect file detected (filesystem): {zip_path}")
                    result = self._ingest_health_file(zip_path)
                    if result:
                        self._ingested_files.add(zip_path)
                        self._send_whatsapp(f"Luke here — health data auto-imported!\n\n{result}")
                        self._update_briefing_data()

    def _ingest_health_file(self, zip_path):
        """Ingest a Health Connect zip file."""
        try:
            from health_connect import ingest_health_connect
            return ingest_health_connect(zip_path, self.dm)
        except Exception as e:
            logger.error(f"Health Connect ingestion failed: {e}", exc_info=True)
            return None

    # ── Daily Health Grab (8 AM) — Oura API + Health Connect ──

    def _daily_health_grab(self):
        """
        Daily 8 AM health data pull.

        Pipeline:
          1. Oura API → sleep, steps, exercise, HRV, readiness
          2. Health Connect (Drive) → weight, body fat, blood pressure
          Each part is independent — Oura failure doesn't block HC and vice versa.
          Sends a WhatsApp summary of what was updated.
        """
        logger.info("=== Daily health grab starting (8 AM) ===")
        summaries = []

        # ── Part 1: Oura API ──
        try:
            from oura_api import ingest_oura_data
            oura_result = ingest_oura_data(self.dm)
            if oura_result and "not configured" not in oura_result.lower():
                summaries.append(oura_result)
                logger.info(f"Daily health grab — Oura: SUCCESS")
            else:
                logger.info(f"Daily health grab — Oura: {oura_result}")
        except Exception as e:
            logger.warning(f"Daily health grab — Oura failed: {e}", exc_info=True)

        # ── Part 2: Health Connect (weight/body fat via Google Drive) ──
        hc_result = self._daily_health_connect_pull()
        if hc_result:
            summaries.append(hc_result)

        # ── Send combined summary ──
        if summaries:
            self._update_briefing_data()
            combined = "\n\n".join(summaries)
            self._send_whatsapp(
                f"Good morning! 📊 Health data updated.\n\n"
                f"{combined}\n\n"
                f"Dashboard updated. — Luke"
            )
            logger.info("Daily health grab: SUCCESS")
        else:
            logger.info("Daily health grab: no data updated")
            self._send_whatsapp(
                "Morning check-in — no new health data today. "
                "Oura data will pull automatically if configured. "
                "For weight, upload a Health Connect export to Drive "
                "or text me 'health'. — Luke"
            )

    def _daily_health_connect_pull(self):
        """
        Pull weight/body fat from Health Connect via Google Drive.
        Retries up to 3 times with 2-minute intervals.
        Returns summary string or None.
        """
        gdrive = self._get_gdrive_connector()

        if gdrive and gdrive.available:
            for attempt in range(1, 4):
                try:
                    files = gdrive.search_health_connect_files(max_age_hours=24)
                    if files:
                        latest = files[0]
                        file_id = latest["id"]

                        if file_id == self._gdrive_last_file_id:
                            logger.info(f"Already ingested today's HC file: {latest['name']}")
                            return None

                        if latest["size"] < 100 * 1024:
                            logger.warning(f"Skipping small file: {latest['name']} ({latest['size']} bytes)")
                            continue

                        local_path = gdrive.download_file(file_id, latest["name"])
                        if local_path:
                            result = self._ingest_health_file(local_path)
                            if result:
                                self._gdrive_last_file_id = file_id
                                self._ingested_files.add(local_path)
                                gdrive.cleanup_old_downloads(max_age_days=7)
                                logger.info("Daily HC pull: SUCCESS (Drive)")
                                return result
                    else:
                        logger.info(f"Daily HC pull attempt {attempt}/3: no files found yet")

                except Exception as e:
                    logger.warning(f"Daily HC pull attempt {attempt}/3 failed: {e}")

                if attempt < 3:
                    time.sleep(120)

        # Fallback to filesystem
        try:
            result = self.trigger_health_ingest()
            if result and "No Health Connect" not in result:
                logger.info("Daily HC pull: SUCCESS (filesystem)")
                return result
        except Exception as e:
            logger.warning(f"Daily HC pull filesystem fallback failed: {e}")

        return None

    # Legacy alias for backward compatibility
    _daily_health_connect_grab = _daily_health_grab

    # ── Gmail Digest (9 AM, 12 PM, 6 PM) ───────────────────

    def _send_email_digest(self, hour):
        """Send a Gmail digest via WhatsApp at scheduled times."""
        try:
            gmail = self._get_gmail_briefing()
            if not gmail or not gmail.available:
                return

            data = gmail.generate_briefing()
            if not data.get("available"):
                return

            # Time-appropriate label
            labels = {9: "Morning", 12: "Midday", 18: "Evening"}
            label = labels.get(hour, "")

            lines = [f"{label} Inbox Update — Luke"]
            lines.append("")

            unread = data.get("unread")
            if unread:
                lines.append(f"Unread: {unread['total_unread']} ({unread['primary']} primary)")

            needs_reply = data.get("needs_reply", [])
            if needs_reply:
                lines.append("")
                lines.append("Needs reply:")
                for e in needs_reply:
                    lines.append(f"  — {e['from']}: {e['subject'][:50]}")

            important = data.get("important", [])
            if important and not needs_reply:
                lines.append("")
                lines.append("Important:")
                for e in important[:3]:
                    lines.append(f"  — {e['from']}: {e['subject'][:50]}")

            meetings = data.get("meetings", [])
            if meetings:
                lines.append(f"\nMeeting invites: {len(meetings)}")

            # Only send if there's something worth reporting
            if unread and unread["primary"] > 0:
                self._send_whatsapp("\n".join(lines))
                logger.info(f"{label} email digest sent")
            else:
                logger.debug(f"{label} email digest skipped — no primary unread")

        except Exception as e:
            logger.error(f"Email digest failed: {e}", exc_info=True)

    # ── New Music Friday ─────────────────────────────────────

    def _send_new_music(self):
        """Send new music releases on Friday morning."""
        try:
            import spotify_module
            if not spotify_module.is_available():
                return

            music = spotify_module.new_releases(limit=15)
            if music and "No new drops" not in music and "not connected" not in music:
                self._send_whatsapp(music)
                logger.info("New Music Friday sent")
            else:
                logger.debug("New Music Friday: nothing new this week")

        except Exception as e:
            logger.error(f"New Music Friday failed: {e}", exc_info=True)

    # ── Morning Nudge ────────────────────────────────────────

    def _send_morning_nudge(self):
        """Send morning briefing via WhatsApp."""
        try:
            data = self.dm.read()
            w = data["weight"]
            today = date.today()

            # Weight trend (7-day)
            series = w.get("series30d", [])
            if len(series) >= 7:
                week_change = series[-1] - series[-7]
                trend = f"{'down' if week_change < 0 else 'up'} {abs(week_change):.1f} lb this week"
            else:
                trend = "tracking"

            # Today's events
            events_today = [
                e for e in data.get("schedule", [])
                if e.get("date") == today.isoformat()
            ]
            events_upcoming = [
                e for e in data.get("schedule", [])
                if today.isoformat() < e.get("date", "") <= (today + timedelta(days=3)).isoformat()
            ]

            # CRO price
            cro = data.get("crypto", {}).get("cro_price", "N/A")
            cro_str = f"${cro}" if isinstance(cro, (int, float)) else str(cro)

            # Build message
            lines = [f"Good morning Bill — Luke here with your {today.strftime('%A, %B %-d')} briefing:"]
            lines.append("")
            lines.append(f"Weight: {w['current']} lb ({trend})")
            lines.append(f"Goal pace: {w.get('pace', 'N/A')} lb/wk | ETA: ~{w.get('etaWeeks', '?')} weeks")
            lines.append(f"CRO: {cro_str}")

            # Sleep (if available)
            sleep = data.get("sleep", {})
            if sleep.get("total_hours"):
                lines.append(f"Sleep: {sleep['total_hours']}h ({sleep.get('bedtime', '?')}→{sleep.get('wake_time', '?')}) · HRV: {sleep.get('avg_hrv', 'N/A')} ms")

            if events_today:
                lines.append("")
                lines.append("TODAY:")
                for e in events_today:
                    time_str = f" {e['time']}" if e.get('time') else ''
                    lines.append(f"  {e['title']}{time_str}")

            if events_upcoming:
                lines.append("")
                lines.append("COMING UP:")
                for e in events_upcoming[:3]:
                    lines.append(f"  {e['date']} — {e['title']}")

            # Gmail summary
            gmail = self._get_gmail_briefing()
            if gmail and gmail.available:
                try:
                    email_data = gmail.generate_briefing()
                    if email_data.get("available") and email_data.get("summary_text"):
                        lines.append("")
                        lines.append("EMAIL:")
                        lines.append(email_data["summary_text"])
                except Exception as e:
                    logger.debug(f"Gmail briefing in morning nudge failed: {e}")

            # On this day insight
            insight = self._get_on_this_day()
            if insight:
                lines.append("")
                lines.append(f"ON THIS DAY: {insight}")

            lines.append("")
            lines.append("Reply anytime — Luke's here.")

            message = "\n".join(lines)
            self._send_whatsapp(message)
            logger.info("Morning nudge sent")

        except Exception as e:
            logger.error(f"Morning nudge failed: {e}", exc_info=True)

    # ── Evening Nudge ────────────────────────────────────────

    def _send_evening_nudge(self):
        """Send evening check-in via WhatsApp. No weight nagging — Bill weighs in mornings only."""
        try:
            data = self.dm.read()
            w = data["weight"]
            today = date.today()

            lines = [f"Evening check-in from Luke — {today.strftime('%A, %B %-d')}"]
            lines.append("")
            lines.append(f"Today's weight: {w['current']} lb | Pace: {w.get('pace', 'N/A')} lb/wk")

            # Tomorrow preview
            tomorrow = today + timedelta(days=1)
            tomorrow_events = [
                e for e in data.get("schedule", [])
                if e.get("date") == tomorrow.isoformat()
            ]
            if tomorrow_events:
                lines.append("")
                lines.append(f"TOMORROW ({tomorrow.strftime('%A')}):")
                for e in tomorrow_events:
                    time_str = f" {e['time']}" if e.get('time') else ''
                    lines.append(f"  {e['title']}{time_str}")
            else:
                lines.append("")
                lines.append(f"Nothing scheduled for tomorrow ({tomorrow.strftime('%A')}).")

            # Health checkpoint check
            checkpoints = data.get("healthPlan", {}).get("weeklyCheckpoints", [])
            for cp in checkpoints:
                cp_date_str = cp.get("date", "")
                # Check if checkpoint is within next 2 days
                try:
                    # Parse "Sunday, March 8" format
                    cp_date = datetime.strptime(cp_date_str + f" {today.year}", "%A, %B %d %Y").date()
                    if today <= cp_date <= today + timedelta(days=2):
                        target = cp.get("target", "?")
                        diff = w['current'] - target if isinstance(target, (int, float)) else 0
                        status = f"{'above' if diff > 0 else 'on track for'} target {target} lb"
                        lines.append(f"\nCheckpoint alert: {cp_date_str} target is {target} lb — you're {status}")
                except (ValueError, TypeError):
                    pass

            lines.append("")
            lines.append("Anything to update? (BP, events, signals, supplements) — Luke")

            message = "\n".join(lines)
            self._send_whatsapp(message)
            logger.info("Evening nudge sent")

        except Exception as e:
            logger.error(f"Evening nudge failed: {e}", exc_info=True)

    # ── Weekly Report ────────────────────────────────────────

    def _send_weekly_report(self):
        """Send weekly trend summary on Sunday morning."""
        try:
            data = self.dm.read()
            w = data["weight"]
            bf = data["bodyFat"]
            today = date.today()

            series = w.get("series30d", [])

            # Weight trend
            if len(series) >= 7:
                week_start = series[-7] if len(series) >= 7 else series[0]
                week_end = series[-1]
                week_change = week_end - week_start
                weekly_avg = sum(series[-7:]) / 7

                month_start = series[0] if len(series) >= 28 else series[0]
                month_change = week_end - month_start
            else:
                week_change = 0
                weekly_avg = w['current']
                month_change = 0

            # Events this past week
            past_week_start = (today - timedelta(days=7)).isoformat()
            past_events = [
                e for e in data.get("schedule", [])
                if past_week_start <= e.get("date", "") < today.isoformat()
            ]

            # Events next week
            next_week_end = (today + timedelta(days=7)).isoformat()
            next_events = [
                e for e in data.get("schedule", [])
                if today.isoformat() <= e.get("date", "") < next_week_end
            ]

            lines = [f"WEEKLY DASHBOARD REPORT — Week ending {today.strftime('%B %-d, %Y')}"]
            lines.append("=" * 40)
            lines.append("")

            # Weight section
            lines.append("WEIGHT")
            lines.append(f"  Current: {w['current']} lb")
            lines.append(f"  This week: {week_change:+.1f} lb")
            lines.append(f"  7-day avg: {weekly_avg:.1f} lb")
            lines.append(f"  30-day change: {month_change:+.1f} lb")
            lines.append(f"  Goal: {w['goal']} lb | Pace: {w.get('pace', 'N/A')} lb/wk")
            lines.append(f"  ETA: ~{w.get('etaWeeks', '?')} weeks")

            # Body fat
            lines.append("")
            lines.append("BODY FAT")
            lines.append(f"  Current: {bf['current']}% (goal: {bf['goal']}%)")
            lines.append(f"  Change: {bf.get('deltaVsLast', 0):+.1f}%")

            # Blood pressure
            bp = data.get("bloodPressure", {})
            lines.append("")
            lines.append("BLOOD PRESSURE")
            lines.append(f"  Last reading: {bp.get('systolic', '?')}/{bp.get('diastolic', '?')} ({bp.get('status', 'unknown')})")
            lines.append(f"  Date: {bp.get('date', 'N/A')}")

            # CRO
            cro = data.get("crypto", {}).get("cro_price")
            if cro:
                lines.append("")
                lines.append(f"CRO: ${cro}")

            # Schedule summary
            if past_events:
                lines.append("")
                lines.append(f"THIS WEEK: {len(past_events)} events completed")

            if next_events:
                lines.append("")
                lines.append(f"NEXT WEEK ({len(next_events)} events):")
                for e in next_events[:5]:
                    lines.append(f"  {e['date']} — {e['title']}")

            # Music
            try:
                import spotify_module
                music = spotify_module.weekly_music_summary(limit=5)
                if music:
                    lines.append("")
                    lines.append(music)
            except Exception:
                pass

            # On this day
            insight = self._get_on_this_day()
            if insight:
                lines.append("")
                lines.append(f"ON THIS DAY: {insight}")

            lines.append("")
            lines.append("Have a great week! — Luke")

            message = "\n".join(lines)
            self._send_whatsapp(message)
            logger.info("Weekly report sent")

        except Exception as e:
            logger.error(f"Weekly report failed: {e}", exc_info=True)

    # ── On This Day ──────────────────────────────────────────

    def _get_on_this_day(self):
        """Query knowledge base for conversations from this date in prior years."""
        try:
            if not os.path.exists(KNOWLEDGE_DB_PATH):
                return None

            conn = sqlite3.connect(KNOWLEDGE_DB_PATH)
            c = conn.cursor()

            today = date.today()
            insights = []

            # Search for conversations from this date in previous years
            for year_offset in [1, 2, 3]:
                try:
                    past_date = today.replace(year=today.year - year_offset)
                    date_prefix = past_date.isoformat()[:10]

                    c.execute("""
                        SELECT c.title, substr(m.content, 1, 150), c.create_time
                        FROM messages m
                        JOIN conversations c ON c.id = m.conversation_id
                        WHERE m.role = 'user'
                        AND c.create_time LIKE ?
                        AND length(m.content) > 50
                        ORDER BY length(m.content) DESC
                        LIMIT 1
                    """, (date_prefix + "%",))

                    row = c.fetchone()
                    if row:
                        title = row[0]
                        snippet = row[1].replace("\n", " ")[:100]
                        year = today.year - year_offset
                        insights.append(f"({year}) You were working on '{title}' — \"{snippet}...\"")
                except ValueError:
                    continue  # Feb 29 in non-leap years

            conn.close()

            if insights:
                return insights[0]  # Return most recent year's insight
            return None

        except Exception as e:
            logger.warning(f"On This Day lookup failed: {e}")
            return None

    # ── Briefing Data (for dashboard HTML) ───────────────────

    def _fetch_betting_picks(self):
        """
        Fetch today's betting picks using Bill's Predictable Tempo strategy.
        Returns structured data for the dashboard Bet365 card.
        """
        try:
            from sports_betting import get_analyst
            analyst = get_analyst()
            if not analyst.available:
                logger.info("Betting picks: Odds API not configured, skipping")
                return None

            sports = ["nhl", "epl", "seriea", "ligue1", "laliga", "liga_portugal", "europa", "conference", "ucl", "mls"]
            picks = []

            for s in sports:
                # Events endpoint works on free plan; odds is paid (401).
                # Only use events — no fallback to odds.
                games = analyst.get_upcoming_events(s, days_ahead=1)

                for g in games:
                    pick = {
                        "home": g["home_team"],
                        "away": g["away_team"],
                        "time": g.get("game_time", ""),
                        "commence_time": g.get("commence_time", ""),
                        "sport": s.upper(),
                    }

                    # ── NHL Template H1 (Playbook) ──
                    # Puckline cushion + alt total with BIG cushion + team total
                    # NO period unders (high variance, no track record)
                    # Under lines pushed higher for cushion (7.5 not 6.5)
                    if s == "nhl":
                        tempo = analyst.classify_nhl_matchup(g["home_team"], g["away_team"])
                        fav = self._extract_favorite(g)
                        home = g["home_team"]
                        away = g["away_team"]
                        underdog = away if fav == home else home if fav else None

                        if tempo == "controlled":
                            pick["tag"] = "★ CONTROLLED"
                            pick["rating"] = "prime"
                            pick["legs"] = [
                                f"{fav} Moneyline" if fav else f"{home} +1.5 Puckline",
                                "Under 7.5 Goals",
                                f"{underdog} Under 3.5 Team Total" if underdog else "Over 1.5 Goals",
                            ]
                        elif tempo == "mixed":
                            from sports_betting import NHL_TEAM_PROFILES
                            home_p = NHL_TEAM_PROFILES.get(home, {})
                            anchor = home if home_p.get("tempo") == "low" else away
                            non_anchor = away if anchor == home else home
                            pick["tag"] = f"MIXED — {anchor} anchors"
                            pick["rating"] = "playable"
                            pick["legs"] = [
                                f"{fav} +1.5 Puckline" if fav else f"{anchor} +1.5 Puckline",
                                "Under 7.5 Goals",
                                f"{non_anchor} Under 3.5 Team Total",
                            ]
                        elif tempo == "chaotic":
                            pick["tag"] = "⚡ CHAOTIC"
                            pick["legs"] = ["Skip — high-event matchup"]
                            pick["rating"] = "avoid"
                        else:
                            pick["tag"] = "~ NEUTRAL"
                            pick["rating"] = "neutral"
                            pick["legs"] = [
                                f"{fav} +1.5 Puckline" if fav else f"{home} +1.5 Puckline",
                                "Under 7.5 Goals",
                            ]

                    # ── Soccer tier classification ──
                    # From Bill's bet_legs.csv (decided bets):
                    #   Goals Range: 100% (3/3)    ← BEST anchor
                    #   Over Goals low bar: 100% (6/6) ← Over 1, Over 1.5
                    #   Double Chance: 86% (6/7)   ← rock solid anchor
                    #   Team Total Under: 80% (4/5)
                    #   Corners: 67% (2/3)         ← team corners best
                    #   AVOID: Shots on Goal (0%), Cards (0%)
                    # Playbook Template S1: DC anchor + totals/range + corners
                    else:
                        league_class = analyst.classify_soccer_matchup(g)
                        tier = league_class.get("tier", 2)
                        label = league_class.get("label", s.upper())
                        is_cup = league_class.get("is_cup", False)
                        avg_goals = league_class.get("avg_goals", 2.6)
                        pick["sport"] = label
                        fav = self._extract_favorite(g)
                        home = g["home_team"]
                        away = g["away_team"]
                        underdog = away if fav == home else home if fav else None

                        if tier == 1:
                            pick["rating"] = "prime"

                            if is_cup:
                                # Cup knockout — cagey, protect-the-lead
                                pick["tag"] = f"★ CUP KNOCKOUT"
                                if fav:
                                    pick["legs"] = [
                                        f"Double Chance: {fav} or Draw",
                                        "Goals Range: 1-4 Goals",
                                        f"Over 3 Corners for {fav}",
                                    ]
                                else:
                                    pick["legs"] = [
                                        "Under 2.5 Goals",
                                        "Goals Range: 0-2 Goals",
                                        "Total Corners Under 9.5",
                                    ]

                            elif avg_goals <= 2.3:
                                # Low-event (Serie A, Liga Portugal)
                                pick["tag"] = f"★ LOW-EVENT {label}"
                                if fav:
                                    pick["legs"] = [
                                        f"Double Chance: {fav} or Draw",
                                        f"{underdog} Goals Range: Not Between 2-3 Goals" if underdog else "Under 2.5 Goals",
                                        f"Over 2 Corners for {fav}",
                                    ]
                                else:
                                    pick["legs"] = [
                                        "Goals Range: 1-4 Goals",
                                        "Under 2.5 Goals",
                                        "Total Corners Over 5",
                                    ]

                            else:
                                # Standard Tier 1 (Ligue 1, EPL)
                                pick["tag"] = f"★ TIER 1"
                                if fav:
                                    pick["legs"] = [
                                        f"Double Chance: {fav} or Draw",
                                        "Over 1 Goals",
                                        f"Over 3 Corners for {fav}",
                                    ]
                                else:
                                    pick["legs"] = [
                                        "Goals Range: 1-5 Goals",
                                        "Over 1 Goals",
                                        "Total Corners Over 6",
                                    ]

                        elif tier == 2:
                            pick["tag"] = f"TIER 2 — {label}"
                            pick["rating"] = "playable"
                            if fav:
                                pick["legs"] = [
                                    f"Double Chance: {fav} or Draw",
                                    "Goals Range: 1-4 Goals",
                                    f"Over 3 Corners for {fav}",
                                ]
                            else:
                                pick["legs"] = [
                                    "Under 2.5 Goals",
                                    "Goals Range: 0-3 Goals",
                                    "Total Corners Under 9.5",
                                ]

                        elif tier == 3:
                            pick["tag"] = "⚠ AVOID"
                            pick["legs"] = ["Skip — unpredictable tempo"]
                            pick["rating"] = "avoid"

                    picks.append(pick)

            # Sort: prime first, then playable, then avoid
            rating_order = {"prime": 0, "playable": 1, "neutral": 2, "avoid": 3}
            picks.sort(key=lambda p: rating_order.get(p.get("rating", "neutral"), 2))

            # ── Pregame Validation ──
            # Run picks through the pregame validator to check form,
            # injuries, and H2H before they reach the dashboard.
            try:
                from pregame_validator import validate_picks_batch
                picks = validate_picks_batch(picks)
                logger.info(f"Betting picks: {len(picks)} games processed + validated, "
                            f"{sum(1 for p in picks if p.get('rating') == 'prime')} prime targets")
            except ImportError:
                logger.warning("pregame_validator not found — skipping validation")
                logger.info(f"Betting picks: {len(picks)} games processed (unvalidated), "
                            f"{sum(1 for p in picks if p.get('rating') == 'prime')} prime targets")
            except Exception as e:
                logger.warning(f"Pregame validation failed — picks pass through unvalidated: {e}")
                logger.info(f"Betting picks: {len(picks)} games processed (validation error), "
                            f"{sum(1 for p in picks if p.get('rating') == 'prime')} prime targets")

            return picks

        except Exception as e:
            logger.error(f"Betting picks fetch failed: {e}", exc_info=True)
            return None

    def _maybe_rapid_pregame_refresh(self, now):
        """
        Rapid pregame validation cycle — runs every 15 min when any game
        starts within 90 minutes. Clears the validation cache so fresh
        data (Claude analysis, scraping) is fetched for imminent games.
        Then re-fetches betting picks and updates briefing + dashboard.
        """
        try:
            from sports_betting import get_analyst, SPORT_KEYS
            analyst = get_analyst()
            if not analyst.available:
                return

            # Check if any game starts within 90 minutes
            has_imminent = False
            sports = ["nhl", "epl", "seriea", "ligue1", "laliga", "liga_portugal",
                      "europa", "conference", "ucl", "mls"]

            for s in sports:
                games = analyst.get_upcoming_events(s, days_ahead=1)
                for g in games:
                    try:
                        ct = g.get("commence_time", "")
                        if not ct:
                            continue
                        game_time = datetime.fromisoformat(ct.replace("Z", "+00:00")).replace(tzinfo=None)
                        minutes_until = (game_time - datetime.utcnow()).total_seconds() / 60
                        if 0 < minutes_until <= 90:
                            has_imminent = True
                            break
                    except (ValueError, TypeError):
                        continue
                if has_imminent:
                    break

            if not has_imminent:
                return

            logger.info("RAPID PREGAME: Games within 90 min — running fresh validation cycle")
            self._last_pregame_rapid = time.time()

            # Clear the validation cache so picks get re-validated with latest intel
            try:
                from pregame_validator import _validation_cache
                cache_size = len(_validation_cache)
                _validation_cache.clear()
                logger.info(f"RAPID PREGAME: Cleared {cache_size} cached validations")
            except ImportError:
                pass

            # Re-fetch picks (which triggers fresh validation)
            picks = self._fetch_betting_picks()
            if picks is not None:
                data = self.dm.read()
                data["betting"] = {
                    "games": picks,
                    "updated": datetime.now().isoformat(),
                }
                self.dm.write(data)
                logger.info(f"RAPID PREGAME: Dashboard updated with {len(picks)} re-validated picks")

                # Also refresh briefing.json
                self._update_briefing_data()

        except Exception as e:
            logger.warning(f"Rapid pregame refresh failed: {e}")

    def _extract_favorite(self, game):
        """Extract the favorite team name from bookmaker data (internal use only)."""
        try:
            if game.get("bookmakers"):
                h2h = game["bookmakers"][0]["markets"].get("h2h", [])
                if h2h:
                    fav = min(h2h, key=lambda x: x["price"])
                    return fav["name"]
        except (KeyError, IndexError, TypeError):
            pass
        return None

    def _update_briefing_data(self):
        """Generate briefing data JSON for the dashboard morning card."""
        try:
            data = self.dm.read()
            w = data["weight"]
            today = date.today()
            now = datetime.now()

            # Greeting based on time of day
            hour = now.hour
            if hour < 12:
                greeting = "Good morning"
            elif hour < 17:
                greeting = "Good afternoon"
            else:
                greeting = "Good evening"

            # Weight trend
            series = w.get("series30d", [])
            if len(series) >= 7:
                week_change = series[-1] - series[-7]
                trend_direction = "down" if week_change < 0 else "up"
                trend_text = f"{trend_direction} {abs(week_change):.1f} lb this week"
                trend_positive = week_change <= 0
            else:
                trend_text = "tracking"
                trend_positive = True

            # Today's events
            events_today = [
                {"title": e["title"], "time": e.get("time", ""), "location": e.get("location", "")}
                for e in data.get("schedule", [])
                if e.get("date") == today.isoformat()
            ]

            # Upcoming 3 days
            events_upcoming = [
                {"title": e["title"], "date": e["date"], "time": e.get("time", "")}
                for e in data.get("schedule", [])
                if today.isoformat() < e.get("date", "") <= (today + timedelta(days=3)).isoformat()
            ][:5]

            # Health checkpoint status
            checkpoint_alert = None
            checkpoints = data.get("healthPlan", {}).get("weeklyCheckpoints", [])
            for cp in checkpoints:
                try:
                    cp_date_str = cp.get("date", "")
                    cp_date = datetime.strptime(cp_date_str + f" {today.year}", "%A, %B %d %Y").date()
                    if today <= cp_date <= today + timedelta(days=2):
                        target = cp.get("target", 0)
                        diff = round(w['current'] - target, 1)
                        checkpoint_alert = {
                            "date": cp_date_str,
                            "target": target,
                            "diff": diff,
                            "on_track": diff <= 1.0,
                        }
                        break
                except (ValueError, TypeError):
                    continue

            # CRO price
            cro_price = data.get("crypto", {}).get("cro_price")

            # On this day
            on_this_day = self._get_on_this_day()

            # Gmail briefing
            email_briefing = None
            gmail = self._get_gmail_briefing()
            if gmail and gmail.available:
                try:
                    email_briefing = gmail.generate_briefing()
                except Exception as e:
                    logger.debug(f"Gmail briefing for dashboard failed: {e}")

            # Google services status
            gdrive = self._get_gdrive_connector()
            google_status = {
                "drive": gdrive.available if gdrive else False,
                "gmail": gmail.available if gmail else False,
            }

            # Betting picks
            betting_picks = self._fetch_betting_picks()

            briefing = {
                "generated": now.isoformat(),
                "greeting": greeting,
                "date_display": today.strftime("%A, %B %-d"),
                "weight": {
                    "current": w["current"],
                    "trend": trend_text,
                    "trend_positive": trend_positive,
                    "pace": w.get("pace", 0),
                    "eta_weeks": w.get("etaWeeks", 0),
                },
                "events_today": events_today,
                "events_upcoming": events_upcoming,
                "checkpoint": checkpoint_alert,
                "cro_price": cro_price,
                "on_this_day": on_this_day,
                "focus": data.get("focus", ""),
                "sleep": data.get("sleep"),
                "email": email_briefing,
                "google_status": google_status,
                "betting": {"games": betting_picks} if betting_picks else None,
            }

            # Write briefing data
            briefing_path = Path(DASHBOARD_DATA_PATH).parent / "briefing.json"
            import tempfile
            fd, tmp_path = tempfile.mkstemp(dir=str(briefing_path.parent), suffix=".json.tmp")
            try:
                with os.fdopen(fd, "w") as f:
                    json.dump(briefing, f, indent=2, ensure_ascii=False)
                os.replace(tmp_path, str(briefing_path))
                logger.info("Briefing data updated")
            except Exception:
                if os.path.exists(tmp_path):
                    os.unlink(tmp_path)
                raise

        except Exception as e:
            logger.error(f"Briefing update failed: {e}", exc_info=True)

    # ── WhatsApp Sending ─────────────────────────────────────

    def _send_whatsapp(self, message):
        """Send a WhatsApp message via Twilio API."""
        if not all([TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN, TWILIO_PHONE_NUMBER, BILL_PHONE_NUMBER]):
            logger.warning("Twilio credentials not configured — skipping WhatsApp send")
            return

        try:
            url = f"https://api.twilio.com/2010-04-01/Accounts/{TWILIO_ACCOUNT_SID}/Messages.json"
            resp = requests.post(
                url,
                auth=(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN),
                data={
                    "From": f"whatsapp:{TWILIO_PHONE_NUMBER}",
                    "To": f"whatsapp:{BILL_PHONE_NUMBER}",
                    "Body": message,
                },
                timeout=15,
            )
            if resp.status_code in (200, 201):
                logger.info(f"WhatsApp message sent ({len(message)} chars)")
            else:
                logger.error(f"WhatsApp send failed: {resp.status_code} — {resp.text[:200]}")
        except Exception as e:
            logger.error(f"WhatsApp send error: {e}")

    # ── Public API (for manual triggers via WhatsApp) ────────

    def trigger_morning_briefing(self):
        """Manually trigger morning briefing (callable from WhatsApp)."""
        self._send_morning_nudge()
        return "Morning briefing sent!"

    def trigger_weekly_report(self):
        """Manually trigger weekly report (callable from WhatsApp)."""
        self._send_weekly_report()
        return "Weekly report sent!"

    def get_on_this_day(self):
        """Get on-this-day insight (callable from WhatsApp)."""
        insight = self._get_on_this_day()
        return insight or "No conversations found from this date in previous years."

    def trigger_oura_ingest(self):
        """Manually trigger Oura API data pull. Returns summary string."""
        logger.info("trigger_oura_ingest: starting")
        try:
            from oura_api import ingest_oura_data
            logger.info("trigger_oura_ingest: oura_api imported OK")
            result = ingest_oura_data(self.dm)
            logger.info(f"trigger_oura_ingest: result = {str(result)[:120]}")
            if result and "not configured" not in result.lower():
                self._update_briefing_data()
                return result
            return result or "No data returned from Oura API."
        except Exception as e:
            logger.error(f"Oura manual ingest failed: {e}", exc_info=True)
            return f"Oura API error: {str(e)[:100]}"

    def trigger_health_ingest(self):
        """Manually trigger Health Connect ingestion. Tries Drive API first, then filesystem."""

        # ── Try Google Drive API first ──
        gdrive = self._get_gdrive_connector()
        if gdrive and gdrive.available:
            try:
                local_path = gdrive.fetch_latest_health_connect(max_age_hours=72)
                if local_path:
                    result = self._ingest_health_file(local_path)
                    if result:
                        self._ingested_files.add(local_path)
                        self._update_briefing_data()
                        return f"(via Google Drive)\n{result}"
            except Exception as e:
                logger.warning(f"Drive fetch failed during manual ingest: {e}")

        # ── Fallback to filesystem ──
        search_dirs = [HEALTH_CONNECT_DIR] + HEALTH_CONNECT_ALT_DIRS
        dash_dir = str(Path(DASHBOARD_DATA_PATH).parent)
        search_dirs.append(dash_dir)

        latest_file = None
        latest_mtime = 0

        for search_dir in search_dirs:
            if not os.path.isdir(search_dir):
                continue
            for pattern in [
                os.path.join(search_dir, "Health Connect*.zip"),
                os.path.join(search_dir, "health_connect*.zip"),
                os.path.join(search_dir, "**", "Health Connect*.zip"),
            ]:
                for zip_path in glob.glob(pattern, recursive=True):
                    mtime = os.path.getmtime(zip_path)
                    if mtime > latest_mtime:
                        latest_mtime = mtime
                        latest_file = zip_path

        if not latest_file:
            return "No Health Connect zip files found in Google Drive or local folders."

        result = self._ingest_health_file(latest_file)
        if result:
            self._ingested_files.add(latest_file)
            self._update_briefing_data()
            return result
        return "Health Connect ingestion failed. Check server logs."

    def get_briefing_data(self):
        """Return current briefing data as dict."""
        briefing_path = Path(DASHBOARD_DATA_PATH).parent / "briefing.json"
        if briefing_path.exists():
            with open(briefing_path) as f:
                return json.load(f)
        return None
