320 lines
12 KiB
Python
320 lines
12 KiB
Python
import json
|
|
import os
|
|
from datetime import date, datetime, timedelta
|
|
from typing import Any, Dict, List
|
|
|
|
from .client import GarminClient
|
|
|
|
|
|
class GarminSync:
|
|
"""Logic to sync Garmin data to local storage."""
|
|
|
|
def __init__(self, client: GarminClient, storage_dir: str = "../data/local/garmin"):
|
|
self.client = client
|
|
self.storage_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), storage_dir))
|
|
os.makedirs(self.storage_dir, exist_ok=True)
|
|
|
|
def sync_activities(self, days: int = 30):
|
|
"""Sync last X days of activities."""
|
|
end_date = date.today()
|
|
start_date = end_date - timedelta(days=days)
|
|
|
|
activities = self.client.get_activities(start_date, end_date)
|
|
|
|
for activity in activities:
|
|
try:
|
|
self._save_activity(activity)
|
|
except Exception:
|
|
# Log and continue
|
|
pass
|
|
|
|
return len(activities)
|
|
|
|
def _save_activity(self, activity: Dict[str, Any]):
|
|
"""Save a single activity as a JSON file."""
|
|
activity_id = activity.get("activityId")
|
|
if not activity_id:
|
|
return
|
|
|
|
file_path = os.path.join(self.storage_dir, f"activity_{activity_id}.json")
|
|
with open(file_path, "w") as f:
|
|
json.dump(activity, f, indent=2)
|
|
|
|
def load_local_activities(self) -> List[Dict[str, Any]]:
|
|
"""Load all locally stored activities."""
|
|
activities = []
|
|
|
|
for filename in os.listdir(self.storage_dir):
|
|
if filename.startswith("activity_") and filename.endswith(".json"):
|
|
with open(os.path.join(self.storage_dir, filename), "r") as f:
|
|
try:
|
|
activities.append(json.load(f))
|
|
except json.JSONDecodeError:
|
|
pass
|
|
return activities
|
|
|
|
def get_last_sync_date(self) -> Any:
|
|
"""Get the date of the latest stored activity."""
|
|
activities = self.load_local_activities()
|
|
if not activities:
|
|
return None
|
|
|
|
latest_date = None
|
|
for act in activities:
|
|
start_str = act.get("startTimeLocal")
|
|
if start_str:
|
|
try:
|
|
# Parse "YYYY-MM-DD HH:MM:SS" -> date
|
|
d = datetime.strptime(start_str.split(" ")[0], "%Y-%m-%d").date()
|
|
if latest_date is None or d > latest_date:
|
|
latest_date = d
|
|
except ValueError:
|
|
continue
|
|
|
|
return latest_date
|
|
|
|
def sync_smart(self) -> int:
|
|
"""Sync only new activities since the last local one."""
|
|
try:
|
|
activities = self.load_local_activities()
|
|
if not activities:
|
|
# No local data, do full sync for last year
|
|
return self.sync_activities(days=365)
|
|
|
|
# Find latest start time
|
|
latest_date = None
|
|
for act in activities:
|
|
start_str = act.get("startTimeLocal")
|
|
if start_str:
|
|
# simplistic parse "YYYY-MM-DD HH:MM:SS" -> date
|
|
d = datetime.strptime(start_str.split(" ")[0], "%Y-%m-%d").date()
|
|
if latest_date is None or d > latest_date:
|
|
latest_date = d
|
|
|
|
if not latest_date:
|
|
return self.sync_activities(days=365)
|
|
|
|
# Sync from latest_date + 1 day
|
|
start_sync = latest_date + timedelta(days=1)
|
|
today = date.today()
|
|
|
|
if start_sync > today:
|
|
return 0 # Up to date
|
|
|
|
# Ensure we cover the gap
|
|
# Actually easier: just pass start_date explicit to get_activities,
|
|
# but our current sync_activities takes 'days'.
|
|
# Let's just recalculate 'days' relative to today.
|
|
|
|
# Days from today to start_sync.
|
|
# if start_sync is 2023-10-01 and today is 2023-10-03.
|
|
# We need 2023-10-01, 10-02, 10-03? No, start_sync is latest+1.
|
|
# So if latest is 2023-10-01. start_sync is 2023-10-02.
|
|
# We want activities from 2023-10-02 to 2023-10-03.
|
|
|
|
# sync_activities uses: start_date = end_date - timedelta(days=days).
|
|
# So if days=1. start = today - 1 day. = yesterday.
|
|
# If we want from 2023-10-02 and today is 2023-10-03.
|
|
# days = (2023-10-03 - 2023-10-02).days = 1.
|
|
# Wait, sync_activities implementation: start_date = end_date - timedelta(days=days) is effectively "last X days".
|
|
# To be precise, let's just use days = (today - latest_date).days.
|
|
# This covers everything since latest_date inclusive (re-syncing last day is fine/safer)
|
|
|
|
days_to_sync = (today - latest_date).days
|
|
return self.sync_activities(days=days_to_sync)
|
|
|
|
except Exception as e:
|
|
# If anything fails, fallback safety? Or just raise.
|
|
raise e
|
|
|
|
def get_weekly_stats(self, weeks: int = 12) -> Dict[str, Any]:
|
|
"""Aggregate activities into weekly stats by type."""
|
|
from collections import defaultdict
|
|
from datetime import datetime
|
|
|
|
activities = self.load_local_activities()
|
|
# Cap weeks logic if needed, but 'weeks' param handles filter
|
|
today = date.today()
|
|
# To show full weeks, let's go back exactly weeks*7 days
|
|
cutoff_date = today - timedelta(days=weeks*7)
|
|
|
|
# Structure: { "2023-W45": { "running": 120, "cycling": 45 }, ... }
|
|
weekly_data = defaultdict(lambda: defaultdict(float))
|
|
activity_types = set()
|
|
|
|
for act in activities:
|
|
start_local = act.get("startTimeLocal", "")
|
|
if not start_local:
|
|
continue
|
|
|
|
try:
|
|
act_date = datetime.strptime(start_local.split(" ")[0], "%Y-%m-%d").date()
|
|
except Exception:
|
|
continue
|
|
|
|
if act_date < cutoff_date:
|
|
continue
|
|
|
|
# ISO Year + Week
|
|
year, week, _ = act_date.isocalendar()
|
|
week_key = f"{year}-W{week:02d}"
|
|
|
|
# Duration in minutes -> Hours (as per request/screenshot implied volumes)
|
|
# Actually screenshot y-axis said "Hours".
|
|
duration_hours = act.get("duration", 0) / 3600.0
|
|
|
|
# Clean type key
|
|
# ... existing logic ...
|
|
|
|
raw_type = act.get("activityType", {}).get("typeKey", "other")
|
|
|
|
weekly_data[week_key][raw_type] += duration_hours
|
|
activity_types.add(raw_type)
|
|
|
|
# Format for frontend chart
|
|
sorted_weeks = sorted(weekly_data.keys())
|
|
datasets = []
|
|
|
|
# Scientific Color Mapping
|
|
def get_color(type_key: str) -> str:
|
|
k = type_key.lower()
|
|
# Cycling (Greens/Teals)
|
|
if "cycling" in k or "virtual_ride" in k or "spinning" in k:
|
|
if "virtual" in k:
|
|
return "#3fb950" # bright green
|
|
if "indoor" in k:
|
|
return "#2ea043" # darker green
|
|
return "#56d364" # standard green
|
|
|
|
# Swimming (Blues)
|
|
if "swimming" in k or "lap_swimming" in k:
|
|
if "open_water" in k:
|
|
return "#1f6feb" # deep blue
|
|
return "#58a6ff" # lighter blue
|
|
|
|
# Yoga/Pilates (Purples/Pinks)
|
|
if "yoga" in k:
|
|
return "#d2a8ff"
|
|
if "pilates" in k:
|
|
return "#bc8cff"
|
|
if "breathing" in k:
|
|
return "#e2c5ff"
|
|
|
|
# Running (Oranges/Reds)
|
|
if "running" in k or "treadmill" in k:
|
|
if "trail" in k:
|
|
return "#bf4b00" # Dark orange
|
|
return "#fa4549" # Redish
|
|
|
|
# Strength (Gold/Yellow per plan change, or keep distinct)
|
|
if "strength" in k or "weight" in k:
|
|
return "#e3b341" # Gold
|
|
|
|
# Hiking/Walking
|
|
if "hiking" in k:
|
|
return "#d29922" # Brown/Orange
|
|
if "walking" in k:
|
|
return "#8b949e" # Grey
|
|
|
|
return "#8b949e" # Default Grey
|
|
|
|
for type_key in activity_types:
|
|
data_points = []
|
|
for week in sorted_weeks:
|
|
# Round to 1 decimal place
|
|
data_points.append(round(weekly_data[week][type_key], 2))
|
|
|
|
datasets.append({
|
|
"label": type_key.replace("_", " ").title(),
|
|
"data": data_points,
|
|
"backgroundColor": get_color(type_key)
|
|
})
|
|
|
|
return {
|
|
"labels": sorted_weeks,
|
|
"datasets": datasets
|
|
}
|
|
|
|
def get_dashboard_stats(self) -> Dict[str, Any]:
|
|
"""
|
|
Get aggregated stats for the dashboard:
|
|
- Last 7 days total hours & trend vs previous 7 days.
|
|
- Last 7 days activity breakdown (e.g. 3x Cycling).
|
|
"""
|
|
activities = self.load_local_activities()
|
|
today = date.today()
|
|
last_7_start = today - timedelta(days=6) # Inclusive of today = 7 days
|
|
prev_7_start = last_7_start - timedelta(days=7)
|
|
prev_7_end = last_7_start - timedelta(days=1)
|
|
|
|
# Buckets
|
|
current_period = {"hours": 0.0, "count": 0, "breakdown": {}}
|
|
prev_period = {"hours": 0.0, "count": 0}
|
|
strength_count = 0
|
|
|
|
for act in activities:
|
|
start_local = act.get("startTimeLocal", "")
|
|
if not start_local:
|
|
continue
|
|
|
|
try:
|
|
act_date = datetime.strptime(start_local.split(" ")[0], "%Y-%m-%d").date()
|
|
except Exception:
|
|
continue
|
|
|
|
dur_hours = act.get("duration", 0) / 3600.0
|
|
type_key = act.get("activityType", {}).get("typeKey", "unknown")
|
|
|
|
# Last 7 Days
|
|
if last_7_start <= act_date <= today:
|
|
current_period["hours"] += dur_hours
|
|
current_period["count"] += 1
|
|
current_period["breakdown"][type_key] = current_period["breakdown"].get(type_key, 0) + 1
|
|
|
|
if "strength" in type_key.lower():
|
|
strength_count += 1
|
|
|
|
# Previous 7 Days
|
|
elif prev_7_start <= act_date <= prev_7_end:
|
|
prev_period["hours"] += dur_hours
|
|
prev_period["count"] += 1
|
|
|
|
# Trend Calculation
|
|
trend_pct = 0
|
|
if prev_period["hours"] > 0:
|
|
trend_pct = ((current_period["hours"] - prev_period["hours"]) / prev_period["hours"]) * 100
|
|
|
|
# Format Breakdown
|
|
breakdown_list = []
|
|
for k, v in current_period["breakdown"].items():
|
|
# Format nicely: "running" -> "Running"
|
|
label = k.replace("_", " ").title()
|
|
breakdown_list.append({"label": label, "count": v})
|
|
|
|
# VO2 Max Logic
|
|
sorted_acts = sorted(
|
|
activities,
|
|
key=lambda x: x.get("startTimeLocal", ""),
|
|
reverse=True
|
|
)
|
|
|
|
vo2_max = None
|
|
for act in sorted_acts:
|
|
if "vo2MaxValue" in act:
|
|
vo2_max = act["vo2MaxValue"]
|
|
break
|
|
if "vo2MaxCyclingValue" in act:
|
|
vo2_max = act["vo2MaxCyclingValue"]
|
|
break
|
|
|
|
return {
|
|
"summary": {
|
|
"total_hours": round(current_period["hours"], 1),
|
|
"trend_pct": round(trend_pct, 1),
|
|
"period_label": "Last 7 Days"
|
|
},
|
|
"breakdown": breakdown_list,
|
|
"strength_sessions": strength_count,
|
|
"vo2_max": vo2_max
|
|
}
|