diff --git a/README.md b/README.md index 49592bffc..b8d4a06a4 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,7 @@ See `backend/app/db/schema.sql`. Key tables: - `user:{id}:categories` — 24h TTL - `user:{id}:upcoming_bills` — 15 min TTL - `insights:{id}` — 24h TTL (invalidate on new expense/bill) + - `user:{id}:weekly_digest:{yyyy-mm-dd}` — 10 min TTL - Invalidation - On expense/bill create/update/delete -> delete affected monthly_summary, upcoming_bills, insights - Rate limiting (optional): `rl:{userId}:{endpoint}:{minute}` with short TTL @@ -66,6 +67,7 @@ OpenAPI: `backend/app/openapi.yaml` - Bills: CRUD `/bills`, pay/mark `/bills/{id}/pay` - Reminders: CRUD `/reminders`, trigger `/reminders/run` - Insights: `/insights/monthly`, `/insights/budget-suggestion` +- Digest: `/digest/weekly`, `/digest/weekly/`, `/digest/history`, `POST /digest/generate` ## MVP UI/UX Plan - Auth screens: register/login. @@ -73,6 +75,12 @@ OpenAPI: `backend/app/openapi.yaml` - Monthly spend chart, category breakdown donut. - Upcoming bills list with due dates and pay status. - AI budget suggestion card. +- Weekly Digest: + - Smart weekly financial summary with income, expenses, and net flow. + - Top spending categories breakdown with percentage bars. + - Week-over-week trend comparison (income/expense change %). + - AI-powered insights generated via Gemini (heuristic fallback). + - Historical digest browser for past weeks. - Expenses page: add expense (amount, category, notes, date), list & filter. - Bills page: create bill (name, amount, cadence, due date, channel), toggle WhatsApp/email. - Settings: profile, categories, reminders default channel, export (premium). @@ -108,6 +116,7 @@ finmind/ __init__.py ai.py cache.py + digest.py reminders.py db/ schema.sql @@ -125,6 +134,7 @@ finmind/ Charts.tsx pages/ Dashboard.tsx + Digest.tsx Expenses.tsx Bills.tsx Settings.tsx diff --git a/app/src/App.tsx b/app/src/App.tsx index f0dc5942d..df96cc79a 100644 --- a/app/src/App.tsx +++ b/app/src/App.tsx @@ -16,6 +16,7 @@ import NotFound from "./pages/NotFound"; import { Landing } from "./pages/Landing"; import ProtectedRoute from "./components/auth/ProtectedRoute"; import Account from "./pages/Account"; +import { Digest } from "./pages/Digest"; const queryClient = new QueryClient({ defaultOptions: { @@ -83,6 +84,14 @@ const App = () => ( } /> + + + + } + /> { + return api('/digest/weekly'); +} + +export async function getWeeklyDigestByDate(dateStr: string): Promise { + return api(`/digest/weekly/${encodeURIComponent(dateStr)}`); +} + +export async function getDigestHistory(limit?: number): Promise { + const query = limit ? `?limit=${limit}` : ''; + return api(`/digest/history${query}`); +} + +export async function forceGenerateDigest(): Promise { + return api('/digest/generate', { method: 'POST' }); +} diff --git a/app/src/components/layout/Navbar.tsx b/app/src/components/layout/Navbar.tsx index c7593b701..998d6c752 100644 --- a/app/src/components/layout/Navbar.tsx +++ b/app/src/components/layout/Navbar.tsx @@ -8,6 +8,7 @@ import { logout as logoutApi } from '@/api/auth'; const navigation = [ { name: 'Dashboard', href: '/dashboard' }, + { name: 'Digest', href: '/digest' }, { name: 'Budgets', href: '/budgets' }, { name: 'Bills', href: '/bills' }, { name: 'Reminders', href: '/reminders' }, diff --git a/app/src/pages/Digest.tsx b/app/src/pages/Digest.tsx new file mode 100644 index 000000000..6098bbe30 --- /dev/null +++ b/app/src/pages/Digest.tsx @@ -0,0 +1,448 @@ +import { useEffect, useState } from 'react'; +import { + FinancialCard, + FinancialCardContent, + FinancialCardDescription, + FinancialCardFooter, + FinancialCardHeader, + FinancialCardTitle, +} from '@/components/ui/financial-card'; +import { Button } from '@/components/ui/button'; +import { + ArrowDownRight, + ArrowUpRight, + ChevronLeft, + ChevronRight, + Lightbulb, + RefreshCw, + TrendingDown, + TrendingUp, + Wallet, + BarChart3, + History, +} from 'lucide-react'; +import { + getWeeklyDigestByDate, + getDigestHistory, + forceGenerateDigest, + type WeeklyDigest as DigestType, +} from '@/api/digest'; +import { formatMoney } from '@/lib/currency'; + +function currency(n: number, code?: string) { + return formatMoney(Number(n || 0), code); +} + +function mondayOfWeek(d: Date): Date { + const day = d.getDay(); + const diff = d.getDate() - day + (day === 0 ? -6 : 1); + return new Date(d.getFullYear(), d.getMonth(), diff); +} + +function formatWeekRange(start: string, end: string): string { + const s = new Date(start + 'T00:00:00'); + const e = new Date(end + 'T00:00:00'); + const opts: Intl.DateTimeFormatOptions = { month: 'short', day: 'numeric' }; + return `${s.toLocaleDateString(undefined, opts)} - ${e.toLocaleDateString(undefined, { ...opts, year: 'numeric' })}`; +} + +export function Digest() { + const [digest, setDigest] = useState(null); + const [history, setHistory] = useState([]); + const [loading, setLoading] = useState(true); + const [regenerating, setRegenerating] = useState(false); + const [error, setError] = useState(null); + const [currentMonday, setCurrentMonday] = useState(() => mondayOfWeek(new Date())); + + const dateStr = currentMonday.toISOString().slice(0, 10); + + useEffect(() => { + (async () => { + setLoading(true); + setError(null); + try { + const res = await getWeeklyDigestByDate(dateStr); + setDigest(res); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : 'Failed to load digest'); + } finally { + setLoading(false); + } + })(); + }, [dateStr]); + + useEffect(() => { + (async () => { + try { + const h = await getDigestHistory(8); + setHistory(h); + } catch { + // non-critical + } + })(); + }, [digest]); + + const handlePrevWeek = () => { + setCurrentMonday((prev) => { + const d = new Date(prev); + d.setDate(d.getDate() - 7); + return d; + }); + }; + + const handleNextWeek = () => { + const nextMonday = new Date(currentMonday); + nextMonday.setDate(nextMonday.getDate() + 7); + const today = mondayOfWeek(new Date()); + if (nextMonday <= today) { + setCurrentMonday(nextMonday); + } + }; + + const handleThisWeek = () => { + setCurrentMonday(mondayOfWeek(new Date())); + }; + + const handleRegenerate = async () => { + setRegenerating(true); + try { + const res = await forceGenerateDigest(); + setDigest(res); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : 'Failed to regenerate'); + } finally { + setRegenerating(false); + } + }; + + const trends = digest?.trends; + const topCategories = digest?.top_categories ?? []; + + const isCurrentWeek = + currentMonday.toISOString().slice(0, 10) === mondayOfWeek(new Date()).toISOString().slice(0, 10); + + const summaryCards = [ + { + title: 'Weekly Income', + amount: currency(digest?.total_income ?? 0), + change: trends + ? `${trends.income_change_pct >= 0 ? '+' : ''}${trends.income_change_pct.toFixed(1)}%` + : '--', + trend: trends?.income_trend ?? 'flat', + icon: TrendingUp, + description: 'vs last week', + }, + { + title: 'Weekly Expenses', + amount: currency(digest?.total_expenses ?? 0), + change: trends + ? `${trends.expense_change_pct >= 0 ? '+' : ''}${trends.expense_change_pct.toFixed(1)}%` + : '--', + trend: trends?.expense_trend === 'down' ? 'up' : 'down', // lower expenses is good + icon: TrendingDown, + description: 'vs last week', + }, + { + title: 'Net Flow', + amount: currency(digest?.net_flow ?? 0), + change: (digest?.net_flow ?? 0) >= 0 ? 'Positive' : 'Negative', + trend: (digest?.net_flow ?? 0) >= 0 ? 'up' : 'down', + icon: Wallet, + description: 'This week', + }, + ] as const; + + return ( +
+
+
+
+

Weekly Digest

+

+ {digest + ? formatWeekRange(digest.week_start, digest.week_end) + : 'Smart financial summary'} +

+
+
+ + + + {isCurrentWeek && ( + + )} +
+
+
+ + {error && ( +
{error}
+ )} + + {/* Summary Cards */} +
+ {summaryCards.map((card, index) => ( + + +
+ + {card.title} + + +
+
+ +
+ {loading ? '...' : card.amount} +
+
+ {card.trend === 'up' ? ( + + ) : card.trend === 'down' ? ( + + ) : null} + + {card.change} + + {card.description} +
+
+
+ ))} +
+ +
+ {/* Top Spending Categories */} +
+ + +
+ + + Top Categories + +
+ Spending breakdown this week +
+ + {loading ? ( +
Loading...
+ ) : topCategories.length === 0 ? ( +
No spending data this week.
+ ) : ( +
+ {topCategories.map((cat, i) => ( +
+
+ {cat.category_name} + + {currency(cat.amount)} ({cat.share_pct.toFixed(0)}%) + +
+
+
+
+
+ ))} +
+ )} + + +
+ + {/* AI Insights */} +
+ + +
+ + + AI Insights + +
+ + Personalized analysis of your weekly finances + +
+ + {loading ? ( +
Analyzing your finances...
+ ) : digest?.ai_insights ? ( +
+ {digest.ai_insights} +
+ ) : ( +
+ No insights available. Add some transactions to get started. +
+ )} +
+
+ + {/* Week-over-Week Comparison */} + {trends && !loading && ( + + + + Week-over-Week Comparison + + + How this week compares to the previous one + + + +
+
+
Previous Week Income
+
+ {currency(trends.previous_week_income)} +
+
+
+
Previous Week Expenses
+
+ {currency(trends.previous_week_expenses)} +
+
+
+
Income Change
+
+ {trends.income_trend === 'up' ? ( + + ) : trends.income_trend === 'down' ? ( + + ) : null} + + {trends.income_change_pct >= 0 ? '+' : ''} + {trends.income_change_pct.toFixed(1)}% + +
+
+
+
Expense Change
+
+ {trends.expense_trend === 'up' ? ( + + ) : trends.expense_trend === 'down' ? ( + + ) : null} + + {trends.expense_change_pct >= 0 ? '+' : ''} + {trends.expense_change_pct.toFixed(1)}% + +
+
+
+
+
+ )} +
+
+ + {/* Past Digests */} + {history.length > 0 && ( +
+ + +
+ + + Past Digests + +
+ Browse your weekly financial history +
+ +
+ {history.map((h) => { + const isActive = h.week_start === digest?.week_start; + return ( + + ); + })} +
+
+ +
+
+ )} +
+ ); +} diff --git a/packages/backend/app/db/schema.sql b/packages/backend/app/db/schema.sql index 410189def..560f52416 100644 --- a/packages/backend/app/db/schema.sql +++ b/packages/backend/app/db/schema.sql @@ -123,3 +123,21 @@ CREATE TABLE IF NOT EXISTS audit_logs ( action VARCHAR(100) NOT NULL, created_at TIMESTAMP NOT NULL DEFAULT NOW() ); + +CREATE TABLE IF NOT EXISTS weekly_digests ( + id SERIAL PRIMARY KEY, + user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + week_start DATE NOT NULL, + week_end DATE NOT NULL, + total_income NUMERIC(12,2) NOT NULL DEFAULT 0, + total_expenses NUMERIC(12,2) NOT NULL DEFAULT 0, + net_flow NUMERIC(12,2) NOT NULL DEFAULT 0, + top_categories TEXT, + trends TEXT, + ai_insights TEXT, + created_at TIMESTAMP NOT NULL DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_weekly_digests_user_week + ON weekly_digests(user_id, week_start DESC); +CREATE UNIQUE INDEX IF NOT EXISTS idx_weekly_digests_user_week_unique + ON weekly_digests(user_id, week_start); diff --git a/packages/backend/app/models.py b/packages/backend/app/models.py index 64d448104..04b8649b5 100644 --- a/packages/backend/app/models.py +++ b/packages/backend/app/models.py @@ -133,3 +133,18 @@ class AuditLog(db.Model): user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=True) action = db.Column(db.String(100), nullable=False) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) + + +class WeeklyDigest(db.Model): + __tablename__ = "weekly_digests" + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) + week_start = db.Column(db.Date, nullable=False) + week_end = db.Column(db.Date, nullable=False) + total_income = db.Column(db.Numeric(12, 2), nullable=False, default=0) + total_expenses = db.Column(db.Numeric(12, 2), nullable=False, default=0) + net_flow = db.Column(db.Numeric(12, 2), nullable=False, default=0) + top_categories = db.Column(db.Text, nullable=True) # JSON string + trends = db.Column(db.Text, nullable=True) # JSON string + ai_insights = db.Column(db.Text, nullable=True) + created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) diff --git a/packages/backend/app/routes/__init__.py b/packages/backend/app/routes/__init__.py index f13b0f897..4eb8ee8ec 100644 --- a/packages/backend/app/routes/__init__.py +++ b/packages/backend/app/routes/__init__.py @@ -7,6 +7,7 @@ from .categories import bp as categories_bp from .docs import bp as docs_bp from .dashboard import bp as dashboard_bp +from .digest import bp as digest_bp def register_routes(app: Flask): @@ -18,3 +19,4 @@ def register_routes(app: Flask): app.register_blueprint(categories_bp, url_prefix="/categories") app.register_blueprint(docs_bp, url_prefix="/docs") app.register_blueprint(dashboard_bp, url_prefix="/dashboard") + app.register_blueprint(digest_bp, url_prefix="/digest") diff --git a/packages/backend/app/routes/digest.py b/packages/backend/app/routes/digest.py new file mode 100644 index 000000000..168031d18 --- /dev/null +++ b/packages/backend/app/routes/digest.py @@ -0,0 +1,66 @@ +from datetime import date, timedelta +import logging + +from flask import Blueprint, jsonify, request +from flask_jwt_extended import jwt_required, get_jwt_identity + +from ..services.digest import generate_weekly_digest, get_digest_history + +bp = Blueprint("digest", __name__) +logger = logging.getLogger("finmind.digest") + + +def _parse_week_start(date_str: str) -> date | None: + """Parse an ISO date string and return the Monday of that week.""" + try: + d = date.fromisoformat(date_str) + return d - timedelta(days=d.weekday()) + except (ValueError, TypeError): + return None + + +@bp.get("/weekly") +@jwt_required() +def weekly_current(): + """Return the current week's digest, generating if it does not exist.""" + uid = int(get_jwt_identity()) + digest = generate_weekly_digest(uid) + return jsonify(digest) + + +@bp.get("/weekly/") +@jwt_required() +def weekly_by_date(date_str: str): + """Return the digest for the week containing the given date.""" + uid = int(get_jwt_identity()) + week_start = _parse_week_start(date_str) + if week_start is None: + return jsonify(error="Invalid date format. Use YYYY-MM-DD."), 400 + digest = generate_weekly_digest(uid, week_start=week_start) + return jsonify(digest) + + +@bp.get("/history") +@jwt_required() +def history(): + """Return past weekly digests for the authenticated user.""" + uid = int(get_jwt_identity()) + limit_str = request.args.get("limit", "10") + try: + limit = max(1, min(50, int(limit_str))) + except (ValueError, TypeError): + limit = 10 + digests = get_digest_history(uid, limit=limit) + return jsonify(digests) + + +@bp.post("/generate") +@jwt_required() +def force_generate(): + """Force regenerate the current week's digest.""" + uid = int(get_jwt_identity()) + week_start_str = request.args.get("week_start") + week_start = _parse_week_start(week_start_str) if week_start_str else None + digest = generate_weekly_digest(uid, week_start=week_start, force=True) + logger.info("Digest force-regenerated user=%s", uid) + return jsonify(digest), 201 diff --git a/packages/backend/app/services/digest.py b/packages/backend/app/services/digest.py new file mode 100644 index 000000000..898d03f64 --- /dev/null +++ b/packages/backend/app/services/digest.py @@ -0,0 +1,333 @@ +import json +import logging +from datetime import date, timedelta + +from sqlalchemy import func + +from ..config import Settings +from ..extensions import db +from ..models import Category, Expense, WeeklyDigest +from .cache import cache_get, cache_set + +logger = logging.getLogger("finmind.digest") +_settings = Settings() + + +def _week_bounds(ref: date | None = None) -> tuple[date, date]: + """Return (Monday, Sunday) of the ISO week containing *ref*.""" + d = ref or date.today() + monday = d - timedelta(days=d.weekday()) + sunday = monday + timedelta(days=6) + return monday, sunday + + +def _weekly_digest_key(user_id: int, week_start: date) -> str: + return f"user:{user_id}:weekly_digest:{week_start.isoformat()}" + + +def _aggregate_week(uid: int, start: date, end: date) -> dict: + """Compute income, expenses, and category breakdown for a date range.""" + income = ( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == uid, + Expense.spent_at >= start, + Expense.spent_at <= end, + Expense.expense_type == "INCOME", + ) + .scalar() + ) + expenses = ( + db.session.query(func.coalesce(func.sum(Expense.amount), 0)) + .filter( + Expense.user_id == uid, + Expense.spent_at >= start, + Expense.spent_at <= end, + Expense.expense_type != "INCOME", + ) + .scalar() + ) + + category_rows = ( + db.session.query( + Expense.category_id, + func.coalesce(Category.name, "Uncategorized").label("category_name"), + func.coalesce(func.sum(Expense.amount), 0).label("total"), + ) + .outerjoin( + Category, + (Category.id == Expense.category_id) & (Category.user_id == uid), + ) + .filter( + Expense.user_id == uid, + Expense.spent_at >= start, + Expense.spent_at <= end, + Expense.expense_type != "INCOME", + ) + .group_by(Expense.category_id, Category.name) + .order_by(func.sum(Expense.amount).desc()) + .all() + ) + + total_exp = float(expenses or 0) + top_categories = [ + { + "category_id": r.category_id, + "category_name": r.category_name, + "amount": round(float(r.total or 0), 2), + "share_pct": ( + round((float(r.total or 0) / total_exp) * 100, 2) + if total_exp > 0 + else 0.0 + ), + } + for r in category_rows[:5] + ] + + return { + "total_income": round(float(income or 0), 2), + "total_expenses": round(total_exp, 2), + "top_categories": top_categories, + } + + +def _compute_trends(current: dict, previous: dict) -> dict: + """Compare current vs previous week and produce trend data.""" + curr_exp = current["total_expenses"] + prev_exp = previous["total_expenses"] + curr_inc = current["total_income"] + prev_inc = previous["total_income"] + + if prev_exp > 0: + expense_change_pct = round(((curr_exp - prev_exp) / prev_exp) * 100, 2) + else: + expense_change_pct = 0.0 if curr_exp == 0 else 100.0 + + if prev_inc > 0: + income_change_pct = round(((curr_inc - prev_inc) / prev_inc) * 100, 2) + else: + income_change_pct = 0.0 if curr_inc == 0 else 100.0 + + return { + "expense_change_pct": expense_change_pct, + "income_change_pct": income_change_pct, + "previous_week_expenses": prev_exp, + "previous_week_income": prev_inc, + "expense_trend": "up" if curr_exp > prev_exp else ("down" if curr_exp < prev_exp else "flat"), + "income_trend": "up" if curr_inc > prev_inc else ("down" if curr_inc < prev_inc else "flat"), + } + + +def _generate_ai_insights(uid: int, current: dict, trends: dict) -> str: + """Generate AI-powered insights using Gemini (or heuristic fallback).""" + from urllib import request as url_request + + api_key = (_settings.gemini_api_key or "").strip() + if not api_key: + return _heuristic_insights(current, trends) + + prompt = ( + "You are FinMind's financial coach. Analyze this weekly spending data and " + "provide 3-4 concise, actionable insights. Be specific with numbers. " + "Return plain text, not JSON.\n\n" + f"Weekly Income: {current['total_income']}\n" + f"Weekly Expenses: {current['total_expenses']}\n" + f"Net Flow: {round(current['total_income'] - current['total_expenses'], 2)}\n" + f"Top Categories: {json.dumps(current['top_categories'])}\n" + f"Expense Change vs Last Week: {trends['expense_change_pct']}%\n" + f"Income Change vs Last Week: {trends['income_change_pct']}%\n" + ) + + url = ( + "https://generativelanguage.googleapis.com/v1beta/models/" + f"{_settings.gemini_model}:generateContent?key={api_key}" + ) + body = json.dumps( + { + "contents": [{"parts": [{"text": prompt}]}], + "generationConfig": {"temperature": 0.3}, + } + ).encode("utf-8") + req = url_request.Request( + url=url, + data=body, + headers={"Content-Type": "application/json"}, + method="POST", + ) + try: + with url_request.urlopen(req, timeout=10) as resp: # nosec B310 + payload = json.loads(resp.read().decode("utf-8")) + text = ( + payload.get("candidates", [{}])[0] + .get("content", {}) + .get("parts", [{}])[0] + .get("text", "") + ) + return text.strip() if text.strip() else _heuristic_insights(current, trends) + except Exception: + logger.warning("Gemini unavailable for digest insights, using heuristic") + return _heuristic_insights(current, trends) + + +def _heuristic_insights(current: dict, trends: dict) -> str: + """Generate simple heuristic insights when Gemini is unavailable.""" + lines = [] + net = round(current["total_income"] - current["total_expenses"], 2) + + if net >= 0: + lines.append( + f"Positive cash flow this week: you saved {net:.2f}. Keep it up!" + ) + else: + lines.append( + f"Spending exceeded income by {abs(net):.2f} this week. " + "Review discretionary expenses." + ) + + change = trends["expense_change_pct"] + if change > 10: + lines.append( + f"Expenses increased {change:.1f}% compared to last week. " + "Check your top spending categories." + ) + elif change < -10: + lines.append( + f"Great job! Expenses decreased {abs(change):.1f}% compared to last week." + ) + + cats = current.get("top_categories", []) + if cats: + top = cats[0] + lines.append( + f"Biggest spending category: {top['category_name']} " + f"at {top['amount']:.2f} ({top['share_pct']:.0f}% of total)." + ) + + if not lines: + lines.append("Not enough data to generate insights this week.") + + return " ".join(lines) + + +def generate_weekly_digest( + user_id: int, week_start: date | None = None, force: bool = False +) -> dict: + """Generate (or retrieve cached) weekly digest for a user. + + Args: + user_id: The user's id. + week_start: Monday of the target week. Defaults to current week. + force: When True, regenerate even if a digest already exists. + + Returns: + Serialised digest dict. + """ + start, end = _week_bounds(week_start) + + # Check cache first (unless forcing regeneration) + cache_key = _weekly_digest_key(user_id, start) + if not force: + cached = cache_get(cache_key) + if cached: + return cached + + # Check DB for existing digest + if not force: + existing = ( + db.session.query(WeeklyDigest) + .filter(WeeklyDigest.user_id == user_id, WeeklyDigest.week_start == start) + .first() + ) + if existing: + result = _digest_to_dict(existing) + cache_set(cache_key, result, ttl_seconds=600) + return result + + # Aggregate current and previous week + current = _aggregate_week(user_id, start, end) + + prev_start = start - timedelta(days=7) + prev_end = end - timedelta(days=7) + previous = _aggregate_week(user_id, prev_start, prev_end) + + trends = _compute_trends(current, previous) + ai_insights = _generate_ai_insights(user_id, current, trends) + net_flow = round(current["total_income"] - current["total_expenses"], 2) + + # Upsert digest record + digest = ( + db.session.query(WeeklyDigest) + .filter(WeeklyDigest.user_id == user_id, WeeklyDigest.week_start == start) + .first() + ) + if digest: + digest.total_income = current["total_income"] + digest.total_expenses = current["total_expenses"] + digest.net_flow = net_flow + digest.top_categories = json.dumps(current["top_categories"]) + digest.trends = json.dumps(trends) + digest.ai_insights = ai_insights + else: + digest = WeeklyDigest( + user_id=user_id, + week_start=start, + week_end=end, + total_income=current["total_income"], + total_expenses=current["total_expenses"], + net_flow=net_flow, + top_categories=json.dumps(current["top_categories"]), + trends=json.dumps(trends), + ai_insights=ai_insights, + ) + db.session.add(digest) + + db.session.commit() + result = _digest_to_dict(digest) + cache_set(cache_key, result, ttl_seconds=600) + logger.info( + "Weekly digest generated user=%s week=%s", user_id, start.isoformat() + ) + return result + + +def get_digest_history(user_id: int, limit: int = 10) -> list[dict]: + """Return past weekly digests for a user, most recent first.""" + rows = ( + db.session.query(WeeklyDigest) + .filter(WeeklyDigest.user_id == user_id) + .order_by(WeeklyDigest.week_start.desc()) + .limit(limit) + .all() + ) + return [_digest_to_dict(r) for r in rows] + + +def _digest_to_dict(digest: WeeklyDigest) -> dict: + """Serialise a WeeklyDigest model instance to a plain dict.""" + top_categories = [] + if digest.top_categories: + try: + top_categories = json.loads(digest.top_categories) + except (json.JSONDecodeError, TypeError): + top_categories = [] + + trends = {} + if digest.trends: + try: + trends = json.loads(digest.trends) + except (json.JSONDecodeError, TypeError): + trends = {} + + return { + "id": digest.id, + "user_id": digest.user_id, + "week_start": digest.week_start.isoformat(), + "week_end": digest.week_end.isoformat(), + "total_income": float(digest.total_income), + "total_expenses": float(digest.total_expenses), + "net_flow": float(digest.net_flow), + "top_categories": top_categories, + "trends": trends, + "ai_insights": digest.ai_insights or "", + "created_at": digest.created_at.isoformat() if digest.created_at else None, + } diff --git a/packages/backend/tests/test_digest.py b/packages/backend/tests/test_digest.py new file mode 100644 index 000000000..c3a640412 --- /dev/null +++ b/packages/backend/tests/test_digest.py @@ -0,0 +1,232 @@ +from datetime import date, timedelta + + +def _monday_of(d): + """Return the Monday of the ISO week containing *d*.""" + return d - timedelta(days=d.weekday()) + + +def _seed_expense(client, auth_header, amount, desc, spent_at, expense_type="EXPENSE", category_id=None): + """Helper to create an expense via the API.""" + payload = { + "amount": amount, + "description": desc, + "date": spent_at.isoformat(), + "expense_type": expense_type, + } + if category_id is not None: + payload["category_id"] = category_id + r = client.post("/expenses", json=payload, headers=auth_header) + assert r.status_code == 201 + return r.get_json() + + +def test_weekly_digest_returns_current_week(client, auth_header): + """GET /digest/weekly returns a digest for the current week.""" + today = date.today() + _seed_expense(client, auth_header, 1000, "Salary", today, expense_type="INCOME") + _seed_expense(client, auth_header, 200, "Groceries", today, expense_type="EXPENSE") + + r = client.get("/digest/weekly", headers=auth_header) + assert r.status_code == 200 + payload = r.get_json() + + assert "week_start" in payload + assert "week_end" in payload + assert payload["total_income"] >= 1000 + assert payload["total_expenses"] >= 200 + assert "net_flow" in payload + assert "top_categories" in payload + assert "trends" in payload + assert "ai_insights" in payload + + +def test_weekly_digest_by_date(client, auth_header): + """GET /digest/weekly/ returns digest for the week containing that date.""" + today = date.today() + _seed_expense(client, auth_header, 500, "Freelance", today, expense_type="INCOME") + _seed_expense(client, auth_header, 100, "Coffee", today, expense_type="EXPENSE") + + r = client.get(f"/digest/weekly/{today.isoformat()}", headers=auth_header) + assert r.status_code == 200 + payload = r.get_json() + + monday = _monday_of(today) + assert payload["week_start"] == monday.isoformat() + assert payload["total_income"] >= 500 + assert payload["total_expenses"] >= 100 + + +def test_weekly_digest_invalid_date_returns_400(client, auth_header): + """GET /digest/weekly/ returns 400.""" + r = client.get("/digest/weekly/not-a-date", headers=auth_header) + assert r.status_code == 400 + assert "error" in r.get_json() + + +def test_weekly_digest_trends_comparison(client, auth_header): + """Trends should compare current vs previous week.""" + today = date.today() + monday = _monday_of(today) + prev_monday = monday - timedelta(days=7) + + # Previous week: spend 300 + _seed_expense(client, auth_header, 300, "Prev week food", prev_monday, expense_type="EXPENSE") + + # Current week: spend 150 + _seed_expense(client, auth_header, 150, "Curr week food", monday, expense_type="EXPENSE") + + r = client.get("/digest/weekly", headers=auth_header) + assert r.status_code == 200 + payload = r.get_json() + + trends = payload["trends"] + assert "expense_change_pct" in trends + # Expenses decreased from 300 to 150 = -50% + assert trends["expense_change_pct"] == -50.0 + assert trends["expense_trend"] == "down" + assert trends["previous_week_expenses"] == 300.0 + + +def test_weekly_digest_top_categories(client, auth_header): + """Top categories should list spending by category with percentages.""" + today = date.today() + + r = client.post("/categories", json={"name": "Food"}, headers=auth_header) + assert r.status_code == 201 + food_id = r.get_json()["id"] + + r = client.post("/categories", json={"name": "Transport"}, headers=auth_header) + assert r.status_code == 201 + transport_id = r.get_json()["id"] + + _seed_expense(client, auth_header, 400, "Groceries", today, category_id=food_id) + _seed_expense(client, auth_header, 100, "Bus pass", today, category_id=transport_id) + + r = client.get("/digest/weekly", headers=auth_header) + assert r.status_code == 200 + payload = r.get_json() + + cats = payload["top_categories"] + assert isinstance(cats, list) + assert len(cats) >= 2 + + # Food should be first (highest spend) + assert cats[0]["category_name"] == "Food" + assert cats[0]["amount"] == 400.0 + # Share should be 80% (400 out of 500) + assert cats[0]["share_pct"] == 80.0 + + +def test_digest_history_returns_past_digests(client, auth_header): + """GET /digest/history returns previously generated digests.""" + today = date.today() + monday = _monday_of(today) + prev_monday = monday - timedelta(days=7) + + _seed_expense(client, auth_header, 100, "Week 1", prev_monday) + _seed_expense(client, auth_header, 200, "Week 2", monday) + + # Generate digests for both weeks + client.get(f"/digest/weekly/{prev_monday.isoformat()}", headers=auth_header) + client.get(f"/digest/weekly/{monday.isoformat()}", headers=auth_header) + + r = client.get("/digest/history", headers=auth_header) + assert r.status_code == 200 + history = r.get_json() + + assert isinstance(history, list) + assert len(history) >= 2 + # Most recent first + assert history[0]["week_start"] >= history[1]["week_start"] + + +def test_digest_history_respects_limit(client, auth_header): + """GET /digest/history?limit=1 returns only one digest.""" + today = date.today() + _seed_expense(client, auth_header, 50, "Expense", today) + + # Generate current week digest + client.get("/digest/weekly", headers=auth_header) + + r = client.get("/digest/history?limit=1", headers=auth_header) + assert r.status_code == 200 + history = r.get_json() + assert len(history) <= 1 + + +def test_force_generate_creates_new_digest(client, auth_header): + """POST /digest/generate should regenerate the digest.""" + today = date.today() + _seed_expense(client, auth_header, 500, "Initial", today, expense_type="INCOME") + + r = client.post("/digest/generate", headers=auth_header) + assert r.status_code == 201 + payload = r.get_json() + assert payload["total_income"] >= 500 + + # Add more income and force regenerate + _seed_expense(client, auth_header, 300, "Extra", today, expense_type="INCOME") + r = client.post("/digest/generate", headers=auth_header) + assert r.status_code == 201 + payload = r.get_json() + assert payload["total_income"] >= 800 + + +def test_digest_requires_auth(client): + """All digest endpoints should require JWT authentication.""" + endpoints = [ + ("GET", "/digest/weekly"), + ("GET", "/digest/weekly/2026-01-01"), + ("GET", "/digest/history"), + ("POST", "/digest/generate"), + ] + for method, path in endpoints: + if method == "GET": + r = client.get(path) + else: + r = client.post(path) + assert r.status_code in (401, 422), f"{method} {path} should require auth" + + +def test_digest_caching_returns_same_result(client, auth_header): + """Repeated GET calls should return the same digest (from cache/DB).""" + today = date.today() + _seed_expense(client, auth_header, 250, "Cached test", today) + + r1 = client.get("/digest/weekly", headers=auth_header) + assert r1.status_code == 200 + d1 = r1.get_json() + + r2 = client.get("/digest/weekly", headers=auth_header) + assert r2.status_code == 200 + d2 = r2.get_json() + + assert d1["total_expenses"] == d2["total_expenses"] + assert d1["week_start"] == d2["week_start"] + + +def test_digest_net_flow_calculation(client, auth_header): + """Net flow should equal income minus expenses.""" + today = date.today() + _seed_expense(client, auth_header, 1000, "Salary", today, expense_type="INCOME") + _seed_expense(client, auth_header, 350, "Rent", today, expense_type="EXPENSE") + _seed_expense(client, auth_header, 150, "Utils", today, expense_type="EXPENSE") + + r = client.post("/digest/generate", headers=auth_header) + assert r.status_code == 201 + payload = r.get_json() + + expected_net = payload["total_income"] - payload["total_expenses"] + assert abs(payload["net_flow"] - expected_net) < 0.01 + + +def test_digest_empty_week(client, auth_header): + """A week with no data should still return a valid digest.""" + # Request a far-past week with no data + r = client.get("/digest/weekly/2020-01-06", headers=auth_header) + assert r.status_code == 200 + payload = r.get_json() + assert payload["total_income"] == 0 + assert payload["total_expenses"] == 0 + assert payload["net_flow"] == 0