diff --git a/.env.example b/.env.example index a57f9a5..5bb33d6 100644 --- a/.env.example +++ b/.env.example @@ -83,3 +83,10 @@ DEMO_MODE=false # INSTACART_AFFILIATE_ID=circuitforge # Walmart Impact network affiliate ID (inline, path-based redirect) # WALMART_AFFILIATE_ID= + + +# Community PostgreSQL — shared across CF products (cloud only; leave unset for local dev) +# Points at cf-orch's cf-community-postgres container (port 5434 on the orch host). +# When unset, community write paths fail soft with a plain-language message. +# COMMUNITY_DB_URL=postgresql://cf_community:changeme@cf-orch-host:5434/cf_community +# COMMUNITY_PSEUDONYM_SALT=change-this-to-a-random-32-char-string diff --git a/Dockerfile b/Dockerfile index 23f8899..611c924 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,6 +16,12 @@ COPY kiwi/environment.yml . RUN conda env create -f environment.yml COPY kiwi/ ./kiwi/ + +# Remove gitignored config files that may exist locally — defense-in-depth. +# The parent .dockerignore should exclude these, but an explicit rm guarantees +# they never end up in the cloud image regardless of .dockerignore placement. +RUN rm -f /app/kiwi/.env + # Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep) RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core WORKDIR /app/kiwi diff --git a/README.md b/README.md index 52894ac..a7d06c0 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ Scan barcodes, photograph receipts, and get recipe ideas based on what you alrea **Status:** Beta · CircuitForge LLC +**[Documentation](https://docs.circuitforge.tech/kiwi/)** · [circuitforge.tech](https://circuitforge.tech) + --- ## What it does @@ -21,7 +23,7 @@ Scan barcodes, photograph receipts, and get recipe ideas based on what you alrea - **Receipt OCR** — extract line items from receipt photos automatically (Paid tier, BYOK-unlockable) - **Recipe suggestions** — four levels from pantry-match to full LLM generation (Paid tier, BYOK-unlockable) - **Style auto-classifier** — LLM suggests style tags (comforting, hands-off, quick, etc.) for saved recipes (Paid tier, BYOK-unlockable) -- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Premium tier) +- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Free, 5/day; unlimited at Paid+) - **LLM backend config** — configure inference via `circuitforge-core` env-var system; BYOK unlocks Paid AI features at any tier - **Feedback FAB** — in-app feedback button; status probed on load, hidden if CF feedback endpoint unreachable @@ -68,7 +70,7 @@ cp .env.example .env | LLM style auto-classifier | — | BYOK | ✓ | | Meal planning | — | ✓ | ✓ | | Multi-household | — | — | ✓ | -| Leftover mode | — | — | ✓ | +| Leftover mode (5/day) | ✓ | ✓ | ✓ | BYOK = bring your own LLM backend (configure `~/.config/circuitforge/llm.yaml`) diff --git a/app/api/endpoints/community.py b/app/api/endpoints/community.py new file mode 100644 index 0000000..72a5592 --- /dev/null +++ b/app/api/endpoints/community.py @@ -0,0 +1,351 @@ +# app/api/endpoints/community.py +# MIT License + +from __future__ import annotations + +import asyncio +import logging +import re +import sqlite3 +from datetime import datetime, timezone + +from fastapi import APIRouter, Depends, HTTPException, Request, Response + +from app.cloud_session import CloudUser, get_session +from app.core.config import settings +from app.db.store import Store +from app.services.community.feed import posts_to_rss + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/community", tags=["community"]) + +_community_store = None + + +def _get_community_store(): + return _community_store + + +def init_community_store(community_db_url: str | None) -> None: + global _community_store + if not community_db_url: + logger.info( + "COMMUNITY_DB_URL not set — community write features disabled. " + "Browse still works via cloud feed." + ) + return + from circuitforge_core.community import CommunityDB + from app.services.community.community_store import KiwiCommunityStore + db = CommunityDB(dsn=community_db_url) + db.run_migrations() + _community_store = KiwiCommunityStore(db) + logger.info("Community store initialized.") + + +def _visible(post, session=None) -> bool: + """Return False for premium-tier posts when the session is not paid/premium.""" + tier = getattr(post, "tier", None) + if tier == "premium": + if session is None or getattr(session, "tier", None) not in ("paid", "premium"): + return False + return True + + +@router.get("/posts") +async def list_posts( + post_type: str | None = None, + dietary_tags: str | None = None, + allergen_exclude: str | None = None, + page: int = 1, + page_size: int = 20, +): + store = _get_community_store() + if store is None: + return {"posts": [], "total": 0, "note": "Community DB not available on this instance."} + + dietary = [t.strip() for t in dietary_tags.split(",")] if dietary_tags else None + allergen_ex = [t.strip() for t in allergen_exclude.split(",")] if allergen_exclude else None + offset = (page - 1) * min(page_size, 100) + + posts = await asyncio.to_thread( + store.list_posts, + limit=min(page_size, 100), + offset=offset, + post_type=post_type, + dietary_tags=dietary, + allergen_exclude=allergen_ex, + ) + return {"posts": [_post_to_dict(p) for p in posts if _visible(p)], "page": page, "page_size": page_size} + + +@router.get("/posts/{slug}") +async def get_post(slug: str, request: Request): + store = _get_community_store() + if store is None: + raise HTTPException(status_code=503, detail="Community DB not available on this instance.") + + post = await asyncio.to_thread(store.get_post_by_slug, slug) + if post is None: + raise HTTPException(status_code=404, detail="Post not found.") + + accept = request.headers.get("accept", "") + if "application/activity+json" in accept or "application/ld+json" in accept: + from app.services.community.ap_compat import post_to_ap_json_ld + base_url = str(request.base_url).rstrip("/") + return post_to_ap_json_ld(_post_to_dict(post), base_url=base_url) + + return _post_to_dict(post) + + +@router.get("/feed.rss") +async def get_rss_feed(request: Request): + store = _get_community_store() + posts_data: list[dict] = [] + if store is not None: + posts = await asyncio.to_thread(store.list_posts, limit=50) + posts_data = [_post_to_dict(p) for p in posts] + + base_url = str(request.base_url).rstrip("/") + rss = posts_to_rss(posts_data, base_url=base_url) + return Response(content=rss, media_type="application/rss+xml; charset=utf-8") + + +@router.get("/local-feed") +async def local_feed(): + store = _get_community_store() + if store is None: + return [] + posts = await asyncio.to_thread(store.list_posts, limit=50) + return [_post_to_dict(p) for p in posts] + + +@router.get("/hall-of-chaos") +async def hall_of_chaos(): + """Hidden easter egg endpoint -- returns the 10 most chaotic bloopers.""" + store = _get_community_store() + if store is None: + return {"posts": [], "chaos_level": 0} + posts = await asyncio.to_thread( + store.list_posts, limit=10, post_type="recipe_blooper" + ) + return { + "posts": [_post_to_dict(p) for p in posts], + "chaos_level": len(posts), + } + + +_VALID_POST_TYPES = {"plan", "recipe_success", "recipe_blooper"} +_MAX_TITLE_LEN = 200 +_MAX_TEXT_LEN = 2000 + + +def _validate_publish_body(body: dict) -> None: + """Raise HTTPException(422) for any invalid fields in a publish request.""" + post_type = body.get("post_type", "plan") + if post_type not in _VALID_POST_TYPES: + raise HTTPException( + status_code=422, + detail=f"post_type must be one of: {', '.join(sorted(_VALID_POST_TYPES))}", + ) + title = body.get("title") or "" + if len(title) > _MAX_TITLE_LEN: + raise HTTPException(status_code=422, detail=f"title exceeds {_MAX_TITLE_LEN} character limit.") + for field in ("description", "outcome_notes", "recipe_name"): + value = body.get(field) + if value and len(str(value)) > _MAX_TEXT_LEN: + raise HTTPException(status_code=422, detail=f"{field} exceeds {_MAX_TEXT_LEN} character limit.") + photo_url = body.get("photo_url") + if photo_url and not str(photo_url).startswith("https://"): + raise HTTPException(status_code=422, detail="photo_url must be an https:// URL.") + + +@router.post("/posts", status_code=201) +async def publish_post(body: dict, session: CloudUser = Depends(get_session)): + from app.tiers import can_use + if not can_use("community_publish", session.tier, session.has_byok): + raise HTTPException(status_code=402, detail="Community publishing requires Paid tier.") + + _validate_publish_body(body) + + store = _get_community_store() + if store is None: + raise HTTPException( + status_code=503, + detail="This Kiwi instance is not connected to a community database. " + "Publishing is only available on cloud instances.", + ) + + from app.services.community.community_store import get_or_create_pseudonym + def _get_pseudonym(): + s = Store(session.db) + try: + return get_or_create_pseudonym( + store=s, + directus_user_id=session.user_id, + requested_name=body.get("pseudonym_name"), + ) + finally: + s.close() + try: + pseudonym = await asyncio.to_thread(_get_pseudonym) + except ValueError as exc: + raise HTTPException(status_code=422, detail=str(exc)) from exc + + recipe_ids = [slot["recipe_id"] for slot in body.get("slots", []) if slot.get("recipe_id")] + from app.services.community.element_snapshot import compute_snapshot + def _snapshot(): + s = Store(session.db) + try: + return compute_snapshot(recipe_ids=recipe_ids, store=s) + finally: + s.close() + snapshot = await asyncio.to_thread(_snapshot) + + post_type = body.get("post_type", "plan") + slug_title = re.sub(r"[^a-z0-9]+", "-", (body.get("title") or "plan").lower()).strip("-") + today = datetime.now(timezone.utc).strftime("%Y-%m-%d") + slug = f"kiwi-{_post_type_prefix(post_type)}-{pseudonym.lower().replace(' ', '')}-{today}-{slug_title}"[:120] + + from circuitforge_core.community.models import CommunityPost + post = CommunityPost( + slug=slug, + pseudonym=pseudonym, + post_type=post_type, + published=datetime.now(timezone.utc), + title=(body.get("title") or "Untitled")[:_MAX_TITLE_LEN], + description=body.get("description"), + photo_url=body.get("photo_url"), + slots=body.get("slots", []), + recipe_id=body.get("recipe_id"), + recipe_name=body.get("recipe_name"), + level=body.get("level"), + outcome_notes=body.get("outcome_notes"), + seasoning_score=snapshot.seasoning_score, + richness_score=snapshot.richness_score, + brightness_score=snapshot.brightness_score, + depth_score=snapshot.depth_score, + aroma_score=snapshot.aroma_score, + structure_score=snapshot.structure_score, + texture_profile=snapshot.texture_profile, + dietary_tags=list(snapshot.dietary_tags), + allergen_flags=list(snapshot.allergen_flags), + flavor_molecules=list(snapshot.flavor_molecules), + fat_pct=snapshot.fat_pct, + protein_pct=snapshot.protein_pct, + moisture_pct=snapshot.moisture_pct, + ) + + try: + inserted = await asyncio.to_thread(store.insert_post, post) + except sqlite3.IntegrityError as exc: + raise HTTPException( + status_code=409, + detail="A post with this title already exists today. Try a different title.", + ) from exc + return _post_to_dict(inserted) + + +@router.delete("/posts/{slug}", status_code=204) +async def delete_post(slug: str, session: CloudUser = Depends(get_session)): + store = _get_community_store() + if store is None: + raise HTTPException(status_code=503, detail="Community DB not available.") + + def _get_pseudonym(): + s = Store(session.db) + try: + return s.get_current_pseudonym(session.user_id) + finally: + s.close() + pseudonym = await asyncio.to_thread(_get_pseudonym) + if not pseudonym: + raise HTTPException(status_code=400, detail="No pseudonym set. Cannot delete posts.") + + deleted = await asyncio.to_thread(store.delete_post, slug=slug, pseudonym=pseudonym) + if not deleted: + raise HTTPException(status_code=404, detail="Post not found or you are not the author.") + + +@router.post("/posts/{slug}/fork", status_code=201) +async def fork_post(slug: str, session: CloudUser = Depends(get_session)): + store = _get_community_store() + if store is None: + raise HTTPException(status_code=503, detail="Community DB not available.") + + post = await asyncio.to_thread(store.get_post_by_slug, slug) + if post is None: + raise HTTPException(status_code=404, detail="Post not found.") + if post.post_type != "plan": + raise HTTPException(status_code=400, detail="Only plan posts can be forked as a meal plan.") + + required_slot_keys = {"day", "meal_type", "recipe_id"} + if any(not required_slot_keys.issubset(slot) for slot in post.slots): + raise HTTPException(status_code=400, detail="Post contains malformed slots and cannot be forked.") + + from datetime import date + week_start = date.today().strftime("%Y-%m-%d") + + def _create_plan(): + s = Store(session.db) + try: + meal_types = list({slot["meal_type"] for slot in post.slots}) + plan = s.create_meal_plan(week_start=week_start, meal_types=meal_types or ["dinner"]) + for slot in post.slots: + s.assign_recipe_to_slot( + plan_id=plan["id"], + day_of_week=slot["day"], + meal_type=slot["meal_type"], + recipe_id=slot["recipe_id"], + ) + return plan + finally: + s.close() + + plan = await asyncio.to_thread(_create_plan) + return {"plan_id": plan["id"], "week_start": plan["week_start"], "forked_from": slug} + + +@router.post("/posts/{slug}/fork-adapt", status_code=201) +async def fork_adapt_post(slug: str, session: CloudUser = Depends(get_session)): + from app.tiers import can_use + if not can_use("community_fork_adapt", session.tier, session.has_byok): + raise HTTPException(status_code=402, detail="Fork with adaptation requires Paid tier or BYOK.") + # Stub: full LLM adaptation deferred + raise HTTPException(status_code=501, detail="Fork-adapt not yet implemented.") + + +def _post_to_dict(post) -> dict: + return { + "slug": post.slug, + "pseudonym": post.pseudonym, + "post_type": post.post_type, + "published": post.published.isoformat() if hasattr(post.published, "isoformat") else str(post.published), + "title": post.title, + "description": post.description, + "photo_url": post.photo_url, + "slots": list(post.slots), + "recipe_id": post.recipe_id, + "recipe_name": post.recipe_name, + "level": post.level, + "outcome_notes": post.outcome_notes, + "element_profiles": { + "seasoning_score": post.seasoning_score, + "richness_score": post.richness_score, + "brightness_score": post.brightness_score, + "depth_score": post.depth_score, + "aroma_score": post.aroma_score, + "structure_score": post.structure_score, + "texture_profile": post.texture_profile, + }, + "dietary_tags": list(post.dietary_tags), + "allergen_flags": list(post.allergen_flags), + "flavor_molecules": list(post.flavor_molecules), + "fat_pct": post.fat_pct, + "protein_pct": post.protein_pct, + "moisture_pct": post.moisture_pct, + } + + +def _post_type_prefix(post_type: str) -> str: + return {"plan": "plan", "recipe_success": "success", "recipe_blooper": "blooper"}.get(post_type, "post") diff --git a/app/api/endpoints/imitate.py b/app/api/endpoints/imitate.py new file mode 100644 index 0000000..5b08731 --- /dev/null +++ b/app/api/endpoints/imitate.py @@ -0,0 +1,185 @@ +"""Kiwi — /api/v1/imitate/samples endpoint for Avocet Imitate tab. + +Returns the actual assembled prompt Kiwi sends to its LLM for recipe generation, +including the full pantry context (expiry-first ordering), dietary constraints +(from user_settings if present), and the Level 3 format instructions. +""" +from __future__ import annotations + +from fastapi import APIRouter, Depends + +from app.cloud_session import get_session, CloudUser +from app.db.store import Store + +router = APIRouter() + +_LEVEL3_FORMAT = [ + "", + "Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:", + "Title: ", + "Ingredients: ", + "Directions:", + "1. ", + "2. ", + "3. ", + "Notes: ", +] + +_LEVEL4_FORMAT = [ + "", + "Reply using EXACTLY this plain-text format — no markdown, no bold:", + "Title: ", + "Ingredients: ", + "Directions:", + "1. ", + "2. ", + "Notes: ", +] + + +def _read_user_settings(store: Store) -> dict: + """Read all key/value pairs from user_settings table.""" + try: + rows = store.conn.execute("SELECT key, value FROM user_settings").fetchall() + return {r["key"]: r["value"] for r in rows} + except Exception: + return {} + + +def _build_recipe_prompt( + pantry_names: list[str], + expiring_names: list[str], + constraints: list[str], + allergies: list[str], + level: int = 3, +) -> str: + """Assemble the recipe generation prompt matching Kiwi's Level 3/4 format.""" + # Expiring items first, then remaining pantry items (deduped) + expiring_set = set(expiring_names) + ordered = list(expiring_names) + [n for n in pantry_names if n not in expiring_set] + + if not ordered: + ordered = pantry_names + + if level == 4: + lines = [ + "Surprise me with a creative, unexpected recipe.", + "Only use ingredients that make culinary sense together. " + "Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.", + f"Ingredients available: {', '.join(ordered)}", + ] + if constraints: + lines.append(f"Constraints: {', '.join(constraints)}") + if allergies: + lines.append(f"Must NOT contain: {', '.join(allergies)}") + lines.append("Treat any mystery ingredient as a wildcard — use your imagination.") + lines += _LEVEL4_FORMAT + else: + lines = [ + "You are a creative chef. Generate a recipe using the ingredients below.", + "IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name " + "from the pantry list. Do not add adjectives, quantities, or cooking states " + "(e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').", + "IMPORTANT: Only use pantry items that make culinary sense for the dish. " + "Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, " + "dessert sauces, flavoured syrups) into savoury dishes.", + "IMPORTANT: Do not default to the same ingredient repeatedly across dishes. " + "If a pantry item does not genuinely improve this specific dish, leave it out.", + "", + f"Pantry items: {', '.join(ordered)}", + ] + if expiring_names: + lines.append( + f"Priority — use these soon (expiring): {', '.join(expiring_names)}" + ) + if constraints: + lines.append(f"Dietary constraints: {', '.join(constraints)}") + if allergies: + lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergies)}") + lines += _LEVEL3_FORMAT + + return "\n".join(lines) + + +@router.get("/samples") +async def imitate_samples( + limit: int = 5, + level: int = 3, + session: CloudUser = Depends(get_session), +): + """Return assembled recipe generation prompts for Avocet's Imitate tab. + + Each sample includes: + system_prompt empty (Kiwi uses no system context) + input_text full Level 3/4 prompt with pantry items, expiring items, + dietary constraints, and format instructions + output_text empty (no prior LLM output stored per-request) + + level: 3 (structured with element biasing context) or 4 (wildcard creative) + limit: max number of distinct prompt variants to return (varies by pantry state) + """ + limit = max(1, min(limit, 10)) + store = Store(session.db) + + # Full pantry for context + all_items = store.list_inventory() + pantry_names = [r["product_name"] for r in all_items if r.get("product_name")] + + # Expiring items as priority ingredients + expiring = store.expiring_soon(days=14) + expiring_names = [r["product_name"] for r in expiring if r.get("product_name")] + + # Dietary constraints from user_settings (keys: constraints, allergies) + settings = _read_user_settings(store) + import json as _json + try: + constraints = _json.loads(settings.get("dietary_constraints", "[]")) or [] + except Exception: + constraints = [] + try: + allergies = _json.loads(settings.get("dietary_allergies", "[]")) or [] + except Exception: + allergies = [] + + if not pantry_names: + return {"samples": [], "total": 0, "type": f"recipe_level{level}"} + + # Build prompt variants: one per expiring item as the "anchor" ingredient, + # plus one general pantry prompt. Cap at limit. + samples = [] + seen_anchors: set[str] = set() + + for item in (expiring[:limit - 1] if expiring else []): + anchor = item.get("product_name", "") + if not anchor or anchor in seen_anchors: + continue + seen_anchors.add(anchor) + + # Put this item first in the list for the prompt + ordered_expiring = [anchor] + [n for n in expiring_names if n != anchor] + prompt = _build_recipe_prompt(pantry_names, ordered_expiring, constraints, allergies, level) + + samples.append({ + "id": item.get("id", 0), + "anchor_item": anchor, + "expiring_count": len(expiring_names), + "pantry_count": len(pantry_names), + "system_prompt": "", + "input_text": prompt, + "output_text": "", + }) + + # One general prompt using all expiring as priority + if len(samples) < limit: + prompt = _build_recipe_prompt(pantry_names, expiring_names, constraints, allergies, level) + samples.append({ + "id": 0, + "anchor_item": "full pantry", + "expiring_count": len(expiring_names), + "pantry_count": len(pantry_names), + "system_prompt": "", + "input_text": prompt, + "output_text": "", + }) + + return {"samples": samples, "total": len(samples), "type": f"recipe_level{level}"} diff --git a/app/api/endpoints/recipes.py b/app/api/endpoints/recipes.py index dd190f7..557500c 100644 --- a/app/api/endpoints/recipes.py +++ b/app/api/endpoints/recipes.py @@ -9,7 +9,19 @@ from fastapi import APIRouter, Depends, HTTPException, Query from app.cloud_session import CloudUser, get_session from app.db.store import Store -from app.models.schemas.recipe import RecipeRequest, RecipeResult +from app.models.schemas.recipe import ( + AssemblyTemplateOut, + BuildRequest, + RecipeRequest, + RecipeResult, + RecipeSuggestion, + RoleCandidatesResponse, +) +from app.services.recipe.assembly_recipes import ( + build_from_selection, + get_role_candidates, + get_templates_for_api, +) from app.services.recipe.browser_domains import ( DOMAINS, get_category_names, @@ -143,6 +155,96 @@ async def browse_recipes( return await asyncio.to_thread(_browse, session.db) +@router.get("/templates", response_model=list[AssemblyTemplateOut]) +async def list_assembly_templates() -> list[dict]: + """Return all 13 assembly templates with ordered role sequences. + + Cache-friendly: static data, no per-user state. + """ + return get_templates_for_api() + + +@router.get("/template-candidates", response_model=RoleCandidatesResponse) +async def get_template_role_candidates( + template_id: str = Query(..., description="Template slug, e.g. 'burrito_taco'"), + role: str = Query(..., description="Role display name, e.g. 'protein'"), + prior_picks: str = Query(default="", description="Comma-separated prior selections"), + session: CloudUser = Depends(get_session), +) -> dict: + """Return pantry-matched candidates for one wizard step.""" + def _get(db_path: Path) -> dict: + store = Store(db_path) + try: + items = store.list_inventory(status="available") + pantry_set = { + item["product_name"] + for item in items + if item.get("product_name") + } + pantry_list = list(pantry_set) + prior = [p.strip() for p in prior_picks.split(",") if p.strip()] + profile_index = store.get_element_profiles(pantry_list + prior) + return get_role_candidates( + template_slug=template_id, + role_display=role, + pantry_set=pantry_set, + prior_picks=prior, + profile_index=profile_index, + ) + finally: + store.close() + + return await asyncio.to_thread(_get, session.db) + + +@router.post("/build", response_model=RecipeSuggestion) +async def build_recipe( + req: BuildRequest, + session: CloudUser = Depends(get_session), +) -> RecipeSuggestion: + """Build a recipe from explicit role selections.""" + def _build(db_path: Path) -> RecipeSuggestion | None: + store = Store(db_path) + try: + items = store.list_inventory(status="available") + pantry_set = { + item["product_name"] + for item in items + if item.get("product_name") + } + suggestion = build_from_selection( + template_slug=req.template_id, + role_overrides=req.role_overrides, + pantry_set=pantry_set, + ) + if suggestion is None: + return None + # Persist to recipes table so the result can be saved/bookmarked. + # external_id encodes template + selections for stable dedup. + import hashlib as _hl, json as _js + sel_hash = _hl.md5( + _js.dumps(req.role_overrides, sort_keys=True).encode() + ).hexdigest()[:8] + external_id = f"assembly:{req.template_id}:{sel_hash}" + real_id = store.upsert_built_recipe( + external_id=external_id, + title=suggestion.title, + ingredients=suggestion.matched_ingredients, + directions=suggestion.directions, + ) + return suggestion.model_copy(update={"id": real_id}) + finally: + store.close() + + result = await asyncio.to_thread(_build, session.db) + if result is None: + raise HTTPException( + status_code=404, + detail="Template not found or required ingredient missing.", + ) + return result + + @router.get("/{recipe_id}") async def get_recipe(recipe_id: int, session: CloudUser = Depends(get_session)) -> dict: def _get(db_path: Path, rid: int) -> dict | None: diff --git a/app/api/routes.py b/app/api/routes.py index e0ea172..69b5190 100644 --- a/app/api/routes.py +++ b/app/api/routes.py @@ -1,17 +1,20 @@ from fastapi import APIRouter -from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, household, saved_recipes, meal_plans +from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, household, saved_recipes, imitate, meal_plans +from app.api.endpoints.community import router as community_router api_router = APIRouter() -api_router.include_router(health.router, prefix="/health", tags=["health"]) -api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"]) -api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"]) -api_router.include_router(export.router, tags=["export"]) -api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"]) -api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"]) -api_router.include_router(settings.router, prefix="/settings", tags=["settings"]) -api_router.include_router(staples.router, prefix="/staples", tags=["staples"]) -api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"]) -api_router.include_router(household.router, prefix="/household", tags=["household"]) -api_router.include_router(saved_recipes.router, prefix="/recipes/saved", tags=["saved-recipes"]) -api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"]) \ No newline at end of file +api_router.include_router(health.router, prefix="/health", tags=["health"]) +api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"]) +api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"]) +api_router.include_router(export.router, tags=["export"]) +api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"]) +api_router.include_router(saved_recipes.router, prefix="/recipes/saved", tags=["saved-recipes"]) +api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"]) +api_router.include_router(settings.router, prefix="/settings", tags=["settings"]) +api_router.include_router(staples.router, prefix="/staples", tags=["staples"]) +api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"]) +api_router.include_router(household.router, prefix="/household", tags=["household"]) +api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"]) +api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"]) +api_router.include_router(community_router) diff --git a/app/cloud_session.py b/app/cloud_session.py index ee6c583..7649408 100644 --- a/app/cloud_session.py +++ b/app/cloud_session.py @@ -92,6 +92,7 @@ class CloudUser: has_byok: bool # True if a configured LLM backend is present in llm.yaml household_id: str | None = None is_household_owner: bool = False + license_key: str | None = None # key_display for lifetime/founders keys; None for subscription/free # ── JWT validation ───────────────────────────────────────────────────────────── @@ -132,16 +133,16 @@ def _ensure_provisioned(user_id: str) -> None: log.warning("Heimdall provision failed for user %s: %s", user_id, exc) -def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool]: - """Returns (tier, household_id | None, is_household_owner).""" +def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]: + """Returns (tier, household_id | None, is_household_owner, license_key | None).""" now = time.monotonic() cached = _TIER_CACHE.get(user_id) if cached and (now - cached[1]) < _TIER_CACHE_TTL: entry = cached[0] - return entry["tier"], entry.get("household_id"), entry.get("is_household_owner", False) + return entry["tier"], entry.get("household_id"), entry.get("is_household_owner", False), entry.get("license_key") if not HEIMDALL_ADMIN_TOKEN: - return "free", None, False + return "free", None, False, None try: resp = requests.post( f"{HEIMDALL_URL}/admin/cloud/resolve", @@ -153,12 +154,13 @@ def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool]: tier = data.get("tier", "free") household_id = data.get("household_id") is_owner = data.get("is_household_owner", False) + license_key = data.get("key_display") except Exception as exc: log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc) - tier, household_id, is_owner = "free", None, False + tier, household_id, is_owner, license_key = "free", None, False, None - _TIER_CACHE[user_id] = ({"tier": tier, "household_id": household_id, "is_household_owner": is_owner}, now) - return tier, household_id, is_owner + _TIER_CACHE[user_id] = ({"tier": tier, "household_id": household_id, "is_household_owner": is_owner, "license_key": license_key}, now) + return tier, household_id, is_owner, license_key def _user_db_path(user_id: str, household_id: str | None = None) -> Path: @@ -170,6 +172,13 @@ def _user_db_path(user_id: str, household_id: str | None = None) -> Path: return path +def _anon_db_path() -> Path: + """Ephemeral DB for unauthenticated guest visitors (Free tier, no persistence).""" + path = CLOUD_DATA_ROOT / "anonymous" / "kiwi.db" + path.parent.mkdir(parents=True, exist_ok=True) + return path + + # ── BYOK detection ──────────────────────────────────────────────────────────── _LLM_CONFIG_PATH = Path.home() / ".config" / "circuitforge" / "llm.yaml" @@ -225,15 +234,25 @@ def get_session(request: Request) -> CloudUser: or request.headers.get("cookie", "") ) if not raw_header: - raise HTTPException(status_code=401, detail="Not authenticated") + return CloudUser( + user_id="anonymous", + tier="free", + db=_anon_db_path(), + has_byok=has_byok, + ) token = _extract_session_token(raw_header) # gitleaks:allow — function name, not a secret if not token: - raise HTTPException(status_code=401, detail="Not authenticated") + return CloudUser( + user_id="anonymous", + tier="free", + db=_anon_db_path(), + has_byok=has_byok, + ) user_id = validate_session_jwt(token) _ensure_provisioned(user_id) - tier, household_id, is_household_owner = _fetch_cloud_tier(user_id) + tier, household_id, is_household_owner, license_key = _fetch_cloud_tier(user_id) return CloudUser( user_id=user_id, tier=tier, @@ -241,6 +260,7 @@ def get_session(request: Request) -> CloudUser: has_byok=has_byok, household_id=household_id, is_household_owner=is_household_owner, + license_key=license_key, ) diff --git a/app/core/config.py b/app/core/config.py index 091b574..fa55bc1 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -35,6 +35,16 @@ class Settings: # Database DB_PATH: Path = Path(os.environ.get("DB_PATH", str(DATA_DIR / "kiwi.db"))) + # Community feature settings + COMMUNITY_DB_URL: str | None = os.environ.get("COMMUNITY_DB_URL") or None + COMMUNITY_PSEUDONYM_SALT: str = os.environ.get( + "COMMUNITY_PSEUDONYM_SALT", "kiwi-default-salt-change-in-prod" + ) + COMMUNITY_CLOUD_FEED_URL: str = os.environ.get( + "COMMUNITY_CLOUD_FEED_URL", + "https://menagerie.circuitforge.tech/kiwi/api/v1/community/posts", + ) + # Processing MAX_CONCURRENT_JOBS: int = int(os.environ.get("MAX_CONCURRENT_JOBS", "4")) USE_GPU: bool = os.environ.get("USE_GPU", "true").lower() in ("1", "true", "yes") diff --git a/app/db/migrations/022_recipe_generic_flag.sql b/app/db/migrations/022_recipe_generic_flag.sql new file mode 100644 index 0000000..a1600fb --- /dev/null +++ b/app/db/migrations/022_recipe_generic_flag.sql @@ -0,0 +1,5 @@ +-- Migration 022: Add is_generic flag to recipes +-- Generic recipes are catch-all/dump recipes with loose ingredient lists +-- that should not appear in Level 1 (deterministic "use what I have") results. +-- Admins can mark recipes via the recipe editor or a bulk backfill script. +ALTER TABLE recipes ADD COLUMN is_generic INTEGER NOT NULL DEFAULT 0; diff --git a/app/db/migrations/028_community_pseudonyms.sql b/app/db/migrations/028_community_pseudonyms.sql new file mode 100644 index 0000000..d5fede0 --- /dev/null +++ b/app/db/migrations/028_community_pseudonyms.sql @@ -0,0 +1,21 @@ +-- 028_community_pseudonyms.sql +-- Per-user pseudonym store: maps the user's chosen community display name +-- to their Directus user ID. This table lives in per-user kiwi.db only. +-- It is NEVER replicated to the community PostgreSQL — pseudonym isolation is by design. +-- +-- A user may have one active pseudonym. Old pseudonyms are retained for reference +-- (posts published under them keep their pseudonym attribution) but only one is +-- flagged as current (is_current = 1). + +CREATE TABLE IF NOT EXISTS community_pseudonyms ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + pseudonym TEXT NOT NULL, + directus_user_id TEXT NOT NULL, + is_current INTEGER NOT NULL DEFAULT 1 CHECK (is_current IN (0, 1)), + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +-- Only one pseudonym can be current at a time per user +CREATE UNIQUE INDEX IF NOT EXISTS idx_community_pseudonyms_current + ON community_pseudonyms (directus_user_id) + WHERE is_current = 1; diff --git a/app/db/migrations/029_inferred_tags.sql b/app/db/migrations/029_inferred_tags.sql new file mode 100644 index 0000000..bf70d5f --- /dev/null +++ b/app/db/migrations/029_inferred_tags.sql @@ -0,0 +1,49 @@ +-- Migration 029: Add inferred_tags column and update FTS index to include it. +-- +-- inferred_tags holds a JSON array of normalized tag strings derived by +-- scripts/pipeline/infer_recipe_tags.py (e.g. ["cuisine:Italian", +-- "dietary:Low-Carb", "flavor:Umami", "can_be:Gluten-Free"]). +-- +-- The FTS5 browser table is rebuilt to index inferred_tags alongside +-- category and keywords so browse domain queries match against all signals. + +-- 1. Add inferred_tags column (empty array default; populated by pipeline run) +ALTER TABLE recipes ADD COLUMN inferred_tags TEXT NOT NULL DEFAULT '[]'; + +-- 2. Drop old FTS table and triggers that only covered category + keywords +DROP TRIGGER IF EXISTS recipes_ai; +DROP TRIGGER IF EXISTS recipes_ad; +DROP TRIGGER IF EXISTS recipes_au; +DROP TABLE IF EXISTS recipe_browser_fts; + +-- 3. Recreate FTS5 table: now indexes category, keywords, AND inferred_tags +CREATE VIRTUAL TABLE recipe_browser_fts USING fts5( + category, + keywords, + inferred_tags, + content=recipes, + content_rowid=id +); + +-- 4. Triggers to keep FTS in sync with recipes table changes +CREATE TRIGGER recipes_ai AFTER INSERT ON recipes BEGIN + INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags) + VALUES (new.id, new.category, new.keywords, new.inferred_tags); +END; + +CREATE TRIGGER recipes_ad AFTER DELETE ON recipes BEGIN + INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags) + VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags); +END; + +CREATE TRIGGER recipes_au AFTER UPDATE ON recipes BEGIN + INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags) + VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags); + INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags) + VALUES (new.id, new.category, new.keywords, new.inferred_tags); +END; + +-- 5. Populate FTS from current table state +-- (inferred_tags is '[]' for all rows at this point; run infer_recipe_tags.py +-- to populate, then the FTS will be rebuilt as part of that script.) +INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild'); diff --git a/app/db/store.py b/app/db/store.py index 576ac55..de838c0 100644 --- a/app/db/store.py +++ b/app/db/store.py @@ -573,6 +573,7 @@ class Store: max_carbs_g: float | None = None, max_sodium_mg: float | None = None, excluded_ids: list[int] | None = None, + exclude_generic: bool = False, ) -> list[dict]: """Find recipes containing any of the given ingredient names. Scores by match count and returns highest-scoring first. @@ -582,6 +583,9 @@ class Store: Nutrition filters use NULL-passthrough: rows without nutrition data always pass (they may be estimated or absent entirely). + + exclude_generic: when True, skips recipes marked is_generic=1. + Pass True for Level 1 ("Use What I Have") to suppress catch-all recipes. """ if not ingredient_names: return [] @@ -607,6 +611,8 @@ class Store: placeholders = ",".join("?" * len(excluded_ids)) extra_clauses.append(f"r.id NOT IN ({placeholders})") extra_params.extend(excluded_ids) + if exclude_generic: + extra_clauses.append("r.is_generic = 0") where_extra = (" AND " + " AND ".join(extra_clauses)) if extra_clauses else "" if self._fts_ready(): @@ -682,6 +688,67 @@ class Store: def get_recipe(self, recipe_id: int) -> dict | None: return self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,)) + def upsert_built_recipe( + self, + external_id: str, + title: str, + ingredients: list[str], + directions: list[str], + ) -> int: + """Persist an assembly-built recipe and return its DB id. + + Uses external_id as a stable dedup key so the same build slug doesn't + accumulate duplicate rows across multiple user sessions. + """ + import json as _json + self.conn.execute( + """ + INSERT OR IGNORE INTO recipes + (external_id, title, ingredients, ingredient_names, directions, source) + VALUES (?, ?, ?, ?, ?, 'assembly') + """, + ( + external_id, + title, + _json.dumps(ingredients), + _json.dumps(ingredients), + _json.dumps(directions), + ), + ) + # Update title in case the build was re-run with tweaked selections + self.conn.execute( + "UPDATE recipes SET title = ? WHERE external_id = ?", + (title, external_id), + ) + self.conn.commit() + row = self._fetch_one( + "SELECT id FROM recipes WHERE external_id = ?", (external_id,) + ) + return row["id"] # type: ignore[index] + + def get_element_profiles(self, names: list[str]) -> dict[str, list[str]]: + """Return {ingredient_name: [element_tag, ...]} for the given names. + + Only names present in ingredient_profiles are returned -- missing names + are silently omitted so callers can distinguish "no profile" from "empty + elements list". + """ + if not names: + return {} + placeholders = ",".join("?" * len(names)) + rows = self._fetch_all( + f"SELECT name, elements FROM ingredient_profiles WHERE name IN ({placeholders})", + tuple(names), + ) + result: dict[str, list[str]] = {} + for row in rows: + try: + elements = json.loads(row["elements"]) if row["elements"] else [] + except (json.JSONDecodeError, TypeError): + elements = [] + result[row["name"]] = elements + return result + # ── rate limits ─────────────────────────────────────────────────────── def check_and_increment_rate_limit( @@ -1128,3 +1195,31 @@ class Store: ) self.conn.commit() return self._fetch_one("SELECT * FROM prep_tasks WHERE id = ?", (task_id,)) + + # ── community ───────────────────────────────────────────────────────── + + def get_current_pseudonym(self, directus_user_id: str) -> str | None: + """Return the current community pseudonym for this user, or None if not set.""" + cur = self.conn.execute( + "SELECT pseudonym FROM community_pseudonyms " + "WHERE directus_user_id = ? AND is_current = 1 LIMIT 1", + (directus_user_id,), + ) + row = cur.fetchone() + return row["pseudonym"] if row else None + + def set_pseudonym(self, directus_user_id: str, pseudonym: str) -> None: + """Set the current community pseudonym for this user. + + Marks any previous pseudonym as non-current (retains history for attribution). + """ + self.conn.execute( + "UPDATE community_pseudonyms SET is_current = 0 WHERE directus_user_id = ?", + (directus_user_id,), + ) + self.conn.execute( + "INSERT INTO community_pseudonyms (pseudonym, directus_user_id, is_current) " + "VALUES (?, ?, 1)", + (pseudonym, directus_user_id), + ) + self.conn.commit() diff --git a/app/main.py b/app/main.py index c5ccec3..0e79e63 100644 --- a/app/main.py +++ b/app/main.py @@ -25,6 +25,10 @@ async def lifespan(app: FastAPI): get_scheduler(settings.DB_PATH) logger.info("Task scheduler started.") + # Initialize community store (no-op if COMMUNITY_DB_URL is not set) + from app.api.endpoints.community import init_community_store + init_community_store(settings.COMMUNITY_DB_URL) + yield # Graceful scheduler shutdown diff --git a/app/models/schemas/recipe.py b/app/models/schemas/recipe.py index 873f893..80d2129 100644 --- a/app/models/schemas/recipe.py +++ b/app/models/schemas/recipe.py @@ -56,6 +56,7 @@ class RecipeResult(BaseModel): grocery_links: list[GroceryLink] = Field(default_factory=list) rate_limited: bool = False rate_limit_count: int = 0 + orch_fallback: bool = False # True when orch budget exhausted; fell back to local LLM class NutritionFilters(BaseModel): @@ -82,3 +83,48 @@ class RecipeRequest(BaseModel): nutrition_filters: NutritionFilters = Field(default_factory=NutritionFilters) excluded_ids: list[int] = Field(default_factory=list) shopping_mode: bool = False + + +# ── Build Your Own schemas ────────────────────────────────────────────────── + + +class AssemblyRoleOut(BaseModel): + """One role slot in a template, as returned by GET /api/recipes/templates.""" + + display: str + required: bool + keywords: list[str] + hint: str = "" + + +class AssemblyTemplateOut(BaseModel): + """One assembly template, as returned by GET /api/recipes/templates.""" + + id: str # slug, e.g. "burrito_taco" + title: str + icon: str + descriptor: str + role_sequence: list[AssemblyRoleOut] + + +class RoleCandidateItem(BaseModel): + """One candidate ingredient for a wizard picker step.""" + + name: str + in_pantry: bool + tags: list[str] = Field(default_factory=list) + + +class RoleCandidatesResponse(BaseModel): + """Response from GET /api/recipes/template-candidates.""" + + compatible: list[RoleCandidateItem] = Field(default_factory=list) + other: list[RoleCandidateItem] = Field(default_factory=list) + available_tags: list[str] = Field(default_factory=list) + + +class BuildRequest(BaseModel): + """Request body for POST /api/recipes/build.""" + + template_id: str + role_overrides: dict[str, str] = Field(default_factory=dict) diff --git a/app/services/community/__init__.py b/app/services/community/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/services/community/ap_compat.py b/app/services/community/ap_compat.py new file mode 100644 index 0000000..4b5bf59 --- /dev/null +++ b/app/services/community/ap_compat.py @@ -0,0 +1,44 @@ +# app/services/community/ap_compat.py +# MIT License — AP scaffold only (no actor, inbox, outbox) + +from __future__ import annotations + +from datetime import datetime, timezone + + +def post_to_ap_json_ld(post: dict, base_url: str) -> dict: + """Serialize a community post dict to an ActivityPub-compatible JSON-LD Note. + + This is a read-only scaffold. No AP actor, inbox, or outbox. + The slug URI is stable so a future full AP implementation can reuse posts + without a DB migration. + """ + slug = post["slug"] + published = post.get("published") + if isinstance(published, datetime): + published_str = published.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + else: + published_str = str(published) + + dietary_tags: list[str] = post.get("dietary_tags") or [] + tags = [{"type": "Hashtag", "name": "#kiwi"}] + for tag in dietary_tags: + tags.append({"type": "Hashtag", "name": f"#{tag.replace('-', '').replace(' ', '')}"}) + + return { + "@context": "https://www.w3.org/ns/activitystreams", + "type": "Note", + "id": f"{base_url}/api/v1/community/posts/{slug}", + "attributedTo": post.get("pseudonym", "anonymous"), + "content": _build_content(post), + "published": published_str, + "tag": tags, + } + + +def _build_content(post: dict) -> str: + title = post.get("title") or "Untitled" + desc = post.get("description") + if desc: + return f"{title} — {desc}" + return title diff --git a/app/services/community/community_store.py b/app/services/community/community_store.py new file mode 100644 index 0000000..bd8eeea --- /dev/null +++ b/app/services/community/community_store.py @@ -0,0 +1,90 @@ +# app/services/community/community_store.py +# MIT License + +from __future__ import annotations + +import logging + +from circuitforge_core.community import CommunityPost, SharedStore + +logger = logging.getLogger(__name__) + + +class KiwiCommunityStore(SharedStore): + """Kiwi-specific community store: adds kiwi-domain query methods on top of SharedStore.""" + + def list_meal_plans( + self, + limit: int = 20, + offset: int = 0, + dietary_tags: list[str] | None = None, + allergen_exclude: list[str] | None = None, + ) -> list[CommunityPost]: + return self.list_posts( + limit=limit, + offset=offset, + post_type="plan", + dietary_tags=dietary_tags, + allergen_exclude=allergen_exclude, + source_product="kiwi", + ) + + def list_outcomes( + self, + limit: int = 20, + offset: int = 0, + post_type: str | None = None, + ) -> list[CommunityPost]: + if post_type in ("recipe_success", "recipe_blooper"): + return self.list_posts( + limit=limit, + offset=offset, + post_type=post_type, + source_product="kiwi", + ) + success = self.list_posts( + limit=limit, + offset=0, + post_type="recipe_success", + source_product="kiwi", + ) + bloopers = self.list_posts( + limit=limit, + offset=0, + post_type="recipe_blooper", + source_product="kiwi", + ) + merged = sorted(success + bloopers, key=lambda p: p.published, reverse=True) + return merged[:limit] + + +def get_or_create_pseudonym( + store, + directus_user_id: str, + requested_name: str | None, +) -> str: + """Return the user's current pseudonym, creating it if it doesn't exist. + + If the user has an existing pseudonym, return it (ignore requested_name). + If not, create using requested_name (must be provided for first-time setup). + + Raises ValueError if no existing pseudonym and requested_name is None or blank. + """ + existing = store.get_current_pseudonym(directus_user_id) + if existing: + return existing + + if not requested_name or not requested_name.strip(): + raise ValueError( + "A pseudonym is required for first publish. " + "Pass requested_name with the user's chosen display name." + ) + + name = requested_name.strip() + if "@" in name: + raise ValueError( + "Pseudonym must not contain '@' — use a display name, not an email address." + ) + + store.set_pseudonym(directus_user_id, name) + return name diff --git a/app/services/community/element_snapshot.py b/app/services/community/element_snapshot.py new file mode 100644 index 0000000..1d850ac --- /dev/null +++ b/app/services/community/element_snapshot.py @@ -0,0 +1,138 @@ +# app/services/community/element_snapshot.py +# MIT License + +from __future__ import annotations + +from dataclasses import dataclass + +# Ingredient name substrings → allergen flag +_ALLERGEN_MAP: dict[str, str] = { + "milk": "dairy", "cream": "dairy", "cheese": "dairy", "butter": "dairy", + "yogurt": "dairy", "whey": "dairy", + "egg": "eggs", + "wheat": "gluten", "pasta": "gluten", "flour": "gluten", "bread": "gluten", + "barley": "gluten", "rye": "gluten", + "peanut": "nuts", "almond": "nuts", "cashew": "nuts", "walnut": "nuts", + "pecan": "nuts", "hazelnut": "nuts", "pistachio": "nuts", "macadamia": "nuts", + "soy": "soy", "tofu": "soy", "edamame": "soy", "miso": "soy", "tempeh": "soy", + "shrimp": "shellfish", "crab": "shellfish", "lobster": "shellfish", + "clam": "shellfish", "mussel": "shellfish", "scallop": "shellfish", + "fish": "fish", "salmon": "fish", "tuna": "fish", "cod": "fish", + "tilapia": "fish", "halibut": "fish", + "sesame": "sesame", +} + +_MEAT_KEYWORDS = frozenset([ + "chicken", "beef", "pork", "lamb", "turkey", "bacon", "ham", "sausage", + "salami", "prosciutto", "guanciale", "pancetta", "steak", "ground meat", + "mince", "veal", "duck", "venison", "bison", "lard", +]) +_SEAFOOD_KEYWORDS = frozenset([ + "fish", "shrimp", "crab", "lobster", "tuna", "salmon", "clam", "mussel", + "scallop", "anchovy", "sardine", "cod", "tilapia", +]) +_ANIMAL_PRODUCT_KEYWORDS = frozenset([ + "milk", "cream", "cheese", "butter", "egg", "honey", "yogurt", "whey", +]) + + +def _detect_allergens(ingredient_names: list[str]) -> list[str]: + found: set[str] = set() + lowered = [n.lower() for n in ingredient_names] + for ingredient in lowered: + for keyword, flag in _ALLERGEN_MAP.items(): + if keyword in ingredient: + found.add(flag) + return sorted(found) + + +def _detect_dietary_tags(ingredient_names: list[str]) -> list[str]: + lowered = [n.lower() for n in ingredient_names] + all_text = " ".join(lowered) + + has_meat = any(k in all_text for k in _MEAT_KEYWORDS) + has_seafood = any(k in all_text for k in _SEAFOOD_KEYWORDS) + has_animal_products = any(k in all_text for k in _ANIMAL_PRODUCT_KEYWORDS) + + tags: list[str] = [] + if not has_meat and not has_seafood: + tags.append("vegetarian") + if not has_meat and not has_seafood and not has_animal_products: + tags.append("vegan") + return tags + + +@dataclass(frozen=True) +class ElementSnapshot: + seasoning_score: float + richness_score: float + brightness_score: float + depth_score: float + aroma_score: float + structure_score: float + texture_profile: str + dietary_tags: tuple + allergen_flags: tuple + flavor_molecules: tuple + fat_pct: float | None + protein_pct: float | None + moisture_pct: float | None + + +def compute_snapshot(recipe_ids: list[int], store) -> ElementSnapshot: + """Compute an element snapshot from a list of recipe IDs. + + Pulls SFAH scores, ingredient lists, and USDA FDC macros from the corpus. + Averages numeric scores across all recipes. Unions allergen flags and dietary tags. + Call at publish time only — snapshot is stored denormalized in community_posts. + """ + if not recipe_ids: + return ElementSnapshot( + seasoning_score=0.0, richness_score=0.0, brightness_score=0.0, + depth_score=0.0, aroma_score=0.0, structure_score=0.0, + texture_profile="", dietary_tags=(), allergen_flags=(), + flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None, + ) + + rows = store.get_recipes_by_ids(recipe_ids) + if not rows: + return ElementSnapshot( + seasoning_score=0.0, richness_score=0.0, brightness_score=0.0, + depth_score=0.0, aroma_score=0.0, structure_score=0.0, + texture_profile="", dietary_tags=(), allergen_flags=(), + flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None, + ) + + def _avg(field: str) -> float: + vals = [r.get(field) or 0.0 for r in rows] + return sum(vals) / len(vals) + + all_ingredients: list[str] = [] + for r in rows: + names = r.get("ingredient_names") or [] + all_ingredients.extend(names if isinstance(names, list) else []) + + allergens = _detect_allergens(all_ingredients) + dietary = _detect_dietary_tags(all_ingredients) + + texture = rows[0].get("texture_profile") or "" + + fat_vals = [r.get("fat") for r in rows if r.get("fat") is not None] + prot_vals = [r.get("protein") for r in rows if r.get("protein") is not None] + moist_vals = [r.get("moisture") for r in rows if r.get("moisture") is not None] + + return ElementSnapshot( + seasoning_score=_avg("seasoning_score"), + richness_score=_avg("richness_score"), + brightness_score=_avg("brightness_score"), + depth_score=_avg("depth_score"), + aroma_score=_avg("aroma_score"), + structure_score=_avg("structure_score"), + texture_profile=texture, + dietary_tags=tuple(dietary), + allergen_flags=tuple(allergens), + flavor_molecules=(), + fat_pct=(sum(fat_vals) / len(fat_vals)) if fat_vals else None, + protein_pct=(sum(prot_vals) / len(prot_vals)) if prot_vals else None, + moisture_pct=(sum(moist_vals) / len(moist_vals)) if moist_vals else None, + ) diff --git a/app/services/community/feed.py b/app/services/community/feed.py new file mode 100644 index 0000000..36000b9 --- /dev/null +++ b/app/services/community/feed.py @@ -0,0 +1,43 @@ +# app/services/community/feed.py +# MIT License + +from __future__ import annotations + +from datetime import datetime, timezone +from email.utils import format_datetime +from xml.etree.ElementTree import Element, SubElement, tostring + + +def posts_to_rss(posts: list[dict], base_url: str) -> str: + """Generate an RSS 2.0 feed from a list of community post dicts. + + base_url: the root URL of this Kiwi instance (no trailing slash). + Returns UTF-8 XML string. + """ + rss = Element("rss", version="2.0") + channel = SubElement(rss, "channel") + + _sub(channel, "title", "Kiwi Community Feed") + _sub(channel, "link", f"{base_url}/community") + _sub(channel, "description", "Meal plans and recipe outcomes from the Kiwi community") + _sub(channel, "language", "en") + _sub(channel, "lastBuildDate", format_datetime(datetime.now(timezone.utc))) + + for post in posts: + item = SubElement(channel, "item") + _sub(item, "title", post.get("title") or "Untitled") + _sub(item, "link", f"{base_url}/api/v1/community/posts/{post['slug']}") + _sub(item, "guid", f"{base_url}/api/v1/community/posts/{post['slug']}") + if post.get("description"): + _sub(item, "description", post["description"]) + published = post.get("published") + if isinstance(published, datetime): + _sub(item, "pubDate", format_datetime(published)) + + return '\n' + tostring(rss, encoding="unicode") + + +def _sub(parent: Element, tag: str, text: str) -> Element: + el = SubElement(parent, tag) + el.text = text + return el diff --git a/app/services/community/mdns.py b/app/services/community/mdns.py new file mode 100644 index 0000000..148780a --- /dev/null +++ b/app/services/community/mdns.py @@ -0,0 +1,72 @@ +# app/services/community/mdns.py +# MIT License + +from __future__ import annotations + +import logging +import socket + +logger = logging.getLogger(__name__) + +# Import deferred to avoid hard failure when zeroconf is not installed +try: + from zeroconf import ServiceInfo, Zeroconf + _ZEROCONF_AVAILABLE = True +except ImportError: + _ZEROCONF_AVAILABLE = False + + +class KiwiMDNS: + """Advertise this Kiwi instance on the LAN via mDNS (_kiwi._tcp.local). + + Defaults to disabled (enabled=False). User must explicitly opt in via the + Settings page. This matches the CF a11y requirement: no surprise broadcasting. + + Usage: + mdns = KiwiMDNS(enabled=settings.MDNS_ENABLED, port=settings.PORT, + feed_url=f"http://{hostname}:{settings.PORT}/api/v1/community/local-feed") + mdns.start() # in lifespan startup + mdns.stop() # in lifespan shutdown + """ + + SERVICE_TYPE = "_kiwi._tcp.local." + + def __init__(self, enabled: bool, port: int, feed_url: str) -> None: + self._enabled = enabled + self._port = port + self._feed_url = feed_url + self._zc: "Zeroconf | None" = None + self._info: "ServiceInfo | None" = None + + def start(self) -> None: + if not self._enabled: + logger.debug("mDNS advertisement disabled (user has not opted in)") + return + if not _ZEROCONF_AVAILABLE: + logger.warning("zeroconf package not installed — mDNS advertisement unavailable") + return + + hostname = socket.gethostname() + service_name = f"kiwi-{hostname}.{self.SERVICE_TYPE}" + self._info = ServiceInfo( + type_=self.SERVICE_TYPE, + name=service_name, + port=self._port, + properties={ + b"feed_url": self._feed_url.encode(), + b"version": b"1", + }, + addresses=[socket.inet_aton("127.0.0.1")], + ) + self._zc = Zeroconf() + self._zc.register_service(self._info) + logger.info("mDNS: advertising %s on port %d", service_name, self._port) + + def stop(self) -> None: + if self._zc is None or self._info is None: + return + self._zc.unregister_service(self._info) + self._zc.close() + self._zc = None + self._info = None + logger.info("mDNS: advertisement stopped") diff --git a/app/services/recipe/assembly_recipes.py b/app/services/recipe/assembly_recipes.py index 3635d2a..c68e6ca 100644 --- a/app/services/recipe/assembly_recipes.py +++ b/app/services/recipe/assembly_recipes.py @@ -42,11 +42,21 @@ class AssemblyRole: class AssemblyTemplate: """A template assembly dish.""" id: int + slug: str # URL-safe identifier, e.g. "burrito_taco" + icon: str # emoji + descriptor: str # one-line description shown in template grid title: str required: list[AssemblyRole] optional: list[AssemblyRole] directions: list[str] notes: str = "" + # Per-role hints shown in the wizard picker header + # keys match role.display values; missing keys fall back to "" + role_hints: dict[str, str] = None # type: ignore[assignment] + + def __post_init__(self) -> None: + if self.role_hints is None: + self.role_hints = {} def _matches_role(role: AssemblyRole, pantry_set: set[str]) -> list[str]: @@ -138,6 +148,9 @@ def _personalized_title(tmpl: AssemblyTemplate, pantry_set: set[str], seed: int) ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ AssemblyTemplate( id=-1, + slug="burrito_taco", + icon="🌯", + descriptor="Protein, veg, and sauce in a tortilla or over rice", title="Burrito / Taco", required=[ AssemblyRole("tortilla or wrap", [ @@ -170,9 +183,21 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Fold in the sides and roll tightly. Optionally toast seam-side down 1-2 minutes.", ], notes="Works as a burrito (rolled), taco (folded), or quesadilla (cheese only, pressed flat).", + role_hints={ + "tortilla or wrap": "The foundation -- what holds everything", + "protein": "The main filling", + "rice or starch": "Optional base layer", + "cheese": "Optional -- melts into the filling", + "salsa or sauce": "Optional -- adds moisture and heat", + "sour cream or yogurt": "Optional -- cool contrast to heat", + "vegetables": "Optional -- adds texture and colour", + }, ), AssemblyTemplate( id=-2, + slug="fried_rice", + icon="🍳", + descriptor="Rice + egg + whatever's in the fridge", title="Fried Rice", required=[ AssemblyRole("cooked rice", [ @@ -205,9 +230,21 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Season with soy sauce and any other sauces. Toss to combine.", ], notes="Add a fried egg on top. A drizzle of sesame oil at the end adds a lot.", + role_hints={ + "cooked rice": "Day-old cold rice works best", + "protein": "Pre-cooked or raw -- cook before adding rice", + "soy sauce or seasoning": "The primary flavour driver", + "oil": "High smoke-point oil for high heat", + "egg": "Scrambled in the same pan", + "vegetables": "Add crunch and colour", + "garlic or ginger": "Aromatic base -- add first", + }, ), AssemblyTemplate( id=-3, + slug="omelette_scramble", + icon="🥚", + descriptor="Eggs with fillings, pan-cooked", title="Omelette / Scramble", required=[ AssemblyRole("eggs", ["egg"]), @@ -238,9 +275,19 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Season and serve immediately.", ], notes="Works for breakfast, lunch, or a quick dinner. Any leftover vegetables work well.", + role_hints={ + "eggs": "The base -- beat with a splash of water", + "cheese": "Fold in just before serving", + "vegetables": "Saute first, then add eggs", + "protein": "Cook through before adding eggs", + "herbs or seasoning": "Season at the end", + }, ), AssemblyTemplate( id=-4, + slug="stir_fry", + icon="🥢", + descriptor="High-heat protein + veg in sauce", title="Stir Fry", required=[ AssemblyRole("vegetables", [ @@ -271,9 +318,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Serve over rice or noodles.", ], notes="High heat is the key. Do not crowd the pan -- cook in batches if needed.", + role_hints={ + "vegetables": "Cut to similar size for even cooking", + "starch base": "Serve under or toss with the stir fry", + "protein": "Cook first, remove, add back at end", + "sauce": "Add last -- toss for 1-2 minutes only", + "garlic or ginger": "Add early for aromatic base", + "oil": "High smoke-point oil only", + }, ), AssemblyTemplate( id=-5, + slug="pasta", + icon="🍝", + descriptor="Pantry pasta with flexible sauce", title="Pasta with Whatever You Have", required=[ AssemblyRole("pasta", [ @@ -307,9 +365,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Toss cooked pasta with sauce. Finish with cheese if using.", ], notes="Pasta water is the secret -- the starch thickens and binds any sauce.", + role_hints={ + "pasta": "The base -- cook al dente, reserve pasta water", + "sauce base": "Simmer 5 min; pasta water loosens it", + "protein": "Cook through before adding sauce", + "cheese": "Finish off heat to avoid graininess", + "vegetables": "Saute until tender before adding sauce", + "garlic": "Saute in oil first -- the flavour foundation", + }, ), AssemblyTemplate( id=-6, + slug="sandwich_wrap", + icon="🥪", + descriptor="Protein + veg between bread or in a wrap", title="Sandwich / Wrap", required=[ AssemblyRole("bread or wrap", [ @@ -341,9 +410,19 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Press together and cut diagonally.", ], notes="Leftovers, deli meat, canned fish -- nearly anything works between bread.", + role_hints={ + "bread or wrap": "Toast for better texture", + "protein": "Layer on first after condiments", + "cheese": "Goes on top of protein", + "condiment": "Spread on both inner surfaces", + "vegetables": "Top layer -- keeps bread from getting soggy", + }, ), AssemblyTemplate( id=-7, + slug="grain_bowl", + icon="🥗", + descriptor="Grain base + protein + toppings + dressing", title="Grain Bowl", required=[ AssemblyRole("grain base", [ @@ -377,9 +456,19 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Drizzle with dressing and add toppings.", ], notes="Great for meal prep -- cook grains and proteins in bulk, assemble bowls all week.", + role_hints={ + "grain base": "Season while cooking -- bland grains sink the bowl", + "protein": "Slice or shred; arrange on top", + "vegetables": "Roast or saute for best flavour", + "dressing or sauce": "Drizzle last -- ties everything together", + "toppings": "Add crunch and contrast", + }, ), AssemblyTemplate( id=-8, + slug="soup_stew", + icon="🥣", + descriptor="Liquid-based, flexible ingredients", title="Soup / Stew", required=[ # Narrow to dedicated soup bases — tomato sauce and coconut milk are @@ -415,9 +504,19 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Season to taste and simmer at least 20 minutes for flavors to develop.", ], notes="Soups and stews improve overnight in the fridge. Almost any combination works.", + role_hints={ + "broth or stock": "The liquid base -- determines overall flavour", + "protein": "Brown first for deeper flavour", + "vegetables": "Dense veg first; quick-cooking veg last", + "starch thickener": "Adds body and turns soup into stew", + "seasoning": "Taste and adjust after 20 min simmer", + }, ), AssemblyTemplate( id=-9, + slug="casserole_bake", + icon="🫙", + descriptor="Oven bake with protein, veg, starch", title="Casserole / Bake", required=[ AssemblyRole("starch or base", [ @@ -457,9 +556,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Bake covered 25 minutes, then uncovered 15 minutes until golden and bubbly.", ], notes="Classic pantry dump dinner. Cream of anything soup is the universal binder.", + role_hints={ + "starch or base": "Cook slightly underdone -- finishes in oven", + "binder or sauce": "Coats everything and holds the bake together", + "protein": "Pre-cook before mixing in", + "vegetables": "Chop small for even distribution", + "cheese topping": "Goes on last -- browns in the final 15 min", + "seasoning": "Casseroles need more salt than you think", + }, ), AssemblyTemplate( id=-10, + slug="pancakes_quickbread", + icon="🥞", + descriptor="Batter-based; sweet or savory", title="Pancakes / Waffles / Quick Bread", required=[ AssemblyRole("flour or baking mix", [ @@ -495,9 +605,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "For muffins or quick bread: pour into greased pan, bake at 375 F until a toothpick comes out clean.", ], notes="Overmixing develops gluten and makes pancakes tough. Stop when just combined.", + role_hints={ + "flour or baking mix": "Whisk dry ingredients together first", + "leavening or egg": "Activates rise -- don't skip", + "liquid": "Add to dry ingredients; lumps are fine", + "fat": "Adds richness and prevents sticking", + "sweetener": "Mix into wet ingredients", + "mix-ins": "Fold in last -- gently", + }, ), AssemblyTemplate( id=-11, + slug="porridge_oatmeal", + icon="🌾", + descriptor="Oat or grain base with toppings", title="Porridge / Oatmeal", required=[ AssemblyRole("oats or grain porridge", [ @@ -520,9 +641,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Top with fruit, nuts, or seeds and serve immediately.", ], notes="Overnight oats: skip cooking — soak oats in cold milk overnight in the fridge.", + role_hints={ + "oats or grain porridge": "1 part oats to 2 parts liquid", + "liquid": "Use milk for creamier result", + "sweetener": "Stir in after cooking", + "fruit": "Add fresh on top or simmer dried fruit in", + "toppings": "Add last for crunch", + "spice": "Stir in with sweetener", + }, ), AssemblyTemplate( id=-12, + slug="pie_pot_pie", + icon="🥧", + descriptor="Pastry or biscuit crust with filling", title="Pie / Pot Pie", required=[ AssemblyRole("pastry or crust", [ @@ -561,9 +693,20 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "For sweet pie: fill unbaked crust with fruit filling, top with second crust or crumble, bake similarly.", ], notes="Puff pastry from the freezer is the shortcut to impressive pot pies. Thaw in the fridge overnight.", + role_hints={ + "pastry or crust": "Thaw puff pastry overnight in fridge", + "protein filling": "Cook through before adding to filling", + "vegetables": "Chop small; cook until just tender", + "sauce or binder": "Holds the filling together in the crust", + "seasoning": "Fillings need generous seasoning", + "sweet filling": "For dessert pies -- fruit + sugar", + }, ), AssemblyTemplate( id=-13, + slug="pudding_custard", + icon="🍮", + descriptor="Dairy-based set dessert", title="Pudding / Custard", required=[ AssemblyRole("dairy or dairy-free milk", [ @@ -601,10 +744,58 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [ "Pour into dishes and refrigerate at least 2 hours to set.", ], notes="UK-style pudding is broad — bread pudding, rice pudding, spotted dick, treacle sponge all count.", + role_hints={ + "dairy or dairy-free milk": "Heat until steaming before adding to eggs", + "thickener or set": "Cornstarch for stovetop; eggs for baked custard", + "sweetener or flavouring": "Signals dessert intent -- required", + "sweetener": "Adjust to taste", + "flavouring": "Add off-heat to preserve aroma", + "starchy base": "For bread pudding or rice pudding", + "fruit": "Layer in or fold through before setting", + }, ), ] +# Slug to template lookup (built once at import time) +_TEMPLATE_BY_SLUG: dict[str, AssemblyTemplate] = { + t.slug: t for t in ASSEMBLY_TEMPLATES +} + + +def get_templates_for_api() -> list[dict]: + """Serialise all 13 templates for GET /api/recipes/templates. + + Combines required and optional roles into a single ordered role_sequence + with required roles first. + """ + out = [] + for tmpl in ASSEMBLY_TEMPLATES: + roles = [] + for role in tmpl.required: + roles.append({ + "display": role.display, + "required": True, + "keywords": role.keywords, + "hint": tmpl.role_hints.get(role.display, ""), + }) + for role in tmpl.optional: + roles.append({ + "display": role.display, + "required": False, + "keywords": role.keywords, + "hint": tmpl.role_hints.get(role.display, ""), + }) + out.append({ + "id": tmpl.slug, + "title": tmpl.title, + "icon": tmpl.icon, + "descriptor": tmpl.descriptor, + "role_sequence": roles, + }) + return out + + # --------------------------------------------------------------------------- # Public API # --------------------------------------------------------------------------- @@ -679,3 +870,148 @@ def match_assembly_templates( # Sort by optional coverage descending — best-matched templates first results.sort(key=lambda s: s.match_count, reverse=True) return results + + +def get_role_candidates( + template_slug: str, + role_display: str, + pantry_set: set[str], + prior_picks: list[str], + profile_index: dict[str, list[str]], +) -> dict: + """Return ingredient candidates for one wizard step. + + Splits candidates into 'compatible' (element overlap with prior picks) + and 'other' (valid for role but no overlap). + + profile_index: {ingredient_name: [element_tag, ...]} -- pre-loaded from + Store.get_element_profiles() by the caller so this function stays DB-free. + + Returns {"compatible": [...], "other": [...], "available_tags": [...]} + where each item is {"name": str, "in_pantry": bool, "tags": [str]}. + """ + tmpl = _TEMPLATE_BY_SLUG.get(template_slug) + if tmpl is None: + return {"compatible": [], "other": [], "available_tags": []} + + # Find the AssemblyRole for this display name + target_role: AssemblyRole | None = None + for role in tmpl.required + tmpl.optional: + if role.display == role_display: + target_role = role + break + if target_role is None: + return {"compatible": [], "other": [], "available_tags": []} + + # Build prior-pick element set for compatibility scoring + prior_elements: set[str] = set() + for pick in prior_picks: + prior_elements.update(profile_index.get(pick, [])) + + # Find pantry items that match this role + pantry_matches = _matches_role(target_role, pantry_set) + + # Build keyword-based "other" candidates from role keywords not in pantry + pantry_lower = {p.lower() for p in pantry_set} + other_names: list[str] = [] + for kw in target_role.keywords: + if not any(kw in item.lower() for item in pantry_lower): + if len(kw) >= 4: + other_names.append(kw.title()) + + def _make_item(name: str, in_pantry: bool) -> dict: + tags = profile_index.get(name, profile_index.get(name.lower(), [])) + return {"name": name, "in_pantry": in_pantry, "tags": tags} + + # Score: compatible if shares any element with prior picks (or no prior picks yet) + compatible: list[dict] = [] + other: list[dict] = [] + for name in pantry_matches: + item_elements = set(profile_index.get(name, [])) + item = _make_item(name, in_pantry=True) + if not prior_elements or item_elements & prior_elements: + compatible.append(item) + else: + other.append(item) + + for name in other_names: + other.append(_make_item(name, in_pantry=False)) + + # available_tags: union of all tags in the full candidate set + all_tags: set[str] = set() + for item in compatible + other: + all_tags.update(item["tags"]) + + return { + "compatible": compatible, + "other": other, + "available_tags": sorted(all_tags), + } + + +def build_from_selection( + template_slug: str, + role_overrides: dict[str, str], + pantry_set: set[str], +) -> "RecipeSuggestion | None": + """Build a RecipeSuggestion from explicit role selections. + + role_overrides: {role.display -> chosen pantry item name} + + Returns None if template not found or any required role is uncovered. + """ + tmpl = _TEMPLATE_BY_SLUG.get(template_slug) + if tmpl is None: + return None + + seed = _pantry_hash(pantry_set) + + # Validate required roles: covered by override OR pantry match + matched_required: list[str] = [] + for role in tmpl.required: + chosen = role_overrides.get(role.display) + if chosen: + matched_required.append(chosen) + else: + hits = _matches_role(role, pantry_set) + if not hits: + return None + matched_required.append(_pick_one(hits, seed + tmpl.id)) + + # Collect optional matches (override preferred, then pantry match) + matched_optional: list[str] = [] + for role in tmpl.optional: + chosen = role_overrides.get(role.display) + if chosen: + matched_optional.append(chosen) + else: + hits = _matches_role(role, pantry_set) + if hits: + matched_optional.append(_pick_one(hits, seed + tmpl.id)) + + all_matched = matched_required + matched_optional + + # Build title: prefer override items for personalisation + effective_pantry = pantry_set | set(role_overrides.values()) + title = _personalized_title(tmpl, effective_pantry, seed + tmpl.id) + + # Items in role_overrides that aren't in the user's pantry = shopping list + missing = [ + item for item in role_overrides.values() + if item and item not in pantry_set + ] + + return RecipeSuggestion( + id=tmpl.id, + title=title, + match_count=len(all_matched), + element_coverage={}, + swap_candidates=[], + matched_ingredients=all_matched, + missing_ingredients=missing, + directions=tmpl.directions, + notes=tmpl.notes, + level=1, + is_wildcard=False, + nutrition=None, + ) diff --git a/app/services/recipe/recipe_engine.py b/app/services/recipe/recipe_engine.py index b526538..907d2be 100644 --- a/app/services/recipe/recipe_engine.py +++ b/app/services/recipe/recipe_engine.py @@ -21,7 +21,6 @@ if TYPE_CHECKING: from app.db.store import Store from app.models.schemas.recipe import GroceryLink, NutritionPanel, RecipeRequest, RecipeResult, RecipeSuggestion, SwapCandidate -from app.services.recipe.assembly_recipes import match_assembly_templates from app.services.recipe.element_classifier import ElementClassifier from app.services.recipe.grocery_links import GroceryLinkBuilder from app.services.recipe.substitution_engine import SubstitutionEngine @@ -517,13 +516,6 @@ def _build_source_url(row: dict) -> str | None: return None -_ASSEMBLY_TIER_LIMITS: dict[str, int] = { - "free": 2, - "paid": 4, - "premium": 6, -} - - # Method complexity classification patterns _EASY_METHODS = re.compile( r"\b(microwave|mix|stir|blend|toast|assemble|heat)\b", re.IGNORECASE @@ -637,6 +629,11 @@ class RecipeEngine: return gen.generate(req, profiles, gaps) # Level 1 & 2: deterministic path + # L1 ("Use What I Have") applies strict quality gates: + # - exclude_generic: filter catch-all recipes at the DB level + # - effective_max_missing: default to 2 when user hasn't set a cap + # - match ratio: require ≥60% ingredient coverage to avoid low-signal results + _l1 = req.level == 1 and not req.shopping_mode nf = req.nutrition_filters rows = self._store.search_recipes_by_ingredients( req.pantry_items, @@ -647,7 +644,16 @@ class RecipeEngine: max_carbs_g=nf.max_carbs_g, max_sodium_mg=nf.max_sodium_mg, excluded_ids=req.excluded_ids or [], + exclude_generic=_l1, ) + + # L1 strict defaults: cap missing ingredients and require a minimum ratio. + _L1_MAX_MISSING_DEFAULT = 2 + _L1_MIN_MATCH_RATIO = 0.6 + effective_max_missing = req.max_missing + if _l1 and effective_max_missing is None: + effective_max_missing = _L1_MAX_MISSING_DEFAULT + suggestions = [] hard_day_tier_map: dict[int, int] = {} # recipe_id → tier when hard_day_mode @@ -690,9 +696,17 @@ class RecipeEngine: missing.append(n) # Filter by max_missing — skipped in shopping mode (user is willing to buy) - if not req.shopping_mode and req.max_missing is not None and len(missing) > req.max_missing: + if not req.shopping_mode and effective_max_missing is not None and len(missing) > effective_max_missing: continue + # L1 match ratio gate: drop results where less than 60% of the recipe's + # ingredients are in the pantry. Prevents low-signal results like a + # 10-ingredient recipe matching on only one common item. + if _l1 and ingredient_names: + match_ratio = len(matched) / len(ingredient_names) + if match_ratio < _L1_MIN_MATCH_RATIO: + continue + # Filter and tier-rank by hard_day_mode if req.hard_day_mode: directions: list[str] = row.get("directions") or [] @@ -761,39 +775,17 @@ class RecipeEngine: source_url=_build_source_url(row), )) - # Assembly-dish templates (burrito, fried rice, pasta, etc.) - # Expiry boost: when expiry_first, the pantry_items list is already sorted - # by expiry urgency — treat the first slice as the "expiring" set so templates - # that use those items bubble up in the merged ranking. - expiring_set: set[str] = set() - if req.expiry_first: - expiring_set = _expand_pantry_set(req.pantry_items[:10]) - - assembly = match_assembly_templates( - pantry_items=req.pantry_items, - pantry_set=pantry_set, - excluded_ids=req.excluded_ids or [], - expiring_set=expiring_set, - ) - - # Cap by tier — lifted in shopping mode since missing-ingredient templates - # are desirable there (each fires an affiliate link opportunity). - if not req.shopping_mode: - assembly_limit = _ASSEMBLY_TIER_LIMITS.get(req.tier, 3) - assembly = assembly[:assembly_limit] - - # Interleave: sort templates and corpus recipes together. + # Sort corpus results — assembly templates are now served from a dedicated tab. # Hard day mode: primary sort by tier (0=premade, 1=simple, 2=moderate), - # then by match_count within each tier. Assembly templates are inherently - # simple so they default to tier 1 when not in the tier map. - # Normal mode: sort by match_count only. + # then by match_count within each tier. + # Normal mode: sort by match_count descending. if req.hard_day_mode and hard_day_tier_map: suggestions = sorted( - assembly + suggestions, + suggestions, key=lambda s: (hard_day_tier_map.get(s.id, 1), -s.match_count), ) else: - suggestions = sorted(assembly + suggestions, key=lambda s: s.match_count, reverse=True) + suggestions = sorted(suggestions, key=lambda s: -s.match_count) # Build grocery list — deduplicated union of all missing ingredients seen: set[str] = set() diff --git a/app/services/recipe/tag_inferrer.py b/app/services/recipe/tag_inferrer.py new file mode 100644 index 0000000..6302ed4 --- /dev/null +++ b/app/services/recipe/tag_inferrer.py @@ -0,0 +1,300 @@ +""" +Recipe tag inference engine. + +Derives normalized tags from a recipe's title, ingredient names, existing corpus +tags (category + keywords), enriched ingredient profile data, and optional +nutrition data. + +Tags are organized into five namespaces: + cuisine:* -- cuisine/region classification + dietary:* -- dietary restriction / nutrition profile + flavor:* -- flavor profile (spicy, smoky, sweet, etc.) + time:* -- effort / time signals + meal:* -- meal type + can_be:* -- achievable with substitutions (e.g. can_be:Gluten-Free) + +Output is a flat sorted list of strings, e.g.: + ["can_be:Gluten-Free", "cuisine:Italian", "dietary:Low-Carb", + "flavor:Savory", "flavor:Umami", "time:Quick"] + +These populate recipes.inferred_tags and are FTS5-indexed so browse domain +queries find recipes the food.com corpus tags alone would miss. +""" +from __future__ import annotations + + +# --------------------------------------------------------------------------- +# Text-signal tables +# (tag, [case-insensitive substrings to search in combined title+ingredient text]) +# --------------------------------------------------------------------------- + +_CUISINE_SIGNALS: list[tuple[str, list[str]]] = [ + ("cuisine:Japanese", ["miso", "dashi", "ramen", "sushi", "teriyaki", "sake", "mirin", + "wasabi", "panko", "edamame", "tonkatsu", "yakitori", "ponzu"]), + ("cuisine:Korean", ["gochujang", "kimchi", "doenjang", "gochugaru", + "bulgogi", "bibimbap", "japchae"]), + ("cuisine:Thai", ["fish sauce", "lemongrass", "galangal", "pad thai", "thai basil", + "kaffir lime", "tom yum", "green curry", "red curry", "nam pla"]), + ("cuisine:Chinese", ["hoisin", "oyster sauce", "five spice", "bok choy", "chow mein", + "dumpling", "wonton", "mapo", "char siu", "sichuan"]), + ("cuisine:Vietnamese", ["pho", "banh mi", "nuoc cham", "rice paper", "vietnamese"]), + ("cuisine:Indian", ["garam masala", "turmeric", "cardamom", "fenugreek", "paneer", + "tikka", "masala", "biryani", "dal", "naan", "tandoori", + "curry leaf", "tamarind", "chutney"]), + ("cuisine:Middle Eastern", ["tahini", "harissa", "za'atar", "sumac", "baharat", "rose water", + "pomegranate molasses", "freekeh", "fattoush", "shakshuka"]), + ("cuisine:Greek", ["feta", "tzatziki", "moussaka", "spanakopita", "orzo", + "kalamata", "gyro", "souvlaki", "dolma"]), + ("cuisine:Mediterranean", ["hummus", "pita", "couscous", "preserved lemon"]), + ("cuisine:Italian", ["pasta", "pizza", "risotto", "lasagna", "carbonara", "gnocchi", + "parmesan", "mozzarella", "ricotta", "prosciutto", "pancetta", + "arancini", "osso buco", "tiramisu", "pesto", "bolognese", + "cannoli", "polenta", "bruschetta", "focaccia"]), + ("cuisine:French", ["croissant", "quiche", "crepe", "coq au vin", + "ratatouille", "bearnaise", "hollandaise", "bouillabaisse", + "herbes de provence", "dijon", "gruyere", "brie", "cassoulet"]), + ("cuisine:Spanish", ["paella", "chorizo", "gazpacho", "tapas", "patatas bravas", + "sofrito", "manchego", "albondigas"]), + ("cuisine:German", ["sauerkraut", "bratwurst", "schnitzel", "pretzel", "strudel", + "spaetzle", "sauerbraten"]), + ("cuisine:Mexican", ["taco", "burrito", "enchilada", "salsa", "guacamole", "chipotle", + "queso", "tamale", "mole", "jalapeno", "tortilla", "carnitas", + "chile verde", "posole", "tostada", "quesadilla"]), + ("cuisine:Latin American", ["plantain", "yuca", "chimichurri", "ceviche", "adobo", "empanada"]), + ("cuisine:American", ["bbq sauce", "buffalo sauce", "ranch dressing", "coleslaw", + "cornbread", "mac and cheese", "brisket", "cheeseburger"]), + ("cuisine:Southern", ["collard greens", "black-eyed peas", "okra", "grits", "catfish", + "hush puppies", "pecan pie"]), + ("cuisine:Cajun", ["cajun", "creole", "gumbo", "jambalaya", "andouille", "etouffee"]), + ("cuisine:African", ["injera", "berbere", "jollof", "suya", "egusi", "fufu", "tagine"]), + ("cuisine:Caribbean", ["jerk", "scotch bonnet", "callaloo", "ackee"]), +] + +_DIETARY_SIGNALS: list[tuple[str, list[str]]] = [ + ("dietary:Vegan", ["vegan", "plant-based", "plant based"]), + ("dietary:Vegetarian", ["vegetarian", "meatless"]), + ("dietary:Gluten-Free", ["gluten-free", "gluten free", "celiac"]), + ("dietary:Dairy-Free", ["dairy-free", "dairy free", "lactose free", "non-dairy"]), + ("dietary:Low-Carb", ["low-carb", "low carb", "keto", "ketogenic", "very low carbs"]), + ("dietary:High-Protein", ["high protein", "high-protein"]), + ("dietary:Low-Fat", ["low-fat", "low fat", "fat-free", "reduced fat"]), + ("dietary:Paleo", ["paleo", "whole30"]), + ("dietary:Nut-Free", ["nut-free", "nut free", "peanut free"]), + ("dietary:Egg-Free", ["egg-free", "egg free"]), + ("dietary:Low-Sodium", ["low sodium", "no salt"]), + ("dietary:Healthy", ["healthy", "low cholesterol", "heart healthy", "wholesome"]), +] + +_FLAVOR_SIGNALS: list[tuple[str, list[str]]] = [ + ("flavor:Spicy", ["jalapeno", "habanero", "ghost pepper", "sriracha", + "chili flake", "red pepper flake", "cayenne", "hot sauce", + "gochujang", "harissa", "scotch bonnet", "szechuan pepper", "spicy"]), + ("flavor:Smoky", ["smoked", "liquid smoke", "smoked paprika", + "bbq sauce", "barbecue", "hickory", "mesquite"]), + ("flavor:Sweet", ["honey", "maple syrup", "brown sugar", "caramel", "chocolate", + "vanilla", "condensed milk", "molasses", "agave"]), + ("flavor:Savory", ["soy sauce", "fish sauce", "miso", "worcestershire", "anchovy", + "parmesan", "blue cheese", "bone broth"]), + ("flavor:Tangy", ["lemon juice", "lime juice", "vinegar", "balsamic", "buttermilk", + "sour cream", "fermented", "pickled", "tamarind", "sumac"]), + ("flavor:Herby", ["fresh basil", "fresh cilantro", "fresh dill", "fresh mint", + "fresh tarragon", "fresh thyme", "herbes de provence"]), + ("flavor:Rich", ["heavy cream", "creme fraiche", "mascarpone", "double cream", + "ghee", "coconut cream", "cream cheese"]), + ("flavor:Umami", ["mushroom", "nutritional yeast", "tomato paste", + "parmesan rind", "bonito", "kombu"]), +] + +_TIME_SIGNALS: list[tuple[str, list[str]]] = [ + ("time:Quick", ["< 15 mins", "< 30 mins", "weeknight", "easy"]), + ("time:Under 1 Hour", ["< 60 mins"]), + ("time:Make-Ahead", ["freezer", "overnight", "refrigerator", "make-ahead", "make ahead"]), + ("time:Slow Cook", ["slow cooker", "crockpot", "< 4 hours", "braise"]), +] + +# food.com corpus tag -> normalized tags +_CORPUS_TAG_MAP: dict[str, list[str]] = { + "european": ["cuisine:Italian", "cuisine:French", "cuisine:German", + "cuisine:Spanish"], + "asian": ["cuisine:Chinese", "cuisine:Japanese", "cuisine:Thai", + "cuisine:Korean", "cuisine:Vietnamese"], + "chinese": ["cuisine:Chinese"], + "japanese": ["cuisine:Japanese"], + "thai": ["cuisine:Thai"], + "vietnamese": ["cuisine:Vietnamese"], + "indian": ["cuisine:Indian"], + "greek": ["cuisine:Greek"], + "mexican": ["cuisine:Mexican"], + "african": ["cuisine:African"], + "caribbean": ["cuisine:Caribbean"], + "vegan": ["dietary:Vegan", "dietary:Vegetarian"], + "vegetarian": ["dietary:Vegetarian"], + "healthy": ["dietary:Healthy"], + "low cholesterol": ["dietary:Healthy"], + "very low carbs": ["dietary:Low-Carb"], + "high in...": ["dietary:High-Protein"], + "lactose free": ["dietary:Dairy-Free"], + "egg free": ["dietary:Egg-Free"], + "< 15 mins": ["time:Quick"], + "< 30 mins": ["time:Quick"], + "< 60 mins": ["time:Under 1 Hour"], + "< 4 hours": ["time:Slow Cook"], + "weeknight": ["time:Quick"], + "freezer": ["time:Make-Ahead"], + "dessert": ["meal:Dessert"], + "breakfast": ["meal:Breakfast"], + "lunch/snacks": ["meal:Lunch", "meal:Snack"], + "beverages": ["meal:Beverage"], + "cookie & brownie": ["meal:Dessert"], + "breads": ["meal:Bread"], +} + +# ingredient_profiles.elements value -> flavor tag +_ELEMENT_TO_FLAVOR: dict[str, str] = { + "Aroma": "flavor:Herby", + "Richness": "flavor:Rich", + "Structure": "", # no flavor tag + "Binding": "", + "Crust": "flavor:Smoky", + "Lift": "", + "Emulsion": "flavor:Rich", + "Acid": "flavor:Tangy", +} + + +def _build_text(title: str, ingredient_names: list[str]) -> str: + parts = [title.lower()] + parts.extend(i.lower() for i in ingredient_names) + return " ".join(parts) + + +def _match_signals(text: str, table: list[tuple[str, list[str]]]) -> list[str]: + return [tag for tag, pats in table if any(p in text for p in pats)] + + +def infer_tags( + title: str, + ingredient_names: list[str], + corpus_keywords: list[str], + corpus_category: str = "", + # Enriched ingredient profile signals (from ingredient_profiles cross-ref) + element_coverage: dict[str, float] | None = None, + fermented_count: int = 0, + glutamate_total: float = 0.0, + ph_min: float | None = None, + available_sub_constraints: list[str] | None = None, + # Nutrition data for macro-based tags + calories: float | None = None, + protein_g: float | None = None, + fat_g: float | None = None, + carbs_g: float | None = None, + servings: float | None = None, +) -> list[str]: + """ + Derive normalized tags for a recipe. + + Parameters + ---------- + title, ingredient_names, corpus_keywords, corpus_category + : Primary recipe data. + element_coverage + : Dict from recipes.element_coverage -- element name to coverage ratio + (e.g. {"Aroma": 0.6, "Richness": 0.4}). Derived from ingredient_profiles. + fermented_count + : Number of fermented ingredients (from ingredient_profiles.is_fermented). + glutamate_total + : Sum of glutamate_mg across all profiled ingredients. High values signal umami. + ph_min + : Minimum ph_estimate across profiled ingredients. Low values signal acidity. + available_sub_constraints + : Substitution constraint labels achievable for this recipe + (e.g. ["gluten_free", "low_carb"]). From substitution_pairs cross-ref. + These become can_be:* tags. + calories, protein_g, fat_g, carbs_g, servings + : Nutrition data for macro-based dietary tags. + + Returns + ------- + Sorted list of unique normalized tag strings. + """ + tags: set[str] = set() + + # 1. Map corpus tags to normalized vocabulary + for kw in corpus_keywords: + for t in _CORPUS_TAG_MAP.get(kw.lower(), []): + tags.add(t) + if corpus_category: + for t in _CORPUS_TAG_MAP.get(corpus_category.lower(), []): + tags.add(t) + + # 2. Text-signal matching + text = _build_text(title, ingredient_names) + tags.update(_match_signals(text, _CUISINE_SIGNALS)) + tags.update(_match_signals(text, _DIETARY_SIGNALS)) + tags.update(_match_signals(text, _FLAVOR_SIGNALS)) + + # 3. Time signals from corpus keywords + text + corpus_text = " ".join(kw.lower() for kw in corpus_keywords) + tags.update(_match_signals(corpus_text, _TIME_SIGNALS)) + tags.update(_match_signals(text, _TIME_SIGNALS)) + + # 4. Enriched profile signals + if element_coverage: + for element, coverage in element_coverage.items(): + if coverage > 0.2: # >20% of ingredients carry this element + flavor_tag = _ELEMENT_TO_FLAVOR.get(element, "") + if flavor_tag: + tags.add(flavor_tag) + + if glutamate_total > 50: + tags.add("flavor:Umami") + + if fermented_count > 0: + tags.add("flavor:Tangy") + + if ph_min is not None and ph_min < 4.5: + tags.add("flavor:Tangy") + + # 5. Achievable-via-substitution tags + if available_sub_constraints: + label_to_tag = { + "gluten_free": "can_be:Gluten-Free", + "low_calorie": "can_be:Low-Calorie", + "low_carb": "can_be:Low-Carb", + "vegan": "can_be:Vegan", + "dairy_free": "can_be:Dairy-Free", + "low_sodium": "can_be:Low-Sodium", + } + for label in available_sub_constraints: + tag = label_to_tag.get(label) + if tag: + tags.add(tag) + + # 6. Macro-based dietary tags + if servings and servings > 0 and any( + v is not None for v in (protein_g, fat_g, carbs_g, calories) + ): + def _per(v: float | None) -> float | None: + return v / servings if v is not None else None + + prot_s = _per(protein_g) + fat_s = _per(fat_g) + carb_s = _per(carbs_g) + cal_s = _per(calories) + + if prot_s is not None and prot_s >= 20: + tags.add("dietary:High-Protein") + if fat_s is not None and fat_s <= 5: + tags.add("dietary:Low-Fat") + if carb_s is not None and carb_s <= 10: + tags.add("dietary:Low-Carb") + if cal_s is not None and cal_s <= 250: + tags.add("dietary:Light") + elif protein_g is not None and protein_g >= 20: + tags.add("dietary:High-Protein") + + # 7. Vegan implies vegetarian + if "dietary:Vegan" in tags: + tags.add("dietary:Vegetarian") + + return sorted(tags) diff --git a/app/tiers.py b/app/tiers.py index c3257ce..1ce348c 100644 --- a/app/tiers.py +++ b/app/tiers.py @@ -18,8 +18,19 @@ KIWI_BYOK_UNLOCKABLE: frozenset[str] = frozenset({ "style_classifier", "meal_plan_llm", "meal_plan_llm_timing", + "community_fork_adapt", }) +# Sources subject to monthly cf-orch call caps. Subscription-based sources are uncapped. +LIFETIME_SOURCES: frozenset[str] = frozenset({"lifetime", "founders"}) + +# (source, tier) → monthly cf-orch call allowance +LIFETIME_ORCH_CAPS: dict[tuple[str, str], int] = { + ("lifetime", "paid"): 60, + ("lifetime", "premium"): 180, + ("founders", "premium"): 300, +} + # Feature → minimum tier required KIWI_FEATURES: dict[str, str] = { # Free tier @@ -43,6 +54,8 @@ KIWI_FEATURES: dict[str, str] = { "style_picker": "paid", "recipe_collections": "paid", "style_classifier": "paid", # LLM auto-tag for saved recipe style tags; BYOK-unlockable + "community_publish": "paid", # Publish plans/outcomes to community feed + "community_fork_adapt": "paid", # Fork with LLM pantry adaptation (BYOK-unlockable) # Premium tier "multi_household": "premium", diff --git a/compose.cloud.yml b/compose.cloud.yml index 29ef534..ae903ab 100644 --- a/compose.cloud.yml +++ b/compose.cloud.yml @@ -20,6 +20,9 @@ services: CLOUD_AUTH_BYPASS_IPS: ${CLOUD_AUTH_BYPASS_IPS:-} # cf-orch: route LLM calls through the coordinator for managed GPU inference CF_ORCH_URL: http://host.docker.internal:7700 + # Community PostgreSQL — shared across CF products; unset = community features unavailable (fail soft) + COMMUNITY_DB_URL: ${COMMUNITY_DB_URL:-} + COMMUNITY_PSEUDONYM_SALT: ${COMMUNITY_PSEUDONYM_SALT:-} extra_hosts: - "host.docker.internal:host-gateway" volumes: diff --git a/frontend/src/App.vue b/frontend/src/App.vue index 94a822d..cde0d78 100644 --- a/frontend/src/App.vue +++ b/frontend/src/App.vue @@ -197,7 +197,7 @@ import { householdAPI } from './services/api' type Tab = 'inventory' | 'receipts' | 'recipes' | 'settings' | 'mealplan' -const currentTab = ref('inventory') +const currentTab = ref('recipes') const sidebarCollapsed = ref(false) const inventoryStore = useInventoryStore() const { kiwiVisible, kiwiDirection } = useEasterEggs() @@ -225,6 +225,11 @@ async function switchTab(tab: Tab) { } onMounted(async () => { + // Pre-fetch inventory so Recipes tab has data on first load + if (inventoryStore.items.length === 0) { + await inventoryStore.fetchItems() + } + // Handle household invite links: /#/join?household_id=xxx&token=yyy const hash = window.location.hash if (hash.includes('/join')) { diff --git a/frontend/src/components/BuildYourOwnTab.vue b/frontend/src/components/BuildYourOwnTab.vue new file mode 100644 index 0000000..0c63e02 --- /dev/null +++ b/frontend/src/components/BuildYourOwnTab.vue @@ -0,0 +1,586 @@ + + + + + diff --git a/frontend/src/components/CommunityFeedPanel.vue b/frontend/src/components/CommunityFeedPanel.vue new file mode 100644 index 0000000..f744cc8 --- /dev/null +++ b/frontend/src/components/CommunityFeedPanel.vue @@ -0,0 +1,337 @@ + + + + + diff --git a/frontend/src/components/CommunityPostCard.vue b/frontend/src/components/CommunityPostCard.vue new file mode 100644 index 0000000..64c83b5 --- /dev/null +++ b/frontend/src/components/CommunityPostCard.vue @@ -0,0 +1,178 @@ + + + + + diff --git a/frontend/src/components/FeedbackButton.vue b/frontend/src/components/FeedbackButton.vue index 9373256..800ab25 100644 --- a/frontend/src/components/FeedbackButton.vue +++ b/frontend/src/components/FeedbackButton.vue @@ -140,11 +140,13 @@ import { ref, computed, onMounted } from 'vue' const props = defineProps<{ currentTab?: string }>() +const apiBase = (import.meta.env.VITE_API_BASE as string) ?? '' + // Probe once on mount — hidden until confirmed enabled so button never flashes const enabled = ref(false) onMounted(async () => { try { - const res = await fetch('/api/v1/feedback/status') + const res = await fetch(`${apiBase}/api/v1/feedback/status`) if (res.ok) { const data = await res.json() enabled.value = data.enabled === true @@ -205,7 +207,7 @@ async function submit() { loading.value = true submitError.value = '' try { - const res = await fetch('/api/v1/feedback', { + const res = await fetch(`${apiBase}/api/v1/feedback`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ @@ -407,6 +409,114 @@ async function submit() { .mt-md { margin-top: var(--spacing-md); } .mt-xs { margin-top: var(--spacing-xs); } +/* ── Form elements ────────────────────────────────────────────────────── */ +.form-group { + display: flex; + flex-direction: column; + gap: var(--spacing-xs); +} + +.form-label { + font-size: var(--font-size-sm); + font-weight: 600; + color: var(--color-text-muted); + text-transform: uppercase; + letter-spacing: 0.06em; +} + +.form-input { + width: 100%; + padding: var(--spacing-xs) var(--spacing-sm); + background: var(--color-bg-secondary); + border: 1px solid var(--color-border); + border-radius: var(--radius-md); + color: var(--color-text-primary); + font-family: var(--font-body); + font-size: var(--font-size-sm); + line-height: 1.5; + transition: border-color 0.15s; + box-sizing: border-box; +} +.form-input:focus { + outline: none; + border-color: var(--color-border-focus); +} +.form-input::placeholder { color: var(--color-text-muted); opacity: 0.7; } + +/* ── Buttons ──────────────────────────────────────────────────────────── */ +.btn { + display: inline-flex; + align-items: center; + justify-content: center; + gap: var(--spacing-xs); + padding: var(--spacing-xs) var(--spacing-md); + border-radius: var(--radius-md); + font-family: var(--font-body); + font-size: var(--font-size-sm); + font-weight: 500; + cursor: pointer; + transition: background 0.15s, color 0.15s, border-color 0.15s; + white-space: nowrap; +} +.btn:disabled { opacity: 0.5; cursor: not-allowed; } + +.btn-primary { + background: var(--color-primary); + color: #fff; + border: 1px solid var(--color-primary); +} +.btn-primary:hover:not(:disabled) { filter: brightness(1.1); } + +.btn-ghost { + background: transparent; + color: var(--color-text-secondary); + border: 1px solid var(--color-border); +} +.btn-ghost:hover:not(:disabled) { + background: var(--color-bg-secondary); + color: var(--color-text-primary); + border-color: var(--color-border-focus); +} + +/* ── Filter chips ─────────────────────────────────────────────────────── */ +.filter-chip-row { + display: flex; + flex-wrap: wrap; + gap: var(--spacing-xs); +} + +.btn-chip { + padding: 5px var(--spacing-sm); + background: var(--color-bg-secondary); + border: 1px solid var(--color-border); + border-radius: 999px; + font-family: var(--font-body); + font-size: var(--font-size-sm); + font-weight: 500; + color: var(--color-text-secondary); + cursor: pointer; + transition: background 0.15s, color 0.15s, border-color 0.15s; +} +.btn-chip.active, +.btn-chip:hover { + background: color-mix(in srgb, var(--color-primary) 15%, transparent); + border-color: var(--color-primary); + color: var(--color-primary); +} + +/* ── Card ─────────────────────────────────────────────────────────────── */ +.card { + background: var(--color-bg-card); + border: 1px solid var(--color-border); + border-radius: var(--radius-md); +} + +/* ── Text utilities ───────────────────────────────────────────────────── */ +.text-muted { color: var(--color-text-muted); } +.text-sm { font-size: var(--font-size-sm); line-height: 1.5; } +.text-xs { font-size: 0.75rem; line-height: 1.5; } +.font-semibold { font-weight: 600; } + /* Transition */ .modal-fade-enter-active, .modal-fade-leave-active { transition: opacity 0.2s ease; } .modal-fade-enter-from, .modal-fade-leave-to { opacity: 0; } diff --git a/frontend/src/components/HallOfChaosView.vue b/frontend/src/components/HallOfChaosView.vue new file mode 100644 index 0000000..cce3bd5 --- /dev/null +++ b/frontend/src/components/HallOfChaosView.vue @@ -0,0 +1,182 @@ + + + + + diff --git a/frontend/src/components/PublishOutcomeModal.vue b/frontend/src/components/PublishOutcomeModal.vue new file mode 100644 index 0000000..0154581 --- /dev/null +++ b/frontend/src/components/PublishOutcomeModal.vue @@ -0,0 +1,365 @@ +