Compare commits
No commits in common. "main" and "feature/shared-task-scheduler" have entirely different histories.
105 changed files with 1364 additions and 9940 deletions
40
.env.example
40
.env.example
|
|
@ -11,33 +11,6 @@ DATA_DIR=./data
|
|||
# Database (defaults to DATA_DIR/kiwi.db)
|
||||
# DB_PATH=./data/kiwi.db
|
||||
|
||||
# Pipeline data directory for downloaded parquets (used by download_datasets.py)
|
||||
# Override to store large datasets on a separate drive or NAS
|
||||
# KIWI_PIPELINE_DATA_DIR=./data/pipeline
|
||||
|
||||
# CF-core resource coordinator (VRAM lease management)
|
||||
# Set to the coordinator URL when running alongside cf-core orchestration
|
||||
# COORDINATOR_URL=http://localhost:7700
|
||||
# IP this machine advertises to the coordinator (must be reachable from coordinator host)
|
||||
# CF_ORCH_ADVERTISE_HOST=10.1.10.71
|
||||
|
||||
# CF-core hosted coordinator (managed cloud GPU inference — Paid+ tier)
|
||||
# Set CF_ORCH_URL to use a hosted cf-orch coordinator instead of self-hosting.
|
||||
# CF_LICENSE_KEY is read automatically by CFOrchClient for bearer auth.
|
||||
# CF_ORCH_URL=https://orch.circuitforge.tech
|
||||
# CF_LICENSE_KEY=CFG-KIWI-xxxx-xxxx-xxxx
|
||||
|
||||
# LLM backend — env-var auto-config (no llm.yaml needed for bare-metal users)
|
||||
# LLMRouter checks these in priority order:
|
||||
# 1. Anthropic cloud — set ANTHROPIC_API_KEY
|
||||
# 2. OpenAI cloud — set OPENAI_API_KEY
|
||||
# 3. Local Ollama — set OLLAMA_HOST (+ optionally OLLAMA_MODEL)
|
||||
# All three are optional; leave unset to rely on a local llm.yaml instead.
|
||||
# ANTHROPIC_API_KEY=sk-ant-...
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# OLLAMA_HOST=http://localhost:11434
|
||||
# OLLAMA_MODEL=llama3.2
|
||||
|
||||
# Processing
|
||||
USE_GPU=true
|
||||
GPU_MEMORY_LIMIT=6144
|
||||
|
|
@ -55,14 +28,6 @@ DEMO_MODE=false
|
|||
# Cloud mode (set in compose.cloud.yml; also set here for reference)
|
||||
# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data
|
||||
# KIWI_DB=data/kiwi.db # local-mode DB path override
|
||||
# DEV ONLY: bypass JWT auth for these IPs/CIDRs (LAN testing without Caddy in the path).
|
||||
# NEVER set in production.
|
||||
# IMPORTANT: Docker port mapping NATs source IPs to the bridge gateway. When hitting
|
||||
# localhost:8515 (host → Docker → nginx → API), nginx sees 192.168.80.1, not 127.0.0.1.
|
||||
# Include the Docker bridge CIDR to allow localhost and LAN access through nginx.
|
||||
# Run: docker network inspect kiwi-cloud_kiwi-cloud-net | grep Subnet
|
||||
# Example: CLOUD_AUTH_BYPASS_IPS=10.1.10.0/24,127.0.0.1,::1,192.168.80.0/20
|
||||
# CLOUD_AUTH_BYPASS_IPS=
|
||||
|
||||
# Heimdall license server (required for cloud tier resolution)
|
||||
# HEIMDALL_URL=https://license.circuitforge.tech
|
||||
|
|
@ -70,8 +35,3 @@ DEMO_MODE=false
|
|||
|
||||
# Directus JWT (must match cf-directus SECRET env var)
|
||||
# DIRECTUS_JWT_SECRET=
|
||||
|
||||
# In-app feedback → Forgejo issue creation
|
||||
# FORGEJO_API_TOKEN=
|
||||
# FORGEJO_REPO=Circuit-Forge/kiwi
|
||||
# FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
||||
|
|
|
|||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,7 +1,4 @@
|
|||
|
||||
# CLAUDE.md — gitignored per BSL 1.1 commercial policy
|
||||
CLAUDE.md
|
||||
|
||||
# Superpowers brainstorming artifacts
|
||||
.superpowers/
|
||||
|
||||
|
|
@ -22,6 +19,3 @@ dist/
|
|||
|
||||
# Data directories
|
||||
data/
|
||||
|
||||
# Test artifacts (MagicMock sqlite files from pytest)
|
||||
<MagicMock*
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
# Findings suppressed here are historical false positives or already-rotated secrets.
|
||||
# .env was accidentally included in the initial commit; it is now gitignored.
|
||||
# Rotate DIRECTUS_JWT_SECRET if it has not been changed since 2026-03-30.
|
||||
|
||||
# c166e5216 (chore: initial commit) — .env included by mistake
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:generic-api-key:25
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:cf-generic-env-token:25
|
||||
28
LICENSE-BSL
28
LICENSE-BSL
|
|
@ -1,28 +0,0 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Circuit Forge LLC
|
||||
Licensed Work: Kiwi — Pantry tracking and leftover recipe suggestions
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
Additional Use Grant: You may use the Licensed Work for personal,
|
||||
non-commercial pantry tracking and recipe suggestion
|
||||
purposes only.
|
||||
Change Date: 2030-01-01
|
||||
Change License: MIT License
|
||||
|
||||
For the full Business Source License 1.1 text, see:
|
||||
https://mariadb.com/bsl11/
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/services/recipe/recipe_engine.py
|
||||
- app/services/recipe/assembly_recipes.py
|
||||
- app/services/recipe/llm_recipe.py
|
||||
- app/services/expiration_predictor.py
|
||||
- app/tasks/scheduler.py
|
||||
- app/tasks/runner.py
|
||||
- app/tiers.py
|
||||
- app/cloud_session.py
|
||||
- frontend/src/components/RecipesView.vue
|
||||
- frontend/src/stores/recipes.ts
|
||||
34
LICENSE-MIT
34
LICENSE-MIT
|
|
@ -1,34 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/api/endpoints/inventory.py
|
||||
- app/api/endpoints/ocr.py
|
||||
- app/db/store.py
|
||||
- app/db/migrations/
|
||||
- app/core/config.py
|
||||
- scripts/pipeline/
|
||||
- scripts/download_datasets.py
|
||||
- scripts/backfill_texture_profiles.py
|
||||
|
|
@ -1,169 +0,0 @@
|
|||
"""
|
||||
Feedback endpoint — creates Forgejo issues from in-app feedback.
|
||||
Ported from peregrine/scripts/feedback_api.py; adapted for Kiwi context.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_ROOT = Path(__file__).resolve().parents[3]
|
||||
|
||||
# ── Forgejo helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
_LABEL_COLORS = {
|
||||
"beta-feedback": "#0075ca",
|
||||
"needs-triage": "#e4e669",
|
||||
"bug": "#d73a4a",
|
||||
"feature-request": "#a2eeef",
|
||||
"question": "#d876e3",
|
||||
}
|
||||
|
||||
|
||||
def _forgejo_headers() -> dict:
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
return {"Authorization": f"token {token}", "Content-Type": "application/json"}
|
||||
|
||||
|
||||
def _ensure_labels(label_names: list[str]) -> list[int]:
|
||||
base = os.environ.get("FORGEJO_API_URL", "https://git.opensourcesolarpunk.com/api/v1")
|
||||
repo = os.environ.get("FORGEJO_REPO", "Circuit-Forge/kiwi")
|
||||
headers = _forgejo_headers()
|
||||
resp = requests.get(f"{base}/repos/{repo}/labels", headers=headers, timeout=10)
|
||||
existing = {lb["name"]: lb["id"] for lb in resp.json()} if resp.ok else {}
|
||||
ids: list[int] = []
|
||||
for name in label_names:
|
||||
if name in existing:
|
||||
ids.append(existing[name])
|
||||
else:
|
||||
r = requests.post(
|
||||
f"{base}/repos/{repo}/labels",
|
||||
headers=headers,
|
||||
json={"name": name, "color": _LABEL_COLORS.get(name, "#ededed")},
|
||||
timeout=10,
|
||||
)
|
||||
if r.ok:
|
||||
ids.append(r.json()["id"])
|
||||
return ids
|
||||
|
||||
|
||||
def _collect_context(tab: str) -> dict:
|
||||
"""Collect lightweight app context: tab, version, platform, timestamp."""
|
||||
try:
|
||||
version = subprocess.check_output(
|
||||
["git", "describe", "--tags", "--always"],
|
||||
cwd=_ROOT, text=True, timeout=5,
|
||||
).strip()
|
||||
except Exception:
|
||||
version = "dev"
|
||||
|
||||
return {
|
||||
"tab": tab,
|
||||
"version": version,
|
||||
"demo_mode": settings.DEMO_MODE,
|
||||
"cloud_mode": settings.CLOUD_MODE,
|
||||
"platform": platform.platform(),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
}
|
||||
|
||||
|
||||
def _build_issue_body(form: dict, context: dict) -> str:
|
||||
_TYPE_LABELS = {"bug": "🐛 Bug", "feature": "✨ Feature Request", "other": "💬 Other"}
|
||||
lines: list[str] = [
|
||||
f"## {_TYPE_LABELS.get(form.get('type', 'other'), '💬 Other')}",
|
||||
"",
|
||||
form.get("description", ""),
|
||||
"",
|
||||
]
|
||||
if form.get("type") == "bug" and form.get("repro"):
|
||||
lines += ["### Reproduction Steps", "", form["repro"], ""]
|
||||
|
||||
lines += ["### Context", ""]
|
||||
for k, v in context.items():
|
||||
lines.append(f"- **{k}:** {v}")
|
||||
lines.append("")
|
||||
|
||||
if form.get("submitter"):
|
||||
lines += ["---", f"*Submitted by: {form['submitter']}*"]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# ── Schemas ────────────────────────────────────────────────────────────────────
|
||||
|
||||
class FeedbackRequest(BaseModel):
|
||||
title: str
|
||||
description: str
|
||||
type: Literal["bug", "feature", "other"] = "other"
|
||||
repro: str = ""
|
||||
tab: str = "unknown"
|
||||
submitter: str = "" # optional "Name <email>" attribution
|
||||
|
||||
|
||||
class FeedbackResponse(BaseModel):
|
||||
issue_number: int
|
||||
issue_url: str
|
||||
|
||||
|
||||
# ── Routes ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/status")
|
||||
def feedback_status() -> dict:
|
||||
"""Return whether feedback submission is configured on this instance."""
|
||||
return {"enabled": bool(os.environ.get("FORGEJO_API_TOKEN")) and not settings.DEMO_MODE}
|
||||
|
||||
|
||||
@router.post("", response_model=FeedbackResponse)
|
||||
def submit_feedback(payload: FeedbackRequest) -> FeedbackResponse:
|
||||
"""
|
||||
File a Forgejo issue from in-app feedback.
|
||||
Silently disabled when FORGEJO_API_TOKEN is not set (demo/offline mode).
|
||||
"""
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
if not token:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Feedback disabled: FORGEJO_API_TOKEN not configured.",
|
||||
)
|
||||
if settings.DEMO_MODE:
|
||||
raise HTTPException(status_code=403, detail="Feedback disabled in demo mode.")
|
||||
|
||||
context = _collect_context(payload.tab)
|
||||
form = {
|
||||
"type": payload.type,
|
||||
"description": payload.description,
|
||||
"repro": payload.repro,
|
||||
"submitter": payload.submitter,
|
||||
}
|
||||
body = _build_issue_body(form, context)
|
||||
labels = ["beta-feedback", "needs-triage"]
|
||||
labels.append({"bug": "bug", "feature": "feature-request"}.get(payload.type, "question"))
|
||||
|
||||
base = os.environ.get("FORGEJO_API_URL", "https://git.opensourcesolarpunk.com/api/v1")
|
||||
repo = os.environ.get("FORGEJO_REPO", "Circuit-Forge/kiwi")
|
||||
headers = _forgejo_headers()
|
||||
|
||||
label_ids = _ensure_labels(labels)
|
||||
resp = requests.post(
|
||||
f"{base}/repos/{repo}/issues",
|
||||
headers=headers,
|
||||
json={"title": payload.title, "body": body, "labels": label_ids},
|
||||
timeout=15,
|
||||
)
|
||||
if not resp.ok:
|
||||
raise HTTPException(status_code=502, detail=f"Forgejo error: {resp.text[:200]}")
|
||||
|
||||
data = resp.json()
|
||||
return FeedbackResponse(issue_number=data["number"], issue_url=data["html_url"])
|
||||
|
|
@ -369,23 +369,6 @@ async def list_tags(
|
|||
|
||||
# ── Stats ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/recalculate-expiry")
|
||||
async def recalculate_expiry(
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Re-run the expiration predictor over all available inventory items.
|
||||
|
||||
Uses each item's stored purchase_date and current location. Safe to call
|
||||
multiple times — idempotent per session.
|
||||
"""
|
||||
def _run(s: Store) -> tuple[int, int]:
|
||||
return s.recalculate_expiry(tier=session.tier, has_byok=session.has_byok)
|
||||
|
||||
updated, skipped = await asyncio.to_thread(_run, store)
|
||||
return {"updated": updated, "skipped": skipped}
|
||||
|
||||
|
||||
@router.get("/stats", response_model=InventoryStats)
|
||||
async def get_inventory_stats(store: Store = Depends(get_store)):
|
||||
def _stats():
|
||||
|
|
|
|||
|
|
@ -1,67 +0,0 @@
|
|||
"""Recipe suggestion endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeResult
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _suggest_in_thread(db_path: Path, req: RecipeRequest) -> RecipeResult:
|
||||
"""Run recipe suggestion in a worker thread with its own Store connection.
|
||||
|
||||
SQLite connections cannot be shared across threads. This function creates
|
||||
a fresh Store (and therefore a fresh sqlite3.Connection) in the same thread
|
||||
where it will be used, avoiding ProgrammingError: SQLite objects created in
|
||||
a thread can only be used in that same thread.
|
||||
"""
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return RecipeEngine(store).suggest(req)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
@router.post("/suggest", response_model=RecipeResult)
|
||||
async def suggest_recipes(
|
||||
req: RecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeResult:
|
||||
# Inject session-authoritative tier/byok immediately — client-supplied values are ignored.
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok})
|
||||
if req.level == 4 and not req.wildcard_confirmed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Level 4 (Wildcard) requires wildcard_confirmed=true.",
|
||||
)
|
||||
if req.level in (3, 4) and not can_use("recipe_suggestions", req.tier, req.has_byok):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="LLM recipe levels require Paid tier or a configured LLM backend.",
|
||||
)
|
||||
if req.style_id and not can_use("style_picker", req.tier):
|
||||
raise HTTPException(status_code=403, detail="Style picker requires Paid tier.")
|
||||
return await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
|
||||
|
||||
@router.get("/{recipe_id}")
|
||||
async def get_recipe(recipe_id: int, session: CloudUser = Depends(get_session)) -> dict:
|
||||
def _get(db_path: Path, rid: int) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe(rid)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
recipe = await asyncio.to_thread(_get, session.db, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found.")
|
||||
return recipe
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
"""User settings endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment"})
|
||||
|
||||
|
||||
class SettingBody(BaseModel):
|
||||
value: str
|
||||
|
||||
|
||||
@router.get("/{key}")
|
||||
async def get_setting(
|
||||
key: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Return the stored value for a settings key."""
|
||||
if key not in _ALLOWED_KEYS:
|
||||
raise HTTPException(status_code=422, detail=f"Unknown settings key: '{key}'.")
|
||||
value = store.get_setting(key)
|
||||
if value is None:
|
||||
raise HTTPException(status_code=404, detail=f"Setting '{key}' not found.")
|
||||
return {"key": key, "value": value}
|
||||
|
||||
|
||||
@router.put("/{key}")
|
||||
async def set_setting(
|
||||
key: str,
|
||||
body: SettingBody,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Upsert a settings key-value pair."""
|
||||
if key not in _ALLOWED_KEYS:
|
||||
raise HTTPException(status_code=422, detail=f"Unknown settings key: '{key}'.")
|
||||
store.set_setting(key, body.value)
|
||||
return {"key": key, "value": body.value}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
"""Staple library endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
|
||||
router = APIRouter()
|
||||
_lib = StapleLibrary()
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def list_staples(dietary: str | None = None) -> list[dict]:
|
||||
staples = _lib.filter_by_dietary(dietary) if dietary else _lib.list_all()
|
||||
return [
|
||||
{
|
||||
"slug": s.slug,
|
||||
"name": s.name,
|
||||
"description": s.description,
|
||||
"dietary_labels": s.dietary_labels,
|
||||
"yield_formats": list(s.yield_formats.keys()),
|
||||
}
|
||||
for s in staples
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{slug}")
|
||||
async def get_staple(slug: str) -> dict:
|
||||
staple = _lib.get(slug)
|
||||
if not staple:
|
||||
raise HTTPException(status_code=404, detail=f"Staple '{slug}' not found.")
|
||||
return {
|
||||
"slug": staple.slug,
|
||||
"name": staple.name,
|
||||
"description": staple.description,
|
||||
"dietary_labels": staple.dietary_labels,
|
||||
"base_ingredients": staple.base_ingredients,
|
||||
"base_method": staple.base_method,
|
||||
"base_time_minutes": staple.base_time_minutes,
|
||||
"yield_formats": staple.yield_formats,
|
||||
"compatible_styles": staple.compatible_styles,
|
||||
}
|
||||
|
|
@ -1,14 +1,10 @@
|
|||
from fastapi import APIRouter
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
||||
api_router.include_router(export.router, tags=["export"])
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(staples.router, prefix="/staples", tags=["staples"])
|
||||
api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"]) # OCR endpoints under /receipts
|
||||
api_router.include_router(export.router, tags=["export"]) # No prefix, uses /export in the router
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
|
|
@ -37,43 +37,6 @@ DIRECTUS_JWT_SECRET: str = os.environ.get("DIRECTUS_JWT_SECRET", "")
|
|||
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
|
||||
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
|
||||
|
||||
# Dev bypass: comma-separated IPs or CIDR ranges that skip JWT auth.
|
||||
# NEVER set this in production. Intended only for LAN developer testing when
|
||||
# the request doesn't pass through Caddy (which normally injects X-CF-Session).
|
||||
# Example: CLOUD_AUTH_BYPASS_IPS=10.1.10.0/24,127.0.0.1
|
||||
import ipaddress as _ipaddress
|
||||
|
||||
_BYPASS_RAW: list[str] = [
|
||||
e.strip()
|
||||
for e in os.environ.get("CLOUD_AUTH_BYPASS_IPS", "").split(",")
|
||||
if e.strip()
|
||||
]
|
||||
|
||||
_BYPASS_NETS: list[_ipaddress.IPv4Network | _ipaddress.IPv6Network] = []
|
||||
_BYPASS_IPS: frozenset[str] = frozenset()
|
||||
|
||||
if _BYPASS_RAW:
|
||||
_nets, _ips = [], set()
|
||||
for entry in _BYPASS_RAW:
|
||||
try:
|
||||
_nets.append(_ipaddress.ip_network(entry, strict=False))
|
||||
except ValueError:
|
||||
_ips.add(entry) # treat non-parseable entries as bare IPs
|
||||
_BYPASS_NETS = _nets
|
||||
_BYPASS_IPS = frozenset(_ips)
|
||||
|
||||
|
||||
def _is_bypass_ip(ip: str) -> bool:
|
||||
if not ip:
|
||||
return False
|
||||
if ip in _BYPASS_IPS:
|
||||
return True
|
||||
try:
|
||||
addr = _ipaddress.ip_address(ip)
|
||||
return any(addr in net for net in _BYPASS_NETS)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
_LOCAL_KIWI_DB: Path = Path(os.environ.get("KIWI_DB", "data/kiwi.db"))
|
||||
|
||||
_TIER_CACHE: dict[str, tuple[str, float]] = {}
|
||||
|
|
@ -190,28 +153,12 @@ def get_session(request: Request) -> CloudUser:
|
|||
|
||||
Local mode: fully-privileged "local" user pointing at local DB.
|
||||
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
||||
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
||||
returns a "local" session without JWT validation (dev/LAN use only).
|
||||
"""
|
||||
has_byok = _detect_byok()
|
||||
|
||||
if not CLOUD_MODE:
|
||||
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
||||
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
client_ip = (
|
||||
request.headers.get("x-real-ip", "")
|
||||
or (request.client.host if request.client else "")
|
||||
)
|
||||
if (_BYPASS_IPS or _BYPASS_NETS) and _is_bypass_ip(client_ip):
|
||||
log.debug("CLOUD_AUTH_BYPASS_IPS match for %s — returning local session", client_ip)
|
||||
# Use a dev DB under CLOUD_DATA_ROOT so the container has a writable path.
|
||||
dev_db = _user_db_path("local-dev")
|
||||
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
||||
|
||||
raw_header = (
|
||||
request.headers.get("x-cf-session", "")
|
||||
or request.headers.get("cookie", "")
|
||||
|
|
@ -219,7 +166,7 @@ def get_session(request: Request) -> CloudUser:
|
|||
if not raw_header:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
token = _extract_session_token(raw_header) # gitleaks:allow — function name, not a secret
|
||||
token = _extract_session_token(raw_header)
|
||||
if not token:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
|
|
|
|||
|
|
@ -43,13 +43,6 @@ class Settings:
|
|||
# Quality
|
||||
MIN_QUALITY_SCORE: float = float(os.environ.get("MIN_QUALITY_SCORE", "50.0"))
|
||||
|
||||
# CF-core resource coordinator (VRAM lease management)
|
||||
COORDINATOR_URL: str = os.environ.get("COORDINATOR_URL", "http://localhost:7700")
|
||||
|
||||
# Hosted cf-orch coordinator — bearer token for managed cloud GPU inference (Paid+)
|
||||
# CFOrchClient reads CF_LICENSE_KEY automatically; exposed here for startup validation.
|
||||
CF_LICENSE_KEY: str | None = os.environ.get("CF_LICENSE_KEY")
|
||||
|
||||
# Feature flags
|
||||
ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes")
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ CREATE TABLE receipts_new (
|
|||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
filename TEXT NOT NULL,
|
||||
original_path TEXT NOT NULL,
|
||||
processed_path TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'uploaded'
|
||||
CHECK (status IN (
|
||||
'uploaded',
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
-- Migration 006: Ingredient element profiles + FlavorGraph molecule index.
|
||||
|
||||
CREATE TABLE ingredient_profiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
name_variants TEXT NOT NULL DEFAULT '[]', -- JSON array of aliases/alternate spellings
|
||||
elements TEXT NOT NULL DEFAULT '[]', -- JSON array: ["Richness","Depth"]
|
||||
-- Functional submetadata (from USDA FDC)
|
||||
fat_pct REAL DEFAULT 0.0,
|
||||
fat_saturated_pct REAL DEFAULT 0.0,
|
||||
moisture_pct REAL DEFAULT 0.0,
|
||||
protein_pct REAL DEFAULT 0.0,
|
||||
starch_pct REAL DEFAULT 0.0,
|
||||
binding_score INTEGER DEFAULT 0 CHECK (binding_score BETWEEN 0 AND 3),
|
||||
glutamate_mg REAL DEFAULT 0.0,
|
||||
ph_estimate REAL,
|
||||
sodium_mg_per_100g REAL DEFAULT 0.0,
|
||||
smoke_point_c REAL,
|
||||
is_fermented INTEGER NOT NULL DEFAULT 0,
|
||||
is_emulsifier INTEGER NOT NULL DEFAULT 0,
|
||||
-- Aroma submetadata
|
||||
flavor_molecule_ids TEXT NOT NULL DEFAULT '[]', -- JSON array of FlavorGraph compound IDs
|
||||
heat_stable INTEGER NOT NULL DEFAULT 1,
|
||||
add_timing TEXT NOT NULL DEFAULT 'any'
|
||||
CHECK (add_timing IN ('early','finish','any')),
|
||||
-- Brightness submetadata
|
||||
acid_type TEXT CHECK (acid_type IN ('citric','acetic','lactic',NULL)),
|
||||
-- Texture submetadata
|
||||
texture_profile TEXT NOT NULL DEFAULT 'neutral',
|
||||
water_activity REAL,
|
||||
-- Source
|
||||
usda_fdc_id TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'usda',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_ingredient_profiles_name ON ingredient_profiles (name);
|
||||
CREATE INDEX idx_ingredient_profiles_elements ON ingredient_profiles (elements);
|
||||
|
||||
CREATE TABLE flavor_molecules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
compound_id TEXT NOT NULL UNIQUE, -- FlavorGraph node ID
|
||||
compound_name TEXT NOT NULL,
|
||||
ingredient_names TEXT NOT NULL DEFAULT '[]', -- JSON array of ingredient names
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_flavor_molecules_compound_id ON flavor_molecules (compound_id);
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
-- Migration 007: Recipe corpus index (food.com dataset).
|
||||
|
||||
CREATE TABLE recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
external_id TEXT,
|
||||
title TEXT NOT NULL,
|
||||
ingredients TEXT NOT NULL DEFAULT '[]', -- JSON array of raw ingredient strings
|
||||
ingredient_names TEXT NOT NULL DEFAULT '[]', -- JSON array of normalized names
|
||||
directions TEXT NOT NULL DEFAULT '[]', -- JSON array of step strings
|
||||
category TEXT,
|
||||
keywords TEXT NOT NULL DEFAULT '[]', -- JSON array
|
||||
calories REAL,
|
||||
fat_g REAL,
|
||||
protein_g REAL,
|
||||
sodium_mg REAL,
|
||||
-- Element coverage scores computed at import time
|
||||
element_coverage TEXT NOT NULL DEFAULT '{}', -- JSON {element: 0.0-1.0}
|
||||
source TEXT NOT NULL DEFAULT 'foodcom',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_recipes_title ON recipes (title);
|
||||
CREATE INDEX idx_recipes_category ON recipes (category);
|
||||
CREATE UNIQUE INDEX idx_recipes_external_id ON recipes (external_id);
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
-- Migration 008: Derived substitution pairs.
|
||||
-- Source: diff of lishuyang/recipepairs (GPL-3.0 derivation — raw data not shipped).
|
||||
|
||||
CREATE TABLE substitution_pairs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
original_name TEXT NOT NULL,
|
||||
substitute_name TEXT NOT NULL,
|
||||
constraint_label TEXT NOT NULL, -- 'vegan'|'vegetarian'|'dairy_free'|'gluten_free'|'low_fat'|'low_sodium'
|
||||
fat_delta REAL DEFAULT 0.0,
|
||||
moisture_delta REAL DEFAULT 0.0,
|
||||
glutamate_delta REAL DEFAULT 0.0,
|
||||
protein_delta REAL DEFAULT 0.0,
|
||||
occurrence_count INTEGER DEFAULT 1,
|
||||
compensation_hints TEXT NOT NULL DEFAULT '[]', -- JSON [{ingredient, reason, element}]
|
||||
source TEXT NOT NULL DEFAULT 'derived',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_substitution_pairs_original ON substitution_pairs (original_name);
|
||||
CREATE INDEX idx_substitution_pairs_constraint ON substitution_pairs (constraint_label);
|
||||
CREATE UNIQUE INDEX idx_substitution_pairs_pair
|
||||
ON substitution_pairs (original_name, substitute_name, constraint_label);
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 009: Staple library (bulk-preparable base components).
|
||||
|
||||
CREATE TABLE staples (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
slug TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
base_ingredients TEXT NOT NULL DEFAULT '[]', -- JSON array of ingredient strings
|
||||
base_method TEXT,
|
||||
base_time_minutes INTEGER,
|
||||
yield_formats TEXT NOT NULL DEFAULT '{}', -- JSON {format_name: {elements, shelf_days, methods, texture}}
|
||||
dietary_labels TEXT NOT NULL DEFAULT '[]', -- JSON ['vegan','high-protein']
|
||||
compatible_styles TEXT NOT NULL DEFAULT '[]', -- JSON [style_id]
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE user_staples (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
staple_slug TEXT NOT NULL REFERENCES staples(slug) ON DELETE CASCADE,
|
||||
active_format TEXT NOT NULL,
|
||||
quantity_g REAL,
|
||||
prepared_at TEXT,
|
||||
notes TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_user_staples_slug ON user_staples (staple_slug);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
-- Migration 010: User substitution approval log (opt-in dataset moat).
|
||||
|
||||
CREATE TABLE substitution_feedback (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
original_name TEXT NOT NULL,
|
||||
substitute_name TEXT NOT NULL,
|
||||
constraint_label TEXT,
|
||||
compensation_used TEXT NOT NULL DEFAULT '[]', -- JSON array of compensation ingredient names
|
||||
approved INTEGER NOT NULL DEFAULT 0,
|
||||
opted_in INTEGER NOT NULL DEFAULT 0, -- user consented to anonymized sharing
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_substitution_feedback_original ON substitution_feedback (original_name);
|
||||
CREATE INDEX idx_substitution_feedback_opted_in ON substitution_feedback (opted_in);
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
-- Migration 011: Daily rate limits (leftover mode: 5/day free tier).
|
||||
|
||||
CREATE TABLE rate_limits (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
feature TEXT NOT NULL,
|
||||
window_date TEXT NOT NULL, -- YYYY-MM-DD
|
||||
count INTEGER NOT NULL DEFAULT 0,
|
||||
UNIQUE (feature, window_date)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_rate_limits_feature_date ON rate_limits (feature, window_date);
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
-- Migration 012: User settings key-value store.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user_settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
-- Migration 014: Add macro nutrition columns to recipes and ingredient_profiles.
|
||||
--
|
||||
-- recipes: sugar, carbs, fiber, servings, and an estimated flag.
|
||||
-- ingredient_profiles: carbs, fiber, calories, sugar per 100g (for estimation fallback).
|
||||
|
||||
ALTER TABLE recipes ADD COLUMN sugar_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN carbs_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN fiber_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN servings REAL;
|
||||
ALTER TABLE recipes ADD COLUMN nutrition_estimated INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN carbs_g_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN fiber_g_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN calories_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN sugar_g_per_100g REAL DEFAULT 0.0;
|
||||
|
||||
CREATE INDEX idx_recipes_sugar_g ON recipes (sugar_g);
|
||||
CREATE INDEX idx_recipes_carbs_g ON recipes (carbs_g);
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
-- Migration 015: FTS5 inverted index for recipe ingredient lookup.
|
||||
--
|
||||
-- Content table backed by `recipes` — stores only the inverted index, no text duplication.
|
||||
-- MATCH queries replace O(N) LIKE scans with O(log N) token lookups.
|
||||
--
|
||||
-- One-time rebuild cost on 3.2M rows: ~15-30 seconds at startup.
|
||||
-- Subsequent startups skip this migration entirely.
|
||||
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS recipes_fts USING fts5(
|
||||
ingredient_names,
|
||||
content=recipes,
|
||||
content_rowid=id,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
|
||||
INSERT INTO recipes_fts(recipes_fts) VALUES('rebuild');
|
||||
|
||||
-- Triggers to keep the FTS index in sync with the recipes table.
|
||||
-- Without these, rows inserted after the initial rebuild are invisible to FTS queries.
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 016: Add FTS5 sync triggers for the recipes_fts content table.
|
||||
--
|
||||
-- Migration 015 created recipes_fts and did a one-time rebuild, but omitted
|
||||
-- triggers. Without them, INSERT/UPDATE/DELETE on recipes does not update the
|
||||
-- FTS index, so new rows are invisible to MATCH queries.
|
||||
--
|
||||
-- CREATE TRIGGER IF NOT EXISTS is idempotent — safe to re-run.
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
477
app/db/store.py
477
app/db/store.py
|
|
@ -32,10 +32,7 @@ class Store:
|
|||
# Deserialise any TEXT columns that contain JSON
|
||||
for key in ("metadata", "nutrition_data", "source_data", "items",
|
||||
"metrics", "improvement_suggestions", "confidence_scores",
|
||||
"warnings",
|
||||
# recipe columns
|
||||
"ingredients", "ingredient_names", "directions",
|
||||
"keywords", "element_coverage"):
|
||||
"warnings"):
|
||||
if key in d and isinstance(d[key], str):
|
||||
try:
|
||||
d[key] = json.loads(d[key])
|
||||
|
|
@ -232,72 +229,6 @@ class Store:
|
|||
(str(days),),
|
||||
)
|
||||
|
||||
def recalculate_expiry(
|
||||
self,
|
||||
tier: str = "local",
|
||||
has_byok: bool = False,
|
||||
) -> tuple[int, int]:
|
||||
"""Re-run the expiration predictor over all available inventory items.
|
||||
|
||||
Uses each item's existing purchase_date (falls back to today if NULL)
|
||||
and its current location. Skips items that have an explicit
|
||||
expiration_date from a source other than auto-prediction (i.e. items
|
||||
whose expiry was found on a receipt or entered by the user) cannot be
|
||||
distinguished — all available items are recalculated.
|
||||
|
||||
Returns (updated_count, skipped_count).
|
||||
"""
|
||||
from datetime import date
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
predictor = ExpirationPredictor()
|
||||
rows = self._fetch_all(
|
||||
"""SELECT i.id, i.location, i.purchase_date,
|
||||
p.name AS product_name, p.category AS product_category
|
||||
FROM inventory_items i
|
||||
JOIN products p ON p.id = i.product_id
|
||||
WHERE i.status = 'available'""",
|
||||
(),
|
||||
)
|
||||
|
||||
updated = skipped = 0
|
||||
for row in rows:
|
||||
cat = predictor.get_category_from_product(
|
||||
row["product_name"] or "",
|
||||
product_category=row.get("product_category"),
|
||||
location=row.get("location"),
|
||||
)
|
||||
purchase_date_raw = row.get("purchase_date")
|
||||
try:
|
||||
purchase_date = (
|
||||
date.fromisoformat(purchase_date_raw)
|
||||
if purchase_date_raw
|
||||
else date.today()
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
purchase_date = date.today()
|
||||
|
||||
exp = predictor.predict_expiration(
|
||||
cat,
|
||||
row["location"] or "pantry",
|
||||
purchase_date=purchase_date,
|
||||
product_name=row["product_name"],
|
||||
tier=tier,
|
||||
has_byok=has_byok,
|
||||
)
|
||||
if exp is None:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
self.conn.execute(
|
||||
"UPDATE inventory_items SET expiration_date = ?, updated_at = datetime('now') WHERE id = ?",
|
||||
(str(exp), row["id"]),
|
||||
)
|
||||
updated += 1
|
||||
|
||||
self.conn.commit()
|
||||
return updated, skipped
|
||||
|
||||
# ── receipt_data ──────────────────────────────────────────────────────
|
||||
|
||||
def upsert_receipt_data(self, receipt_id: int, data: dict) -> dict[str, Any]:
|
||||
|
|
@ -329,409 +260,3 @@ class Store:
|
|||
return self._fetch_one(
|
||||
"SELECT * FROM receipt_data WHERE receipt_id = ?", (receipt_id,)
|
||||
)
|
||||
|
||||
# ── recipes ───────────────────────────────────────────────────────────
|
||||
|
||||
def _fts_ready(self) -> bool:
|
||||
"""Return True if the recipes_fts virtual table exists."""
|
||||
row = self._fetch_one(
|
||||
"SELECT 1 FROM sqlite_master WHERE type='table' AND name='recipes_fts'"
|
||||
)
|
||||
return row is not None
|
||||
|
||||
# Words that carry no recipe-ingredient signal and should be filtered
|
||||
# out when tokenising multi-word product names for FTS expansion.
|
||||
_FTS_TOKEN_STOPWORDS: frozenset[str] = frozenset({
|
||||
# Common English stopwords
|
||||
"a", "an", "the", "of", "in", "for", "with", "and", "or", "to",
|
||||
"from", "at", "by", "as", "on", "into",
|
||||
# Brand / marketing words that appear in product names
|
||||
"lean", "cuisine", "healthy", "choice", "stouffer", "original",
|
||||
"classic", "deluxe", "homestyle", "family", "style", "grade",
|
||||
"premium", "select", "natural", "organic", "fresh", "lite",
|
||||
"ready", "quick", "easy", "instant", "microwave", "frozen",
|
||||
"brand", "size", "large", "small", "medium", "extra",
|
||||
# Plant-based / alt-meat brand names
|
||||
"daring", "gardein", "morningstar", "lightlife", "tofurky",
|
||||
"quorn", "omni", "nuggs", "simulate", "simulate",
|
||||
# Preparation states — "cut up chicken" is still chicken
|
||||
"cut", "diced", "sliced", "chopped", "minced", "shredded",
|
||||
"cooked", "raw", "whole", "boneless", "skinless", "trimmed",
|
||||
"pre", "prepared", "marinated", "seasoned", "breaded", "battered",
|
||||
"grilled", "roasted", "smoked", "canned", "dried", "dehydrated",
|
||||
"pieces", "piece", "strips", "strip", "chunks", "chunk",
|
||||
"fillets", "fillet", "cutlets", "cutlet", "tenders", "nuggets",
|
||||
# Units / packaging
|
||||
"oz", "lb", "lbs", "pkg", "pack", "box", "can", "bag", "jar",
|
||||
})
|
||||
|
||||
# Maps substrings found in product-label names to canonical recipe-corpus
|
||||
# ingredient terms. Checked as substring matches against the lower-cased
|
||||
# full product name, then against each individual token.
|
||||
_FTS_SYNONYMS: dict[str, str] = {
|
||||
# Ground / minced beef
|
||||
"burger patt": "hamburger",
|
||||
"beef patt": "hamburger",
|
||||
"ground beef": "hamburger",
|
||||
"ground chuck": "hamburger",
|
||||
"ground round": "hamburger",
|
||||
"mince": "hamburger",
|
||||
"veggie burger": "hamburger",
|
||||
"beyond burger": "hamburger",
|
||||
"impossible burger": "hamburger",
|
||||
"plant burger": "hamburger",
|
||||
"chicken patt": "hamburger", # FTS match only — recipe scoring still works
|
||||
# Sausages
|
||||
"kielbasa": "sausage",
|
||||
"bratwurst": "sausage",
|
||||
"brat ": "sausage",
|
||||
"frankfurter": "hotdog",
|
||||
"wiener": "hotdog",
|
||||
# Chicken cuts + plant-based chicken → generic chicken for broader matching
|
||||
"chicken breast": "chicken",
|
||||
"chicken thigh": "chicken",
|
||||
"chicken drumstick": "chicken",
|
||||
"chicken wing": "chicken",
|
||||
"rotisserie chicken": "chicken",
|
||||
"chicken tender": "chicken",
|
||||
"chicken strip": "chicken",
|
||||
"chicken piece": "chicken",
|
||||
"fake chicken": "chicken",
|
||||
"plant chicken": "chicken",
|
||||
"vegan chicken": "chicken",
|
||||
"daring": "chicken", # Daring Foods brand
|
||||
"gardein chick": "chicken",
|
||||
"quorn chick": "chicken",
|
||||
"chick'n": "chicken",
|
||||
"chikn": "chicken",
|
||||
"not-chicken": "chicken",
|
||||
"no-chicken": "chicken",
|
||||
# Plant-based beef subs — map to broad "beef" not "hamburger"
|
||||
# (texture varies: strips ≠ ground; let corpus handle the specific form)
|
||||
"not-beef": "beef",
|
||||
"no-beef": "beef",
|
||||
"plant beef": "beef",
|
||||
"vegan beef": "beef",
|
||||
# Plant-based pork subs
|
||||
"not-pork": "pork",
|
||||
"no-pork": "pork",
|
||||
"plant pork": "pork",
|
||||
"vegan pork": "pork",
|
||||
"omnipork": "pork",
|
||||
"omni pork": "pork",
|
||||
# Generic alt-meat catch-alls → broad "beef" (safer than hamburger)
|
||||
"fake meat": "beef",
|
||||
"plant meat": "beef",
|
||||
"vegan meat": "beef",
|
||||
"meat-free": "beef",
|
||||
"meatless": "beef",
|
||||
# Pork cuts
|
||||
"pork chop": "pork",
|
||||
"pork loin": "pork",
|
||||
"pork tenderloin": "pork",
|
||||
# Tomato-based sauces
|
||||
"marinara": "tomato sauce",
|
||||
"pasta sauce": "tomato sauce",
|
||||
"spaghetti sauce": "tomato sauce",
|
||||
"pizza sauce": "tomato sauce",
|
||||
# Pasta shapes — map to generic "pasta" so FTS finds any pasta recipe
|
||||
"macaroni": "pasta",
|
||||
"noodles": "pasta",
|
||||
"spaghetti": "pasta",
|
||||
"penne": "pasta",
|
||||
"fettuccine": "pasta",
|
||||
"rigatoni": "pasta",
|
||||
"linguine": "pasta",
|
||||
"rotini": "pasta",
|
||||
"farfalle": "pasta",
|
||||
# Cheese variants → "cheese" for broad matching
|
||||
"shredded cheese": "cheese",
|
||||
"sliced cheese": "cheese",
|
||||
"american cheese": "cheese",
|
||||
"cheddar": "cheese",
|
||||
"mozzarella": "cheese",
|
||||
# Cream variants
|
||||
"heavy cream": "cream",
|
||||
"whipping cream": "cream",
|
||||
"half and half": "cream",
|
||||
# Buns / rolls
|
||||
"burger bun": "buns",
|
||||
"hamburger bun": "buns",
|
||||
"hot dog bun": "buns",
|
||||
"bread roll": "buns",
|
||||
"dinner roll": "buns",
|
||||
# Tortillas / wraps
|
||||
"flour tortilla": "tortillas",
|
||||
"corn tortilla": "tortillas",
|
||||
"tortilla wrap": "tortillas",
|
||||
"soft taco shell": "tortillas",
|
||||
"taco shell": "taco shells",
|
||||
"pita bread": "pita",
|
||||
"flatbread": "flatbread",
|
||||
# Canned beans
|
||||
"black bean": "beans",
|
||||
"pinto bean": "beans",
|
||||
"kidney bean": "beans",
|
||||
"refried bean": "beans",
|
||||
"chickpea": "beans",
|
||||
"garbanzo": "beans",
|
||||
# Rice variants
|
||||
"white rice": "rice",
|
||||
"brown rice": "rice",
|
||||
"jasmine rice": "rice",
|
||||
"basmati rice": "rice",
|
||||
"instant rice": "rice",
|
||||
"microwavable rice": "rice",
|
||||
# Salsa / hot sauce
|
||||
"hot sauce": "salsa",
|
||||
"taco sauce": "salsa",
|
||||
"enchilada sauce": "salsa",
|
||||
# Sour cream substitute
|
||||
"greek yogurt": "sour cream",
|
||||
# Prepackaged meals
|
||||
"lean cuisine": "casserole",
|
||||
"stouffer": "casserole",
|
||||
"healthy choice": "casserole",
|
||||
"marie callender": "casserole",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _normalize_for_fts(name: str) -> list[str]:
|
||||
"""Expand one pantry item to all FTS search terms it should contribute.
|
||||
|
||||
Returns the original name plus:
|
||||
- Any synonym-map canonical terms (handles product-label → corpus name)
|
||||
- Individual significant tokens from multi-word product names
|
||||
(handles packaged meals like "Lean Cuisine Chicken Alfredo" → also
|
||||
searches for "chicken" and "alfredo" independently)
|
||||
"""
|
||||
lower = name.lower().strip()
|
||||
if not lower:
|
||||
return []
|
||||
|
||||
terms: list[str] = [lower]
|
||||
|
||||
# Substring synonym check on full name
|
||||
for pattern, canonical in Store._FTS_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
terms.append(canonical)
|
||||
|
||||
# For multi-word product names, also add individual significant tokens
|
||||
if " " in lower:
|
||||
for token in lower.split():
|
||||
if len(token) <= 3 or token in Store._FTS_TOKEN_STOPWORDS:
|
||||
continue
|
||||
if token not in terms:
|
||||
terms.append(token)
|
||||
# Synonym-expand individual tokens too
|
||||
if token in Store._FTS_SYNONYMS:
|
||||
canonical = Store._FTS_SYNONYMS[token]
|
||||
if canonical not in terms:
|
||||
terms.append(canonical)
|
||||
|
||||
return terms
|
||||
|
||||
@staticmethod
|
||||
def _build_fts_query(ingredient_names: list[str]) -> str:
|
||||
"""Build an FTS5 MATCH expression ORing all ingredient terms.
|
||||
|
||||
Each pantry item is expanded via _normalize_for_fts so that
|
||||
product-label names (e.g. "burger patties") also search for their
|
||||
recipe-corpus equivalents (e.g. "hamburger"), and multi-word packaged
|
||||
product names contribute their individual ingredient tokens.
|
||||
"""
|
||||
parts: list[str] = []
|
||||
seen: set[str] = set()
|
||||
for name in ingredient_names:
|
||||
for term in Store._normalize_for_fts(name):
|
||||
# Strip characters that break FTS5 query syntax
|
||||
clean = term.replace('"', "").replace("'", "")
|
||||
if not clean or clean in seen:
|
||||
continue
|
||||
seen.add(clean)
|
||||
parts.append(f'"{clean}"')
|
||||
return " OR ".join(parts)
|
||||
|
||||
def search_recipes_by_ingredients(
|
||||
self,
|
||||
ingredient_names: list[str],
|
||||
limit: int = 20,
|
||||
category: str | None = None,
|
||||
max_calories: float | None = None,
|
||||
max_sugar_g: float | None = None,
|
||||
max_carbs_g: float | None = None,
|
||||
max_sodium_mg: float | None = None,
|
||||
excluded_ids: list[int] | None = None,
|
||||
) -> list[dict]:
|
||||
"""Find recipes containing any of the given ingredient names.
|
||||
Scores by match count and returns highest-scoring first.
|
||||
|
||||
Uses FTS5 index (migration 015) when available — O(log N) per query.
|
||||
Falls back to LIKE scans on older databases.
|
||||
|
||||
Nutrition filters use NULL-passthrough: rows without nutrition data
|
||||
always pass (they may be estimated or absent entirely).
|
||||
"""
|
||||
if not ingredient_names:
|
||||
return []
|
||||
|
||||
extra_clauses: list[str] = []
|
||||
extra_params: list = []
|
||||
if category:
|
||||
extra_clauses.append("r.category = ?")
|
||||
extra_params.append(category)
|
||||
if max_calories is not None:
|
||||
extra_clauses.append("(r.calories IS NULL OR r.calories <= ?)")
|
||||
extra_params.append(max_calories)
|
||||
if max_sugar_g is not None:
|
||||
extra_clauses.append("(r.sugar_g IS NULL OR r.sugar_g <= ?)")
|
||||
extra_params.append(max_sugar_g)
|
||||
if max_carbs_g is not None:
|
||||
extra_clauses.append("(r.carbs_g IS NULL OR r.carbs_g <= ?)")
|
||||
extra_params.append(max_carbs_g)
|
||||
if max_sodium_mg is not None:
|
||||
extra_clauses.append("(r.sodium_mg IS NULL OR r.sodium_mg <= ?)")
|
||||
extra_params.append(max_sodium_mg)
|
||||
if excluded_ids:
|
||||
placeholders = ",".join("?" * len(excluded_ids))
|
||||
extra_clauses.append(f"r.id NOT IN ({placeholders})")
|
||||
extra_params.extend(excluded_ids)
|
||||
where_extra = (" AND " + " AND ".join(extra_clauses)) if extra_clauses else ""
|
||||
|
||||
if self._fts_ready():
|
||||
return self._search_recipes_fts(
|
||||
ingredient_names, limit, where_extra, extra_params
|
||||
)
|
||||
return self._search_recipes_like(
|
||||
ingredient_names, limit, where_extra, extra_params
|
||||
)
|
||||
|
||||
def _search_recipes_fts(
|
||||
self,
|
||||
ingredient_names: list[str],
|
||||
limit: int,
|
||||
where_extra: str,
|
||||
extra_params: list,
|
||||
) -> list[dict]:
|
||||
"""FTS5-backed ingredient search. Candidates fetched via inverted index;
|
||||
match_count computed in Python over the small candidate set."""
|
||||
fts_query = self._build_fts_query(ingredient_names)
|
||||
if not fts_query:
|
||||
return []
|
||||
|
||||
# Pull up to 10× limit candidates so ranking has enough headroom.
|
||||
sql = f"""
|
||||
SELECT r.*
|
||||
FROM recipes_fts
|
||||
JOIN recipes r ON r.id = recipes_fts.rowid
|
||||
WHERE recipes_fts MATCH ?
|
||||
{where_extra}
|
||||
LIMIT ?
|
||||
"""
|
||||
rows = self._fetch_all(sql, (fts_query, *extra_params, limit * 10))
|
||||
|
||||
pantry_set = {n.lower().strip() for n in ingredient_names}
|
||||
scored: list[dict] = []
|
||||
for row in rows:
|
||||
raw = row.get("ingredient_names") or []
|
||||
names: list[str] = raw if isinstance(raw, list) else json.loads(raw or "[]")
|
||||
match_count = sum(1 for n in names if n.lower() in pantry_set)
|
||||
scored.append({**row, "match_count": match_count})
|
||||
|
||||
scored.sort(key=lambda r: (-r["match_count"], r["id"]))
|
||||
return scored[:limit]
|
||||
|
||||
def _search_recipes_like(
|
||||
self,
|
||||
ingredient_names: list[str],
|
||||
limit: int,
|
||||
where_extra: str,
|
||||
extra_params: list,
|
||||
) -> list[dict]:
|
||||
"""Legacy LIKE-based ingredient search (O(N×rows) — slow on large corpora)."""
|
||||
like_params = [f'%"{n}"%' for n in ingredient_names]
|
||||
like_clauses = " OR ".join(
|
||||
"r.ingredient_names LIKE ?" for _ in ingredient_names
|
||||
)
|
||||
match_score = " + ".join(
|
||||
"CASE WHEN r.ingredient_names LIKE ? THEN 1 ELSE 0 END"
|
||||
for _ in ingredient_names
|
||||
)
|
||||
sql = f"""
|
||||
SELECT r.*, ({match_score}) AS match_count
|
||||
FROM recipes r
|
||||
WHERE ({like_clauses})
|
||||
{where_extra}
|
||||
ORDER BY match_count DESC, r.id ASC
|
||||
LIMIT ?
|
||||
"""
|
||||
all_params = like_params + like_params + extra_params + [limit]
|
||||
return self._fetch_all(sql, tuple(all_params))
|
||||
|
||||
def get_recipe(self, recipe_id: int) -> dict | None:
|
||||
return self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
||||
|
||||
# ── rate limits ───────────────────────────────────────────────────────
|
||||
|
||||
def check_and_increment_rate_limit(
|
||||
self, feature: str, daily_max: int
|
||||
) -> tuple[bool, int]:
|
||||
"""Check daily counter for feature; only increment if under the limit.
|
||||
Returns (allowed, current_count). Rejected calls do not consume quota."""
|
||||
from datetime import date
|
||||
today = date.today().isoformat()
|
||||
row = self._fetch_one(
|
||||
"SELECT count FROM rate_limits WHERE feature = ? AND window_date = ?",
|
||||
(feature, today),
|
||||
)
|
||||
current = row["count"] if row else 0
|
||||
if current >= daily_max:
|
||||
return (False, current)
|
||||
self.conn.execute("""
|
||||
INSERT INTO rate_limits (feature, window_date, count)
|
||||
VALUES (?, ?, 1)
|
||||
ON CONFLICT(feature, window_date) DO UPDATE SET count = count + 1
|
||||
""", (feature, today))
|
||||
self.conn.commit()
|
||||
return (True, current + 1)
|
||||
|
||||
# ── user settings ────────────────────────────────────────────────────
|
||||
|
||||
def get_setting(self, key: str) -> str | None:
|
||||
"""Return the value for a settings key, or None if not set."""
|
||||
row = self._fetch_one(
|
||||
"SELECT value FROM user_settings WHERE key = ?", (key,)
|
||||
)
|
||||
return row["value"] if row else None
|
||||
|
||||
def set_setting(self, key: str, value: str) -> None:
|
||||
"""Upsert a settings key-value pair."""
|
||||
self.conn.execute(
|
||||
"INSERT INTO user_settings (key, value) VALUES (?, ?)"
|
||||
" ON CONFLICT(key) DO UPDATE SET value = excluded.value",
|
||||
(key, value),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
# ── substitution feedback ─────────────────────────────────────────────
|
||||
|
||||
def log_substitution_feedback(
|
||||
self,
|
||||
original: str,
|
||||
substitute: str,
|
||||
constraint: str | None,
|
||||
compensation_used: list[str],
|
||||
approved: bool,
|
||||
opted_in: bool,
|
||||
) -> None:
|
||||
self.conn.execute("""
|
||||
INSERT INTO substitution_feedback
|
||||
(original_name, substitute_name, constraint_label,
|
||||
compensation_used, approved, opted_in)
|
||||
VALUES (?,?,?,?,?,?)
|
||||
""", (
|
||||
original, substitute, constraint,
|
||||
self._dump(compensation_used),
|
||||
int(approved), int(opted_in),
|
||||
))
|
||||
self.conn.commit()
|
||||
|
|
|
|||
|
|
@ -1,81 +0,0 @@
|
|||
"""Pydantic schemas for the recipe engine API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SwapCandidate(BaseModel):
|
||||
original_name: str
|
||||
substitute_name: str
|
||||
constraint_label: str
|
||||
explanation: str
|
||||
compensation_hints: list[dict] = Field(default_factory=list)
|
||||
|
||||
|
||||
class NutritionPanel(BaseModel):
|
||||
"""Per-recipe macro summary. All values are per-serving when servings is known,
|
||||
otherwise for the full recipe. None means data is unavailable."""
|
||||
calories: float | None = None
|
||||
fat_g: float | None = None
|
||||
protein_g: float | None = None
|
||||
carbs_g: float | None = None
|
||||
fiber_g: float | None = None
|
||||
sugar_g: float | None = None
|
||||
sodium_mg: float | None = None
|
||||
servings: float | None = None
|
||||
estimated: bool = False # True when nutrition was inferred from ingredient profiles
|
||||
|
||||
|
||||
class RecipeSuggestion(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
match_count: int
|
||||
element_coverage: dict[str, float] = Field(default_factory=dict)
|
||||
swap_candidates: list[SwapCandidate] = Field(default_factory=list)
|
||||
missing_ingredients: list[str] = Field(default_factory=list)
|
||||
directions: list[str] = Field(default_factory=list)
|
||||
prep_notes: list[str] = Field(default_factory=list)
|
||||
notes: str = ""
|
||||
level: int = 1
|
||||
is_wildcard: bool = False
|
||||
nutrition: NutritionPanel | None = None
|
||||
|
||||
|
||||
class GroceryLink(BaseModel):
|
||||
ingredient: str
|
||||
retailer: str
|
||||
url: str
|
||||
|
||||
|
||||
class RecipeResult(BaseModel):
|
||||
suggestions: list[RecipeSuggestion]
|
||||
element_gaps: list[str]
|
||||
grocery_list: list[str] = Field(default_factory=list)
|
||||
grocery_links: list[GroceryLink] = Field(default_factory=list)
|
||||
rate_limited: bool = False
|
||||
rate_limit_count: int = 0
|
||||
|
||||
|
||||
class NutritionFilters(BaseModel):
|
||||
"""Optional per-serving upper bounds for macro filtering. None = no filter."""
|
||||
max_calories: float | None = None
|
||||
max_sugar_g: float | None = None
|
||||
max_carbs_g: float | None = None
|
||||
max_sodium_mg: float | None = None
|
||||
|
||||
|
||||
class RecipeRequest(BaseModel):
|
||||
pantry_items: list[str]
|
||||
level: int = Field(default=1, ge=1, le=4)
|
||||
constraints: list[str] = Field(default_factory=list)
|
||||
expiry_first: bool = False
|
||||
hard_day_mode: bool = False
|
||||
max_missing: int | None = None
|
||||
style_id: str | None = None
|
||||
category: str | None = None
|
||||
tier: str = "free"
|
||||
has_byok: bool = False
|
||||
wildcard_confirmed: bool = False
|
||||
allergies: list[str] = Field(default_factory=list)
|
||||
nutrition_filters: NutritionFilters = Field(default_factory=NutritionFilters)
|
||||
excluded_ids: list[int] = Field(default_factory=list)
|
||||
|
|
@ -5,8 +5,6 @@ This module provides functionality to detect and decode barcodes
|
|||
from images (UPC, EAN, QR codes, etc.).
|
||||
"""
|
||||
|
||||
import io
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from pyzbar import pyzbar
|
||||
|
|
@ -14,12 +12,6 @@ from pathlib import Path
|
|||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
|
||||
try:
|
||||
from PIL import Image as _PILImage
|
||||
_HAS_PIL = True
|
||||
except ImportError:
|
||||
_HAS_PIL = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -84,7 +76,9 @@ class BarcodeScanner:
|
|||
# 4. Try rotations if still no barcodes found (handles tilted/rotated barcodes)
|
||||
if not barcodes:
|
||||
logger.info("No barcodes found in standard orientation, trying rotations...")
|
||||
for angle in [90, 180, 270, 45, 135]:
|
||||
# Try incremental angles: 30°, 60°, 90° (covers 0-90° range)
|
||||
# 0° already tried, 180° is functionally same as 0°, 90°/270° are same axis
|
||||
for angle in [30, 60, 90]:
|
||||
rotated_gray = self._rotate_image(gray, angle)
|
||||
rotated_color = self._rotate_image(image, angle)
|
||||
detected = self._detect_barcodes(rotated_gray, rotated_color)
|
||||
|
|
@ -270,26 +264,6 @@ class BarcodeScanner:
|
|||
|
||||
return list(seen.values())
|
||||
|
||||
def _fix_exif_orientation(self, image_bytes: bytes) -> bytes:
|
||||
"""Apply EXIF orientation correction so cv2 sees an upright image.
|
||||
|
||||
Phone cameras embed rotation in EXIF; cv2.imdecode ignores it,
|
||||
so a photo taken in portrait may arrive physically sideways in memory.
|
||||
"""
|
||||
if not _HAS_PIL:
|
||||
return image_bytes
|
||||
try:
|
||||
pil = _PILImage.open(io.BytesIO(image_bytes))
|
||||
pil = _PILImage.fromarray(np.array(pil)) # strips EXIF but applies orientation via PIL
|
||||
# Use ImageOps.exif_transpose for proper EXIF-aware rotation
|
||||
import PIL.ImageOps
|
||||
pil = PIL.ImageOps.exif_transpose(pil)
|
||||
buf = io.BytesIO()
|
||||
pil.save(buf, format="JPEG")
|
||||
return buf.getvalue()
|
||||
except Exception:
|
||||
return image_bytes
|
||||
|
||||
def scan_from_bytes(self, image_bytes: bytes) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Scan barcodes from image bytes (uploaded file).
|
||||
|
|
@ -301,10 +275,6 @@ class BarcodeScanner:
|
|||
List of detected barcodes
|
||||
"""
|
||||
try:
|
||||
# Apply EXIF orientation correction first (phone cameras embed rotation in EXIF;
|
||||
# cv2.imdecode ignores it, causing sideways barcodes to appear rotated in memory).
|
||||
image_bytes = self._fix_exif_orientation(image_bytes)
|
||||
|
||||
# Convert bytes to numpy array
|
||||
nparr = np.frombuffer(image_bytes, np.uint8)
|
||||
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
|
@ -330,12 +300,11 @@ class BarcodeScanner:
|
|||
)
|
||||
barcodes.extend(self._detect_barcodes(thresh, image))
|
||||
|
||||
# 3. Try all 90° rotations + common tilt angles
|
||||
# 90/270 catches truly sideways barcodes; 180 catches upside-down;
|
||||
# 45/135 catches tilted barcodes on flat surfaces.
|
||||
# 3. Try rotations if still no barcodes found
|
||||
if not barcodes:
|
||||
logger.info("No barcodes found in uploaded image, trying rotations...")
|
||||
for angle in [90, 180, 270, 45, 135]:
|
||||
# Try incremental angles: 30°, 60°, 90° (covers 0-90° range)
|
||||
for angle in [30, 60, 90]:
|
||||
rotated_gray = self._rotate_image(gray, angle)
|
||||
rotated_color = self._rotate_image(image, angle)
|
||||
detected = self._detect_barcodes(rotated_gray, rotated_color)
|
||||
|
|
|
|||
|
|
@ -21,29 +21,6 @@ logger = logging.getLogger(__name__)
|
|||
class ExpirationPredictor:
|
||||
"""Predict expiration dates based on product category and storage location."""
|
||||
|
||||
# Canonical location names and their aliases.
|
||||
# All location strings are normalised through this before table lookup.
|
||||
LOCATION_ALIASES: dict[str, str] = {
|
||||
'garage_freezer': 'freezer',
|
||||
'chest_freezer': 'freezer',
|
||||
'deep_freezer': 'freezer',
|
||||
'upright_freezer': 'freezer',
|
||||
'refrigerator': 'fridge',
|
||||
'frig': 'fridge',
|
||||
'cupboard': 'cabinet',
|
||||
'shelf': 'pantry',
|
||||
'counter': 'pantry',
|
||||
}
|
||||
|
||||
# When a category has no entry for the requested location, try these
|
||||
# alternatives in order — prioritising same-temperature storage first.
|
||||
LOCATION_FALLBACK: dict[str, tuple[str, ...]] = {
|
||||
'freezer': ('freezer', 'fridge', 'pantry', 'cabinet'),
|
||||
'fridge': ('fridge', 'pantry', 'cabinet', 'freezer'),
|
||||
'pantry': ('pantry', 'cabinet', 'fridge', 'freezer'),
|
||||
'cabinet': ('cabinet', 'pantry', 'fridge', 'freezer'),
|
||||
}
|
||||
|
||||
# Default shelf life in days by category and location
|
||||
# Sources: USDA FoodKeeper app, FDA guidelines
|
||||
SHELF_LIFE = {
|
||||
|
|
@ -62,8 +39,6 @@ class ExpirationPredictor:
|
|||
'poultry': {'fridge': 2, 'freezer': 270},
|
||||
'chicken': {'fridge': 2, 'freezer': 270},
|
||||
'turkey': {'fridge': 2, 'freezer': 270},
|
||||
'tempeh': {'fridge': 10, 'freezer': 365},
|
||||
'tofu': {'fridge': 5, 'freezer': 180},
|
||||
'ground_meat': {'fridge': 2, 'freezer': 120},
|
||||
# Seafood
|
||||
'fish': {'fridge': 2, 'freezer': 180},
|
||||
|
|
@ -84,9 +59,9 @@ class ExpirationPredictor:
|
|||
'bread': {'pantry': 5, 'freezer': 90},
|
||||
'bakery': {'pantry': 3, 'fridge': 7, 'freezer': 90},
|
||||
# Frozen
|
||||
'frozen_foods': {'freezer': 180, 'fridge': 3},
|
||||
'frozen_vegetables': {'freezer': 270, 'fridge': 4},
|
||||
'frozen_fruit': {'freezer': 365, 'fridge': 4},
|
||||
'frozen_foods': {'freezer': 180},
|
||||
'frozen_vegetables': {'freezer': 270},
|
||||
'frozen_fruit': {'freezer': 365},
|
||||
'ice_cream': {'freezer': 60},
|
||||
# Pantry Staples
|
||||
'canned_goods': {'pantry': 730, 'cabinet': 730},
|
||||
|
|
@ -116,127 +91,44 @@ class ExpirationPredictor:
|
|||
'prepared_foods': {'fridge': 4, 'freezer': 90},
|
||||
}
|
||||
|
||||
# Keyword lists are checked in declaration order — most specific first.
|
||||
# Rules:
|
||||
# - canned/processed goods BEFORE raw-meat terms (canned chicken != raw chicken)
|
||||
# - frozen prepared foods BEFORE generic protein terms
|
||||
# - multi-word phrases before single words where ambiguity exists
|
||||
CATEGORY_KEYWORDS = {
|
||||
# ── Frozen prepared foods ─────────────────────────────────────────────
|
||||
# Before raw protein entries so plant-based frozen products don't
|
||||
# inherit 2–3 day raw-meat shelf lives.
|
||||
'ice_cream': ['ice cream', 'gelato', 'frozen yogurt', 'sorbet', 'sherbet'],
|
||||
'frozen_fruit': [
|
||||
'frozen berries', 'frozen mango', 'frozen strawberries',
|
||||
'frozen blueberries', 'frozen raspberries', 'frozen peaches',
|
||||
'frozen fruit', 'frozen cherries',
|
||||
],
|
||||
'frozen_vegetables': [
|
||||
'frozen veg', 'frozen corn', 'frozen peas', 'frozen broccoli',
|
||||
'frozen spinach', 'frozen edamame', 'frozen green beans',
|
||||
'frozen mixed vegetables', 'frozen carrots',
|
||||
'peas & carrots', 'peas and carrots', 'mixed vegetables',
|
||||
'spring rolls', 'vegetable spring rolls',
|
||||
],
|
||||
'frozen_foods': [
|
||||
'plant-based', 'plant based', 'meatless', 'impossible',
|
||||
"chik'n", 'chikn', 'veggie burger', 'veggie patty',
|
||||
'nugget', 'tater tot', 'waffle fries', 'hash brown',
|
||||
'onion ring', 'fish stick', 'fish fillet', 'potsticker',
|
||||
'dumpling', 'egg roll', 'empanada', 'tamale', 'falafel',
|
||||
'mac & cheese bite', 'cauliflower wing', 'ranchero potato',
|
||||
],
|
||||
# ── Canned / shelf-stable processed goods ─────────────────────────────
|
||||
# Before raw protein keywords so "canned chicken", "cream of chicken",
|
||||
# and "lentil soup" resolve here rather than to raw chicken/cream.
|
||||
'canned_goods': [
|
||||
'canned', 'can of', 'tin of', 'tinned',
|
||||
'cream of ', 'condensed soup', 'condensed cream',
|
||||
'baked beans', 'refried beans',
|
||||
'canned beans', 'canned tomatoes', 'canned corn', 'canned peas',
|
||||
'canned soup', 'canned tuna', 'canned salmon', 'canned chicken',
|
||||
'canned fruit', 'canned peaches', 'canned pears',
|
||||
'enchilada sauce', 'tomato sauce', 'tomato paste',
|
||||
'lentil soup', 'bean soup', 'chicken noodle soup',
|
||||
],
|
||||
# ── Condiments & brined items ─────────────────────────────────────────
|
||||
# Before produce/protein terms so brined olives, jarred peppers, etc.
|
||||
# don't inherit raw vegetable shelf lives.
|
||||
'ketchup': ['ketchup', 'catsup'],
|
||||
'mustard': ['mustard', 'dijon', 'dijion', 'stoneground mustard'],
|
||||
'mayo': ['mayo', 'mayonnaise', 'miracle whip'],
|
||||
'soy_sauce': ['soy sauce', 'tamari', 'shoyu'],
|
||||
'salad_dressing': ['salad dressing', 'ranch', 'italian dressing', 'vinaigrette'],
|
||||
'condiments': [
|
||||
# brined / jarred items
|
||||
'dill chips', 'hamburger chips', 'gherkin',
|
||||
'olive', 'capers', 'jalapeño', 'jalapeno', 'pepperoncini',
|
||||
'pimiento', 'banana pepper', 'cornichon',
|
||||
# sauces
|
||||
'hot sauce', 'hot pepper sauce', 'sriracha', 'cholula',
|
||||
'worcestershire', 'barbecue sauce', 'bbq sauce',
|
||||
'chipotle sauce', 'chipotle mayo', 'chipotle creamy',
|
||||
'salsa', 'chutney', 'relish',
|
||||
'teriyaki', 'hoisin', 'oyster sauce', 'fish sauce',
|
||||
'miso', 'ssamjang', 'gochujang', 'doenjang',
|
||||
'soybean paste', 'fermented soybean',
|
||||
# nut butters / spreads
|
||||
'peanut butter', 'almond butter', 'tahini', 'hummus',
|
||||
# seasoning mixes
|
||||
'seasoning', 'spice blend', 'borracho',
|
||||
# other shelf-stable sauces
|
||||
'yuzu', 'ponzu', 'lizano',
|
||||
],
|
||||
# ── Soy / fermented proteins ──────────────────────────────────────────
|
||||
'tempeh': ['tempeh'],
|
||||
'tofu': ['tofu', 'bean curd'],
|
||||
# ── Dairy ─────────────────────────────────────────────────────────────
|
||||
'milk': ['milk', 'whole milk', '2% milk', 'skim milk', 'almond milk', 'oat milk', 'soy milk'],
|
||||
'cheese': ['cheese', 'cheddar', 'mozzarella', 'swiss', 'parmesan', 'feta', 'gouda', 'velveeta'],
|
||||
'cheese': ['cheese', 'cheddar', 'mozzarella', 'swiss', 'parmesan', 'feta', 'gouda'],
|
||||
'yogurt': ['yogurt', 'greek yogurt', 'yoghurt'],
|
||||
'butter': ['butter', 'margarine'],
|
||||
# Bare 'cream' removed — "cream of X" is canned_goods (matched above).
|
||||
'cream': ['heavy cream', 'whipping cream', 'sour cream', 'crème fraîche',
|
||||
'cream cheese', 'whipped topping', 'whipped cream'],
|
||||
'cream': ['cream', 'heavy cream', 'whipping cream', 'sour cream'],
|
||||
'eggs': ['eggs', 'egg'],
|
||||
# ── Raw proteins ──────────────────────────────────────────────────────
|
||||
# After canned/frozen so "canned chicken" is already resolved above.
|
||||
'beef': ['beef', 'steak', 'roast', 'brisket', 'ribeye', 'sirloin'],
|
||||
'pork': ['pork', 'bacon', 'ham', 'sausage', 'pork chop'],
|
||||
'chicken': ['chicken', 'chicken breast', 'chicken thigh', 'chicken wings'],
|
||||
'turkey': ['turkey', 'turkey breast', 'ground turkey'],
|
||||
'ground_meat': ['ground beef', 'ground pork', 'ground chicken', 'hamburger'],
|
||||
'fish': ['fish', 'cod', 'tilapia', 'halibut'],
|
||||
'salmon': ['salmon'],
|
||||
'shrimp': ['shrimp', 'prawns'],
|
||||
'fish': ['fish', 'cod', 'tilapia', 'halibut', 'pollock'],
|
||||
# Specific chicken cuts only — bare 'chicken' handled in generic fallback
|
||||
'chicken': ['chicken breast', 'chicken thigh', 'chicken wings', 'chicken leg',
|
||||
'whole chicken', 'rotisserie chicken', 'raw chicken'],
|
||||
'turkey': ['turkey breast', 'whole turkey'],
|
||||
'ground_meat': ['ground beef', 'ground pork', 'ground chicken', 'ground turkey',
|
||||
'ground lamb', 'ground bison'],
|
||||
'pork': ['pork', 'bacon', 'ham', 'pork chop', 'pork loin'],
|
||||
'beef': ['beef', 'steak', 'brisket', 'ribeye', 'sirloin', 'roast beef'],
|
||||
'deli_meat': ['deli', 'sliced turkey', 'sliced ham', 'lunch meat', 'cold cuts',
|
||||
'prosciutto', 'salami', 'pepperoni'],
|
||||
# ── Produce ───────────────────────────────────────────────────────────
|
||||
'leafy_greens': ['lettuce', 'spinach', 'kale', 'arugula', 'mixed greens'],
|
||||
'leafy_greens': ['lettuce', 'spinach', 'kale', 'arugula', 'mixed greens', 'salad'],
|
||||
'berries': ['strawberries', 'blueberries', 'raspberries', 'blackberries'],
|
||||
'apples': ['apple', 'apples'],
|
||||
'bananas': ['banana', 'bananas'],
|
||||
'citrus': ['orange', 'lemon', 'lime', 'grapefruit', 'tangerine'],
|
||||
# ── Bakery ────────────────────────────────────────────────────────────
|
||||
'bakery': [
|
||||
'muffin', 'croissant', 'donut', 'danish', 'puff pastry', 'pastry puff',
|
||||
'cinnamon roll', 'dinner roll', 'parkerhouse roll', 'scone',
|
||||
],
|
||||
'bread': ['bread', 'loaf', 'baguette', 'bagel', 'bun', 'pita', 'naan',
|
||||
'english muffin', 'sourdough'],
|
||||
# ── Dry pantry staples ────────────────────────────────────────────────
|
||||
'pasta': ['pasta', 'spaghetti', 'penne', 'macaroni', 'noodles', 'couscous', 'orzo'],
|
||||
'rice': ['rice', 'brown rice', 'white rice', 'jasmine rice', 'basmati',
|
||||
'spanish rice', 'rice mix'],
|
||||
'bread': ['bread', 'loaf', 'baguette', 'roll', 'bagel', 'bun'],
|
||||
'bakery': ['muffin', 'croissant', 'donut', 'danish', 'pastry'],
|
||||
'deli_meat': ['deli', 'sliced turkey', 'sliced ham', 'lunch meat', 'cold cuts'],
|
||||
'frozen_vegetables': ['frozen veg', 'frozen corn', 'frozen peas', 'frozen broccoli'],
|
||||
'frozen_fruit': ['frozen berries', 'frozen mango', 'frozen strawberries'],
|
||||
'ice_cream': ['ice cream', 'gelato', 'frozen yogurt'],
|
||||
'pasta': ['pasta', 'spaghetti', 'penne', 'macaroni', 'noodles'],
|
||||
'rice': ['rice', 'brown rice', 'white rice', 'jasmine'],
|
||||
'cereal': ['cereal', 'granola', 'oatmeal'],
|
||||
'chips': ['chips', 'crisps', 'tortilla chips', 'pretzel', 'popcorn'],
|
||||
'cookies': ['cookies', 'biscuits', 'crackers', 'graham cracker', 'wafer'],
|
||||
# ── Beverages ─────────────────────────────────────────────────────────
|
||||
'juice': ['juice', 'orange juice', 'apple juice', 'lemonade'],
|
||||
'soda': ['soda', 'cola', 'sprite', 'pepsi', 'coke', 'carbonated soft drink'],
|
||||
'chips': ['chips', 'crisps', 'tortilla chips'],
|
||||
'cookies': ['cookies', 'biscuits', 'crackers'],
|
||||
'ketchup': ['ketchup', 'catsup'],
|
||||
'mustard': ['mustard'],
|
||||
'mayo': ['mayo', 'mayonnaise', 'miracle whip'],
|
||||
'salad_dressing': ['salad dressing', 'ranch', 'italian dressing', 'vinaigrette'],
|
||||
'soy_sauce': ['soy sauce', 'tamari'],
|
||||
'juice': ['juice', 'orange juice', 'apple juice'],
|
||||
'soda': ['soda', 'pop', 'cola', 'sprite', 'pepsi', 'coke'],
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
|
@ -284,13 +176,8 @@ class ExpirationPredictor:
|
|||
product_name: str,
|
||||
product_category: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
location: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""Determine category from product name, existing category, and tags.
|
||||
|
||||
location is used as a last-resort hint: unknown items in the freezer
|
||||
default to frozen_foods rather than dry_goods.
|
||||
"""
|
||||
"""Determine category from product name, existing category, and tags."""
|
||||
if product_category:
|
||||
cat = product_category.lower().strip()
|
||||
if cat in self.SHELF_LIFE:
|
||||
|
|
@ -310,36 +197,21 @@ class ExpirationPredictor:
|
|||
if any(kw in name for kw in keywords):
|
||||
return category
|
||||
|
||||
# Generic single-word fallbacks — checked after the keyword dict so
|
||||
# multi-word phrases (e.g. "canned chicken") already matched above.
|
||||
for words, fallback in [
|
||||
(['frozen'], 'frozen_foods'),
|
||||
(['canned', 'tinned'], 'canned_goods'),
|
||||
# bare 'chicken' / 'sausage' / 'ham' kept here so raw-meat names
|
||||
# that don't appear in the specific keyword lists still resolve.
|
||||
(['chicken', 'turkey'], 'poultry'),
|
||||
(['sausage', 'ham', 'bacon'], 'pork'),
|
||||
(['beef', 'steak'], 'beef'),
|
||||
(['meat', 'pork'], 'meat'),
|
||||
(['meat', 'beef', 'pork', 'chicken'], 'meat'),
|
||||
(['vegetable', 'veggie', 'produce'], 'vegetables'),
|
||||
(['fruit'], 'fruits'),
|
||||
(['dairy'], 'dairy'),
|
||||
(['frozen'], 'frozen_foods'),
|
||||
]:
|
||||
if any(w in name for w in words):
|
||||
return fallback
|
||||
|
||||
# Location-aware final fallback: unknown item in a freezer → frozen_foods.
|
||||
# This handles unlabelled frozen products (e.g. "Birthday Littles",
|
||||
# "Pulled BBQ Crumbles") without requiring every brand name to be listed.
|
||||
canon_loc = self._normalize_location(location or '')
|
||||
if canon_loc == 'freezer':
|
||||
return 'frozen_foods'
|
||||
|
||||
return 'dry_goods'
|
||||
|
||||
def get_shelf_life_info(self, category: str, location: str) -> Optional[int]:
|
||||
"""Shelf life in days for a given category + location, or None."""
|
||||
return self._lookup_days(category, location)
|
||||
return self.SHELF_LIFE.get(category.lower().strip(), {}).get(location)
|
||||
|
||||
def list_categories(self) -> List[str]:
|
||||
return list(self.SHELF_LIFE.keys())
|
||||
|
|
@ -352,18 +224,8 @@ class ExpirationPredictor:
|
|||
|
||||
# ── Private helpers ───────────────────────────────────────────────────────
|
||||
|
||||
def _normalize_location(self, location: str) -> str:
|
||||
"""Resolve location aliases to canonical names."""
|
||||
loc = location.lower().strip()
|
||||
return self.LOCATION_ALIASES.get(loc, loc)
|
||||
|
||||
def _lookup_days(self, category: Optional[str], location: str) -> Optional[int]:
|
||||
"""Pure deterministic lookup — no I/O.
|
||||
|
||||
Normalises location aliases (e.g. garage_freezer → freezer) and uses
|
||||
a context-aware fallback order so pantry items don't accidentally get
|
||||
fridge shelf-life and vice versa.
|
||||
"""
|
||||
"""Pure deterministic lookup — no I/O."""
|
||||
if not category:
|
||||
return None
|
||||
cat = category.lower().strip()
|
||||
|
|
@ -375,19 +237,13 @@ class ExpirationPredictor:
|
|||
else:
|
||||
return None
|
||||
|
||||
canon_loc = self._normalize_location(location)
|
||||
shelf = self.SHELF_LIFE[cat]
|
||||
|
||||
# Try the canonical location first, then work through the
|
||||
# context-aware fallback chain for that location type.
|
||||
fallback_order = self.LOCATION_FALLBACK.get(
|
||||
canon_loc, (canon_loc, 'pantry', 'fridge', 'cabinet', 'freezer')
|
||||
)
|
||||
for loc in fallback_order:
|
||||
days = shelf.get(loc)
|
||||
if days is not None:
|
||||
return days
|
||||
return None
|
||||
days = self.SHELF_LIFE[cat].get(location)
|
||||
if days is None:
|
||||
for loc in ('fridge', 'pantry', 'freezer', 'cabinet'):
|
||||
days = self.SHELF_LIFE[cat].get(loc)
|
||||
if days is not None:
|
||||
break
|
||||
return days
|
||||
|
||||
def _llm_predict_days(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
"""Thin HTTP client for the cf-docuvision document vision service."""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
@dataclass
|
||||
class DocuvisionResult:
|
||||
text: str
|
||||
confidence: float | None = None
|
||||
raw: dict | None = None
|
||||
|
||||
|
||||
class DocuvisionClient:
|
||||
"""Thin client for the cf-docuvision service."""
|
||||
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
|
||||
def extract_text(self, image_path: str | Path) -> DocuvisionResult:
|
||||
"""Send an image to docuvision and return extracted text."""
|
||||
image_bytes = Path(image_path).read_bytes()
|
||||
b64 = base64.b64encode(image_bytes).decode()
|
||||
|
||||
with httpx.Client(timeout=30.0) as client:
|
||||
resp = client.post(
|
||||
f"{self._base_url}/extract",
|
||||
json={"image": b64},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
return DocuvisionResult(
|
||||
text=data.get("text", ""),
|
||||
confidence=data.get("confidence"),
|
||||
raw=data,
|
||||
)
|
||||
|
||||
async def extract_text_async(self, image_path: str | Path) -> DocuvisionResult:
|
||||
"""Async version."""
|
||||
image_bytes = Path(image_path).read_bytes()
|
||||
b64 = base64.b64encode(image_bytes).decode()
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
resp = await client.post(
|
||||
f"{self._base_url}/extract",
|
||||
json={"image": b64},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
return DocuvisionResult(
|
||||
text=data.get("text", ""),
|
||||
confidence=data.get("confidence"),
|
||||
raw=data,
|
||||
)
|
||||
|
|
@ -8,7 +8,6 @@ OCR with understanding of receipt structure to extract structured JSON data.
|
|||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
|
@ -27,32 +26,6 @@ from app.core.config import settings
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _try_docuvision(image_path: str | Path) -> str | None:
|
||||
"""Try to extract text via cf-docuvision. Returns None if unavailable."""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if not cf_orch_url:
|
||||
return None
|
||||
try:
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
from app.services.ocr.docuvision_client import DocuvisionClient
|
||||
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
with client.allocate(
|
||||
service="cf-docuvision",
|
||||
model_candidates=["cf-docuvision"],
|
||||
ttl_s=60.0,
|
||||
caller="kiwi-ocr",
|
||||
) as alloc:
|
||||
if alloc is None:
|
||||
return None
|
||||
doc_client = DocuvisionClient(alloc.url)
|
||||
result = doc_client.extract_text(image_path)
|
||||
return result.text if result.text else None
|
||||
except Exception as exc:
|
||||
logger.debug("cf-docuvision fast-path failed, falling back: %s", exc)
|
||||
return None
|
||||
|
||||
|
||||
class VisionLanguageOCR:
|
||||
"""Vision-Language Model for receipt OCR and structured extraction."""
|
||||
|
||||
|
|
@ -67,7 +40,7 @@ class VisionLanguageOCR:
|
|||
self.processor = None
|
||||
self.device = "cuda" if torch.cuda.is_available() and settings.USE_GPU else "cpu"
|
||||
self.use_quantization = use_quantization
|
||||
self.model_name = "Qwen/Qwen2.5-VL-7B-Instruct"
|
||||
self.model_name = "Qwen/Qwen2-VL-2B-Instruct"
|
||||
|
||||
logger.info(f"Initializing VisionLanguageOCR with device: {self.device}")
|
||||
|
||||
|
|
@ -139,18 +112,6 @@ class VisionLanguageOCR:
|
|||
"warnings": [...]
|
||||
}
|
||||
"""
|
||||
# Try docuvision fast path first (skips heavy local VLM if available)
|
||||
docuvision_text = _try_docuvision(image_path)
|
||||
if docuvision_text is not None:
|
||||
parsed = self._parse_json_from_text(docuvision_text)
|
||||
# Only accept the docuvision result if it yielded meaningful content;
|
||||
# an empty-skeleton dict (no items, no merchant) means the text was
|
||||
# garbled and we should fall through to the local VLM instead.
|
||||
if parsed.get("items") or parsed.get("merchant"):
|
||||
parsed["raw_text"] = docuvision_text
|
||||
return self._validate_result(parsed)
|
||||
# Parsed result has no meaningful content — fall through to local VLM
|
||||
|
||||
self._load_model()
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -1,647 +0,0 @@
|
|||
"""
|
||||
Assembly-dish template matcher for Level 1/2.
|
||||
|
||||
Assembly dishes (burritos, stir fry, fried rice, omelettes, sandwiches, etc.)
|
||||
are defined by structural roles -- container + filler + sauce -- not by a fixed
|
||||
ingredient list. The corpus can never fully cover them.
|
||||
|
||||
This module fires when the pantry covers all *required* roles of a template.
|
||||
Results are injected at the top of the Level 1/2 suggestion list with negative
|
||||
ids (client displays them identically to corpus recipes).
|
||||
|
||||
Templates define:
|
||||
- required: list of role sets -- ALL must have at least one pantry match
|
||||
- optional: role sets whose matched items are shown as extras
|
||||
- directions: short cooking instructions
|
||||
- notes: serving suggestions / variations
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
from dataclasses import dataclass
|
||||
|
||||
from app.models.schemas.recipe import RecipeSuggestion
|
||||
|
||||
|
||||
# IDs in range -100..-1 are reserved for assembly-generated suggestions
|
||||
_ASSEMBLY_ID_START = -1
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssemblyRole:
|
||||
"""One role in a template (e.g. 'protein').
|
||||
|
||||
display: human-readable role label
|
||||
keywords: substrings matched against pantry item (lowercased)
|
||||
"""
|
||||
display: str
|
||||
keywords: list[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssemblyTemplate:
|
||||
"""A template assembly dish."""
|
||||
id: int
|
||||
title: str
|
||||
required: list[AssemblyRole]
|
||||
optional: list[AssemblyRole]
|
||||
directions: list[str]
|
||||
notes: str = ""
|
||||
|
||||
|
||||
def _matches_role(role: AssemblyRole, pantry_set: set[str]) -> list[str]:
|
||||
"""Return pantry items that satisfy this role.
|
||||
|
||||
Single-word keywords use whole-word matching (word must appear as a
|
||||
discrete token) so short words like 'pea', 'ham', 'egg' don't false-match
|
||||
inside longer words like 'peanut', 'hamburger', 'eggnog'.
|
||||
Multi-word keywords (e.g. 'burger patt') use substring matching.
|
||||
"""
|
||||
hits: list[str] = []
|
||||
for item in pantry_set:
|
||||
item_lower = item.lower()
|
||||
item_words = set(item_lower.split())
|
||||
for kw in role.keywords:
|
||||
if " " in kw:
|
||||
# Multi-word: substring match
|
||||
if kw in item_lower:
|
||||
hits.append(item)
|
||||
break
|
||||
else:
|
||||
# Single-word: whole-word match only
|
||||
if kw in item_words:
|
||||
hits.append(item)
|
||||
break
|
||||
return hits
|
||||
|
||||
|
||||
def _pick_one(items: list[str], seed: int) -> str:
|
||||
"""Deterministically pick one item from a list using a seed."""
|
||||
return sorted(items)[seed % len(items)]
|
||||
|
||||
|
||||
def _pantry_hash(pantry_set: set[str]) -> int:
|
||||
"""Stable integer derived from pantry contents — used for deterministic picks."""
|
||||
key = ",".join(sorted(pantry_set))
|
||||
return int(hashlib.md5(key.encode()).hexdigest(), 16) # noqa: S324 — non-crypto use
|
||||
|
||||
|
||||
def _keyword_label(item: str, role: AssemblyRole) -> str:
|
||||
"""Return a short, clean label derived from the keyword that matched.
|
||||
|
||||
Uses the longest matching keyword (most specific) as the base label,
|
||||
then title-cases it. This avoids pasting full raw pantry names like
|
||||
'Organic Extra Firm Tofu' into titles — just 'Tofu' instead.
|
||||
"""
|
||||
lower = item.lower()
|
||||
best_kw = ""
|
||||
for kw in role.keywords:
|
||||
if kw in lower and len(kw) > len(best_kw):
|
||||
best_kw = kw
|
||||
label = (best_kw or item).strip().title()
|
||||
# Drop trailing 's' from keywords like "beans" → "Bean" when it reads better
|
||||
return label
|
||||
|
||||
|
||||
def _personalized_title(tmpl: AssemblyTemplate, pantry_set: set[str], seed: int) -> str:
|
||||
"""Build a specific title using actual pantry items, e.g. 'Chicken & Broccoli Burrito'.
|
||||
|
||||
Uses the matched keyword as the label (not the full pantry item name) so
|
||||
'Organic Extra Firm Tofu Block' → 'Tofu' in the title.
|
||||
Picks at most two roles; prefers protein then vegetable.
|
||||
"""
|
||||
priority_displays = ["protein", "vegetables", "sauce base", "cheese"]
|
||||
|
||||
picked: list[str] = []
|
||||
for display in priority_displays:
|
||||
for role in tmpl.optional:
|
||||
if role.display != display:
|
||||
continue
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if hits:
|
||||
item = _pick_one(hits, seed)
|
||||
label = _keyword_label(item, role)
|
||||
if label not in picked:
|
||||
picked.append(label)
|
||||
if len(picked) >= 2:
|
||||
break
|
||||
|
||||
if not picked:
|
||||
return tmpl.title
|
||||
return f"{' & '.join(picked)} {tmpl.title}"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Template definitions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
||||
AssemblyTemplate(
|
||||
id=-1,
|
||||
title="Burrito / Taco",
|
||||
required=[
|
||||
AssemblyRole("tortilla or wrap", [
|
||||
"tortilla", "wrap", "taco shell", "flatbread", "pita",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "steak", "pork", "sausage", "hamburger",
|
||||
"burger patt", "shrimp", "egg", "tofu", "beans", "bean",
|
||||
]),
|
||||
AssemblyRole("rice or starch", ["rice", "quinoa", "potato"]),
|
||||
AssemblyRole("cheese", [
|
||||
"cheese", "cheddar", "mozzarella", "monterey", "queso",
|
||||
]),
|
||||
AssemblyRole("salsa or sauce", [
|
||||
"salsa", "hot sauce", "taco sauce", "enchilada", "guacamole",
|
||||
]),
|
||||
AssemblyRole("sour cream or yogurt", ["sour cream", "greek yogurt", "crema"]),
|
||||
AssemblyRole("vegetables", [
|
||||
"pepper", "onion", "tomato", "lettuce", "corn", "avocado",
|
||||
"spinach", "broccoli", "zucchini",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Warm the tortilla in a dry skillet or microwave for 20 seconds.",
|
||||
"Heat any proteins or vegetables in a pan until cooked through.",
|
||||
"Layer ingredients down the center: rice first, then protein, then vegetables.",
|
||||
"Add cheese, salsa, and sour cream last so they stay cool.",
|
||||
"Fold in the sides and roll tightly. Optionally toast seam-side down 1-2 minutes.",
|
||||
],
|
||||
notes="Works as a burrito (rolled), taco (folded), or quesadilla (cheese only, pressed flat).",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-2,
|
||||
title="Fried Rice",
|
||||
required=[
|
||||
AssemblyRole("cooked rice", [
|
||||
"rice", "leftover rice", "instant rice", "microwavable rice",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "shrimp", "egg", "tofu",
|
||||
"sausage", "ham", "spam",
|
||||
]),
|
||||
AssemblyRole("soy sauce or seasoning", [
|
||||
"soy sauce", "tamari", "teriyaki", "oyster sauce", "fish sauce",
|
||||
]),
|
||||
AssemblyRole("oil", ["oil", "butter", "sesame"]),
|
||||
AssemblyRole("egg", ["egg"]),
|
||||
AssemblyRole("vegetables", [
|
||||
"carrot", "peas", "corn", "onion", "scallion", "green onion",
|
||||
"broccoli", "bok choy", "bean sprout", "zucchini", "spinach",
|
||||
]),
|
||||
AssemblyRole("garlic or ginger", ["garlic", "ginger"]),
|
||||
],
|
||||
directions=[
|
||||
"Use day-old cold rice if available -- it fries better than fresh.",
|
||||
"Heat oil in a large skillet or wok over high heat.",
|
||||
"Add garlic/ginger and any raw vegetables; stir fry 2-3 minutes.",
|
||||
"Push to the side, scramble eggs in the same pan if using.",
|
||||
"Add protein (pre-cooked or raw) and cook through.",
|
||||
"Add rice, breaking up clumps. Stir fry until heated and lightly toasted.",
|
||||
"Season with soy sauce and any other sauces. Toss to combine.",
|
||||
],
|
||||
notes="Add a fried egg on top. A drizzle of sesame oil at the end adds a lot.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-3,
|
||||
title="Omelette / Scramble",
|
||||
required=[
|
||||
AssemblyRole("eggs", ["egg"]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("cheese", [
|
||||
"cheese", "cheddar", "mozzarella", "feta", "parmesan",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"pepper", "onion", "tomato", "spinach", "mushroom",
|
||||
"broccoli", "zucchini", "scallion", "avocado",
|
||||
]),
|
||||
AssemblyRole("protein", [
|
||||
"ham", "bacon", "sausage", "chicken", "turkey",
|
||||
"smoked salmon",
|
||||
]),
|
||||
AssemblyRole("herbs or seasoning", [
|
||||
"herb", "basil", "chive", "parsley", "salt", "pepper",
|
||||
"hot sauce", "salsa",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Beat eggs with a splash of water or milk and a pinch of salt.",
|
||||
"Saute any vegetables and proteins in butter or oil over medium heat until softened.",
|
||||
"Pour eggs over fillings (scramble) or pour into a clean buttered pan (omelette).",
|
||||
"For omelette: cook until nearly set, add fillings to one side, fold over.",
|
||||
"For scramble: stir gently over medium-low heat until just set.",
|
||||
"Season and serve immediately.",
|
||||
],
|
||||
notes="Works for breakfast, lunch, or a quick dinner. Any leftover vegetables work well.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-4,
|
||||
title="Stir Fry",
|
||||
required=[
|
||||
AssemblyRole("vegetables", [
|
||||
"pepper", "broccoli", "carrot", "snap pea", "bok choy",
|
||||
"zucchini", "mushroom", "corn", "onion", "bean sprout",
|
||||
"cabbage", "spinach", "asparagus",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "shrimp", "tofu", "egg",
|
||||
]),
|
||||
AssemblyRole("sauce", [
|
||||
"soy sauce", "teriyaki", "oyster sauce", "hoisin",
|
||||
"stir fry sauce", "sesame",
|
||||
]),
|
||||
AssemblyRole("starch base", ["rice", "noodle", "pasta", "ramen"]),
|
||||
AssemblyRole("garlic or ginger", ["garlic", "ginger"]),
|
||||
AssemblyRole("oil", ["oil", "sesame"]),
|
||||
],
|
||||
directions=[
|
||||
"Cut all proteins and vegetables into similar-sized pieces for even cooking.",
|
||||
"Heat oil in a wok or large skillet over the highest heat your stove allows.",
|
||||
"Cook protein first until nearly done; remove and set aside.",
|
||||
"Add dense vegetables (carrots, broccoli) first; quick-cooking veg last.",
|
||||
"Return protein, add sauce, and toss everything together for 1-2 minutes.",
|
||||
"Serve over rice or noodles.",
|
||||
],
|
||||
notes="High heat is the key. Do not crowd the pan -- cook in batches if needed.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-5,
|
||||
title="Pasta with Whatever You Have",
|
||||
required=[
|
||||
AssemblyRole("pasta", [
|
||||
"pasta", "spaghetti", "penne", "fettuccine", "rigatoni",
|
||||
"linguine", "rotini", "farfalle", "macaroni", "noodle",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("sauce base", [
|
||||
"tomato", "marinara", "pasta sauce", "cream", "butter",
|
||||
"olive oil", "pesto",
|
||||
]),
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "shrimp", "sausage", "bacon",
|
||||
"ham", "tuna", "canned fish",
|
||||
]),
|
||||
AssemblyRole("cheese", [
|
||||
"parmesan", "romano", "mozzarella", "ricotta", "feta",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"tomato", "spinach", "mushroom", "pepper", "zucchini",
|
||||
"broccoli", "artichoke", "olive", "onion",
|
||||
]),
|
||||
AssemblyRole("garlic", ["garlic"]),
|
||||
],
|
||||
directions=[
|
||||
"Cook pasta in well-salted boiling water until al dente. Reserve 1 cup pasta water.",
|
||||
"While pasta cooks, saute garlic in olive oil over medium heat.",
|
||||
"Add proteins and cook through; add vegetables until tender.",
|
||||
"Add sauce base and simmer 5 minutes. Add pasta water to loosen if needed.",
|
||||
"Toss cooked pasta with sauce. Finish with cheese if using.",
|
||||
],
|
||||
notes="Pasta water is the secret -- the starch thickens and binds any sauce.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-6,
|
||||
title="Sandwich / Wrap",
|
||||
required=[
|
||||
AssemblyRole("bread or wrap", [
|
||||
"bread", "roll", "bun", "wrap", "tortilla", "pita",
|
||||
"bagel", "english muffin", "croissant", "flatbread",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "turkey", "ham", "roast beef", "tuna", "egg",
|
||||
"bacon", "salami", "pepperoni", "tofu", "tempeh",
|
||||
]),
|
||||
AssemblyRole("cheese", [
|
||||
"cheese", "cheddar", "swiss", "provolone", "mozzarella",
|
||||
]),
|
||||
AssemblyRole("condiment", [
|
||||
"mayo", "mustard", "ketchup", "hot sauce", "ranch",
|
||||
"hummus", "pesto", "aioli",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"lettuce", "tomato", "onion", "cucumber", "avocado",
|
||||
"pepper", "sprout", "arugula",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Toast bread if desired.",
|
||||
"Spread condiments on both inner surfaces.",
|
||||
"Layer protein first, then cheese, then vegetables.",
|
||||
"Press together and cut diagonally.",
|
||||
],
|
||||
notes="Leftovers, deli meat, canned fish -- nearly anything works between bread.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-7,
|
||||
title="Grain Bowl",
|
||||
required=[
|
||||
AssemblyRole("grain base", [
|
||||
"rice", "quinoa", "farro", "barley", "couscous",
|
||||
"bulgur", "freekeh", "polenta",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "tofu", "egg", "shrimp",
|
||||
"beans", "bean", "lentil", "chickpea",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"roasted", "broccoli", "carrot", "kale", "spinach",
|
||||
"cucumber", "tomato", "corn", "edamame", "avocado",
|
||||
"beet", "sweet potato",
|
||||
]),
|
||||
AssemblyRole("dressing or sauce", [
|
||||
"dressing", "tahini", "vinaigrette", "sauce",
|
||||
"olive oil", "lemon", "soy sauce",
|
||||
]),
|
||||
AssemblyRole("toppings", [
|
||||
"nut", "seed", "feta", "parmesan", "herb",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Cook grain base according to package directions; season with salt.",
|
||||
"Roast or saute vegetables with oil, salt, and pepper until tender.",
|
||||
"Cook or slice protein.",
|
||||
"Arrange grain in a bowl, top with protein and vegetables.",
|
||||
"Drizzle with dressing and add toppings.",
|
||||
],
|
||||
notes="Great for meal prep -- cook grains and proteins in bulk, assemble bowls all week.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-8,
|
||||
title="Soup / Stew",
|
||||
required=[
|
||||
AssemblyRole("broth or liquid base", [
|
||||
"broth", "stock", "bouillon",
|
||||
"tomato sauce", "coconut milk", "cream of",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "sausage", "shrimp",
|
||||
"beans", "bean", "lentil", "tofu",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"carrot", "celery", "onion", "potato", "tomato",
|
||||
"spinach", "kale", "corn", "pea", "zucchini",
|
||||
]),
|
||||
AssemblyRole("starch thickener", [
|
||||
"potato", "pasta", "noodle", "rice", "barley",
|
||||
"flour", "cornstarch",
|
||||
]),
|
||||
AssemblyRole("seasoning", [
|
||||
"garlic", "herb", "bay leaf", "thyme", "rosemary",
|
||||
"cumin", "paprika", "chili",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Saute onion, celery, and garlic in oil until softened, about 5 minutes.",
|
||||
"Add any raw proteins and cook until browned.",
|
||||
"Add broth or liquid base and bring to a simmer.",
|
||||
"Add dense vegetables (carrots, potatoes) first; quick-cooking veg in the last 10 minutes.",
|
||||
"Add starches and cook until tender.",
|
||||
"Season to taste and simmer at least 20 minutes for flavors to develop.",
|
||||
],
|
||||
notes="Soups and stews improve overnight in the fridge. Almost any combination works.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-9,
|
||||
title="Casserole / Bake",
|
||||
required=[
|
||||
AssemblyRole("starch or base", [
|
||||
"pasta", "rice", "potato", "noodle", "bread",
|
||||
"tortilla", "polenta", "grits", "macaroni",
|
||||
]),
|
||||
AssemblyRole("binder or sauce", [
|
||||
"cream of", "cheese", "cream cheese", "sour cream",
|
||||
"soup mix", "gravy", "tomato sauce", "marinara",
|
||||
"broth", "stock", "milk", "cream",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
"chicken", "beef", "pork", "tuna", "ham", "sausage",
|
||||
"ground", "shrimp", "beans", "bean", "lentil",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"broccoli", "corn", "pea", "onion", "mushroom",
|
||||
"spinach", "zucchini", "tomato", "pepper", "carrot",
|
||||
]),
|
||||
AssemblyRole("cheese topping", [
|
||||
"cheddar", "mozzarella", "parmesan", "swiss",
|
||||
"cheese", "breadcrumb",
|
||||
]),
|
||||
AssemblyRole("seasoning", [
|
||||
"garlic", "herb", "thyme", "rosemary", "paprika",
|
||||
"onion powder", "salt", "pepper",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Preheat oven to 375 F (190 C). Grease a 9x13 baking dish.",
|
||||
"Cook starch base (pasta, rice, potato) until just underdone -- it finishes in the oven.",
|
||||
"Mix cooked starch with sauce/binder, protein, and vegetables in the dish.",
|
||||
"Season generously -- casseroles need salt.",
|
||||
"Top with cheese or breadcrumbs if using.",
|
||||
"Bake covered 25 minutes, then uncovered 15 minutes until golden and bubbly.",
|
||||
],
|
||||
notes="Classic pantry dump dinner. Cream of anything soup is the universal binder.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-10,
|
||||
title="Pancakes / Waffles / Quick Bread",
|
||||
required=[
|
||||
AssemblyRole("flour or baking mix", [
|
||||
"flour", "bisquick", "pancake mix", "waffle mix",
|
||||
"baking mix", "cornmeal", "oats",
|
||||
]),
|
||||
AssemblyRole("leavening or egg", [
|
||||
"egg", "baking powder", "baking soda", "yeast",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("liquid", [
|
||||
"milk", "buttermilk", "water", "juice",
|
||||
"almond milk", "oat milk", "sour cream",
|
||||
]),
|
||||
AssemblyRole("fat", [
|
||||
"butter", "oil", "margarine",
|
||||
]),
|
||||
AssemblyRole("sweetener", [
|
||||
"sugar", "honey", "maple syrup", "brown sugar",
|
||||
]),
|
||||
AssemblyRole("mix-ins", [
|
||||
"blueberr", "banana", "apple", "chocolate chip",
|
||||
"nut", "berry", "cinnamon", "vanilla",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"Whisk dry ingredients (flour, leavening, sugar, salt) together in a bowl.",
|
||||
"Whisk wet ingredients (egg, milk, melted butter) in a separate bowl.",
|
||||
"Fold wet into dry until just combined -- lumps are fine, do not overmix.",
|
||||
"For pancakes: cook on a buttered griddle over medium heat, flip when bubbles form.",
|
||||
"For waffles: pour into preheated waffle iron according to manufacturer instructions.",
|
||||
"For muffins or quick bread: pour into greased pan, bake at 375 F until a toothpick comes out clean.",
|
||||
],
|
||||
notes="Overmixing develops gluten and makes pancakes tough. Stop when just combined.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-11,
|
||||
title="Porridge / Oatmeal",
|
||||
required=[
|
||||
AssemblyRole("oats or grain porridge", [
|
||||
"oat", "porridge", "grits", "semolina", "cream of wheat",
|
||||
"polenta", "congee", "rice porridge",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("liquid", ["milk", "water", "almond milk", "oat milk", "coconut milk"]),
|
||||
AssemblyRole("sweetener", ["sugar", "honey", "maple syrup", "brown sugar", "agave"]),
|
||||
AssemblyRole("fruit", ["banana", "berry", "apple", "raisin", "date", "mango"]),
|
||||
AssemblyRole("toppings", ["nut", "seed", "granola", "coconut", "chocolate"]),
|
||||
AssemblyRole("spice", ["cinnamon", "nutmeg", "vanilla", "cardamom"]),
|
||||
],
|
||||
directions=[
|
||||
"Combine oats with liquid in a pot — typically 1 part oats to 2 parts liquid.",
|
||||
"Bring to a gentle simmer over medium heat, stirring occasionally.",
|
||||
"Cook 5 minutes (rolled oats) or 2 minutes (quick oats) until thickened to your liking.",
|
||||
"Stir in sweetener and spices.",
|
||||
"Top with fruit, nuts, or seeds and serve immediately.",
|
||||
],
|
||||
notes="Overnight oats: skip cooking — soak oats in cold milk overnight in the fridge.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-12,
|
||||
title="Pie / Pot Pie",
|
||||
required=[
|
||||
AssemblyRole("pastry or crust", [
|
||||
"pastry", "puff pastry", "pie crust", "shortcrust",
|
||||
"pie shell", "phyllo", "filo", "biscuit",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein filling", [
|
||||
"chicken", "beef", "pork", "lamb", "turkey", "tofu",
|
||||
"mushroom", "beans", "bean", "lentil", "tuna", "salmon",
|
||||
]),
|
||||
AssemblyRole("vegetables", [
|
||||
"carrot", "pea", "corn", "potato", "onion", "leek",
|
||||
"broccoli", "spinach", "mushroom", "parsnip", "swede",
|
||||
]),
|
||||
AssemblyRole("sauce or binder", [
|
||||
"gravy", "cream of", "stock", "broth", "cream",
|
||||
"white sauce", "bechamel", "cheese sauce",
|
||||
]),
|
||||
AssemblyRole("seasoning", [
|
||||
"thyme", "rosemary", "sage", "garlic", "herb",
|
||||
"mustard", "worcestershire",
|
||||
]),
|
||||
AssemblyRole("sweet filling", [
|
||||
"apple", "berry", "cherry", "pear", "peach",
|
||||
"rhubarb", "plum", "custard",
|
||||
]),
|
||||
],
|
||||
directions=[
|
||||
"For pot pie: make a sauce by combining stock or cream-of-something with cooked vegetables and protein.",
|
||||
"Season generously — fillings need more salt than you think.",
|
||||
"Pour filling into a baking dish and top with pastry, pressing edges to seal.",
|
||||
"Cut a few slits in the top to release steam. Brush with egg wash or milk if available.",
|
||||
"Bake at 400 F (200 C) for 25-35 minutes until pastry is golden brown.",
|
||||
"For sweet pie: fill unbaked crust with fruit filling, top with second crust or crumble, bake similarly.",
|
||||
],
|
||||
notes="Puff pastry from the freezer is the shortcut to impressive pot pies. Thaw in the fridge overnight.",
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-13,
|
||||
title="Pudding / Custard",
|
||||
required=[
|
||||
AssemblyRole("dairy or dairy-free milk", [
|
||||
"milk", "cream", "oat milk", "almond milk",
|
||||
"soy milk", "coconut milk",
|
||||
]),
|
||||
AssemblyRole("thickener or set", [
|
||||
"egg", "cornstarch", "custard powder", "gelatin",
|
||||
"agar", "tapioca", "arrowroot",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("sweetener", ["sugar", "honey", "maple syrup", "condensed milk"]),
|
||||
AssemblyRole("flavouring", [
|
||||
"vanilla", "chocolate", "cocoa", "caramel",
|
||||
"lemon", "orange", "cinnamon", "nutmeg",
|
||||
]),
|
||||
AssemblyRole("starchy base", [
|
||||
"rice", "bread", "sponge", "cake", "biscuit",
|
||||
]),
|
||||
AssemblyRole("fruit", ["raisin", "sultana", "berry", "banana", "apple"]),
|
||||
],
|
||||
directions=[
|
||||
"For stovetop custard: whisk eggs and sugar together, heat milk until steaming.",
|
||||
"Slowly pour hot milk into egg mixture while whisking constantly (tempering).",
|
||||
"Return to low heat and stir until mixture coats the back of a spoon.",
|
||||
"For cornstarch pudding: whisk cornstarch into cold milk first, then heat while stirring.",
|
||||
"Add flavourings (vanilla, cocoa) once off heat.",
|
||||
"Pour into dishes and refrigerate at least 2 hours to set.",
|
||||
],
|
||||
notes="UK-style pudding is broad — bread pudding, rice pudding, spotted dick, treacle sponge all count.",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def match_assembly_templates(
|
||||
pantry_items: list[str],
|
||||
pantry_set: set[str],
|
||||
excluded_ids: list[int],
|
||||
) -> list[RecipeSuggestion]:
|
||||
"""Return assembly-dish suggestions whose required roles are all satisfied.
|
||||
|
||||
Titles are personalized with specific pantry items (deterministically chosen
|
||||
from the pantry contents so the same pantry always produces the same title).
|
||||
Skips templates whose id is in excluded_ids (dismiss/load-more support).
|
||||
"""
|
||||
excluded = set(excluded_ids)
|
||||
seed = _pantry_hash(pantry_set)
|
||||
results: list[RecipeSuggestion] = []
|
||||
|
||||
for tmpl in ASSEMBLY_TEMPLATES:
|
||||
if tmpl.id in excluded:
|
||||
continue
|
||||
|
||||
# All required roles must be satisfied
|
||||
if any(not _matches_role(role, pantry_set) for role in tmpl.required):
|
||||
continue
|
||||
|
||||
optional_hit_count = sum(
|
||||
1 for role in tmpl.optional if _matches_role(role, pantry_set)
|
||||
)
|
||||
|
||||
results.append(RecipeSuggestion(
|
||||
id=tmpl.id,
|
||||
title=_personalized_title(tmpl, pantry_set, seed + tmpl.id),
|
||||
match_count=len(tmpl.required) + optional_hit_count,
|
||||
element_coverage={},
|
||||
swap_candidates=[],
|
||||
missing_ingredients=[],
|
||||
directions=tmpl.directions,
|
||||
notes=tmpl.notes,
|
||||
level=1,
|
||||
is_wildcard=False,
|
||||
nutrition=None,
|
||||
))
|
||||
|
||||
# Sort by optional coverage descending — best-matched templates first
|
||||
results.sort(key=lambda s: s.match_count, reverse=True)
|
||||
return results
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
"""
|
||||
ElementClassifier -- classify pantry items into culinary element tags.
|
||||
|
||||
Lookup order:
|
||||
1. ingredient_profiles table (pre-computed from USDA FDC)
|
||||
2. Keyword heuristic fallback (for unlisted ingredients)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
# All valid ingredient-level element labels (Method is recipe-level, not ingredient-level)
|
||||
ELEMENTS = frozenset({
|
||||
"Seasoning", "Richness", "Brightness", "Depth",
|
||||
"Aroma", "Structure", "Texture",
|
||||
})
|
||||
|
||||
_HEURISTIC: list[tuple[list[str], str]] = [
|
||||
(["vinegar", "lemon", "lime", "citrus", "wine", "yogurt", "kefir",
|
||||
"buttermilk", "tomato", "tamarind"], "Brightness"),
|
||||
(["oil", "butter", "cream", "lard", "fat", "avocado", "coconut milk",
|
||||
"ghee", "shortening", "crisco"], "Richness"),
|
||||
(["salt", "soy", "miso", "tamari", "fish sauce", "worcestershire",
|
||||
"anchov", "capers", "olive", "brine"], "Seasoning"),
|
||||
(["mushroom", "parmesan", "miso", "nutritional yeast", "bouillon",
|
||||
"broth", "umami", "anchov", "dried tomato", "soy"], "Depth"),
|
||||
(["garlic", "onion", "shallot", "herb", "basil", "oregano", "thyme",
|
||||
"rosemary", "spice", "cumin", "coriander", "paprika", "chili",
|
||||
"ginger", "cinnamon", "pepper", "cilantro", "dill", "fennel",
|
||||
"cardamom", "turmeric", "smoke"], "Aroma"),
|
||||
(["flour", "starch", "cornstarch", "arrowroot", "egg", "gelatin",
|
||||
"agar", "breadcrumb", "panko", "roux"], "Structure"),
|
||||
(["nut", "seed", "cracker", "crisp", "wafer", "chip", "crouton",
|
||||
"granola", "tofu", "tempeh"], "Texture"),
|
||||
]
|
||||
|
||||
|
||||
def _safe_json_list(val) -> list:
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
return json.loads(val)
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class IngredientProfile:
|
||||
name: str
|
||||
elements: list[str]
|
||||
fat_pct: float = 0.0
|
||||
fat_saturated_pct: float = 0.0
|
||||
moisture_pct: float = 0.0
|
||||
protein_pct: float = 0.0
|
||||
starch_pct: float = 0.0
|
||||
binding_score: int = 0
|
||||
glutamate_mg: float = 0.0
|
||||
ph_estimate: float | None = None
|
||||
flavor_molecule_ids: list[str] = field(default_factory=list)
|
||||
heat_stable: bool = True
|
||||
add_timing: str = "any"
|
||||
acid_type: str | None = None
|
||||
sodium_mg_per_100g: float = 0.0
|
||||
is_fermented: bool = False
|
||||
texture_profile: str = "neutral"
|
||||
smoke_point_c: float | None = None
|
||||
is_emulsifier: bool = False
|
||||
source: str = "heuristic"
|
||||
|
||||
|
||||
class ElementClassifier:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
|
||||
def classify(self, ingredient_name: str) -> IngredientProfile:
|
||||
"""Return element profile for a single ingredient name."""
|
||||
name = ingredient_name.lower().strip()
|
||||
if not name:
|
||||
return IngredientProfile(name="", elements=[], source="heuristic")
|
||||
row = self._store._fetch_one(
|
||||
"SELECT * FROM ingredient_profiles WHERE name = ?", (name,)
|
||||
)
|
||||
if row:
|
||||
return self._row_to_profile(row)
|
||||
return self._heuristic_profile(name)
|
||||
|
||||
def classify_batch(self, names: list[str]) -> list[IngredientProfile]:
|
||||
return [self.classify(n) for n in names]
|
||||
|
||||
def identify_gaps(self, profiles: list[IngredientProfile]) -> list[str]:
|
||||
"""Return element names that have no coverage in the given profile list."""
|
||||
covered = set()
|
||||
for p in profiles:
|
||||
covered.update(p.elements)
|
||||
return sorted(ELEMENTS - covered)
|
||||
|
||||
def _row_to_profile(self, row: dict) -> IngredientProfile:
|
||||
return IngredientProfile(
|
||||
name=row["name"],
|
||||
elements=_safe_json_list(row.get("elements")),
|
||||
fat_pct=row.get("fat_pct") or 0.0,
|
||||
fat_saturated_pct=row.get("fat_saturated_pct") or 0.0,
|
||||
moisture_pct=row.get("moisture_pct") or 0.0,
|
||||
protein_pct=row.get("protein_pct") or 0.0,
|
||||
starch_pct=row.get("starch_pct") or 0.0,
|
||||
binding_score=row.get("binding_score") or 0,
|
||||
glutamate_mg=row.get("glutamate_mg") or 0.0,
|
||||
ph_estimate=row.get("ph_estimate"),
|
||||
flavor_molecule_ids=_safe_json_list(row.get("flavor_molecule_ids")),
|
||||
heat_stable=bool(row.get("heat_stable", 1)),
|
||||
add_timing=row.get("add_timing") or "any",
|
||||
acid_type=row.get("acid_type"),
|
||||
sodium_mg_per_100g=row.get("sodium_mg_per_100g") or 0.0,
|
||||
is_fermented=bool(row.get("is_fermented", 0)),
|
||||
texture_profile=row.get("texture_profile") or "neutral",
|
||||
smoke_point_c=row.get("smoke_point_c"),
|
||||
is_emulsifier=bool(row.get("is_emulsifier", 0)),
|
||||
source="db",
|
||||
)
|
||||
|
||||
def _heuristic_profile(self, name: str) -> IngredientProfile:
|
||||
seen: set[str] = set()
|
||||
elements: list[str] = []
|
||||
for keywords, element in _HEURISTIC:
|
||||
if element not in seen and any(kw in name for kw in keywords):
|
||||
elements.append(element)
|
||||
seen.add(element)
|
||||
return IngredientProfile(name=name, elements=elements, source="heuristic")
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
"""
|
||||
GroceryLinkBuilder — affiliate deeplinks for missing ingredient grocery lists.
|
||||
|
||||
Free tier: URL construction only (Amazon Fresh, Walmart, Instacart).
|
||||
Paid+: live product search API (stubbed — future task).
|
||||
|
||||
Config (env vars, all optional — missing = retailer disabled):
|
||||
AMAZON_AFFILIATE_TAG — e.g. "circuitforge-20"
|
||||
INSTACART_AFFILIATE_ID — e.g. "circuitforge"
|
||||
WALMART_AFFILIATE_ID — e.g. "circuitforge" (Impact affiliate network)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink
|
||||
|
||||
|
||||
def _amazon_link(ingredient: str, tag: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
url = f"https://www.amazon.com/s?k={q}&i=amazonfresh&tag={tag}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Amazon Fresh", url=url)
|
||||
|
||||
|
||||
def _walmart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
# Walmart Impact affiliate deeplink pattern
|
||||
url = f"https://goto.walmart.com/c/{affiliate_id}/walmart?u=https://www.walmart.com/search?q={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Walmart Grocery", url=url)
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
url = f"https://www.instacart.com/store/s?k={q}&aff={affiliate_id}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=url)
|
||||
|
||||
|
||||
class GroceryLinkBuilder:
|
||||
def __init__(self, tier: str = "free", has_byok: bool = False) -> None:
|
||||
self._tier = tier
|
||||
self._has_byok = has_byok
|
||||
self._amazon_tag = os.environ.get("AMAZON_AFFILIATE_TAG", "")
|
||||
self._instacart_id = os.environ.get("INSTACART_AFFILIATE_ID", "")
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "")
|
||||
|
||||
def build_links(self, ingredient: str) -> list[GroceryLink]:
|
||||
"""Build affiliate deeplinks for a single ingredient.
|
||||
|
||||
Free tier: URL construction only.
|
||||
Paid+: would call live product search APIs (stubbed).
|
||||
"""
|
||||
if not ingredient.strip():
|
||||
return []
|
||||
links: list[GroceryLink] = []
|
||||
|
||||
if self._amazon_tag:
|
||||
links.append(_amazon_link(ingredient, self._amazon_tag))
|
||||
if self._walmart_id:
|
||||
links.append(_walmart_link(ingredient, self._walmart_id))
|
||||
if self._instacart_id:
|
||||
links.append(_instacart_link(ingredient, self._instacart_id))
|
||||
|
||||
# Paid+: live API stub (future task)
|
||||
# if self._tier in ("paid", "premium") and not self._has_byok:
|
||||
# links.extend(self._search_kroger_api(ingredient))
|
||||
|
||||
return links
|
||||
|
||||
def build_all(self, ingredients: list[str]) -> list[GroceryLink]:
|
||||
"""Build links for a list of ingredients."""
|
||||
links: list[GroceryLink] = []
|
||||
for ingredient in ingredients:
|
||||
links.extend(self.build_links(ingredient))
|
||||
return links
|
||||
|
|
@ -1,313 +0,0 @@
|
|||
"""LLM-driven recipe generator for Levels 3 and 4."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from contextlib import nullcontext
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from openai import OpenAI
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeResult, RecipeSuggestion
|
||||
from app.services.recipe.element_classifier import IngredientProfile
|
||||
from app.services.recipe.style_adapter import StyleAdapter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _filter_allergies(pantry_items: list[str], allergies: list[str]) -> list[str]:
|
||||
"""Return pantry items with allergy matches removed (bidirectional substring)."""
|
||||
if not allergies:
|
||||
return list(pantry_items)
|
||||
return [
|
||||
item for item in pantry_items
|
||||
if not any(
|
||||
allergy.lower() in item.lower() or item.lower() in allergy.lower()
|
||||
for allergy in allergies
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class LLMRecipeGenerator:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
self._style_adapter = StyleAdapter()
|
||||
|
||||
def build_level3_prompt(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
profiles: list[IngredientProfile],
|
||||
gaps: list[str],
|
||||
) -> str:
|
||||
"""Build a structured element-scaffold prompt for Level 3."""
|
||||
allergy_list = req.allergies
|
||||
safe_pantry = _filter_allergies(req.pantry_items, allergy_list)
|
||||
|
||||
covered_elements: list[str] = []
|
||||
for profile in profiles:
|
||||
for element in profile.elements:
|
||||
if element not in covered_elements:
|
||||
covered_elements.append(element)
|
||||
|
||||
lines: list[str] = [
|
||||
"You are a creative chef. Generate a recipe using the ingredients below.",
|
||||
"IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name from the pantry list. Do not add adjectives, quantities, or cooking states (e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').",
|
||||
"IMPORTANT: Only use pantry items that make culinary sense for the dish. Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, dessert sauces, flavoured syrups) into savoury dishes. Plain yoghurt, plain cream, and plain dairy are fine in savoury cooking.",
|
||||
"IMPORTANT: Do not default to the same ingredient repeatedly across dishes. If a pantry item does not genuinely improve this specific dish, leave it out.",
|
||||
"",
|
||||
f"Pantry items: {', '.join(safe_pantry)}",
|
||||
]
|
||||
|
||||
if req.constraints:
|
||||
lines.append(f"Dietary constraints: {', '.join(req.constraints)}")
|
||||
|
||||
if allergy_list:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"Covered culinary elements: {', '.join(covered_elements) or 'none'}")
|
||||
|
||||
if gaps:
|
||||
lines.append(
|
||||
f"Missing elements to address: {', '.join(gaps)}. "
|
||||
"Incorporate ingredients or techniques to fill these gaps."
|
||||
)
|
||||
|
||||
if req.style_id:
|
||||
template = self._style_adapter.get(req.style_id)
|
||||
if template:
|
||||
lines.append(f"Cuisine style: {template.name}")
|
||||
if template.aromatics:
|
||||
lines.append(f"Preferred aromatics: {', '.join(template.aromatics[:4])}")
|
||||
|
||||
lines += [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"3. <continue for each step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def build_level4_prompt(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
) -> str:
|
||||
"""Build a minimal wildcard prompt for Level 4."""
|
||||
allergy_list = req.allergies
|
||||
safe_pantry = _filter_allergies(req.pantry_items, allergy_list)
|
||||
|
||||
lines: list[str] = [
|
||||
"Surprise me with a creative, unexpected recipe.",
|
||||
"Only use ingredients that make culinary sense together. Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.",
|
||||
f"Ingredients available: {', '.join(safe_pantry)}",
|
||||
]
|
||||
|
||||
if req.constraints:
|
||||
lines.append(f"Constraints: {', '.join(req.constraints)}")
|
||||
|
||||
if allergy_list:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
lines += [
|
||||
"Treat any mystery ingredient as a wildcard — use your imagination.",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
_MODEL_CANDIDATES: list[str] = ["Ouro-2.6B-Thinking", "Ouro-1.4B"]
|
||||
|
||||
def _get_llm_context(self):
|
||||
"""Return a sync context manager that yields an Allocation or None.
|
||||
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a vLLM allocation
|
||||
(which handles service lifecycle and VRAM). Falls back to nullcontext(None)
|
||||
when the env var is absent or CFOrchClient raises on construction.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
return client.allocate(
|
||||
service="vllm",
|
||||
model_candidates=self._MODEL_CANDIDATES,
|
||||
ttl_s=300.0,
|
||||
caller="kiwi-recipe",
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug("CFOrchClient init failed, falling back to direct URL: %s", exc)
|
||||
return nullcontext(None)
|
||||
|
||||
def _call_llm(self, prompt: str) -> str:
|
||||
"""Call the LLM, using CFOrchClient allocation when CF_ORCH_URL is set.
|
||||
|
||||
With CF_ORCH_URL set: acquires a vLLM allocation via CFOrchClient and
|
||||
calls the OpenAI-compatible API directly against the allocated service URL.
|
||||
Without CF_ORCH_URL: falls back to LLMRouter using its configured backends.
|
||||
"""
|
||||
try:
|
||||
with self._get_llm_context() as alloc:
|
||||
if alloc is not None:
|
||||
base_url = alloc.url.rstrip("/") + "/v1"
|
||||
client = OpenAI(base_url=base_url, api_key="any")
|
||||
model = alloc.model or "__auto__"
|
||||
if model == "__auto__":
|
||||
model = client.models.list().data[0].id
|
||||
resp = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
)
|
||||
return resp.choices[0].message.content or ""
|
||||
else:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
router = LLMRouter()
|
||||
return router.complete(prompt)
|
||||
except Exception as exc:
|
||||
logger.error("LLM call failed: %s", exc)
|
||||
return ""
|
||||
|
||||
# Strips markdown bold/italic markers so "**Directions:**" parses like "Directions:"
|
||||
_MD_BOLD = re.compile(r"\*{1,2}([^*]+)\*{1,2}")
|
||||
|
||||
def _strip_md(self, text: str) -> str:
|
||||
return self._MD_BOLD.sub(r"\1", text).strip()
|
||||
|
||||
def _parse_response(self, response: str) -> dict[str, str | list[str]]:
|
||||
"""Parse LLM response text into structured recipe fields.
|
||||
|
||||
Handles both plain-text and markdown-formatted responses. Directions are
|
||||
preserved as newline-separated text so the caller can split on step numbers.
|
||||
"""
|
||||
result: dict[str, str | list[str]] = {
|
||||
"title": "",
|
||||
"ingredients": [],
|
||||
"directions": "",
|
||||
"notes": "",
|
||||
}
|
||||
|
||||
current_key: str | None = None
|
||||
buffer: list[str] = []
|
||||
|
||||
def _flush(key: str | None, buf: list[str]) -> None:
|
||||
if key is None or not buf:
|
||||
return
|
||||
if key == "directions":
|
||||
result["directions"] = "\n".join(buf)
|
||||
elif key == "ingredients":
|
||||
text = " ".join(buf)
|
||||
result["ingredients"] = [i.strip() for i in text.split(",") if i.strip()]
|
||||
else:
|
||||
result[key] = " ".join(buf).strip()
|
||||
|
||||
for raw_line in response.splitlines():
|
||||
line = self._strip_md(raw_line)
|
||||
lower = line.lower()
|
||||
if lower.startswith("title:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "title", [line.split(":", 1)[1].strip()]
|
||||
elif lower.startswith("ingredients:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "ingredients", [line.split(":", 1)[1].strip()]
|
||||
elif lower.startswith("directions:"):
|
||||
_flush(current_key, buffer)
|
||||
rest = line.split(":", 1)[1].strip()
|
||||
current_key, buffer = "directions", ([rest] if rest else [])
|
||||
elif lower.startswith("notes:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "notes", [line.split(":", 1)[1].strip()]
|
||||
elif current_key and line.strip():
|
||||
buffer.append(line.strip())
|
||||
elif current_key is None and line.strip() and ":" not in line:
|
||||
# Before any section header: a 2-10 word colon-free line is the dish name
|
||||
words = line.split()
|
||||
if 2 <= len(words) <= 10 and not result["title"]:
|
||||
result["title"] = line.strip()
|
||||
|
||||
_flush(current_key, buffer)
|
||||
return result
|
||||
|
||||
def generate(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
profiles: list[IngredientProfile],
|
||||
gaps: list[str],
|
||||
) -> RecipeResult:
|
||||
"""Generate a recipe via LLM and return a RecipeResult."""
|
||||
if req.level == 4:
|
||||
prompt = self.build_level4_prompt(req)
|
||||
else:
|
||||
prompt = self.build_level3_prompt(req, profiles, gaps)
|
||||
|
||||
response = self._call_llm(prompt)
|
||||
|
||||
if not response:
|
||||
return RecipeResult(suggestions=[], element_gaps=gaps)
|
||||
|
||||
parsed = self._parse_response(response)
|
||||
|
||||
raw_directions = parsed.get("directions", "")
|
||||
if isinstance(raw_directions, str):
|
||||
# Split on newlines; strip leading step numbers ("1.", "2.", "- ", "* ")
|
||||
_step_prefix = re.compile(r"^\s*(?:\d+[.)]\s*|[-*]\s+)")
|
||||
directions_list = [
|
||||
_step_prefix.sub("", s).strip()
|
||||
for s in raw_directions.splitlines()
|
||||
if s.strip()
|
||||
]
|
||||
else:
|
||||
directions_list = list(raw_directions)
|
||||
raw_notes = parsed.get("notes", "")
|
||||
notes_str: str = raw_notes if isinstance(raw_notes, str) else ""
|
||||
|
||||
all_ingredients: list[str] = list(parsed.get("ingredients", []))
|
||||
pantry_set = {item.lower() for item in (req.pantry_items or [])}
|
||||
|
||||
# Strip leading quantities/units (e.g. "2 cups rice" → "rice") before
|
||||
# checking against pantry, since LLMs return formatted ingredient strings.
|
||||
_qty_re = re.compile(
|
||||
r"^\s*[\d½¼¾⅓⅔]+[\s/\-]*" # leading digits or fractions
|
||||
r"(?:cup|cups|tbsp|tsp|tablespoon|teaspoon|oz|lb|lbs|g|kg|"
|
||||
r"can|cans|clove|cloves|bunch|package|pkg|slice|slices|"
|
||||
r"piece|pieces|pinch|dash|handful|head|heads|large|small|medium"
|
||||
r")s?\b[,\s]*",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
missing = []
|
||||
for ing in all_ingredients:
|
||||
bare = _qty_re.sub("", ing).strip().lower()
|
||||
if bare not in pantry_set and ing.lower() not in pantry_set:
|
||||
missing.append(bare or ing)
|
||||
|
||||
suggestion = RecipeSuggestion(
|
||||
id=0,
|
||||
title=parsed.get("title") or "LLM Recipe",
|
||||
match_count=len(req.pantry_items),
|
||||
element_coverage={},
|
||||
missing_ingredients=missing,
|
||||
directions=directions_list,
|
||||
notes=notes_str,
|
||||
level=req.level,
|
||||
is_wildcard=(req.level == 4),
|
||||
)
|
||||
|
||||
return RecipeResult(
|
||||
suggestions=[suggestion],
|
||||
element_gaps=gaps,
|
||||
)
|
||||
|
|
@ -1,583 +0,0 @@
|
|||
"""
|
||||
RecipeEngine — orchestrates the four creativity levels.
|
||||
|
||||
Level 1: corpus lookup ranked by ingredient match + expiry urgency
|
||||
Level 2: Level 1 + deterministic substitution swaps
|
||||
Level 3: element scaffold → LLM constrained prompt (see llm_recipe.py)
|
||||
Level 4: wildcard LLM (see llm_recipe.py)
|
||||
|
||||
Amendments:
|
||||
- max_missing: filter to recipes missing ≤ N pantry items
|
||||
- hard_day_mode: filter to easy-method recipes only
|
||||
- grocery_list: aggregated missing ingredients across suggestions
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink, NutritionPanel, RecipeRequest, RecipeResult, RecipeSuggestion, SwapCandidate
|
||||
from app.services.recipe.assembly_recipes import match_assembly_templates
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
from app.services.recipe.substitution_engine import SubstitutionEngine
|
||||
|
||||
_LEFTOVER_DAILY_MAX_FREE = 5
|
||||
|
||||
# Words that carry no ingredient-identity signal — stripped before overlap scoring
|
||||
_SWAP_STOPWORDS = frozenset({
|
||||
"a", "an", "the", "of", "in", "for", "with", "and", "or",
|
||||
"to", "from", "at", "by", "as", "on",
|
||||
})
|
||||
|
||||
# Maps product-label substrings to recipe-corpus canonical terms.
|
||||
# Kept in sync with Store._FTS_SYNONYMS — both must agree on canonical names.
|
||||
# Used to expand pantry_set so single-word recipe ingredients can match
|
||||
# multi-word product names (e.g. "hamburger" satisfied by "burger patties").
|
||||
_PANTRY_LABEL_SYNONYMS: dict[str, str] = {
|
||||
"burger patt": "hamburger",
|
||||
"beef patt": "hamburger",
|
||||
"ground beef": "hamburger",
|
||||
"ground chuck": "hamburger",
|
||||
"ground round": "hamburger",
|
||||
"mince": "hamburger",
|
||||
"veggie burger": "hamburger",
|
||||
"beyond burger": "hamburger",
|
||||
"impossible burger": "hamburger",
|
||||
"plant burger": "hamburger",
|
||||
"chicken patt": "chicken patty",
|
||||
"kielbasa": "sausage",
|
||||
"bratwurst": "sausage",
|
||||
"frankfurter": "hotdog",
|
||||
"wiener": "hotdog",
|
||||
"chicken breast": "chicken",
|
||||
"chicken thigh": "chicken",
|
||||
"chicken drumstick": "chicken",
|
||||
"chicken wing": "chicken",
|
||||
"rotisserie chicken": "chicken",
|
||||
"chicken tender": "chicken",
|
||||
"chicken strip": "chicken",
|
||||
"chicken piece": "chicken",
|
||||
"fake chicken": "chicken",
|
||||
"plant chicken": "chicken",
|
||||
"vegan chicken": "chicken",
|
||||
"daring": "chicken",
|
||||
"gardein chick": "chicken",
|
||||
"quorn chick": "chicken",
|
||||
"chick'n": "chicken",
|
||||
"chikn": "chicken",
|
||||
"not-chicken": "chicken",
|
||||
"no-chicken": "chicken",
|
||||
# Plant-based beef subs → broad "beef" (strips ≠ ground; texture matters)
|
||||
"not-beef": "beef",
|
||||
"no-beef": "beef",
|
||||
"plant beef": "beef",
|
||||
"vegan beef": "beef",
|
||||
# Plant-based pork subs
|
||||
"not-pork": "pork",
|
||||
"no-pork": "pork",
|
||||
"plant pork": "pork",
|
||||
"vegan pork": "pork",
|
||||
"omnipork": "pork",
|
||||
"omni pork": "pork",
|
||||
# Generic alt-meat catch-alls → broad "beef"
|
||||
"fake meat": "beef",
|
||||
"plant meat": "beef",
|
||||
"vegan meat": "beef",
|
||||
"meat-free": "beef",
|
||||
"meatless": "beef",
|
||||
"pork chop": "pork",
|
||||
"pork loin": "pork",
|
||||
"pork tenderloin": "pork",
|
||||
"marinara": "tomato sauce",
|
||||
"pasta sauce": "tomato sauce",
|
||||
"spaghetti sauce": "tomato sauce",
|
||||
"pizza sauce": "tomato sauce",
|
||||
"macaroni": "pasta",
|
||||
"noodles": "pasta",
|
||||
"spaghetti": "pasta",
|
||||
"penne": "pasta",
|
||||
"fettuccine": "pasta",
|
||||
"rigatoni": "pasta",
|
||||
"linguine": "pasta",
|
||||
"rotini": "pasta",
|
||||
"farfalle": "pasta",
|
||||
"shredded cheese": "cheese",
|
||||
"sliced cheese": "cheese",
|
||||
"american cheese": "cheese",
|
||||
"cheddar": "cheese",
|
||||
"mozzarella": "cheese",
|
||||
"heavy cream": "cream",
|
||||
"whipping cream": "cream",
|
||||
"half and half": "cream",
|
||||
"burger bun": "buns",
|
||||
"hamburger bun": "buns",
|
||||
"hot dog bun": "buns",
|
||||
"bread roll": "buns",
|
||||
"dinner roll": "buns",
|
||||
# Tortillas / wraps — assembly dishes (burritos, tacos, quesadillas)
|
||||
"flour tortilla": "tortillas",
|
||||
"corn tortilla": "tortillas",
|
||||
"tortilla wrap": "tortillas",
|
||||
"soft taco shell": "tortillas",
|
||||
"taco shell": "taco shells",
|
||||
"pita bread": "pita",
|
||||
"flatbread": "flatbread",
|
||||
# Canned beans — extremely interchangeable in assembly dishes
|
||||
"black bean": "beans",
|
||||
"pinto bean": "beans",
|
||||
"kidney bean": "beans",
|
||||
"refried bean": "beans",
|
||||
"chickpea": "beans",
|
||||
"garbanzo": "beans",
|
||||
# Rice variants
|
||||
"white rice": "rice",
|
||||
"brown rice": "rice",
|
||||
"jasmine rice": "rice",
|
||||
"basmati rice": "rice",
|
||||
"instant rice": "rice",
|
||||
"microwavable rice": "rice",
|
||||
# Salsa / hot sauce
|
||||
"hot sauce": "salsa",
|
||||
"taco sauce": "salsa",
|
||||
"enchilada sauce": "salsa",
|
||||
# Sour cream / Greek yogurt — functional substitutes
|
||||
"greek yogurt": "sour cream",
|
||||
# Frozen/prepackaged meal token extraction — handled by individual token
|
||||
# fallback in _normalize_for_fts; these are the most common single-serve meal types
|
||||
"lean cuisine": "casserole",
|
||||
"stouffer": "casserole",
|
||||
"healthy choice": "casserole",
|
||||
"marie callender": "casserole",
|
||||
}
|
||||
|
||||
|
||||
# Matches leading quantity/unit prefixes in recipe ingredient strings,
|
||||
# e.g. "2 cups flour" → "flour", "1/2 c. ketchup" → "ketchup",
|
||||
# "3 oz. butter" → "butter"
|
||||
_QUANTITY_PREFIX = re.compile(
|
||||
r"^\s*(?:\d+(?:[./]\d+)?\s*)?" # optional leading number (1, 1/2, 2.5)
|
||||
r"(?:to\s+\d+\s*)?" # optional "to N" range
|
||||
r"(?:c\.|cup|cups|tbsp|tsp|oz|lb|lbs|g|kg|ml|l|"
|
||||
r"can|cans|pkg|pkg\.|package|slice|slices|clove|cloves|"
|
||||
r"small|medium|large|bunch|head|piece|pieces|"
|
||||
r"pinch|dash|handful|sprig|sprigs)\s*\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
# Preparation-state words that modify an ingredient without changing what it is.
|
||||
# Stripped from both ends so "melted butter", "butter, melted" both → "butter".
|
||||
_PREP_STATES = re.compile(
|
||||
r"\b(melted|softened|cold|warm|hot|room.temperature|"
|
||||
r"diced|sliced|chopped|minced|grated|shredded|shredded|beaten|whipped|"
|
||||
r"cooked|raw|frozen|canned|dried|dehydrated|marinated|seasoned|"
|
||||
r"roasted|toasted|ground|crushed|pressed|peeled|seeded|pitted|"
|
||||
r"boneless|skinless|trimmed|halved|quartered|julienned|"
|
||||
r"thinly|finely|roughly|coarsely|freshly|lightly|"
|
||||
r"packed|heaping|level|sifted|divided|optional)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
# Trailing comma + optional prep state (e.g. "butter, melted")
|
||||
_TRAILING_PREP = re.compile(r",\s*\w+$")
|
||||
|
||||
|
||||
# Maps prep-state words to human-readable instruction templates.
|
||||
# {ingredient} is replaced with the actual ingredient name.
|
||||
# None means the state is passive (frozen, canned) — no note needed.
|
||||
_PREP_INSTRUCTIONS: dict[str, str | None] = {
|
||||
"melted": "Melt the {ingredient} before starting.",
|
||||
"softened": "Let the {ingredient} soften to room temperature before using.",
|
||||
"room temperature": "Bring the {ingredient} to room temperature before using.",
|
||||
"beaten": "Beat the {ingredient} lightly before adding.",
|
||||
"whipped": "Whip the {ingredient} until soft peaks form.",
|
||||
"sifted": "Sift the {ingredient} before measuring.",
|
||||
"toasted": "Toast the {ingredient} in a dry pan until fragrant.",
|
||||
"roasted": "Roast the {ingredient} before using.",
|
||||
"pressed": "Press the {ingredient} to remove excess moisture.",
|
||||
"diced": "Dice the {ingredient} into small pieces.",
|
||||
"sliced": "Slice the {ingredient} thinly.",
|
||||
"chopped": "Chop the {ingredient} roughly.",
|
||||
"minced": "Mince the {ingredient} finely.",
|
||||
"grated": "Grate the {ingredient}.",
|
||||
"shredded": "Shred the {ingredient}.",
|
||||
"ground": "Grind the {ingredient}.",
|
||||
"crushed": "Crush the {ingredient}.",
|
||||
"peeled": "Peel the {ingredient} before use.",
|
||||
"seeded": "Remove seeds from the {ingredient}.",
|
||||
"pitted": "Pit the {ingredient} before use.",
|
||||
"trimmed": "Trim any excess from the {ingredient}.",
|
||||
"julienned": "Cut the {ingredient} into thin matchstick strips.",
|
||||
"cooked": "Pre-cook the {ingredient} before adding.",
|
||||
# Passive states — ingredient is used as-is, no prep note needed
|
||||
"cold": None,
|
||||
"warm": None,
|
||||
"hot": None,
|
||||
"raw": None,
|
||||
"frozen": None,
|
||||
"canned": None,
|
||||
"dried": None,
|
||||
"dehydrated": None,
|
||||
"marinated": None,
|
||||
"seasoned": None,
|
||||
"boneless": None,
|
||||
"skinless": None,
|
||||
"divided": None,
|
||||
"optional": None,
|
||||
"fresh": None,
|
||||
"freshly": None,
|
||||
"thinly": None,
|
||||
"finely": None,
|
||||
"roughly": None,
|
||||
"coarsely": None,
|
||||
"lightly": None,
|
||||
"packed": None,
|
||||
"heaping": None,
|
||||
"level": None,
|
||||
}
|
||||
|
||||
# Finds the first actionable prep state in an ingredient string
|
||||
_PREP_STATE_SEARCH = re.compile(
|
||||
r"\b(" + "|".join(re.escape(k) for k in _PREP_INSTRUCTIONS) + r")\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def _strip_quantity(ingredient: str) -> str:
|
||||
"""Remove leading quantity/unit and preparation-state words from a recipe ingredient.
|
||||
|
||||
e.g. "2 tbsp melted butter" → "butter"
|
||||
"butter, melted" → "butter"
|
||||
"1/4 cup flour, sifted" → "flour"
|
||||
"""
|
||||
stripped = _QUANTITY_PREFIX.sub("", ingredient).strip()
|
||||
# Strip any remaining leading number (e.g. "3 eggs" → "eggs")
|
||||
stripped = re.sub(r"^\d+\s+", "", stripped)
|
||||
# Strip trailing ", prep_state"
|
||||
stripped = _TRAILING_PREP.sub("", stripped).strip()
|
||||
# Strip prep-state words (may be leading or embedded)
|
||||
stripped = _PREP_STATES.sub("", stripped).strip()
|
||||
# Clean up any double spaces left behind
|
||||
stripped = re.sub(r"\s{2,}", " ", stripped).strip()
|
||||
return stripped or ingredient
|
||||
|
||||
|
||||
def _prep_note_for(ingredient: str) -> str | None:
|
||||
"""Return a human-readable prep instruction for this ingredient string, or None.
|
||||
|
||||
e.g. "2 tbsp melted butter" → "Melt the butter before starting."
|
||||
"onion, diced" → "Dice the onion into small pieces."
|
||||
"frozen peas" → None (passive state, no action needed)
|
||||
"""
|
||||
match = _PREP_STATE_SEARCH.search(ingredient)
|
||||
if not match:
|
||||
return None
|
||||
state = match.group(1).lower()
|
||||
template = _PREP_INSTRUCTIONS.get(state)
|
||||
if not template:
|
||||
return None
|
||||
# Use the stripped ingredient name as the subject
|
||||
ingredient_name = _strip_quantity(ingredient)
|
||||
return template.format(ingredient=ingredient_name)
|
||||
|
||||
|
||||
def _expand_pantry_set(pantry_items: list[str]) -> set[str]:
|
||||
"""Return pantry_set expanded with canonical recipe-corpus synonyms.
|
||||
|
||||
For each pantry item, checks _PANTRY_LABEL_SYNONYMS for substring matches
|
||||
and adds the canonical form. This lets single-word recipe ingredients
|
||||
("hamburger", "chicken") match product-label pantry entries
|
||||
("burger patties", "rotisserie chicken").
|
||||
"""
|
||||
expanded: set[str] = set()
|
||||
for item in pantry_items:
|
||||
lower = item.lower().strip()
|
||||
expanded.add(lower)
|
||||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
expanded.add(canonical)
|
||||
return expanded
|
||||
|
||||
|
||||
def _ingredient_in_pantry(ingredient: str, pantry_set: set[str]) -> bool:
|
||||
"""Return True if the recipe ingredient is satisfied by the pantry.
|
||||
|
||||
Checks three layers in order:
|
||||
1. Exact match after quantity stripping
|
||||
2. Synonym lookup: ingredient → canonical → in pantry_set
|
||||
(handles "ground beef" matched by "burger patties" via shared canonical)
|
||||
3. Token subset: all content tokens of the ingredient appear in pantry
|
||||
(handles "diced onions" when "onions" is in pantry)
|
||||
"""
|
||||
clean = _strip_quantity(ingredient).lower()
|
||||
if clean in pantry_set:
|
||||
return True
|
||||
|
||||
# Check if this recipe ingredient maps to a canonical that's in pantry
|
||||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in clean and canonical in pantry_set:
|
||||
return True
|
||||
|
||||
# Single-token ingredient whose token appears in pantry (e.g. "ketchup" in "c. ketchup")
|
||||
tokens = [t for t in clean.split() if t not in _SWAP_STOPWORDS and len(t) > 2]
|
||||
if tokens and all(t in pantry_set for t in tokens):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _content_tokens(text: str) -> frozenset[str]:
|
||||
return frozenset(
|
||||
w for w in text.lower().split()
|
||||
if w not in _SWAP_STOPWORDS and len(w) > 1
|
||||
)
|
||||
|
||||
|
||||
def _pantry_creative_swap(required: str, pantry_items: set[str]) -> str | None:
|
||||
"""Return a pantry item that's a plausible creative substitute, or None.
|
||||
|
||||
Requires ≥2 shared content tokens AND ≥50% bidirectional overlap so that
|
||||
single-word differences (cream-of-mushroom vs cream-of-potato) qualify while
|
||||
single-word ingredients (butter, flour) don't accidentally match supersets
|
||||
(peanut butter, bread flour).
|
||||
"""
|
||||
req_tokens = _content_tokens(required)
|
||||
if len(req_tokens) < 2:
|
||||
return None # single-word ingredients must already be in pantry_set
|
||||
|
||||
best: str | None = None
|
||||
best_score = 0.0
|
||||
for item in pantry_items:
|
||||
if item.lower() == required.lower():
|
||||
continue
|
||||
pan_tokens = _content_tokens(item)
|
||||
if not pan_tokens:
|
||||
continue
|
||||
overlap = len(req_tokens & pan_tokens)
|
||||
if overlap < 2:
|
||||
continue
|
||||
score = min(overlap / len(req_tokens), overlap / len(pan_tokens))
|
||||
if score >= 0.5 and score > best_score:
|
||||
best_score = score
|
||||
best = item
|
||||
return best
|
||||
|
||||
|
||||
# Method complexity classification patterns
|
||||
_EASY_METHODS = re.compile(
|
||||
r"\b(microwave|mix|stir|blend|toast|assemble|heat)\b", re.IGNORECASE
|
||||
)
|
||||
_INVOLVED_METHODS = re.compile(
|
||||
r"\b(braise|roast|knead|deep.?fry|fry|sauté|saute|bake|boil)\b", re.IGNORECASE
|
||||
)
|
||||
|
||||
|
||||
def _classify_method_complexity(
|
||||
directions: list[str],
|
||||
available_equipment: list[str] | None = None,
|
||||
) -> str:
|
||||
"""Classify recipe method complexity from direction strings.
|
||||
|
||||
Returns 'easy', 'moderate', or 'involved'.
|
||||
available_equipment can expand the easy set (e.g. ['toaster', 'air fryer']).
|
||||
"""
|
||||
text = " ".join(directions).lower()
|
||||
equipment_set = {e.lower() for e in (available_equipment or [])}
|
||||
|
||||
if _INVOLVED_METHODS.search(text):
|
||||
return "involved"
|
||||
|
||||
if _EASY_METHODS.search(text):
|
||||
return "easy"
|
||||
|
||||
# Check equipment-specific easy methods
|
||||
for equip in equipment_set:
|
||||
if equip in text:
|
||||
return "easy"
|
||||
|
||||
return "moderate"
|
||||
|
||||
|
||||
class RecipeEngine:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
self._classifier = ElementClassifier(store)
|
||||
self._substitution = SubstitutionEngine(store)
|
||||
|
||||
def suggest(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
available_equipment: list[str] | None = None,
|
||||
) -> RecipeResult:
|
||||
# Load cooking equipment from user settings when hard_day_mode is active
|
||||
if req.hard_day_mode and available_equipment is None:
|
||||
equipment_json = self._store.get_setting("cooking_equipment")
|
||||
if equipment_json:
|
||||
try:
|
||||
available_equipment = json.loads(equipment_json)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
available_equipment = []
|
||||
else:
|
||||
available_equipment = []
|
||||
# Rate-limit leftover mode for free tier
|
||||
if req.expiry_first and req.tier == "free":
|
||||
allowed, count = self._store.check_and_increment_rate_limit(
|
||||
"leftover_mode", _LEFTOVER_DAILY_MAX_FREE
|
||||
)
|
||||
if not allowed:
|
||||
return RecipeResult(
|
||||
suggestions=[], element_gaps=[], rate_limited=True, rate_limit_count=count
|
||||
)
|
||||
|
||||
profiles = self._classifier.classify_batch(req.pantry_items)
|
||||
gaps = self._classifier.identify_gaps(profiles)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items)
|
||||
|
||||
if req.level >= 3:
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
gen = LLMRecipeGenerator(self._store)
|
||||
return gen.generate(req, profiles, gaps)
|
||||
|
||||
# Level 1 & 2: deterministic path
|
||||
nf = req.nutrition_filters
|
||||
rows = self._store.search_recipes_by_ingredients(
|
||||
req.pantry_items,
|
||||
limit=20,
|
||||
category=req.category or None,
|
||||
max_calories=nf.max_calories,
|
||||
max_sugar_g=nf.max_sugar_g,
|
||||
max_carbs_g=nf.max_carbs_g,
|
||||
max_sodium_mg=nf.max_sodium_mg,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
)
|
||||
suggestions = []
|
||||
|
||||
for row in rows:
|
||||
ingredient_names: list[str] = row.get("ingredient_names") or []
|
||||
if isinstance(ingredient_names, str):
|
||||
try:
|
||||
ingredient_names = json.loads(ingredient_names)
|
||||
except Exception:
|
||||
ingredient_names = []
|
||||
|
||||
# Compute missing ingredients, detecting pantry coverage first.
|
||||
# When covered, collect any prep-state annotations (e.g. "melted butter"
|
||||
# → note "Melt the butter before starting.") to surface separately.
|
||||
swap_candidates: list[SwapCandidate] = []
|
||||
missing: list[str] = []
|
||||
prep_note_set: set[str] = set()
|
||||
for n in ingredient_names:
|
||||
if _ingredient_in_pantry(n, pantry_set):
|
||||
note = _prep_note_for(n)
|
||||
if note:
|
||||
prep_note_set.add(note)
|
||||
continue
|
||||
swap_item = _pantry_creative_swap(n, pantry_set)
|
||||
if swap_item:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=n,
|
||||
substitute_name=swap_item,
|
||||
constraint_label="pantry_swap",
|
||||
explanation=f"You have {swap_item} — use it in place of {n}.",
|
||||
compensation_hints=[],
|
||||
))
|
||||
else:
|
||||
missing.append(n)
|
||||
|
||||
# Filter by max_missing (pantry swaps don't count as missing)
|
||||
if req.max_missing is not None and len(missing) > req.max_missing:
|
||||
continue
|
||||
|
||||
# Filter by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
directions: list[str] = row.get("directions") or []
|
||||
if isinstance(directions, str):
|
||||
try:
|
||||
directions = json.loads(directions)
|
||||
except Exception:
|
||||
directions = [directions]
|
||||
complexity = _classify_method_complexity(directions, available_equipment)
|
||||
if complexity == "involved":
|
||||
continue
|
||||
|
||||
# Level 2: also add dietary constraint swaps from substitution_pairs
|
||||
if req.level == 2 and req.constraints:
|
||||
for ing in ingredient_names:
|
||||
for constraint in req.constraints:
|
||||
swaps = self._substitution.find_substitutes(ing, constraint)
|
||||
for swap in swaps[:1]:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=swap.original_name,
|
||||
substitute_name=swap.substitute_name,
|
||||
constraint_label=swap.constraint_label,
|
||||
explanation=swap.explanation,
|
||||
compensation_hints=swap.compensation_hints,
|
||||
))
|
||||
|
||||
coverage_raw = row.get("element_coverage") or {}
|
||||
if isinstance(coverage_raw, str):
|
||||
try:
|
||||
coverage_raw = json.loads(coverage_raw)
|
||||
except Exception:
|
||||
coverage_raw = {}
|
||||
|
||||
servings = row.get("servings") or None
|
||||
nutrition = NutritionPanel(
|
||||
calories=row.get("calories"),
|
||||
fat_g=row.get("fat_g"),
|
||||
protein_g=row.get("protein_g"),
|
||||
carbs_g=row.get("carbs_g"),
|
||||
fiber_g=row.get("fiber_g"),
|
||||
sugar_g=row.get("sugar_g"),
|
||||
sodium_mg=row.get("sodium_mg"),
|
||||
servings=servings,
|
||||
estimated=bool(row.get("nutrition_estimated", 0)),
|
||||
)
|
||||
has_nutrition = any(
|
||||
v is not None
|
||||
for v in (nutrition.calories, nutrition.sugar_g, nutrition.carbs_g)
|
||||
)
|
||||
suggestions.append(RecipeSuggestion(
|
||||
id=row["id"],
|
||||
title=row["title"],
|
||||
match_count=int(row.get("match_count") or 0),
|
||||
element_coverage=coverage_raw,
|
||||
swap_candidates=swap_candidates,
|
||||
missing_ingredients=missing,
|
||||
prep_notes=sorted(prep_note_set),
|
||||
level=req.level,
|
||||
nutrition=nutrition if has_nutrition else None,
|
||||
))
|
||||
|
||||
# Prepend assembly-dish templates (burrito, stir fry, omelette, etc.)
|
||||
# These fire regardless of corpus coverage — any pantry can make a burrito.
|
||||
assembly = match_assembly_templates(
|
||||
pantry_items=req.pantry_items,
|
||||
pantry_set=pantry_set,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
)
|
||||
suggestions = assembly + suggestions
|
||||
|
||||
# Build grocery list — deduplicated union of all missing ingredients
|
||||
seen: set[str] = set()
|
||||
grocery_list: list[str] = []
|
||||
for s in suggestions:
|
||||
for item in s.missing_ingredients:
|
||||
if item not in seen:
|
||||
grocery_list.append(item)
|
||||
seen.add(item)
|
||||
|
||||
# Build grocery links — affiliate deeplinks for each missing ingredient
|
||||
link_builder = GroceryLinkBuilder(tier=req.tier, has_byok=req.has_byok)
|
||||
grocery_links = link_builder.build_all(grocery_list)
|
||||
|
||||
return RecipeResult(
|
||||
suggestions=suggestions,
|
||||
element_gaps=gaps,
|
||||
grocery_list=grocery_list,
|
||||
grocery_links=grocery_links,
|
||||
)
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""
|
||||
StapleLibrary -- bulk-preparable base component reference data.
|
||||
Loaded from YAML files in app/staples/.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
_STAPLES_DIR = Path(__file__).parents[2] / "staples"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StapleEntry:
|
||||
slug: str
|
||||
name: str
|
||||
description: str
|
||||
dietary_labels: list[str]
|
||||
base_ingredients: list[str]
|
||||
base_method: str
|
||||
base_time_minutes: int
|
||||
yield_formats: dict[str, Any]
|
||||
compatible_styles: list[str]
|
||||
|
||||
|
||||
class StapleLibrary:
|
||||
def __init__(self, staples_dir: Path = _STAPLES_DIR) -> None:
|
||||
self._staples: dict[str, StapleEntry] = {}
|
||||
for yaml_path in sorted(staples_dir.glob("*.yaml")):
|
||||
entry = self._load(yaml_path)
|
||||
self._staples[entry.slug] = entry
|
||||
|
||||
def get(self, slug: str) -> StapleEntry | None:
|
||||
return self._staples.get(slug)
|
||||
|
||||
def list_all(self) -> list[StapleEntry]:
|
||||
return list(self._staples.values())
|
||||
|
||||
def filter_by_dietary(self, label: str) -> list[StapleEntry]:
|
||||
return [s for s in self._staples.values() if label in s.dietary_labels]
|
||||
|
||||
def _load(self, path: Path) -> StapleEntry:
|
||||
try:
|
||||
data = yaml.safe_load(path.read_text())
|
||||
return StapleEntry(
|
||||
slug=data["slug"],
|
||||
name=data["name"],
|
||||
description=data.get("description", ""),
|
||||
dietary_labels=data.get("dietary_labels", []),
|
||||
base_ingredients=data.get("base_ingredients", []),
|
||||
base_method=data.get("base_method", ""),
|
||||
base_time_minutes=int(data.get("base_time_minutes", 0)),
|
||||
yield_formats=data.get("yield_formats", {}),
|
||||
compatible_styles=data.get("compatible_styles", []),
|
||||
)
|
||||
except (KeyError, yaml.YAMLError) as exc:
|
||||
raise ValueError(f"Failed to load staple from {path}: {exc}") from exc
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
"""
|
||||
StyleAdapter — cuisine-mode overlay that biases element dimensions.
|
||||
YAML templates in app/styles/.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
_STYLES_DIR = Path(__file__).parents[2] / "styles"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StyleTemplate:
|
||||
style_id: str
|
||||
name: str
|
||||
aromatics: tuple[str, ...]
|
||||
depth_sources: tuple[str, ...]
|
||||
brightness_sources: tuple[str, ...]
|
||||
method_bias: dict[str, float]
|
||||
structure_forms: tuple[str, ...]
|
||||
seasoning_bias: str
|
||||
finishing_fat_str: str
|
||||
|
||||
def bias_aroma_selection(self, pantry_items: list[str]) -> list[str]:
|
||||
"""Return aromatics present in pantry (bidirectional substring match)."""
|
||||
result = []
|
||||
for aroma in self.aromatics:
|
||||
for item in pantry_items:
|
||||
if aroma.lower() in item.lower() or item.lower() in aroma.lower():
|
||||
result.append(aroma)
|
||||
break
|
||||
return result
|
||||
|
||||
def preferred_depth_sources(self, pantry_items: list[str]) -> list[str]:
|
||||
"""Return depth_sources present in pantry."""
|
||||
result = []
|
||||
for src in self.depth_sources:
|
||||
for item in pantry_items:
|
||||
if src.lower() in item.lower() or item.lower() in src.lower():
|
||||
result.append(src)
|
||||
break
|
||||
return result
|
||||
|
||||
def preferred_structure_forms(self, pantry_items: list[str]) -> list[str]:
|
||||
"""Return structure_forms present in pantry."""
|
||||
result = []
|
||||
for form in self.structure_forms:
|
||||
for item in pantry_items:
|
||||
if form.lower() in item.lower() or item.lower() in form.lower():
|
||||
result.append(form)
|
||||
break
|
||||
return result
|
||||
|
||||
def method_weights(self) -> dict[str, float]:
|
||||
"""Return method bias weights."""
|
||||
return dict(self.method_bias)
|
||||
|
||||
def seasoning_vector(self) -> str:
|
||||
"""Return seasoning bias."""
|
||||
return self.seasoning_bias
|
||||
|
||||
def finishing_fat(self) -> str:
|
||||
"""Return finishing fat."""
|
||||
return self.finishing_fat_str
|
||||
|
||||
|
||||
class StyleAdapter:
|
||||
def __init__(self, styles_dir: Path = _STYLES_DIR) -> None:
|
||||
self._styles: dict[str, StyleTemplate] = {}
|
||||
for yaml_path in sorted(styles_dir.glob("*.yaml")):
|
||||
try:
|
||||
template = self._load(yaml_path)
|
||||
self._styles[template.style_id] = template
|
||||
except (KeyError, yaml.YAMLError, TypeError) as exc:
|
||||
raise ValueError(f"Failed to load style from {yaml_path}: {exc}") from exc
|
||||
|
||||
@property
|
||||
def styles(self) -> dict[str, StyleTemplate]:
|
||||
return self._styles
|
||||
|
||||
def get(self, style_id: str) -> StyleTemplate | None:
|
||||
return self._styles.get(style_id)
|
||||
|
||||
def list_all(self) -> list[StyleTemplate]:
|
||||
return list(self._styles.values())
|
||||
|
||||
def bias_aroma_selection(self, style_id: str, pantry_items: list[str]) -> list[str]:
|
||||
"""Return pantry items that match the style's preferred aromatics.
|
||||
Falls back to all pantry items if no match found."""
|
||||
template = self._styles.get(style_id)
|
||||
if not template:
|
||||
return pantry_items
|
||||
matched = [
|
||||
item for item in pantry_items
|
||||
if any(
|
||||
aroma.lower() in item.lower() or item.lower() in aroma.lower()
|
||||
for aroma in template.aromatics
|
||||
)
|
||||
]
|
||||
return matched if matched else pantry_items
|
||||
|
||||
def apply(self, style_id: str, pantry_items: list[str]) -> dict:
|
||||
"""Return style-biased ingredient guidance for each element dimension."""
|
||||
template = self._styles.get(style_id)
|
||||
if not template:
|
||||
return {}
|
||||
return {
|
||||
"aroma_candidates": self.bias_aroma_selection(style_id, pantry_items),
|
||||
"depth_suggestions": list(template.depth_sources),
|
||||
"brightness_suggestions": list(template.brightness_sources),
|
||||
"method_bias": template.method_bias,
|
||||
"structure_forms": list(template.structure_forms),
|
||||
"seasoning_bias": template.seasoning_bias,
|
||||
"finishing_fat": template.finishing_fat_str,
|
||||
}
|
||||
|
||||
def _load(self, path: Path) -> StyleTemplate:
|
||||
data = yaml.safe_load(path.read_text())
|
||||
return StyleTemplate(
|
||||
style_id=data["style_id"],
|
||||
name=data["name"],
|
||||
aromatics=tuple(data.get("aromatics", [])),
|
||||
depth_sources=tuple(data.get("depth_sources", [])),
|
||||
brightness_sources=tuple(data.get("brightness_sources", [])),
|
||||
method_bias=dict(data.get("method_bias", {})),
|
||||
structure_forms=tuple(data.get("structure_forms", [])),
|
||||
seasoning_bias=data.get("seasoning_bias", ""),
|
||||
finishing_fat_str=data.get("finishing_fat", ""),
|
||||
)
|
||||
|
|
@ -1,126 +0,0 @@
|
|||
"""
|
||||
SubstitutionEngine — deterministic ingredient swap candidates with compensation hints.
|
||||
|
||||
Powered by:
|
||||
- substitution_pairs table (derived from lishuyang/recipepairs)
|
||||
- ingredient_profiles functional metadata (USDA FDC)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
# Compensation threshold — if |delta| exceeds this, surface a hint
|
||||
_FAT_THRESHOLD = 5.0 # grams per 100g
|
||||
_GLUTAMATE_THRESHOLD = 1.0 # mg per 100g
|
||||
_MOISTURE_THRESHOLD = 15.0 # grams per 100g
|
||||
|
||||
_RICHNESS_COMPENSATORS = ["olive oil", "coconut oil", "butter", "shortening", "full-fat coconut milk"]
|
||||
_DEPTH_COMPENSATORS = ["nutritional yeast", "soy sauce", "miso", "mushroom powder",
|
||||
"better than bouillon not-beef", "smoked paprika"]
|
||||
_MOISTURE_BINDERS = ["cornstarch", "flour", "arrowroot", "breadcrumbs"]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CompensationHint:
|
||||
ingredient: str
|
||||
reason: str
|
||||
element: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SubstitutionSwap:
|
||||
original_name: str
|
||||
substitute_name: str
|
||||
constraint_label: str
|
||||
fat_delta: float
|
||||
moisture_delta: float
|
||||
glutamate_delta: float
|
||||
protein_delta: float
|
||||
occurrence_count: int
|
||||
compensation_hints: list[dict] = field(default_factory=list)
|
||||
explanation: str = ""
|
||||
|
||||
|
||||
class SubstitutionEngine:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
|
||||
def find_substitutes(
|
||||
self,
|
||||
ingredient_name: str,
|
||||
constraint: str,
|
||||
) -> list[SubstitutionSwap]:
|
||||
rows = self._store._fetch_all("""
|
||||
SELECT substitute_name, constraint_label,
|
||||
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
||||
occurrence_count, compensation_hints
|
||||
FROM substitution_pairs
|
||||
WHERE original_name = ? AND constraint_label = ?
|
||||
ORDER BY occurrence_count DESC
|
||||
""", (ingredient_name.lower(), constraint))
|
||||
|
||||
return [self._row_to_swap(ingredient_name, row) for row in rows]
|
||||
|
||||
def _row_to_swap(self, original: str, row: dict) -> SubstitutionSwap:
|
||||
hints = self._build_hints(row)
|
||||
explanation = self._build_explanation(original, row, hints)
|
||||
return SubstitutionSwap(
|
||||
original_name=original,
|
||||
substitute_name=row["substitute_name"],
|
||||
constraint_label=row["constraint_label"],
|
||||
fat_delta=row.get("fat_delta") or 0.0,
|
||||
moisture_delta=row.get("moisture_delta") or 0.0,
|
||||
glutamate_delta=row.get("glutamate_delta") or 0.0,
|
||||
protein_delta=row.get("protein_delta") or 0.0,
|
||||
occurrence_count=row.get("occurrence_count") or 1,
|
||||
compensation_hints=[{"ingredient": h.ingredient, "reason": h.reason, "element": h.element} for h in hints],
|
||||
explanation=explanation,
|
||||
)
|
||||
|
||||
def _build_hints(self, row: dict) -> list[CompensationHint]:
|
||||
hints = []
|
||||
fat_delta = row.get("fat_delta") or 0.0
|
||||
glutamate_delta = row.get("glutamate_delta") or 0.0
|
||||
moisture_delta = row.get("moisture_delta") or 0.0
|
||||
|
||||
if fat_delta < -_FAT_THRESHOLD:
|
||||
missing = abs(fat_delta)
|
||||
sugg = _RICHNESS_COMPENSATORS[0]
|
||||
hints.append(CompensationHint(
|
||||
ingredient=sugg,
|
||||
reason=f"substitute has ~{missing:.0f}g/100g less fat — add {sugg} to restore Richness",
|
||||
element="Richness",
|
||||
))
|
||||
|
||||
if glutamate_delta < -_GLUTAMATE_THRESHOLD:
|
||||
sugg = _DEPTH_COMPENSATORS[0]
|
||||
hints.append(CompensationHint(
|
||||
ingredient=sugg,
|
||||
reason=f"substitute is lower in umami — add {sugg} to restore Depth",
|
||||
element="Depth",
|
||||
))
|
||||
|
||||
if moisture_delta > _MOISTURE_THRESHOLD:
|
||||
sugg = _MOISTURE_BINDERS[0]
|
||||
hints.append(CompensationHint(
|
||||
ingredient=sugg,
|
||||
reason=f"substitute adds ~{moisture_delta:.0f}g/100g more moisture — add {sugg} to maintain Structure",
|
||||
element="Structure",
|
||||
))
|
||||
|
||||
return hints
|
||||
|
||||
def _build_explanation(
|
||||
self, original: str, row: dict, hints: list[CompensationHint]
|
||||
) -> str:
|
||||
sub = row["substitute_name"]
|
||||
count = row.get("occurrence_count") or 1
|
||||
base = f"Replace {original} with {sub} (seen in {count} recipes)."
|
||||
if hints:
|
||||
base += " To compensate: " + "; ".join(h.reason for h in hints) + "."
|
||||
return base
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
slug: seitan
|
||||
name: Seitan (Wheat Meat)
|
||||
description: High-protein wheat gluten that mimics the texture of meat. Can be made in bulk and stored in multiple formats.
|
||||
dietary_labels: [vegan, high-protein]
|
||||
base_ingredients:
|
||||
- vital wheat gluten
|
||||
- nutritional yeast
|
||||
- soy sauce
|
||||
- garlic powder
|
||||
- vegetable broth
|
||||
base_method: simmer
|
||||
base_time_minutes: 45
|
||||
yield_formats:
|
||||
fresh:
|
||||
elements: [Structure, Depth, Richness]
|
||||
shelf_days: 5
|
||||
storage: airtight container, refrigerated in broth
|
||||
methods: [saute, braise, grill, stir-fry]
|
||||
texture: chewy, meaty
|
||||
frozen:
|
||||
elements: [Structure, Depth]
|
||||
shelf_days: 90
|
||||
storage: vacuum-sealed freezer bag
|
||||
methods: [thaw then any method]
|
||||
texture: slightly softer after thaw
|
||||
braised:
|
||||
elements: [Structure, Depth, Seasoning]
|
||||
shelf_days: 4
|
||||
storage: covered in braising liquid, refrigerated
|
||||
methods: [serve directly, slice for sandwiches]
|
||||
texture: tender, falling-apart
|
||||
grilled:
|
||||
elements: [Structure, Aroma, Texture]
|
||||
shelf_days: 3
|
||||
storage: refrigerated, uncovered to maintain crust
|
||||
methods: [slice cold, reheat in pan]
|
||||
texture: crisp exterior, chewy interior
|
||||
compatible_styles: [italian, latin, east_asian, eastern_european]
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
slug: tempeh
|
||||
name: Tempeh
|
||||
description: Fermented soybean cake. Dense, nutty, high in protein. Excellent at absorbing marinades.
|
||||
dietary_labels: [vegan, high-protein, fermented]
|
||||
base_ingredients:
|
||||
- tempeh block (store-bought or homemade from soybeans + starter)
|
||||
base_method: steam then marinate
|
||||
base_time_minutes: 20
|
||||
yield_formats:
|
||||
raw:
|
||||
elements: [Structure, Depth, Richness]
|
||||
shelf_days: 7
|
||||
storage: refrigerated in original packaging or wrapped
|
||||
methods: [steam, crumble, slice]
|
||||
texture: dense, firm
|
||||
marinated:
|
||||
elements: [Structure, Depth, Seasoning, Aroma]
|
||||
shelf_days: 5
|
||||
storage: submerged in marinade, refrigerated
|
||||
methods: [bake, pan-fry, grill]
|
||||
texture: chewy, flavor-dense
|
||||
crumbled:
|
||||
elements: [Structure, Depth, Texture]
|
||||
shelf_days: 3
|
||||
storage: refrigerated, use quickly
|
||||
methods: [saute as ground meat substitute, add to tacos or pasta]
|
||||
texture: crumbly, browned bits
|
||||
compatible_styles: [latin, east_asian, mediterranean]
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
slug: tofu_firm
|
||||
name: Firm Tofu
|
||||
description: Pressed soybean curd. Neutral flavor, excellent at absorbing surrounding flavors. Freeze-thaw cycle creates meatier texture.
|
||||
dietary_labels: [vegan, high-protein]
|
||||
base_ingredients:
|
||||
- firm or extra-firm tofu block
|
||||
base_method: press (30 min) then prepare
|
||||
base_time_minutes: 30
|
||||
yield_formats:
|
||||
pressed_raw:
|
||||
elements: [Structure]
|
||||
shelf_days: 5
|
||||
storage: submerged in water, refrigerated, change water daily
|
||||
methods: [cube, slice, crumble]
|
||||
texture: dense, uniform
|
||||
freeze_thawed:
|
||||
elements: [Structure, Texture]
|
||||
shelf_days: 5
|
||||
storage: refrigerated after thawing
|
||||
methods: [squeeze dry, saute, bake]
|
||||
texture: chewy, porous, absorbs marinades deeply
|
||||
baked:
|
||||
elements: [Structure, Texture, Aroma]
|
||||
shelf_days: 4
|
||||
storage: refrigerated, uncovered
|
||||
methods: [add to stir-fry, bowl, salad]
|
||||
texture: crisp exterior, chewy interior
|
||||
silken:
|
||||
elements: [Richness, Structure]
|
||||
shelf_days: 3
|
||||
storage: refrigerated, use within days of opening
|
||||
methods: [blend into sauces, custards, dressings]
|
||||
texture: silky, smooth
|
||||
compatible_styles: [east_asian, mediterranean]
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
style_id: east_asian
|
||||
name: East Asian
|
||||
aromatics: [ginger, scallion, sesame, star anise, five spice, sichuan pepper, lemongrass]
|
||||
depth_sources: [soy sauce, miso, oyster sauce, shiitake, fish sauce, bonito]
|
||||
brightness_sources: [rice vinegar, mirin, citrus zest, ponzu]
|
||||
method_bias:
|
||||
stir_fry: 0.35
|
||||
steam: 0.25
|
||||
braise: 0.20
|
||||
boil: 0.20
|
||||
structure_forms: [dumpling wrapper, thin noodle, rice, bao]
|
||||
seasoning_bias: soy sauce
|
||||
finishing_fat: toasted sesame oil
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
style_id: eastern_european
|
||||
name: Eastern European
|
||||
aromatics: [dill, caraway, marjoram, parsley, horseradish, bay leaf]
|
||||
depth_sources: [sour cream, smoked meats, bacon, dried mushrooms]
|
||||
brightness_sources: [sauerkraut brine, apple cider vinegar, sour cream]
|
||||
method_bias:
|
||||
braise: 0.35
|
||||
boil: 0.30
|
||||
bake: 0.25
|
||||
roast: 0.10
|
||||
structure_forms: [dumpling wrapper, bread dough, stuffed cabbage]
|
||||
seasoning_bias: kosher salt
|
||||
finishing_fat: butter or lard
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
style_id: italian
|
||||
name: Italian
|
||||
aromatics: [basil, oregano, garlic, onion, fennel, rosemary, thyme, sage, marjoram]
|
||||
depth_sources: [parmesan, pecorino, anchovies, canned tomato, porcini mushrooms]
|
||||
brightness_sources: [lemon, white wine, tomato, red wine vinegar]
|
||||
method_bias:
|
||||
braise: 0.30
|
||||
roast: 0.30
|
||||
saute: 0.25
|
||||
simmer: 0.15
|
||||
structure_forms: [pasta, wrapped, layered, risotto]
|
||||
seasoning_bias: sea salt
|
||||
finishing_fat: olive oil
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
style_id: latin
|
||||
name: Latin
|
||||
aromatics: [cumin, chili, cilantro, epazote, mexican oregano, ancho, chipotle, smoked paprika]
|
||||
depth_sources: [dried chilis, smoked peppers, chocolate, achiote]
|
||||
brightness_sources: [lime, tomatillo, brined jalapeño, orange]
|
||||
method_bias:
|
||||
roast: 0.30
|
||||
braise: 0.30
|
||||
fry: 0.25
|
||||
grill: 0.15
|
||||
structure_forms: [wrapped in masa, pastry, stuffed, bowl]
|
||||
seasoning_bias: kosher salt
|
||||
finishing_fat: lard or neutral oil
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
style_id: mediterranean
|
||||
name: Mediterranean
|
||||
aromatics: [oregano, thyme, rosemary, mint, sumac, za'atar, preserved lemon]
|
||||
depth_sources: [tahini, feta, halloumi, dried olives, harissa]
|
||||
brightness_sources: [lemon, pomegranate molasses, yogurt, sumac]
|
||||
method_bias:
|
||||
roast: 0.35
|
||||
grill: 0.30
|
||||
braise: 0.25
|
||||
saute: 0.10
|
||||
structure_forms: [flatbread, stuffed vegetables, grain bowl, mezze plate]
|
||||
seasoning_bias: sea salt
|
||||
finishing_fat: olive oil
|
||||
|
|
@ -27,9 +27,6 @@ LLM_TASK_TYPES: frozenset[str] = frozenset({"expiry_llm_fallback"})
|
|||
VRAM_BUDGETS: dict[str, float] = {
|
||||
# ExpirationPredictor uses a small LLM (16 tokens out, single pass).
|
||||
"expiry_llm_fallback": 2.0,
|
||||
# Recipe LLM (levels 3-4): full recipe generation, ~200-500 tokens out.
|
||||
# Budget assumes a quantized 7B-class model.
|
||||
"recipe_llm": 4.0,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from circuitforge_core.tasks.scheduler import (
|
|||
reset_scheduler, # re-export for tests
|
||||
)
|
||||
|
||||
from app.core.config import settings
|
||||
from app.tasks.runner import LLM_TASK_TYPES, VRAM_BUDGETS, run_task
|
||||
|
||||
|
||||
|
|
@ -21,6 +20,4 @@ def get_scheduler(db_path: Path) -> TaskScheduler:
|
|||
run_task_fn=run_task,
|
||||
task_types=LLM_TASK_TYPES,
|
||||
vram_budgets=VRAM_BUDGETS,
|
||||
coordinator_url=settings.COORDINATOR_URL,
|
||||
service_name="kiwi",
|
||||
)
|
||||
|
|
|
|||
20
app/tiers.py
20
app/tiers.py
|
|
@ -25,8 +25,6 @@ KIWI_FEATURES: dict[str, str] = {
|
|||
"receipt_upload": "free",
|
||||
"expiry_alerts": "free",
|
||||
"export_csv": "free",
|
||||
"leftover_mode": "free", # Rate-limited at API layer, not tier-gated
|
||||
"staple_library": "free",
|
||||
|
||||
# Paid tier
|
||||
"receipt_ocr": "paid", # BYOK-unlockable
|
||||
|
|
@ -34,28 +32,21 @@ KIWI_FEATURES: dict[str, str] = {
|
|||
"expiry_llm_matching": "paid", # BYOK-unlockable
|
||||
"meal_planning": "paid",
|
||||
"dietary_profiles": "paid",
|
||||
"style_picker": "paid",
|
||||
|
||||
# Premium tier
|
||||
"multi_household": "premium",
|
||||
"background_monitoring": "premium",
|
||||
"leftover_mode": "premium",
|
||||
}
|
||||
|
||||
|
||||
def can_use(feature: str, tier: str, has_byok: bool = False) -> bool:
|
||||
"""Return True if the given tier can access the feature.
|
||||
|
||||
The 'local' tier is assigned to dev-bypass and non-cloud sessions —
|
||||
it has unrestricted access to all features.
|
||||
"""
|
||||
if tier == "local":
|
||||
return True
|
||||
"""Return True if the given tier can access the feature."""
|
||||
return _can_use(
|
||||
feature,
|
||||
tier,
|
||||
has_byok=has_byok,
|
||||
_features=KIWI_FEATURES,
|
||||
_byok_unlockable=KIWI_BYOK_UNLOCKABLE,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -63,12 +54,7 @@ def require_feature(feature: str, tier: str, has_byok: bool = False) -> None:
|
|||
"""Raise ValueError if the tier cannot access the feature."""
|
||||
if not can_use(feature, tier, has_byok):
|
||||
from circuitforge_core.tiers.tiers import tier_label
|
||||
needed = tier_label(
|
||||
feature,
|
||||
has_byok=has_byok,
|
||||
_features=KIWI_FEATURES,
|
||||
_byok_unlockable=KIWI_BYOK_UNLOCKABLE,
|
||||
)
|
||||
needed = tier_label(feature, has_byok=has_byok, _features=KIWI_FEATURES)
|
||||
raise ValueError(
|
||||
f"Feature '{feature}' requires {needed} tier. "
|
||||
f"Current tier: {tier}."
|
||||
|
|
|
|||
|
|
@ -14,9 +14,6 @@ services:
|
|||
CLOUD_MODE: "true"
|
||||
CLOUD_DATA_ROOT: /devl/kiwi-cloud-data
|
||||
# DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN — set in .env
|
||||
# DEV ONLY: comma-separated IPs that bypass JWT auth (LAN testing without Caddy).
|
||||
# Production deployments must NOT set this. Leave blank or omit entirely.
|
||||
CLOUD_AUTH_BYPASS_IPS: ${CLOUD_AUTH_BYPASS_IPS:-}
|
||||
volumes:
|
||||
- /devl/kiwi-cloud-data:/devl/kiwi-cloud-data
|
||||
# LLM config — shared with other CF products; read-only in container
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
# compose.override.yml — local dev additions (auto-merged by docker compose)
|
||||
# Not used in cloud or demo stacks (those use compose.cloud.yml / compose.demo.yml directly).
|
||||
|
||||
services:
|
||||
# cf-orch agent sidecar: registers kiwi as a GPU node with the coordinator.
|
||||
# The API scheduler uses COORDINATOR_URL to lease VRAM cooperatively; this
|
||||
# agent makes kiwi's VRAM usage visible on the orchestrator dashboard.
|
||||
cf-orch-agent:
|
||||
image: kiwi-api # reuse local api image — cf-core already installed there
|
||||
network_mode: host
|
||||
env_file: .env
|
||||
environment:
|
||||
# Override coordinator URL here or via .env
|
||||
COORDINATOR_URL: ${COORDINATOR_URL:-http://10.1.10.71:7700}
|
||||
command: >
|
||||
conda run -n kiwi cf-orch agent
|
||||
--coordinator ${COORDINATOR_URL:-http://10.1.10.71:7700}
|
||||
--node-id kiwi
|
||||
--host 0.0.0.0
|
||||
--port 7702
|
||||
--advertise-host ${CF_ORCH_ADVERTISE_HOST:-10.1.10.71}
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- api
|
||||
|
|
@ -14,17 +14,6 @@ server {
|
|||
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
|
||||
# Forward the session header injected by Caddy from cf_session cookie.
|
||||
proxy_set_header X-CF-Session $http_x_cf_session;
|
||||
# Allow image uploads (barcode/receipt photos from phone cameras).
|
||||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
# When accessed directly (localhost:8515) instead of via Caddy (/kiwi path-strip),
|
||||
# Vite's /kiwi base URL means assets are requested at /kiwi/assets/... but stored
|
||||
# at /assets/... in nginx's root. Alias /kiwi/ → root so direct port access works.
|
||||
# ^~ prevents regex locations from overriding this prefix match for /kiwi/ paths.
|
||||
location ^~ /kiwi/ {
|
||||
alias /usr/share/nginx/html/;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location = /index.html {
|
||||
|
|
|
|||
|
|
@ -9,8 +9,6 @@ server {
|
|||
proxy_pass http://172.17.0.1:8512;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
# Allow image uploads (barcode/receipt photos from phone cameras).
|
||||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
location = /index.html {
|
||||
|
|
|
|||
|
|
@ -14,14 +14,5 @@ dependencies:
|
|||
- numpy>=1.25
|
||||
- pyzbar>=0.1.9
|
||||
- httpx>=0.27
|
||||
- psutil>=5.9
|
||||
- pydantic>=2.5
|
||||
- PyJWT>=2.8
|
||||
- datasets
|
||||
- huggingface_hub
|
||||
- transformers
|
||||
- sentence-transformers
|
||||
- torch
|
||||
- pyyaml
|
||||
- pandas
|
||||
- pyarrow
|
||||
|
|
|
|||
|
|
@ -3,26 +3,8 @@
|
|||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
|
||||
<title>Kiwi — Pantry Tracker</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Fraunces:ital,opsz,wght@0,9..144,300..900;1,9..144,300..900&family=DM+Mono:ital,wght@0,300;0,400;0,500;1,300;1,400;1,500&family=DM+Sans:ital,opsz,wght@0,9..40,300..700;1,9..40,300..700&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<!-- Anti-FOUC: critical layout CSS inline so it's available before the JS bundle.
|
||||
Without this, the sidebar flashes visible on mobile for ~100ms while the
|
||||
bundle hydrates and injects component styles. -->
|
||||
<style>
|
||||
.sidebar { display: none; }
|
||||
.bottom-nav { display: flex; }
|
||||
@media (min-width: 769px) {
|
||||
.sidebar { display: flex; flex-direction: column; }
|
||||
.bottom-nav { display: none; }
|
||||
.app-body { display: flex; flex-direction: column; flex: 1; }
|
||||
}
|
||||
</style>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>frontend</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
|
|
|||
5
frontend/package-lock.json
generated
5
frontend/package-lock.json
generated
|
|
@ -844,6 +844,7 @@
|
|||
"integrity": "sha512-uWN8YqxXxqFMX2RqGOrumsKeti4LlmIMIyV0lgut4jx7KQBcBiW6vkDtIBvHnHIquwNfJhk8v2OtmO8zXWHfPA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~7.16.0"
|
||||
}
|
||||
|
|
@ -1556,6 +1557,7 @@
|
|||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
|
|
@ -1719,6 +1721,7 @@
|
|||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||
"devOptional": true,
|
||||
"license": "Apache-2.0",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
|
|
@ -1740,6 +1743,7 @@
|
|||
"integrity": "sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.25.0",
|
||||
"fdir": "^6.5.0",
|
||||
|
|
@ -1821,6 +1825,7 @@
|
|||
"resolved": "https://registry.npmjs.org/vue/-/vue-3.5.22.tgz",
|
||||
"integrity": "sha512-toaZjQ3a/G/mYaLSbV+QsQhIdMo9x5rrqIpYRObsJ6T/J+RyCSFwN2LHNVH9v8uIcljDNa3QzPVdv3Y6b9hAJQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@vue/compiler-dom": "3.5.22",
|
||||
"@vue/compiler-sfc": "3.5.22",
|
||||
|
|
|
|||
|
|
@ -1,152 +1,46 @@
|
|||
<template>
|
||||
<div id="app" :class="{ 'sidebar-collapsed': sidebarCollapsed }">
|
||||
|
||||
<!-- Desktop sidebar (hidden on mobile) -->
|
||||
<aside class="sidebar" role="navigation" aria-label="Main navigation">
|
||||
<!-- Wordmark + collapse toggle -->
|
||||
<div class="sidebar-header">
|
||||
<span class="wordmark-kiwi" @click="onWordmarkClick" style="cursor:pointer">Kiwi</span>
|
||||
<button class="sidebar-toggle" @click="sidebarCollapsed = !sidebarCollapsed" :aria-label="sidebarCollapsed ? 'Expand sidebar' : 'Collapse sidebar'">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" width="18" height="18">
|
||||
<line x1="3" y1="6" x2="21" y2="6"/>
|
||||
<line x1="3" y1="12" x2="21" y2="12"/>
|
||||
<line x1="3" y1="18" x2="21" y2="18"/>
|
||||
</svg>
|
||||
</button>
|
||||
<div id="app">
|
||||
<header class="app-header">
|
||||
<div class="container">
|
||||
<h1>🥝 Kiwi</h1>
|
||||
<p class="tagline">Smart Pantry Tracking & Recipe Suggestions</p>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<nav class="sidebar-nav">
|
||||
<button :class="['sidebar-item', { active: currentTab === 'inventory' }]" @click="switchTab('inventory')">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="3" y="4" width="18" height="4" rx="1"/>
|
||||
<rect x="3" y="11" width="18" height="4" rx="1"/>
|
||||
<rect x="3" y="18" width="18" height="3" rx="1"/>
|
||||
</svg>
|
||||
<span class="sidebar-label">Pantry</span>
|
||||
</button>
|
||||
|
||||
<button :class="['sidebar-item', { active: currentTab === 'receipts' }]" @click="switchTab('receipts')">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M4 4v16l2-1.5 2 1.5 2-1.5 2 1.5 2-1.5 2 1.5 2-1.5V4"/>
|
||||
<line x1="8" y1="9" x2="16" y2="9"/>
|
||||
<line x1="8" y1="13" x2="14" y2="13"/>
|
||||
</svg>
|
||||
<span class="sidebar-label">Receipts</span>
|
||||
</button>
|
||||
|
||||
<button :class="['sidebar-item', { active: currentTab === 'recipes' }]" @click="switchTab('recipes')">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M12 2C9 2 7 5 7 8c0 2.5 1 4.5 3 5.5V20h4v-6.5c2-1 3-3 3-5.5 0-3-2-6-5-6z"/>
|
||||
<line x1="9" y1="12" x2="15" y2="12"/>
|
||||
</svg>
|
||||
<span class="sidebar-label">Recipes</span>
|
||||
</button>
|
||||
|
||||
<button :class="['sidebar-item', { active: currentTab === 'settings' }]" @click="switchTab('settings')">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<circle cx="12" cy="12" r="3"/>
|
||||
<path d="M12 1v3M12 20v3M4.22 4.22l2.12 2.12M17.66 17.66l2.12 2.12M1 12h3M20 12h3M4.22 19.78l2.12-2.12M17.66 6.34l2.12-2.12"/>
|
||||
</svg>
|
||||
<span class="sidebar-label">Settings</span>
|
||||
</button>
|
||||
</nav>
|
||||
</aside>
|
||||
|
||||
<!-- Main area: header + content -->
|
||||
<div class="app-body">
|
||||
<!-- Mobile-only header -->
|
||||
<header class="app-header">
|
||||
<div class="header-inner">
|
||||
<span class="wordmark-kiwi">Kiwi</span>
|
||||
<main class="app-main">
|
||||
<div class="container">
|
||||
<!-- Tabs -->
|
||||
<div class="tabs">
|
||||
<button
|
||||
:class="['tab', { active: currentTab === 'inventory' }]"
|
||||
@click="switchTab('inventory')"
|
||||
>
|
||||
🏪 Inventory
|
||||
</button>
|
||||
<button
|
||||
:class="['tab', { active: currentTab === 'receipts' }]"
|
||||
@click="switchTab('receipts')"
|
||||
>
|
||||
🧾 Receipts
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main class="app-main">
|
||||
<div class="container">
|
||||
<div v-show="currentTab === 'inventory'" class="tab-content fade-in">
|
||||
<InventoryList />
|
||||
</div>
|
||||
<div v-show="currentTab === 'receipts'" class="tab-content fade-in">
|
||||
<ReceiptsView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'recipes'" class="tab-content fade-in">
|
||||
<RecipesView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'settings'" class="tab-content fade-in">
|
||||
<SettingsView />
|
||||
</div>
|
||||
<!-- Tab Content -->
|
||||
<div v-show="currentTab === 'inventory'" class="tab-content">
|
||||
<InventoryList />
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
|
||||
<!-- Mobile bottom nav only -->
|
||||
<nav class="bottom-nav" role="navigation" aria-label="Main navigation">
|
||||
<button :class="['nav-item', { active: currentTab === 'inventory' }]" @click="switchTab('inventory')" aria-label="Pantry">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<rect x="3" y="4" width="18" height="4" rx="1"/>
|
||||
<rect x="3" y="11" width="18" height="4" rx="1"/>
|
||||
<rect x="3" y="18" width="18" height="3" rx="1"/>
|
||||
</svg>
|
||||
<span class="nav-label">Pantry</span>
|
||||
</button>
|
||||
<button :class="['nav-item', { active: currentTab === 'receipts' }]" @click="switchTab('receipts')" aria-label="Receipts">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M4 4v16l2-1.5 2 1.5 2-1.5 2 1.5 2-1.5 2 1.5 2-1.5V4"/>
|
||||
<line x1="8" y1="9" x2="16" y2="9"/>
|
||||
<line x1="8" y1="13" x2="14" y2="13"/>
|
||||
</svg>
|
||||
<span class="nav-label">Receipts</span>
|
||||
</button>
|
||||
<button :class="['nav-item', { active: currentTab === 'recipes' }]" @click="switchTab('recipes')" aria-label="Recipes">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M12 2C9 2 7 5 7 8c0 2.5 1 4.5 3 5.5V20h4v-6.5c2-1 3-3 3-5.5 0-3-2-6-5-6z"/>
|
||||
<line x1="9" y1="12" x2="15" y2="12"/>
|
||||
</svg>
|
||||
<span class="nav-label">Recipes</span>
|
||||
</button>
|
||||
<button :class="['nav-item', { active: currentTab === 'settings' }]" @click="switchTab('settings')" aria-label="Settings">
|
||||
<svg class="nav-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<circle cx="12" cy="12" r="3"/>
|
||||
<path d="M12 1v3M12 20v3M4.22 4.22l2.12 2.12M17.66 17.66l2.12 2.12M1 12h3M20 12h3M4.22 19.78l2.12-2.12M17.66 6.34l2.12-2.12"/>
|
||||
</svg>
|
||||
<span class="nav-label">Settings</span>
|
||||
</button>
|
||||
</nav>
|
||||
|
||||
<!-- Feedback FAB — hidden when FORGEJO_API_TOKEN not configured -->
|
||||
<FeedbackButton :current-tab="currentTab" />
|
||||
|
||||
<!-- Easter egg: Kiwi bird sprite — triggered by typing "kiwi" -->
|
||||
<Transition name="kiwi-fade">
|
||||
<div v-if="kiwiVisible" class="kiwi-bird-stage" aria-hidden="true">
|
||||
<div class="kiwi-bird" :class="kiwiDirection">
|
||||
<!-- Kiwi bird SVG — side profile, facing left by default (rtl walk) -->
|
||||
<svg class="kiwi-svg" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<!-- Body — plump oval -->
|
||||
<ellipse cx="30" cy="38" rx="18" ry="15" fill="#8B6914" />
|
||||
<!-- Head -->
|
||||
<ellipse cx="46" cy="26" rx="10" ry="9" fill="#6B4F10" />
|
||||
<!-- Long beak -->
|
||||
<path d="M54 25 Q66 24 70 25 Q66 27 54 27Z" fill="#C8A96E" />
|
||||
<!-- Eye -->
|
||||
<circle cx="49" cy="23" r="2" fill="#1a1a1a" />
|
||||
<circle cx="49.7" cy="22.3" r="0.6" fill="white" />
|
||||
<!-- Wing texture lines -->
|
||||
<path d="M18 32 Q24 28 34 30" stroke="#6B4F10" stroke-width="1.2" stroke-linecap="round" />
|
||||
<path d="M16 37 Q22 33 32 35" stroke="#6B4F10" stroke-width="1.2" stroke-linecap="round" />
|
||||
<!-- Legs -->
|
||||
<line x1="24" y1="52" x2="22" y2="60" stroke="#A07820" stroke-width="2.5" stroke-linecap="round" />
|
||||
<line x1="34" y1="52" x2="36" y2="60" stroke="#A07820" stroke-width="2.5" stroke-linecap="round" />
|
||||
<!-- Feet -->
|
||||
<path d="M18 60 L22 60 L24 57" stroke="#A07820" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" fill="none" />
|
||||
<path d="M32 60 L36 60 L38 57" stroke="#A07820" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" fill="none" />
|
||||
<!-- Feather texture -->
|
||||
<path d="M22 38 Q28 34 36 36" stroke="#A07820" stroke-width="0.8" stroke-linecap="round" />
|
||||
<path d="M20 43 Q26 39 34 41" stroke="#A07820" stroke-width="0.8" stroke-linecap="round" />
|
||||
</svg>
|
||||
<div v-show="currentTab === 'receipts'" class="tab-content">
|
||||
<ReceiptsView />
|
||||
</div>
|
||||
</div>
|
||||
</Transition>
|
||||
</main>
|
||||
|
||||
<footer class="app-footer">
|
||||
<div class="container">
|
||||
<p>© 2026 CircuitForge LLC</p>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
|
|
@ -154,39 +48,11 @@
|
|||
import { ref } from 'vue'
|
||||
import InventoryList from './components/InventoryList.vue'
|
||||
import ReceiptsView from './components/ReceiptsView.vue'
|
||||
import RecipesView from './components/RecipesView.vue'
|
||||
import SettingsView from './components/SettingsView.vue'
|
||||
import FeedbackButton from './components/FeedbackButton.vue'
|
||||
import { useInventoryStore } from './stores/inventory'
|
||||
import { useEasterEggs } from './composables/useEasterEggs'
|
||||
|
||||
type Tab = 'inventory' | 'receipts' | 'recipes' | 'settings'
|
||||
const currentTab = ref<'inventory' | 'receipts'>('inventory')
|
||||
|
||||
const currentTab = ref<Tab>('inventory')
|
||||
const sidebarCollapsed = ref(false)
|
||||
const inventoryStore = useInventoryStore()
|
||||
const { kiwiVisible, kiwiDirection } = useEasterEggs()
|
||||
|
||||
// Wordmark click counter for chef mode easter egg
|
||||
const wordmarkClicks = ref(0)
|
||||
let wordmarkTimer: ReturnType<typeof setTimeout> | null = null
|
||||
function onWordmarkClick() {
|
||||
wordmarkClicks.value++
|
||||
if (wordmarkTimer) clearTimeout(wordmarkTimer)
|
||||
if (wordmarkClicks.value >= 5) {
|
||||
wordmarkClicks.value = 0
|
||||
document.querySelector('.wordmark-kiwi')?.classList.add('chef-spin')
|
||||
setTimeout(() => document.querySelector('.wordmark-kiwi')?.classList.remove('chef-spin'), 800)
|
||||
} else {
|
||||
wordmarkTimer = setTimeout(() => { wordmarkClicks.value = 0 }, 1200)
|
||||
}
|
||||
}
|
||||
|
||||
async function switchTab(tab: Tab) {
|
||||
function switchTab(tab: 'inventory' | 'receipts') {
|
||||
currentTab.value = tab
|
||||
if (tab === 'recipes' && inventoryStore.items.length === 0) {
|
||||
await inventoryStore.fetchItems()
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
@ -198,326 +64,136 @@ async function switchTab(tab: Tab) {
|
|||
}
|
||||
|
||||
body {
|
||||
font-family: var(--font-body);
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
|
||||
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
|
||||
sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
background: var(--color-bg-primary);
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
.wordmark-kiwi {
|
||||
font-family: var(--font-display);
|
||||
font-style: italic;
|
||||
font-weight: 700;
|
||||
color: var(--color-primary);
|
||||
letter-spacing: -0.01em;
|
||||
line-height: 1;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
MOBILE LAYOUT (< 769px)
|
||||
sidebar hidden, bottom nav visible
|
||||
============================================ */
|
||||
#app {
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.sidebar { display: none; }
|
||||
.app-body { display: contents; }
|
||||
.container {
|
||||
max-width: 1400px;
|
||||
margin: 0 auto;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.app-header {
|
||||
background: var(--gradient-header);
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 100;
|
||||
backdrop-filter: blur(8px);
|
||||
background: var(--gradient-primary);
|
||||
color: white;
|
||||
padding: var(--spacing-xl) 0;
|
||||
box-shadow: var(--shadow-md);
|
||||
}
|
||||
|
||||
.header-inner {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
min-height: 44px;
|
||||
.app-header h1 {
|
||||
font-size: 32px;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.header-inner .wordmark-kiwi { font-size: 24px; }
|
||||
.app-header .tagline {
|
||||
font-size: 16px;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.app-main {
|
||||
flex: 1;
|
||||
padding: var(--spacing-md) 0 var(--spacing-xl);
|
||||
/* Clear fixed bottom nav — env() gives extra room for iPhone home bar */
|
||||
padding-bottom: calc(68px + env(safe-area-inset-bottom, 0px));
|
||||
padding: 20px 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
margin: 0 auto;
|
||||
padding: 0 var(--spacing-md);
|
||||
}
|
||||
|
||||
.tab-content { min-height: 0; }
|
||||
|
||||
/* ---- Bottom nav ---- */
|
||||
.bottom-nav {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
z-index: 200;
|
||||
.app-footer {
|
||||
background: var(--color-bg-elevated);
|
||||
border-top: 1px solid var(--color-border);
|
||||
display: flex;
|
||||
align-items: stretch;
|
||||
padding-bottom: env(safe-area-inset-bottom, 0);
|
||||
box-shadow: 0 -4px 16px rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
.nav-item {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 3px;
|
||||
padding: 8px 4px 10px;
|
||||
border: none;
|
||||
background: transparent;
|
||||
color: var(--color-text-muted);
|
||||
cursor: pointer;
|
||||
transition: color 0.18s ease, background 0.18s ease;
|
||||
border-radius: 0;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.nav-item::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 20%;
|
||||
right: 20%;
|
||||
height: 2px;
|
||||
background: var(--color-primary);
|
||||
border-radius: 0 0 2px 2px;
|
||||
transform: scaleX(0);
|
||||
transition: transform 0.18s ease;
|
||||
}
|
||||
|
||||
.nav-item:hover {
|
||||
color: var(--color-text-secondary);
|
||||
background: rgba(232, 168, 32, 0.06);
|
||||
transform: none;
|
||||
border-color: transparent;
|
||||
padding: var(--spacing-lg) 0;
|
||||
text-align: center;
|
||||
margin-top: var(--spacing-xl);
|
||||
border-top: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.nav-item.active { color: var(--color-primary); }
|
||||
.nav-item.active::before { transform: scaleX(1); }
|
||||
.app-footer p {
|
||||
font-size: var(--font-size-sm);
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.nav-icon { width: 22px; height: 22px; flex-shrink: 0; }
|
||||
/* Tabs */
|
||||
.tabs {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.nav-label {
|
||||
font-family: var(--font-body);
|
||||
font-size: 10px;
|
||||
.tab {
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 15px 30px;
|
||||
font-size: 16px;
|
||||
border-radius: 8px;
|
||||
cursor: pointer;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.tab:hover {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
.tab.active {
|
||||
background: var(--color-bg-card);
|
||||
color: var(--color-primary);
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.04em;
|
||||
text-transform: uppercase;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.tab-content {
|
||||
animation: fadeIn 0.3s;
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; }
|
||||
to { opacity: 1; }
|
||||
}
|
||||
|
||||
/* Mobile Responsive Breakpoints */
|
||||
@media (max-width: 480px) {
|
||||
.container { padding: 0 var(--spacing-sm); }
|
||||
.app-main {
|
||||
padding: var(--spacing-sm) 0 var(--spacing-lg);
|
||||
padding-bottom: calc(68px + env(safe-area-inset-bottom, 0px));
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
DESKTOP LAYOUT (≥ 769px)
|
||||
sidebar visible, bottom nav hidden
|
||||
============================================ */
|
||||
@media (min-width: 769px) {
|
||||
.bottom-nav { display: none; }
|
||||
|
||||
#app {
|
||||
flex-direction: row;
|
||||
padding-bottom: 0;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
/* ---- Sidebar ---- */
|
||||
.sidebar {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 200px;
|
||||
min-height: 100vh;
|
||||
background: var(--color-bg-elevated);
|
||||
border-right: 1px solid var(--color-border);
|
||||
position: sticky;
|
||||
top: 0;
|
||||
flex-shrink: 0;
|
||||
transition: width 0.22s cubic-bezier(0.4, 0, 0.2, 1);
|
||||
overflow: hidden;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.sidebar-collapsed .sidebar {
|
||||
width: 56px;
|
||||
}
|
||||
|
||||
.sidebar-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: var(--spacing-md) var(--spacing-md) var(--spacing-sm);
|
||||
min-height: 56px;
|
||||
gap: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.sidebar-header .wordmark-kiwi {
|
||||
font-size: 22px;
|
||||
opacity: 1;
|
||||
transition: opacity 0.15s ease, width 0.22s ease;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.sidebar-collapsed .sidebar-header .wordmark-kiwi {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.sidebar-toggle {
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--color-text-muted);
|
||||
cursor: pointer;
|
||||
padding: 6px;
|
||||
border-radius: var(--radius-md);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-shrink: 0;
|
||||
transition: color 0.15s, background 0.15s;
|
||||
}
|
||||
|
||||
.sidebar-toggle:hover {
|
||||
color: var(--color-text-primary);
|
||||
background: var(--color-bg-secondary);
|
||||
transform: none;
|
||||
border-color: transparent;
|
||||
}
|
||||
|
||||
.sidebar-nav {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
padding: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.sidebar-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-sm);
|
||||
padding: 10px var(--spacing-sm);
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
background: transparent;
|
||||
color: var(--color-text-muted);
|
||||
cursor: pointer;
|
||||
transition: color 0.15s, background 0.15s;
|
||||
white-space: nowrap;
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.sidebar-item:hover {
|
||||
color: var(--color-text-primary);
|
||||
background: var(--color-bg-secondary);
|
||||
transform: none;
|
||||
border-color: transparent;
|
||||
}
|
||||
|
||||
.sidebar-item.active {
|
||||
color: var(--color-primary);
|
||||
background: color-mix(in srgb, var(--color-primary) 10%, transparent);
|
||||
}
|
||||
|
||||
.sidebar-item .nav-icon { width: 20px; height: 20px; flex-shrink: 0; }
|
||||
|
||||
.sidebar-label {
|
||||
font-size: var(--font-size-sm);
|
||||
font-weight: 600;
|
||||
opacity: 1;
|
||||
transition: opacity 0.12s ease;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.sidebar-collapsed .sidebar-label {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* ---- Main body ---- */
|
||||
.app-body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
min-width: 0; /* prevent overflow */
|
||||
contents: unset;
|
||||
}
|
||||
|
||||
.app-header { display: none; } /* wordmark lives in sidebar on desktop */
|
||||
|
||||
/* Override style.css #app max-width so sidebar spans full viewport */
|
||||
#app {
|
||||
max-width: none;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.app-main {
|
||||
flex: 1;
|
||||
padding: var(--spacing-xl) 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
max-width: 860px;
|
||||
padding: 0 var(--spacing-lg);
|
||||
padding: 0 12px;
|
||||
}
|
||||
|
||||
.app-header h1 {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.app-header .tagline {
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.tabs {
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.tab {
|
||||
padding: 12px 20px;
|
||||
font-size: 14px;
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1200px) {
|
||||
@media (min-width: 481px) and (max-width: 768px) {
|
||||
.container {
|
||||
max-width: 960px;
|
||||
padding: 0 var(--spacing-xl);
|
||||
padding: 0 16px;
|
||||
}
|
||||
}
|
||||
|
||||
/* Easter egg: wordmark spin on 5× click */
|
||||
@keyframes chefSpin {
|
||||
0% { transform: rotate(0deg) scale(1); }
|
||||
30% { transform: rotate(180deg) scale(1.3); }
|
||||
60% { transform: rotate(340deg) scale(1.1); }
|
||||
100% { transform: rotate(360deg) scale(1); }
|
||||
}
|
||||
.app-header h1 {
|
||||
font-size: 28px;
|
||||
}
|
||||
|
||||
.wordmark-kiwi.chef-spin {
|
||||
display: inline-block;
|
||||
animation: chefSpin 0.8s cubic-bezier(0.34, 1.56, 0.64, 1) forwards;
|
||||
}
|
||||
|
||||
/* Kiwi bird transition */
|
||||
.kiwi-fade-enter-active,
|
||||
.kiwi-fade-leave-active {
|
||||
transition: opacity 0.4s ease;
|
||||
}
|
||||
|
||||
.kiwi-fade-enter-from,
|
||||
.kiwi-fade-leave-to {
|
||||
opacity: 0;
|
||||
.tab {
|
||||
padding: 14px 25px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@
|
|||
<div class="form-group">
|
||||
<label>Product</label>
|
||||
<div class="product-info">
|
||||
<strong>{{ item.product_name || 'Unknown Product' }}</strong>
|
||||
<span v-if="item.category" class="brand">{{ item.category }}</span>
|
||||
<strong>{{ item.product.name }}</strong>
|
||||
<span v-if="item.product.brand" class="brand">({{ item.product.brand }})</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
@ -228,183 +228,160 @@ function getExpiryHint(): string {
|
|||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 1000;
|
||||
backdrop-filter: blur(4px);
|
||||
}
|
||||
|
||||
.modal-content {
|
||||
background: var(--color-bg-card);
|
||||
border-radius: var(--radius-xl);
|
||||
border-radius: var(--radius-lg);
|
||||
width: 90%;
|
||||
max-width: 600px;
|
||||
max-height: 90vh;
|
||||
overflow-y: auto;
|
||||
box-shadow: var(--shadow-xl);
|
||||
border: 1px solid var(--color-border);
|
||||
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.modal-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: var(--spacing-lg) var(--spacing-lg) var(--spacing-md);
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
padding: 20px;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.modal-header h2 {
|
||||
margin: 0;
|
||||
font-size: var(--font-size-xl);
|
||||
font-family: var(--font-display);
|
||||
font-style: italic;
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 28px;
|
||||
color: var(--color-text-muted);
|
||||
font-size: 32px;
|
||||
color: #999;
|
||||
cursor: pointer;
|
||||
padding: 0;
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
line-height: 1;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: var(--radius-md);
|
||||
transition: color 0.18s, background 0.18s;
|
||||
}
|
||||
|
||||
.close-btn:hover {
|
||||
color: var(--color-text-primary);
|
||||
background: var(--color-bg-elevated);
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: var(--spacing-lg);
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: var(--spacing-md);
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
/* Using .form-row from theme.css */
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: var(--spacing-xs);
|
||||
margin-bottom: 8px;
|
||||
font-weight: 600;
|
||||
color: var(--color-text-secondary);
|
||||
font-size: var(--font-size-xs);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.06em;
|
||||
color: var(--color-text-primary);
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.form-input {
|
||||
width: 100%;
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
padding: 10px;
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: var(--radius-md);
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: var(--font-size-sm);
|
||||
background: var(--color-bg-input);
|
||||
color: var(--color-text-primary);
|
||||
font-family: var(--font-body);
|
||||
transition: border-color 0.18s, box-shadow 0.18s;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.form-input:focus {
|
||||
outline: none;
|
||||
border-color: var(--color-primary);
|
||||
box-shadow: 0 0 0 3px var(--color-warning-bg);
|
||||
border-color: #2196F3;
|
||||
box-shadow: 0 0 0 2px rgba(33, 150, 243, 0.1);
|
||||
}
|
||||
|
||||
.form-input.expiry-expired {
|
||||
border-color: var(--color-error);
|
||||
border-color: #f44336;
|
||||
}
|
||||
|
||||
.form-input.expiry-soon {
|
||||
border-color: var(--color-error-light);
|
||||
border-color: #ff5722;
|
||||
}
|
||||
|
||||
.form-input.expiry-warning {
|
||||
border-color: var(--color-warning);
|
||||
border-color: #ff9800;
|
||||
}
|
||||
|
||||
.form-input.expiry-good {
|
||||
border-color: var(--color-success);
|
||||
border-color: #4CAF50;
|
||||
}
|
||||
|
||||
textarea.form-input {
|
||||
resize: vertical;
|
||||
font-family: var(--font-body);
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
.product-info {
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
background: var(--color-bg-secondary);
|
||||
border-radius: var(--radius-md);
|
||||
padding: 10px;
|
||||
background: #f5f5f5;
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: var(--font-size-sm);
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.product-info .brand {
|
||||
color: var(--color-text-secondary);
|
||||
margin-left: var(--spacing-sm);
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
.expiry-hint {
|
||||
display: block;
|
||||
margin-top: var(--spacing-xs);
|
||||
margin-top: 5px;
|
||||
font-size: var(--font-size-xs);
|
||||
color: var(--color-text-secondary);
|
||||
}
|
||||
|
||||
.error-message {
|
||||
background: var(--color-error-bg);
|
||||
color: var(--color-error-light);
|
||||
border: 1px solid var(--color-error-border);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
border-radius: var(--radius-md);
|
||||
margin-bottom: var(--spacing-md);
|
||||
background: #ffebee;
|
||||
color: #c62828;
|
||||
padding: 12px;
|
||||
border-radius: var(--radius-sm);
|
||||
margin-bottom: 15px;
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.form-actions {
|
||||
display: flex;
|
||||
gap: var(--spacing-sm);
|
||||
gap: 10px;
|
||||
justify-content: flex-end;
|
||||
margin-top: var(--spacing-lg);
|
||||
padding-top: var(--spacing-md);
|
||||
border-top: 1px solid var(--color-border);
|
||||
margin-top: 25px;
|
||||
padding-top: 20px;
|
||||
border-top: 1px solid #eee;
|
||||
}
|
||||
|
||||
.btn-cancel,
|
||||
.btn-save {
|
||||
padding: var(--spacing-sm) var(--spacing-lg);
|
||||
padding: 10px 24px;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: var(--font-size-sm);
|
||||
font-weight: 600;
|
||||
font-family: var(--font-body);
|
||||
cursor: pointer;
|
||||
transition: all 0.18s;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.btn-cancel {
|
||||
background: var(--color-bg-elevated);
|
||||
color: var(--color-text-secondary);
|
||||
border: 1px solid var(--color-border);
|
||||
background: #f5f5f5;
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
.btn-cancel:hover {
|
||||
background: var(--color-bg-primary);
|
||||
color: var(--color-text-primary);
|
||||
background: #e0e0e0;
|
||||
}
|
||||
|
||||
.btn-save {
|
||||
|
|
@ -417,7 +394,7 @@ textarea.form-input {
|
|||
}
|
||||
|
||||
.btn-save:disabled {
|
||||
opacity: 0.45;
|
||||
background: var(--color-text-muted);
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
|
|
@ -431,7 +408,7 @@ textarea.form-input {
|
|||
}
|
||||
|
||||
.modal-header {
|
||||
padding: var(--spacing-md);
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.modal-header h2 {
|
||||
|
|
@ -439,24 +416,23 @@ textarea.form-input {
|
|||
}
|
||||
|
||||
.edit-form {
|
||||
padding: var(--spacing-md);
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.form-group {
|
||||
margin-bottom: var(--spacing-sm);
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
/* Form actions stack on very small screens */
|
||||
.form-actions {
|
||||
flex-direction: column-reverse;
|
||||
gap: var(--spacing-sm);
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.btn-cancel,
|
||||
.btn-save {
|
||||
width: 100%;
|
||||
padding: var(--spacing-md);
|
||||
text-align: center;
|
||||
padding: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -464,5 +440,13 @@ textarea.form-input {
|
|||
.modal-content {
|
||||
width: 92%;
|
||||
}
|
||||
|
||||
.modal-header {
|
||||
padding: 18px;
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: 18px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
|
|
@ -1,413 +0,0 @@
|
|||
<template>
|
||||
<!-- Floating trigger button -->
|
||||
<button
|
||||
v-if="enabled"
|
||||
class="feedback-fab"
|
||||
@click="open = true"
|
||||
aria-label="Send feedback or report a bug"
|
||||
title="Send feedback or report a bug"
|
||||
>
|
||||
<svg class="feedback-fab-icon" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M21 15a2 2 0 01-2 2H7l-4 4V5a2 2 0 012-2h14a2 2 0 012 2z"/>
|
||||
</svg>
|
||||
<span class="feedback-fab-label">Feedback</span>
|
||||
</button>
|
||||
|
||||
<!-- Modal — teleported to body to avoid z-index / overflow clipping -->
|
||||
<Teleport to="body">
|
||||
<Transition name="modal-fade">
|
||||
<div v-if="open" class="feedback-overlay" @click.self="close">
|
||||
<div class="feedback-modal" role="dialog" aria-modal="true" aria-label="Send Feedback">
|
||||
|
||||
<!-- Header -->
|
||||
<div class="feedback-header">
|
||||
<h2 class="feedback-title">{{ step === 1 ? "What's on your mind?" : "Review & submit" }}</h2>
|
||||
<button class="feedback-close" @click="close" aria-label="Close">
|
||||
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" width="18" height="18">
|
||||
<line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- ── Step 1: Form ─────────────────────────────────────────── -->
|
||||
<div v-if="step === 1" class="feedback-body">
|
||||
<div class="form-group">
|
||||
<label class="form-label">Type</label>
|
||||
<div class="filter-chip-row">
|
||||
<button
|
||||
v-for="t in types"
|
||||
:key="t.value"
|
||||
:class="['btn-chip', { active: form.type === t.value }]"
|
||||
@click="form.type = t.value"
|
||||
type="button"
|
||||
>{{ t.label }}</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="form-label">Title <span class="form-required">*</span></label>
|
||||
<input
|
||||
v-model="form.title"
|
||||
class="form-input"
|
||||
type="text"
|
||||
placeholder="Short summary of the issue or idea"
|
||||
maxlength="120"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="form-label">Description <span class="form-required">*</span></label>
|
||||
<textarea
|
||||
v-model="form.description"
|
||||
class="form-input feedback-textarea"
|
||||
placeholder="Describe what happened or what you'd like to see…"
|
||||
rows="4"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div v-if="form.type === 'bug'" class="form-group">
|
||||
<label class="form-label">Reproduction steps</label>
|
||||
<textarea
|
||||
v-model="form.repro"
|
||||
class="form-input feedback-textarea"
|
||||
placeholder="1. Go to… 2. Tap… 3. See error"
|
||||
rows="3"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<p v-if="stepError" class="feedback-error">{{ stepError }}</p>
|
||||
</div>
|
||||
|
||||
<!-- ── Step 2: Attribution + confirm ──────────────────────────── -->
|
||||
<div v-if="step === 2" class="feedback-body">
|
||||
<div class="feedback-summary card">
|
||||
<div class="feedback-summary-row">
|
||||
<span class="text-muted text-sm">Type</span>
|
||||
<span class="text-sm font-semibold">{{ typeLabel }}</span>
|
||||
</div>
|
||||
<div class="feedback-summary-row">
|
||||
<span class="text-muted text-sm">Title</span>
|
||||
<span class="text-sm">{{ form.title }}</span>
|
||||
</div>
|
||||
<div class="feedback-summary-row">
|
||||
<span class="text-muted text-sm">Description</span>
|
||||
<span class="text-sm feedback-summary-desc">{{ form.description }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group mt-md">
|
||||
<label class="form-label">Attribution (optional)</label>
|
||||
<input
|
||||
v-model="form.submitter"
|
||||
class="form-input"
|
||||
type="text"
|
||||
placeholder="Your name <email@example.com>"
|
||||
/>
|
||||
<p class="text-muted text-xs mt-xs">Include your name and email in the issue if you'd like a response. Never required.</p>
|
||||
</div>
|
||||
|
||||
<p v-if="submitError" class="feedback-error">{{ submitError }}</p>
|
||||
<div v-if="submitted" class="feedback-success">
|
||||
Issue filed! <a :href="issueUrl" target="_blank" rel="noopener" class="feedback-link">View on Forgejo →</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Footer nav -->
|
||||
<div class="feedback-footer">
|
||||
<button v-if="step === 2 && !submitted" class="btn btn-ghost" @click="step = 1" :disabled="loading">← Back</button>
|
||||
<button v-if="!submitted" class="btn btn-ghost" @click="close" :disabled="loading">Cancel</button>
|
||||
<button
|
||||
v-if="step === 1"
|
||||
class="btn btn-primary"
|
||||
@click="nextStep"
|
||||
>Next →</button>
|
||||
<button
|
||||
v-if="step === 2 && !submitted"
|
||||
class="btn btn-primary"
|
||||
@click="submit"
|
||||
:disabled="loading"
|
||||
>{{ loading ? 'Filing…' : 'Submit' }}</button>
|
||||
<button v-if="submitted" class="btn btn-primary" @click="close">Done</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Transition>
|
||||
</Teleport>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, computed, onMounted } from 'vue'
|
||||
|
||||
const props = defineProps<{ currentTab?: string }>()
|
||||
|
||||
// Probe once on mount — hidden until confirmed enabled so button never flashes
|
||||
const enabled = ref(false)
|
||||
onMounted(async () => {
|
||||
try {
|
||||
const res = await fetch('/api/v1/feedback/status')
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
enabled.value = data.enabled === true
|
||||
}
|
||||
} catch { /* network error — stay hidden */ }
|
||||
})
|
||||
|
||||
const open = ref(false)
|
||||
const step = ref(1)
|
||||
const loading = ref(false)
|
||||
const stepError = ref('')
|
||||
const submitError = ref('')
|
||||
const submitted = ref(false)
|
||||
const issueUrl = ref('')
|
||||
|
||||
const types: { value: 'bug' | 'feature' | 'other'; label: string }[] = [
|
||||
{ value: 'bug', label: '🐛 Bug' },
|
||||
{ value: 'feature', label: '✨ Feature request' },
|
||||
{ value: 'other', label: '💬 Other' },
|
||||
]
|
||||
|
||||
const form = ref({
|
||||
type: 'bug' as 'bug' | 'feature' | 'other',
|
||||
title: '',
|
||||
description: '',
|
||||
repro: '',
|
||||
submitter: '',
|
||||
})
|
||||
|
||||
const typeLabel = computed(() => types.find(t => t.value === form.value.type)?.label ?? '')
|
||||
|
||||
function close() {
|
||||
open.value = false
|
||||
// reset after transition
|
||||
setTimeout(reset, 300)
|
||||
}
|
||||
|
||||
function reset() {
|
||||
step.value = 1
|
||||
loading.value = false
|
||||
stepError.value = ''
|
||||
submitError.value = ''
|
||||
submitted.value = false
|
||||
issueUrl.value = ''
|
||||
form.value = { type: 'bug', title: '', description: '', repro: '', submitter: '' }
|
||||
}
|
||||
|
||||
function nextStep() {
|
||||
stepError.value = ''
|
||||
if (!form.value.title.trim() || !form.value.description.trim()) {
|
||||
stepError.value = 'Please fill in both Title and Description.'
|
||||
return
|
||||
}
|
||||
step.value = 2
|
||||
}
|
||||
|
||||
async function submit() {
|
||||
loading.value = true
|
||||
submitError.value = ''
|
||||
try {
|
||||
const res = await fetch('/api/v1/feedback', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
title: form.value.title.trim(),
|
||||
description: form.value.description.trim(),
|
||||
type: form.value.type,
|
||||
repro: form.value.repro.trim(),
|
||||
tab: props.currentTab ?? 'unknown',
|
||||
submitter: form.value.submitter.trim(),
|
||||
}),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const err = await res.json().catch(() => ({ detail: res.statusText }))
|
||||
submitError.value = err.detail ?? 'Submission failed.'
|
||||
return
|
||||
}
|
||||
const data = await res.json()
|
||||
issueUrl.value = data.issue_url
|
||||
submitted.value = true
|
||||
} catch (e) {
|
||||
submitError.value = 'Network error — please try again.'
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
/* ── Floating action button ─────────────────────────────────────────── */
|
||||
.feedback-fab {
|
||||
position: fixed;
|
||||
right: var(--spacing-md);
|
||||
bottom: calc(68px + var(--spacing-md)); /* above mobile bottom nav */
|
||||
z-index: 190;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-xs);
|
||||
padding: 9px var(--spacing-md);
|
||||
background: var(--color-bg-elevated);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 999px;
|
||||
color: var(--color-text-secondary);
|
||||
font-size: var(--font-size-sm);
|
||||
font-family: var(--font-body);
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
box-shadow: var(--shadow-md);
|
||||
transition: background 0.15s, color 0.15s, box-shadow 0.15s, border-color 0.15s;
|
||||
}
|
||||
.feedback-fab:hover {
|
||||
background: var(--color-bg-card);
|
||||
color: var(--color-text-primary);
|
||||
border-color: var(--color-border-focus);
|
||||
box-shadow: var(--shadow-lg);
|
||||
}
|
||||
.feedback-fab-icon { width: 15px; height: 15px; flex-shrink: 0; }
|
||||
.feedback-fab-label { white-space: nowrap; }
|
||||
|
||||
/* On desktop, bottom nav is gone — drop to standard corner */
|
||||
@media (min-width: 769px) {
|
||||
.feedback-fab {
|
||||
bottom: var(--spacing-lg);
|
||||
}
|
||||
}
|
||||
|
||||
/* ── Overlay ──────────────────────────────────────────────────────────── */
|
||||
.feedback-overlay {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.55);
|
||||
z-index: 1000;
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
justify-content: center;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
@media (min-width: 500px) {
|
||||
.feedback-overlay {
|
||||
align-items: center;
|
||||
padding: var(--spacing-md);
|
||||
}
|
||||
}
|
||||
|
||||
/* ── Modal ────────────────────────────────────────────────────────────── */
|
||||
.feedback-modal {
|
||||
background: var(--color-bg-elevated);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: var(--radius-lg) var(--radius-lg) 0 0;
|
||||
width: 100%;
|
||||
max-height: 90vh;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
box-shadow: var(--shadow-xl);
|
||||
}
|
||||
|
||||
@media (min-width: 500px) {
|
||||
.feedback-modal {
|
||||
border-radius: var(--radius-lg);
|
||||
width: 100%;
|
||||
max-width: 520px;
|
||||
max-height: 85vh;
|
||||
}
|
||||
}
|
||||
|
||||
.feedback-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: var(--spacing-md) var(--spacing-md) var(--spacing-sm);
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.feedback-title {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--font-size-lg);
|
||||
font-weight: 600;
|
||||
margin: 0;
|
||||
}
|
||||
.feedback-close {
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--color-text-muted);
|
||||
cursor: pointer;
|
||||
padding: 4px;
|
||||
border-radius: var(--radius-sm);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
.feedback-close:hover { color: var(--color-text-primary); }
|
||||
|
||||
.feedback-body {
|
||||
padding: var(--spacing-md);
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-md);
|
||||
}
|
||||
|
||||
.feedback-footer {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
gap: var(--spacing-sm);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
border-top: 1px solid var(--color-border);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.feedback-textarea {
|
||||
resize: vertical;
|
||||
min-height: 80px;
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.form-required { color: var(--color-error); margin-left: 2px; }
|
||||
|
||||
.feedback-error {
|
||||
color: var(--color-error);
|
||||
font-size: var(--font-size-sm);
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.feedback-success {
|
||||
color: var(--color-success);
|
||||
font-size: var(--font-size-sm);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
background: var(--color-success-bg);
|
||||
border: 1px solid var(--color-success-border);
|
||||
border-radius: var(--radius-md);
|
||||
}
|
||||
.feedback-link { color: var(--color-success); font-weight: 600; text-decoration: underline; }
|
||||
|
||||
/* Summary card (step 2) */
|
||||
.feedback-summary {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-xs);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
background: var(--color-bg-secondary);
|
||||
border-radius: var(--radius-md);
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
.feedback-summary-row {
|
||||
display: flex;
|
||||
gap: var(--spacing-md);
|
||||
align-items: flex-start;
|
||||
}
|
||||
.feedback-summary-row > :first-child { min-width: 72px; flex-shrink: 0; }
|
||||
.feedback-summary-desc {
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.mt-md { margin-top: var(--spacing-md); }
|
||||
.mt-xs { margin-top: var(--spacing-xs); }
|
||||
|
||||
/* Transition */
|
||||
.modal-fade-enter-active, .modal-fade-leave-active { transition: opacity 0.2s ease; }
|
||||
.modal-fade-enter-from, .modal-fade-leave-to { opacity: 0; }
|
||||
</style>
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,7 +2,7 @@
|
|||
<div class="receipts-view">
|
||||
<!-- Upload Section -->
|
||||
<div class="card">
|
||||
<h2 class="section-title mb-md">Upload Receipt</h2>
|
||||
<h2>📸 Upload Receipt</h2>
|
||||
<div
|
||||
class="upload-area"
|
||||
@click="triggerFileInput"
|
||||
|
|
@ -21,9 +21,9 @@
|
|||
@change="handleFileSelect"
|
||||
/>
|
||||
|
||||
<div v-if="uploading" class="loading-inline mt-md">
|
||||
<div v-if="uploading" class="loading">
|
||||
<div class="spinner"></div>
|
||||
<span class="text-sm text-muted">Processing receipt…</span>
|
||||
<p>Processing receipt...</p>
|
||||
</div>
|
||||
|
||||
<div v-if="uploadResults.length > 0" class="results">
|
||||
|
|
@ -39,8 +39,8 @@
|
|||
|
||||
<!-- Receipts List Section -->
|
||||
<div class="card">
|
||||
<h2 class="section-title mb-md">Recent Receipts</h2>
|
||||
<div v-if="receipts.length === 0" class="text-center text-secondary p-lg">
|
||||
<h2>📋 Recent Receipts</h2>
|
||||
<div v-if="receipts.length === 0" style="text-align: center; color: var(--color-text-secondary)">
|
||||
<p>No receipts yet. Upload one above!</p>
|
||||
</div>
|
||||
<div v-else>
|
||||
|
|
@ -89,9 +89,9 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-sm mt-md">
|
||||
<button class="btn btn-secondary" @click="exportCSV">Download CSV</button>
|
||||
<button class="btn btn-secondary" @click="exportExcel">Download Excel</button>
|
||||
<div style="margin-top: 20px">
|
||||
<button class="button" @click="exportCSV">📊 Download CSV</button>
|
||||
<button class="button" @click="exportExcel">📈 Download Excel</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -225,117 +225,157 @@ onMounted(() => {
|
|||
.receipts-view {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-md);
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
.card {
|
||||
background: var(--color-bg-card);
|
||||
border-radius: var(--radius-xl);
|
||||
padding: 30px;
|
||||
box-shadow: 0 10px 40px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.card h2 {
|
||||
margin-bottom: 20px;
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
.upload-area {
|
||||
border: 2px dashed var(--color-border-focus);
|
||||
border: 3px dashed var(--color-primary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: var(--spacing-xl) var(--spacing-lg);
|
||||
padding: 40px;
|
||||
text-align: center;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
transition: all 0.3s;
|
||||
background: var(--color-bg-secondary);
|
||||
}
|
||||
|
||||
.upload-area:hover {
|
||||
border-color: var(--color-primary);
|
||||
border-color: var(--color-secondary);
|
||||
background: var(--color-bg-elevated);
|
||||
}
|
||||
|
||||
.upload-icon {
|
||||
font-size: 40px;
|
||||
margin-bottom: var(--spacing-md);
|
||||
line-height: 1;
|
||||
font-size: 48px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.upload-text {
|
||||
font-size: var(--font-size-base);
|
||||
font-weight: 600;
|
||||
font-size: var(--font-size-lg);
|
||||
color: var(--color-text-primary);
|
||||
margin-bottom: var(--spacing-xs);
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.upload-hint {
|
||||
font-size: var(--font-size-sm);
|
||||
color: var(--color-text-muted);
|
||||
color: var(--color-text-secondary);
|
||||
}
|
||||
|
||||
.loading-inline {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-sm);
|
||||
padding: var(--spacing-sm) 0;
|
||||
.loading {
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.spinner {
|
||||
border: 4px solid #f3f3f3;
|
||||
border-top: 4px solid #667eea;
|
||||
border-radius: 50%;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
animation: spin 1s linear infinite;
|
||||
margin: 0 auto 10px;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
0% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
.results {
|
||||
margin-top: var(--spacing-md);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-xs);
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.result-item {
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
padding: 15px;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: var(--font-size-sm);
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.result-success {
|
||||
background: var(--color-success-bg);
|
||||
color: var(--color-success-light);
|
||||
color: var(--color-success-dark);
|
||||
border: 1px solid var(--color-success-border);
|
||||
}
|
||||
|
||||
.result-error {
|
||||
background: var(--color-error-bg);
|
||||
color: var(--color-error-light);
|
||||
color: var(--color-error-dark);
|
||||
border: 1px solid var(--color-error-border);
|
||||
}
|
||||
|
||||
.result-info {
|
||||
background: var(--color-info-bg);
|
||||
color: var(--color-info-light);
|
||||
color: var(--color-info-dark);
|
||||
border: 1px solid var(--color-info-border);
|
||||
}
|
||||
|
||||
/* Stat cards */
|
||||
/* Using .grid-stats from theme.css */
|
||||
|
||||
.stat-card {
|
||||
background: var(--color-bg-secondary);
|
||||
padding: var(--spacing-md);
|
||||
padding: 20px;
|
||||
border-radius: var(--radius-lg);
|
||||
text-align: center;
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.stat-value {
|
||||
font-family: var(--font-mono);
|
||||
font-size: var(--font-size-2xl);
|
||||
font-weight: 500;
|
||||
font-weight: bold;
|
||||
color: var(--color-primary);
|
||||
margin-bottom: var(--spacing-xs);
|
||||
line-height: 1.1;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.stat-label {
|
||||
font-size: var(--font-size-xs);
|
||||
color: var(--color-text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
font-size: var(--font-size-sm);
|
||||
color: var(--color-text-secondary);
|
||||
}
|
||||
|
||||
.button {
|
||||
background: var(--gradient-primary);
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 12px 30px;
|
||||
font-size: var(--font-size-base);
|
||||
border-radius: var(--radius-md);
|
||||
cursor: pointer;
|
||||
transition: transform 0.2s;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.button:hover {
|
||||
transform: translateY(-2px);
|
||||
}
|
||||
|
||||
.button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.receipts-list {
|
||||
margin-top: var(--spacing-md);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-xs);
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.receipt-item {
|
||||
background: var(--color-bg-secondary);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
padding: 15px;
|
||||
border-radius: var(--radius-md);
|
||||
border: 1px solid var(--color-border);
|
||||
margin-bottom: 10px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
|
@ -348,7 +388,7 @@ onMounted(() => {
|
|||
.receipt-merchant {
|
||||
font-weight: 600;
|
||||
font-size: var(--font-size-base);
|
||||
margin-bottom: var(--spacing-xs);
|
||||
margin-bottom: 5px;
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
|
|
@ -356,7 +396,7 @@ onMounted(() => {
|
|||
font-size: var(--font-size-sm);
|
||||
color: var(--color-text-secondary);
|
||||
display: flex;
|
||||
gap: var(--spacing-md);
|
||||
gap: 15px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
|
|
@ -379,17 +419,20 @@ onMounted(() => {
|
|||
color: var(--color-text-secondary);
|
||||
}
|
||||
|
||||
/* Mobile */
|
||||
/* Mobile Responsive - Handled by theme.css
|
||||
Component-specific overrides only below */
|
||||
|
||||
@media (max-width: 480px) {
|
||||
.stat-card {
|
||||
padding: var(--spacing-sm);
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
/* Receipt items stack content vertically */
|
||||
.receipt-item {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: var(--spacing-sm);
|
||||
padding: var(--spacing-sm);
|
||||
gap: 12px;
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.receipt-info {
|
||||
|
|
@ -397,8 +440,15 @@ onMounted(() => {
|
|||
}
|
||||
|
||||
.receipt-details {
|
||||
gap: var(--spacing-sm);
|
||||
gap: 10px;
|
||||
font-size: var(--font-size-xs);
|
||||
}
|
||||
|
||||
/* Buttons full width on mobile */
|
||||
.button {
|
||||
width: 100%;
|
||||
margin-right: 0;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
|
|
@ -1,813 +0,0 @@
|
|||
<template>
|
||||
<div class="recipes-view">
|
||||
<!-- Controls Panel -->
|
||||
<div class="card mb-controls">
|
||||
<h2 class="section-title text-xl mb-md">Find Recipes</h2>
|
||||
|
||||
<!-- Level Selector -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Creativity Level</label>
|
||||
<div class="flex flex-wrap gap-sm">
|
||||
<button
|
||||
v-for="lvl in levels"
|
||||
:key="lvl.value"
|
||||
:class="['btn', 'btn-secondary', { active: recipesStore.level === lvl.value }]"
|
||||
@click="recipesStore.level = lvl.value"
|
||||
>
|
||||
{{ lvl.label }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Wildcard warning -->
|
||||
<div v-if="recipesStore.level === 4" class="status-badge status-warning wildcard-warning">
|
||||
Wildcard mode uses LLM to generate creative recipes with whatever you have. Results may be
|
||||
unusual.
|
||||
<label class="flex-start gap-sm mt-xs">
|
||||
<input type="checkbox" v-model="recipesStore.wildcardConfirmed" />
|
||||
<span>I understand, go for it</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<!-- Dietary Constraints Tags -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Dietary Constraints</label>
|
||||
<div class="tags-wrap flex flex-wrap gap-xs mb-xs">
|
||||
<span
|
||||
v-for="tag in recipesStore.constraints"
|
||||
:key="tag"
|
||||
class="tag-chip status-badge status-info"
|
||||
>
|
||||
{{ tag }}
|
||||
<button class="chip-remove" @click="removeConstraint(tag)" aria-label="Remove">×</button>
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
class="form-input"
|
||||
v-model="constraintInput"
|
||||
placeholder="e.g. vegetarian, vegan, gluten-free — press Enter or comma"
|
||||
@keydown="onConstraintKey"
|
||||
@blur="commitConstraintInput"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Allergies Tags -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Allergies (hard exclusions)</label>
|
||||
<div class="tags-wrap flex flex-wrap gap-xs mb-xs">
|
||||
<span
|
||||
v-for="tag in recipesStore.allergies"
|
||||
:key="tag"
|
||||
class="tag-chip status-badge status-error"
|
||||
>
|
||||
{{ tag }}
|
||||
<button class="chip-remove" @click="removeAllergy(tag)" aria-label="Remove">×</button>
|
||||
</span>
|
||||
</div>
|
||||
<input
|
||||
class="form-input"
|
||||
v-model="allergyInput"
|
||||
placeholder="e.g. peanuts, shellfish, dairy — press Enter or comma"
|
||||
@keydown="onAllergyKey"
|
||||
@blur="commitAllergyInput"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Hard Day Mode -->
|
||||
<div class="form-group">
|
||||
<label class="flex-start gap-sm hard-day-toggle">
|
||||
<input type="checkbox" v-model="recipesStore.hardDayMode" />
|
||||
<span class="form-label" style="margin-bottom: 0;">Hard Day Mode</span>
|
||||
</label>
|
||||
<p v-if="recipesStore.hardDayMode" class="text-sm text-secondary mt-xs">
|
||||
Only suggests quick, simple recipes based on your saved equipment.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Max Missing -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Max Missing Ingredients (optional)</label>
|
||||
<input
|
||||
type="number"
|
||||
class="form-input"
|
||||
min="0"
|
||||
max="5"
|
||||
placeholder="Leave blank for no limit"
|
||||
:value="recipesStore.maxMissing ?? ''"
|
||||
@input="onMaxMissingInput"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Nutrition Filters -->
|
||||
<details class="collapsible form-group">
|
||||
<summary class="form-label collapsible-summary nutrition-summary">
|
||||
Nutrition Filters <span class="text-muted text-xs">(per recipe, optional)</span>
|
||||
</summary>
|
||||
<div class="nutrition-filters-grid mt-xs">
|
||||
<div class="form-group">
|
||||
<label class="form-label">Max Calories</label>
|
||||
<input
|
||||
type="number"
|
||||
class="form-input"
|
||||
min="0"
|
||||
placeholder="e.g. 600"
|
||||
:value="recipesStore.nutritionFilters.max_calories ?? ''"
|
||||
@input="onNutritionInput('max_calories', $event)"
|
||||
/>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="form-label">Max Sugar (g)</label>
|
||||
<input
|
||||
type="number"
|
||||
class="form-input"
|
||||
min="0"
|
||||
placeholder="e.g. 10"
|
||||
:value="recipesStore.nutritionFilters.max_sugar_g ?? ''"
|
||||
@input="onNutritionInput('max_sugar_g', $event)"
|
||||
/>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="form-label">Max Carbs (g)</label>
|
||||
<input
|
||||
type="number"
|
||||
class="form-input"
|
||||
min="0"
|
||||
placeholder="e.g. 50"
|
||||
:value="recipesStore.nutritionFilters.max_carbs_g ?? ''"
|
||||
@input="onNutritionInput('max_carbs_g', $event)"
|
||||
/>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="form-label">Max Sodium (mg)</label>
|
||||
<input
|
||||
type="number"
|
||||
class="form-input"
|
||||
min="0"
|
||||
placeholder="e.g. 800"
|
||||
:value="recipesStore.nutritionFilters.max_sodium_mg ?? ''"
|
||||
@input="onNutritionInput('max_sodium_mg', $event)"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<p class="text-xs text-muted mt-xs">
|
||||
Recipes without nutrition data always appear. Filters apply to food.com and estimated values.
|
||||
</p>
|
||||
</details>
|
||||
|
||||
<!-- Cuisine Style (Level 3+ only) -->
|
||||
<div v-if="recipesStore.level >= 3" class="form-group">
|
||||
<label class="form-label">Cuisine Style <span class="text-muted text-xs">(Level 3+)</span></label>
|
||||
<div class="flex flex-wrap gap-xs">
|
||||
<button
|
||||
v-for="style in cuisineStyles"
|
||||
:key="style.id"
|
||||
:class="['btn', 'btn-secondary', 'btn-sm', { active: recipesStore.styleId === style.id }]"
|
||||
@click="recipesStore.styleId = recipesStore.styleId === style.id ? null : style.id"
|
||||
>{{ style.label }}</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Category Filter (Level 1–2 only) -->
|
||||
<div v-if="recipesStore.level <= 2" class="form-group">
|
||||
<label class="form-label">Category <span class="text-muted text-xs">(optional)</span></label>
|
||||
<input
|
||||
class="form-input"
|
||||
v-model="categoryInput"
|
||||
placeholder="e.g. Breakfast, Asian, Chicken, < 30 Mins"
|
||||
@blur="recipesStore.category = categoryInput.trim() || null"
|
||||
@keydown.enter="recipesStore.category = categoryInput.trim() || null"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Suggest Button -->
|
||||
<div class="suggest-row">
|
||||
<button
|
||||
class="btn btn-primary btn-lg flex-1"
|
||||
:disabled="recipesStore.loading || pantryItems.length === 0 || (recipesStore.level === 4 && !recipesStore.wildcardConfirmed)"
|
||||
@click="handleSuggest"
|
||||
>
|
||||
<span v-if="recipesStore.loading && !isLoadingMore">
|
||||
<span class="spinner spinner-sm inline-spinner"></span> Finding recipes…
|
||||
</span>
|
||||
<span v-else>Suggest Recipes</span>
|
||||
</button>
|
||||
<button
|
||||
v-if="recipesStore.dismissedCount > 0"
|
||||
class="btn btn-ghost btn-sm"
|
||||
@click="recipesStore.clearDismissed()"
|
||||
title="Show all dismissed recipes again"
|
||||
>Clear dismissed ({{ recipesStore.dismissedCount }})</button>
|
||||
</div>
|
||||
|
||||
<!-- Empty pantry nudge -->
|
||||
<p v-if="pantryItems.length === 0 && !recipesStore.loading" class="text-sm text-muted text-center mt-xs">
|
||||
Add items to your pantry first, then tap Suggest to find recipes.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Error -->
|
||||
<div v-if="recipesStore.error" class="status-badge status-error mb-md">
|
||||
{{ recipesStore.error }}
|
||||
</div>
|
||||
|
||||
<!-- Results -->
|
||||
<div v-if="recipesStore.result" class="results-section fade-in">
|
||||
<!-- Rate limit warning -->
|
||||
<div
|
||||
v-if="recipesStore.result.rate_limited"
|
||||
class="status-badge status-warning rate-limit-banner mb-md"
|
||||
>
|
||||
You've used your {{ recipesStore.result.rate_limit_count }} free suggestions today. Upgrade for
|
||||
unlimited.
|
||||
</div>
|
||||
|
||||
<!-- Element gaps -->
|
||||
<div v-if="recipesStore.result.element_gaps.length > 0" class="card card-warning mb-md">
|
||||
<p class="text-sm font-semibold">Your pantry is missing some flavor elements:</p>
|
||||
<div class="flex flex-wrap gap-xs mt-xs">
|
||||
<span
|
||||
v-for="gap in recipesStore.result.element_gaps"
|
||||
:key="gap"
|
||||
class="status-badge status-warning"
|
||||
>{{ gap }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- No suggestions -->
|
||||
<div
|
||||
v-if="recipesStore.result.suggestions.length === 0"
|
||||
class="card text-center text-muted"
|
||||
>
|
||||
<p>No recipes found for your current pantry and settings. Try lowering the creativity level or adding more items.</p>
|
||||
</div>
|
||||
|
||||
<!-- Recipe Cards -->
|
||||
<div class="grid-auto mb-md">
|
||||
<div
|
||||
v-for="recipe in recipesStore.result.suggestions"
|
||||
:key="recipe.id"
|
||||
class="card slide-up"
|
||||
>
|
||||
<!-- Header row -->
|
||||
<div class="flex-between mb-sm">
|
||||
<h3 class="text-lg font-bold recipe-title">{{ recipe.title }}</h3>
|
||||
<div class="flex flex-wrap gap-xs" style="align-items:center">
|
||||
<span class="status-badge status-success">{{ recipe.match_count }} matched</span>
|
||||
<span class="status-badge status-info">Level {{ recipe.level }}</span>
|
||||
<span v-if="recipe.is_wildcard" class="status-badge status-warning">Wildcard</span>
|
||||
<button
|
||||
v-if="recipe.id"
|
||||
class="btn-dismiss"
|
||||
@click="recipesStore.dismiss(recipe.id)"
|
||||
title="Hide this recipe"
|
||||
aria-label="Dismiss recipe"
|
||||
>✕</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Notes -->
|
||||
<p v-if="recipe.notes" class="text-sm text-secondary mb-sm">{{ recipe.notes }}</p>
|
||||
|
||||
<!-- Nutrition chips -->
|
||||
<div v-if="recipe.nutrition" class="nutrition-chips mb-sm">
|
||||
<span v-if="recipe.nutrition.calories != null" class="nutrition-chip">
|
||||
🔥 {{ Math.round(recipe.nutrition.calories) }} kcal
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.fat_g != null" class="nutrition-chip">
|
||||
🧈 {{ recipe.nutrition.fat_g.toFixed(1) }}g fat
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.protein_g != null" class="nutrition-chip">
|
||||
💪 {{ recipe.nutrition.protein_g.toFixed(1) }}g protein
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.carbs_g != null" class="nutrition-chip">
|
||||
🌾 {{ recipe.nutrition.carbs_g.toFixed(1) }}g carbs
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.fiber_g != null" class="nutrition-chip">
|
||||
🌿 {{ recipe.nutrition.fiber_g.toFixed(1) }}g fiber
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.sugar_g != null" class="nutrition-chip nutrition-chip-sugar">
|
||||
🍬 {{ recipe.nutrition.sugar_g.toFixed(1) }}g sugar
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.sodium_mg != null" class="nutrition-chip">
|
||||
🧂 {{ Math.round(recipe.nutrition.sodium_mg) }}mg sodium
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.servings != null" class="nutrition-chip nutrition-chip-servings">
|
||||
🍽️ {{ recipe.nutrition.servings }} serving{{ recipe.nutrition.servings !== 1 ? 's' : '' }}
|
||||
</span>
|
||||
<span v-if="recipe.nutrition.estimated" class="nutrition-chip nutrition-chip-estimated" title="Estimated from ingredient profiles">
|
||||
~ estimated
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Missing ingredients -->
|
||||
<div v-if="recipe.missing_ingredients.length > 0" class="mb-sm">
|
||||
<p class="text-sm font-semibold text-warning">You'd need:</p>
|
||||
<div class="flex flex-wrap gap-xs mt-xs">
|
||||
<span
|
||||
v-for="ing in recipe.missing_ingredients"
|
||||
:key="ing"
|
||||
class="status-badge status-warning"
|
||||
>{{ ing }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Grocery links for this recipe's missing ingredients -->
|
||||
<div v-if="groceryLinksForRecipe(recipe).length > 0" class="mb-sm">
|
||||
<p class="text-sm font-semibold">Buy online:</p>
|
||||
<div class="flex flex-wrap gap-xs mt-xs">
|
||||
<a
|
||||
v-for="link in groceryLinksForRecipe(recipe)"
|
||||
:key="link.ingredient + link.retailer"
|
||||
:href="link.url"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="grocery-link status-badge status-info"
|
||||
>
|
||||
{{ link.ingredient }} @ {{ link.retailer }} ↗
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Swap candidates collapsible -->
|
||||
<details v-if="recipe.swap_candidates.length > 0" class="collapsible mb-sm">
|
||||
<summary class="text-sm font-semibold collapsible-summary">
|
||||
Possible swaps ({{ recipe.swap_candidates.length }})
|
||||
</summary>
|
||||
<div class="card-secondary mt-xs">
|
||||
<div
|
||||
v-for="swap in recipe.swap_candidates"
|
||||
:key="swap.original_name + swap.substitute_name"
|
||||
class="swap-row text-sm"
|
||||
>
|
||||
<span class="font-semibold">{{ swap.original_name }}</span>
|
||||
<span class="text-muted"> → </span>
|
||||
<span class="font-semibold">{{ swap.substitute_name }}</span>
|
||||
<span v-if="swap.constraint_label" class="status-badge status-info ml-xs">{{ swap.constraint_label }}</span>
|
||||
<p v-if="swap.explanation" class="text-muted mt-xs">{{ swap.explanation }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</details>
|
||||
|
||||
<!-- Prep notes -->
|
||||
<div v-if="recipe.prep_notes && recipe.prep_notes.length > 0" class="prep-notes mb-sm">
|
||||
<p class="text-sm font-semibold">Before you start:</p>
|
||||
<ul class="prep-notes-list mt-xs">
|
||||
<li v-for="note in recipe.prep_notes" :key="note" class="text-sm prep-note-item">
|
||||
{{ note }}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- Directions collapsible -->
|
||||
<details v-if="recipe.directions.length > 0" class="collapsible">
|
||||
<summary class="text-sm font-semibold collapsible-summary">
|
||||
Directions ({{ recipe.directions.length }} steps)
|
||||
</summary>
|
||||
<ol class="directions-list mt-xs">
|
||||
<li v-for="(step, idx) in recipe.directions" :key="idx" class="text-sm direction-step">
|
||||
{{ step }}
|
||||
</li>
|
||||
</ol>
|
||||
</details>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Load More -->
|
||||
<div v-if="recipesStore.result.suggestions.length > 0" class="load-more-row">
|
||||
<button
|
||||
class="btn btn-secondary"
|
||||
:disabled="recipesStore.loading"
|
||||
@click="handleLoadMore"
|
||||
>
|
||||
<span v-if="recipesStore.loading && isLoadingMore">
|
||||
<span class="spinner spinner-sm inline-spinner"></span> Loading…
|
||||
</span>
|
||||
<span v-else>Load more recipes</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Grocery list summary -->
|
||||
<div v-if="recipesStore.result.grocery_list.length > 0" class="card card-info">
|
||||
<h3 class="text-lg font-bold mb-sm">Shopping List</h3>
|
||||
<ul class="grocery-list">
|
||||
<li
|
||||
v-for="item in recipesStore.result.grocery_list"
|
||||
:key="item"
|
||||
class="text-sm grocery-item"
|
||||
>
|
||||
{{ item }}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Empty state when no results yet and pantry has items -->
|
||||
<div
|
||||
v-if="!recipesStore.result && !recipesStore.loading && pantryItems.length > 0"
|
||||
class="card text-center text-muted"
|
||||
>
|
||||
<svg viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.5" style="width:40px;height:40px;color:var(--color-text-muted);margin-bottom:var(--spacing-sm)">
|
||||
<path d="M12 8c0 0 4-4 12-4s12 4 12 4v8H12V8z"/>
|
||||
<path d="M10 16h28v4l-2 20H12L10 20v-4z"/>
|
||||
<line x1="20" y1="24" x2="28" y2="24"/>
|
||||
</svg>
|
||||
<p class="mt-xs">Tap "Suggest Recipes" to find recipes using your pantry items.</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, computed, onMounted } from 'vue'
|
||||
import { useRecipesStore } from '../stores/recipes'
|
||||
import { useInventoryStore } from '../stores/inventory'
|
||||
import type { RecipeSuggestion, GroceryLink } from '../services/api'
|
||||
|
||||
const recipesStore = useRecipesStore()
|
||||
const inventoryStore = useInventoryStore()
|
||||
|
||||
// Local input state for tags
|
||||
const constraintInput = ref('')
|
||||
const allergyInput = ref('')
|
||||
const categoryInput = ref('')
|
||||
const isLoadingMore = ref(false)
|
||||
|
||||
const levels = [
|
||||
{ value: 1, label: '1 — From Pantry' },
|
||||
{ value: 2, label: '2 — Creative Swaps' },
|
||||
{ value: 3, label: '3 — AI Scaffold' },
|
||||
{ value: 4, label: '4 — Wildcard 🎲' },
|
||||
]
|
||||
|
||||
const cuisineStyles = [
|
||||
{ id: 'italian', label: 'Italian' },
|
||||
{ id: 'mediterranean', label: 'Mediterranean' },
|
||||
{ id: 'east_asian', label: 'East Asian' },
|
||||
{ id: 'latin', label: 'Latin' },
|
||||
{ id: 'eastern_european', label: 'Eastern European' },
|
||||
]
|
||||
|
||||
// Pantry items sorted expiry-first (available items only)
|
||||
const pantryItems = computed(() => {
|
||||
const sorted = [...inventoryStore.items]
|
||||
.filter((item) => item.status === 'available')
|
||||
.sort((a, b) => {
|
||||
if (!a.expiration_date && !b.expiration_date) return 0
|
||||
if (!a.expiration_date) return 1
|
||||
if (!b.expiration_date) return -1
|
||||
return new Date(a.expiration_date).getTime() - new Date(b.expiration_date).getTime()
|
||||
})
|
||||
return sorted.map((item) => item.product_name).filter(Boolean) as string[]
|
||||
})
|
||||
|
||||
// Grocery links relevant to a specific recipe's missing ingredients
|
||||
function groceryLinksForRecipe(recipe: RecipeSuggestion): GroceryLink[] {
|
||||
if (!recipesStore.result) return []
|
||||
return recipesStore.result.grocery_links.filter((link) =>
|
||||
recipe.missing_ingredients.includes(link.ingredient)
|
||||
)
|
||||
}
|
||||
|
||||
// Tag input helpers — constraints
|
||||
function addConstraint(value: string) {
|
||||
const tag = value.trim().toLowerCase()
|
||||
if (tag && !recipesStore.constraints.includes(tag)) {
|
||||
recipesStore.constraints = [...recipesStore.constraints, tag]
|
||||
}
|
||||
constraintInput.value = ''
|
||||
}
|
||||
|
||||
function removeConstraint(tag: string) {
|
||||
recipesStore.constraints = recipesStore.constraints.filter((c) => c !== tag)
|
||||
}
|
||||
|
||||
function onConstraintKey(e: KeyboardEvent) {
|
||||
if (e.key === 'Enter' || e.key === ',') {
|
||||
e.preventDefault()
|
||||
addConstraint(constraintInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
function commitConstraintInput() {
|
||||
if (constraintInput.value.trim()) {
|
||||
addConstraint(constraintInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
// Tag input helpers — allergies
|
||||
function addAllergy(value: string) {
|
||||
const tag = value.trim().toLowerCase()
|
||||
if (tag && !recipesStore.allergies.includes(tag)) {
|
||||
recipesStore.allergies = [...recipesStore.allergies, tag]
|
||||
}
|
||||
allergyInput.value = ''
|
||||
}
|
||||
|
||||
function removeAllergy(tag: string) {
|
||||
recipesStore.allergies = recipesStore.allergies.filter((a) => a !== tag)
|
||||
}
|
||||
|
||||
function onAllergyKey(e: KeyboardEvent) {
|
||||
if (e.key === 'Enter' || e.key === ',') {
|
||||
e.preventDefault()
|
||||
addAllergy(allergyInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
function commitAllergyInput() {
|
||||
if (allergyInput.value.trim()) {
|
||||
addAllergy(allergyInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
// Max missing number input
|
||||
function onMaxMissingInput(e: Event) {
|
||||
const target = e.target as HTMLInputElement
|
||||
const val = parseInt(target.value)
|
||||
recipesStore.maxMissing = isNaN(val) ? null : val
|
||||
}
|
||||
|
||||
// Nutrition filter inputs
|
||||
type NutritionKey = 'max_calories' | 'max_sugar_g' | 'max_carbs_g' | 'max_sodium_mg'
|
||||
function onNutritionInput(key: NutritionKey, e: Event) {
|
||||
const target = e.target as HTMLInputElement
|
||||
const val = parseFloat(target.value)
|
||||
recipesStore.nutritionFilters[key] = isNaN(val) ? null : val
|
||||
}
|
||||
|
||||
// Suggest handler
|
||||
async function handleSuggest() {
|
||||
isLoadingMore.value = false
|
||||
await recipesStore.suggest(pantryItems.value)
|
||||
}
|
||||
|
||||
async function handleLoadMore() {
|
||||
isLoadingMore.value = true
|
||||
await recipesStore.loadMore(pantryItems.value)
|
||||
isLoadingMore.value = false
|
||||
}
|
||||
|
||||
onMounted(async () => {
|
||||
if (inventoryStore.items.length === 0) {
|
||||
await inventoryStore.fetchItems()
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.mb-controls {
|
||||
margin-bottom: var(--spacing-md);
|
||||
}
|
||||
|
||||
.mb-md {
|
||||
margin-bottom: var(--spacing-md);
|
||||
}
|
||||
|
||||
.mb-sm {
|
||||
margin-bottom: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.mt-xs {
|
||||
margin-top: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.ml-xs {
|
||||
margin-left: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.wildcard-warning {
|
||||
display: block;
|
||||
margin-bottom: var(--spacing-md);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
}
|
||||
|
||||
.hard-day-toggle {
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.tag-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.chip-remove {
|
||||
background: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
padding: 0;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
color: inherit;
|
||||
opacity: 0.7;
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
|
||||
.chip-remove:hover {
|
||||
opacity: 1;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.inline-spinner {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
margin-right: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.rate-limit-banner {
|
||||
display: block;
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
}
|
||||
|
||||
.recipe-title {
|
||||
flex: 1;
|
||||
margin-right: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.btn-dismiss {
|
||||
background: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
padding: 2px 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1;
|
||||
color: var(--color-text-muted);
|
||||
border-radius: 4px;
|
||||
transition: color 0.15s, background 0.15s;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.btn-dismiss:hover {
|
||||
color: var(--color-error, #dc2626);
|
||||
background: var(--color-error-bg, #fee2e2);
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.suggest-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.btn-ghost {
|
||||
background: transparent;
|
||||
border: none;
|
||||
color: var(--color-text-muted);
|
||||
font-size: var(--font-size-sm);
|
||||
cursor: pointer;
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.btn-ghost:hover {
|
||||
color: var(--color-primary);
|
||||
background: transparent;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.btn-sm {
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.load-more-row {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin-bottom: var(--spacing-md);
|
||||
}
|
||||
|
||||
.collapsible {
|
||||
border-top: 1px solid var(--color-border);
|
||||
padding-top: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.collapsible-summary {
|
||||
cursor: pointer;
|
||||
list-style: none;
|
||||
padding: var(--spacing-xs) 0;
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.collapsible-summary::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.collapsible-summary::before {
|
||||
content: '▶ ';
|
||||
font-size: 10px;
|
||||
}
|
||||
|
||||
details[open] .collapsible-summary::before {
|
||||
content: '▼ ';
|
||||
}
|
||||
|
||||
.swap-row {
|
||||
padding: var(--spacing-xs) 0;
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.swap-row:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.prep-notes-list {
|
||||
padding-left: var(--spacing-lg);
|
||||
list-style-type: disc;
|
||||
}
|
||||
|
||||
.prep-note-item {
|
||||
margin-bottom: var(--spacing-xs);
|
||||
line-height: 1.5;
|
||||
color: var(--color-text-secondary);
|
||||
}
|
||||
|
||||
.directions-list {
|
||||
padding-left: var(--spacing-lg);
|
||||
}
|
||||
|
||||
.direction-step {
|
||||
margin-bottom: var(--spacing-xs);
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.grocery-link {
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.grocery-link:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.grocery-list {
|
||||
padding-left: var(--spacing-lg);
|
||||
}
|
||||
|
||||
.grocery-item {
|
||||
margin-bottom: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.results-section {
|
||||
margin-top: var(--spacing-md);
|
||||
}
|
||||
|
||||
.nutrition-summary {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.nutrition-filters-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
gap: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.nutrition-chips {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.nutrition-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 8px;
|
||||
border-radius: 12px;
|
||||
font-size: var(--font-size-xs);
|
||||
background: var(--color-bg-secondary, #f5f5f5);
|
||||
color: var(--color-text-secondary);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.nutrition-chip-sugar {
|
||||
background: var(--color-warning-bg);
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
.nutrition-chip-servings {
|
||||
background: var(--color-info-bg);
|
||||
color: var(--color-info-light);
|
||||
}
|
||||
|
||||
.nutrition-chip-estimated {
|
||||
font-style: italic;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
/* Mobile adjustments */
|
||||
@media (max-width: 480px) {
|
||||
.flex-between {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.recipe-title {
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
.nutrition-filters-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
<template>
|
||||
<div class="settings-view">
|
||||
<div class="card">
|
||||
<h2 class="section-title text-xl mb-md">Settings</h2>
|
||||
|
||||
<!-- Cooking Equipment -->
|
||||
<section>
|
||||
<h3 class="text-lg font-semibold mb-xs">Cooking Equipment</h3>
|
||||
<p class="text-sm text-secondary mb-md">
|
||||
Tell Kiwi what you have — used when Hard Day Mode is on to filter out recipes requiring
|
||||
equipment you don't own.
|
||||
</p>
|
||||
|
||||
<!-- Current equipment tags -->
|
||||
<div class="tags-wrap flex flex-wrap gap-xs mb-sm">
|
||||
<span
|
||||
v-for="item in settingsStore.cookingEquipment"
|
||||
:key="item"
|
||||
class="tag-chip status-badge status-info"
|
||||
>
|
||||
{{ item }}
|
||||
<button class="chip-remove" @click="removeEquipment(item)" aria-label="Remove">×</button>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Custom input -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Add equipment</label>
|
||||
<input
|
||||
class="form-input"
|
||||
v-model="equipmentInput"
|
||||
placeholder="Type equipment name, press Enter or comma"
|
||||
@keydown="onEquipmentKey"
|
||||
@blur="commitEquipmentInput"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Quick-add chips -->
|
||||
<div class="form-group">
|
||||
<label class="form-label">Quick-add</label>
|
||||
<div class="flex flex-wrap gap-xs">
|
||||
<button
|
||||
v-for="eq in quickAddOptions"
|
||||
:key="eq"
|
||||
:class="['btn', 'btn-sm', 'btn-secondary', { active: settingsStore.cookingEquipment.includes(eq) }]"
|
||||
@click="toggleEquipment(eq)"
|
||||
>
|
||||
{{ eq }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Save button -->
|
||||
<div class="flex-start gap-sm">
|
||||
<button
|
||||
class="btn btn-primary"
|
||||
:disabled="settingsStore.loading"
|
||||
@click="settingsStore.save()"
|
||||
>
|
||||
<span v-if="settingsStore.loading">Saving…</span>
|
||||
<span v-else-if="settingsStore.saved">✓ Saved!</span>
|
||||
<span v-else>Save Settings</span>
|
||||
</button>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, onMounted } from 'vue'
|
||||
import { useSettingsStore } from '../stores/settings'
|
||||
|
||||
const settingsStore = useSettingsStore()
|
||||
|
||||
const equipmentInput = ref('')
|
||||
|
||||
const quickAddOptions = [
|
||||
'Oven',
|
||||
'Stovetop',
|
||||
'Microwave',
|
||||
'Air Fryer',
|
||||
'Instant Pot',
|
||||
'Slow Cooker',
|
||||
'Grill',
|
||||
'Blender',
|
||||
]
|
||||
|
||||
function addEquipment(value: string) {
|
||||
const item = value.trim()
|
||||
if (item && !settingsStore.cookingEquipment.includes(item)) {
|
||||
settingsStore.cookingEquipment = [...settingsStore.cookingEquipment, item]
|
||||
}
|
||||
equipmentInput.value = ''
|
||||
}
|
||||
|
||||
function removeEquipment(item: string) {
|
||||
settingsStore.cookingEquipment = settingsStore.cookingEquipment.filter((e) => e !== item)
|
||||
}
|
||||
|
||||
function toggleEquipment(item: string) {
|
||||
if (settingsStore.cookingEquipment.includes(item)) {
|
||||
removeEquipment(item)
|
||||
} else {
|
||||
addEquipment(item)
|
||||
}
|
||||
}
|
||||
|
||||
function onEquipmentKey(e: KeyboardEvent) {
|
||||
if (e.key === 'Enter' || e.key === ',') {
|
||||
e.preventDefault()
|
||||
addEquipment(equipmentInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
function commitEquipmentInput() {
|
||||
if (equipmentInput.value.trim()) {
|
||||
addEquipment(equipmentInput.value)
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(async () => {
|
||||
await settingsStore.load()
|
||||
})
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.mb-md {
|
||||
margin-bottom: var(--spacing-md);
|
||||
}
|
||||
|
||||
.mb-sm {
|
||||
margin-bottom: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.mb-xs {
|
||||
margin-bottom: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.tag-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.chip-remove {
|
||||
background: transparent;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
padding: 0;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
color: inherit;
|
||||
opacity: 0.7;
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
|
||||
.chip-remove:hover {
|
||||
opacity: 1;
|
||||
transform: none;
|
||||
}
|
||||
</style>
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
import { ref, onMounted, onUnmounted } from 'vue'
|
||||
|
||||
const KONAMI = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','b','a']
|
||||
const KIWI_WORD = ['k','i','w','i']
|
||||
|
||||
// Module-level shared state — single instance across all component uses
|
||||
const neonMode = ref(false)
|
||||
const kiwiVisible = ref(false)
|
||||
const kiwiDirection = ref<'ltr' | 'rtl'>('rtl') // bird enters from right by default
|
||||
|
||||
const NEON_VARS: Record<string, string> = {
|
||||
'--color-bg-primary': '#070011',
|
||||
'--color-bg-secondary': '#0f001f',
|
||||
'--color-bg-elevated': '#160028',
|
||||
'--color-bg-card': '#160028',
|
||||
'--color-bg-input': '#0f001f',
|
||||
'--color-primary': '#ff006e',
|
||||
'--color-text-primary': '#f0e6ff',
|
||||
'--color-text-secondary': '#c090ff',
|
||||
'--color-text-muted': '#7040a0',
|
||||
'--color-border': 'rgba(255, 0, 110, 0.22)',
|
||||
'--color-border-focus': '#ff006e',
|
||||
'--color-info': '#00f5ff',
|
||||
'--color-info-bg': 'rgba(0, 245, 255, 0.10)',
|
||||
'--color-info-border': 'rgba(0, 245, 255, 0.30)',
|
||||
'--color-info-light': '#00f5ff',
|
||||
'--color-success': '#39ff14',
|
||||
'--color-success-bg': 'rgba(57, 255, 20, 0.10)',
|
||||
'--color-success-border': 'rgba(57, 255, 20, 0.30)',
|
||||
'--color-success-light': '#39ff14',
|
||||
'--color-warning': '#ffbe0b',
|
||||
'--color-warning-bg': 'rgba(255, 190, 11, 0.10)',
|
||||
'--color-warning-border': 'rgba(255, 190, 11, 0.30)',
|
||||
'--color-warning-light': '#ffbe0b',
|
||||
'--shadow-amber': '0 0 18px rgba(255, 0, 110, 0.55)',
|
||||
'--shadow-md': '0 2px 16px rgba(255, 0, 110, 0.18)',
|
||||
'--shadow-lg': '0 4px 28px rgba(255, 0, 110, 0.25)',
|
||||
'--gradient-primary': 'linear-gradient(135deg, #ff006e 0%, #8338ec 100%)',
|
||||
'--gradient-header': 'linear-gradient(135deg, #070011 0%, #160028 100%)',
|
||||
'--color-loc-fridge': '#00f5ff',
|
||||
'--color-loc-freezer': '#8338ec',
|
||||
'--color-loc-pantry': '#ff006e',
|
||||
'--color-loc-cabinet': '#ffbe0b',
|
||||
'--color-loc-garage-freezer': '#39ff14',
|
||||
}
|
||||
|
||||
function applyNeon() {
|
||||
const root = document.documentElement
|
||||
for (const [prop, val] of Object.entries(NEON_VARS)) {
|
||||
root.style.setProperty(prop, val)
|
||||
}
|
||||
document.body.classList.add('neon-mode')
|
||||
}
|
||||
|
||||
function removeNeon() {
|
||||
const root = document.documentElement
|
||||
for (const prop of Object.keys(NEON_VARS)) {
|
||||
root.style.removeProperty(prop)
|
||||
}
|
||||
document.body.classList.remove('neon-mode')
|
||||
}
|
||||
|
||||
function toggleNeon() {
|
||||
neonMode.value = !neonMode.value
|
||||
if (neonMode.value) {
|
||||
applyNeon()
|
||||
localStorage.setItem('kiwi-neon-mode', '1')
|
||||
} else {
|
||||
removeNeon()
|
||||
localStorage.removeItem('kiwi-neon-mode')
|
||||
}
|
||||
}
|
||||
|
||||
function spawnKiwi() {
|
||||
kiwiDirection.value = Math.random() > 0.5 ? 'ltr' : 'rtl'
|
||||
kiwiVisible.value = true
|
||||
setTimeout(() => { kiwiVisible.value = false }, 5500)
|
||||
}
|
||||
|
||||
export function useEasterEggs() {
|
||||
const konamiBuffer: string[] = []
|
||||
const kiwiBuffer: string[] = []
|
||||
|
||||
function onKeyDown(e: KeyboardEvent) {
|
||||
// Skip when user is typing in a form input
|
||||
const tag = (e.target as HTMLElement)?.tagName
|
||||
const isInput = tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT'
|
||||
|
||||
// Konami code — works even in inputs
|
||||
konamiBuffer.push(e.key)
|
||||
if (konamiBuffer.length > KONAMI.length) konamiBuffer.shift()
|
||||
if (konamiBuffer.join(',') === KONAMI.join(',')) {
|
||||
toggleNeon()
|
||||
konamiBuffer.length = 0
|
||||
}
|
||||
|
||||
// KIWI word — only when not in a form input
|
||||
if (!isInput) {
|
||||
const key = e.key.toLowerCase()
|
||||
if ('kiwi'.includes(key) && key.length === 1) {
|
||||
kiwiBuffer.push(key)
|
||||
if (kiwiBuffer.length > KIWI_WORD.length) kiwiBuffer.shift()
|
||||
if (kiwiBuffer.join('') === 'kiwi') {
|
||||
spawnKiwi()
|
||||
kiwiBuffer.length = 0
|
||||
}
|
||||
} else {
|
||||
kiwiBuffer.length = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
if (localStorage.getItem('kiwi-neon-mode')) {
|
||||
neonMode.value = true
|
||||
applyNeon()
|
||||
}
|
||||
window.addEventListener('keydown', onKeyDown)
|
||||
})
|
||||
|
||||
onUnmounted(() => {
|
||||
window.removeEventListener('keydown', onKeyDown)
|
||||
})
|
||||
|
||||
return {
|
||||
neonMode,
|
||||
kiwiVisible,
|
||||
kiwiDirection,
|
||||
toggleNeon,
|
||||
spawnKiwi,
|
||||
}
|
||||
}
|
||||
|
|
@ -80,11 +80,9 @@ export interface Tag {
|
|||
}
|
||||
|
||||
export interface InventoryItem {
|
||||
id: number
|
||||
product_id: number
|
||||
product_name: string | null
|
||||
barcode: string | null
|
||||
category: string | null
|
||||
id: string
|
||||
product_id: string
|
||||
product: Product
|
||||
quantity: number
|
||||
unit: string
|
||||
location: string
|
||||
|
|
@ -111,10 +109,11 @@ export interface InventoryItemUpdate {
|
|||
|
||||
export interface InventoryStats {
|
||||
total_items: number
|
||||
available_items: number
|
||||
total_products: number
|
||||
expiring_soon: number
|
||||
expired_items: number
|
||||
locations: Record<string, number>
|
||||
expired: number
|
||||
items_by_location: Record<string, number>
|
||||
items_by_status: Record<string, number>
|
||||
}
|
||||
|
||||
export interface Receipt {
|
||||
|
|
@ -186,7 +185,7 @@ export const inventoryAPI = {
|
|||
/**
|
||||
* Update an inventory item
|
||||
*/
|
||||
async updateItem(itemId: number, update: InventoryItemUpdate): Promise<InventoryItem> {
|
||||
async updateItem(itemId: string, update: InventoryItemUpdate): Promise<InventoryItem> {
|
||||
const response = await api.patch(`/inventory/items/${itemId}`, update)
|
||||
return response.data
|
||||
},
|
||||
|
|
@ -194,7 +193,7 @@ export const inventoryAPI = {
|
|||
/**
|
||||
* Delete an inventory item
|
||||
*/
|
||||
async deleteItem(itemId: number): Promise<void> {
|
||||
async deleteItem(itemId: string): Promise<void> {
|
||||
await api.delete(`/inventory/items/${itemId}`)
|
||||
},
|
||||
|
||||
|
|
@ -235,7 +234,7 @@ export const inventoryAPI = {
|
|||
/**
|
||||
* Mark item as consumed
|
||||
*/
|
||||
async consumeItem(itemId: number): Promise<void> {
|
||||
async consumeItem(itemId: string): Promise<void> {
|
||||
await api.post(`/inventory/items/${itemId}/consume`)
|
||||
},
|
||||
|
||||
|
|
@ -405,118 +404,4 @@ export const exportAPI = {
|
|||
},
|
||||
}
|
||||
|
||||
// ========== Recipes & Settings Types ==========
|
||||
|
||||
export interface SwapCandidate {
|
||||
original_name: string
|
||||
substitute_name: string
|
||||
constraint_label: string
|
||||
explanation: string
|
||||
compensation_hints: Record<string, string>[]
|
||||
}
|
||||
|
||||
export interface NutritionPanel {
|
||||
calories: number | null
|
||||
fat_g: number | null
|
||||
protein_g: number | null
|
||||
carbs_g: number | null
|
||||
fiber_g: number | null
|
||||
sugar_g: number | null
|
||||
sodium_mg: number | null
|
||||
servings: number | null
|
||||
estimated: boolean
|
||||
}
|
||||
|
||||
export interface RecipeSuggestion {
|
||||
id: number
|
||||
title: string
|
||||
match_count: number
|
||||
element_coverage: Record<string, number>
|
||||
swap_candidates: SwapCandidate[]
|
||||
missing_ingredients: string[]
|
||||
directions: string[]
|
||||
prep_notes: string[]
|
||||
notes: string
|
||||
level: number
|
||||
is_wildcard: boolean
|
||||
nutrition: NutritionPanel | null
|
||||
}
|
||||
|
||||
export interface NutritionFilters {
|
||||
max_calories: number | null
|
||||
max_sugar_g: number | null
|
||||
max_carbs_g: number | null
|
||||
max_sodium_mg: number | null
|
||||
}
|
||||
|
||||
export interface GroceryLink {
|
||||
ingredient: string
|
||||
retailer: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export interface RecipeResult {
|
||||
suggestions: RecipeSuggestion[]
|
||||
element_gaps: string[]
|
||||
grocery_list: string[]
|
||||
grocery_links: GroceryLink[]
|
||||
rate_limited: boolean
|
||||
rate_limit_count: number
|
||||
}
|
||||
|
||||
export interface RecipeRequest {
|
||||
pantry_items: string[]
|
||||
level: number
|
||||
constraints: string[]
|
||||
allergies: string[]
|
||||
expiry_first: boolean
|
||||
hard_day_mode: boolean
|
||||
max_missing: number | null
|
||||
style_id: string | null
|
||||
category: string | null
|
||||
wildcard_confirmed: boolean
|
||||
nutrition_filters: NutritionFilters
|
||||
excluded_ids: number[]
|
||||
}
|
||||
|
||||
export interface Staple {
|
||||
slug: string
|
||||
name: string
|
||||
category: string
|
||||
dietary_tags: string[]
|
||||
}
|
||||
|
||||
// ========== Recipes API ==========
|
||||
|
||||
export const recipesAPI = {
|
||||
async suggest(req: RecipeRequest): Promise<RecipeResult> {
|
||||
const response = await api.post('/recipes/suggest', req)
|
||||
return response.data
|
||||
},
|
||||
async getRecipe(id: number): Promise<RecipeSuggestion> {
|
||||
const response = await api.get(`/recipes/${id}`)
|
||||
return response.data
|
||||
},
|
||||
async listStaples(dietary?: string): Promise<Staple[]> {
|
||||
const response = await api.get('/staples/', { params: dietary ? { dietary } : undefined })
|
||||
return response.data
|
||||
},
|
||||
}
|
||||
|
||||
// ========== Settings API ==========
|
||||
|
||||
export const settingsAPI = {
|
||||
async getSetting(key: string): Promise<string | null> {
|
||||
try {
|
||||
const response = await api.get(`/settings/${key}`)
|
||||
return response.data.value
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
},
|
||||
async setSetting(key: string, value: string): Promise<void> {
|
||||
await api.put(`/settings/${key}`, { value })
|
||||
},
|
||||
}
|
||||
|
||||
export default api
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ export const useInventoryStore = defineStore('inventory', () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function updateItem(itemId: number, update: InventoryItemUpdate) {
|
||||
async function updateItem(itemId: string, update: InventoryItemUpdate) {
|
||||
loading.value = true
|
||||
error.value = null
|
||||
|
||||
|
|
@ -99,7 +99,7 @@ export const useInventoryStore = defineStore('inventory', () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function deleteItem(itemId: number) {
|
||||
async function deleteItem(itemId: string) {
|
||||
loading.value = true
|
||||
error.value = null
|
||||
|
||||
|
|
|
|||
|
|
@ -1,174 +0,0 @@
|
|||
/**
|
||||
* Recipes Store
|
||||
*
|
||||
* Manages recipe suggestion state and request parameters using Pinia.
|
||||
* Dismissed recipe IDs are persisted to localStorage with a 7-day TTL.
|
||||
*/
|
||||
|
||||
import { defineStore } from 'pinia'
|
||||
import { ref, computed } from 'vue'
|
||||
import { recipesAPI, type RecipeResult, type RecipeSuggestion, type RecipeRequest, type NutritionFilters } from '../services/api'
|
||||
|
||||
const DISMISSED_KEY = 'kiwi:dismissed_recipes'
|
||||
const DISMISS_TTL_MS = 7 * 24 * 60 * 60 * 1000
|
||||
|
||||
// [id, dismissedAtMs]
|
||||
type DismissEntry = [number, number]
|
||||
|
||||
function loadDismissed(): Set<number> {
|
||||
try {
|
||||
const raw = localStorage.getItem(DISMISSED_KEY)
|
||||
if (!raw) return new Set()
|
||||
const entries: DismissEntry[] = JSON.parse(raw)
|
||||
const cutoff = Date.now() - DISMISS_TTL_MS
|
||||
return new Set(entries.filter(([, ts]) => ts > cutoff).map(([id]) => id))
|
||||
} catch {
|
||||
return new Set()
|
||||
}
|
||||
}
|
||||
|
||||
function saveDismissed(ids: Set<number>) {
|
||||
const now = Date.now()
|
||||
const entries: DismissEntry[] = [...ids].map((id) => [id, now])
|
||||
localStorage.setItem(DISMISSED_KEY, JSON.stringify(entries))
|
||||
}
|
||||
|
||||
export const useRecipesStore = defineStore('recipes', () => {
|
||||
// Suggestion result state
|
||||
const result = ref<RecipeResult | null>(null)
|
||||
const loading = ref(false)
|
||||
const error = ref<string | null>(null)
|
||||
|
||||
// Request parameters
|
||||
const level = ref(1)
|
||||
const constraints = ref<string[]>([])
|
||||
const allergies = ref<string[]>([])
|
||||
const hardDayMode = ref(false)
|
||||
const maxMissing = ref<number | null>(null)
|
||||
const styleId = ref<string | null>(null)
|
||||
const category = ref<string | null>(null)
|
||||
const wildcardConfirmed = ref(false)
|
||||
const nutritionFilters = ref<NutritionFilters>({
|
||||
max_calories: null,
|
||||
max_sugar_g: null,
|
||||
max_carbs_g: null,
|
||||
max_sodium_mg: null,
|
||||
})
|
||||
|
||||
// Dismissed IDs: persisted to localStorage, 7-day TTL
|
||||
const dismissedIds = ref<Set<number>>(loadDismissed())
|
||||
// Seen IDs: session-only, used by Load More to avoid repeating results
|
||||
const seenIds = ref<Set<number>>(new Set())
|
||||
|
||||
const dismissedCount = computed(() => dismissedIds.value.size)
|
||||
|
||||
function _buildRequest(pantryItems: string[], extraExcluded: number[] = []): RecipeRequest {
|
||||
const excluded = new Set([...dismissedIds.value, ...extraExcluded])
|
||||
return {
|
||||
pantry_items: pantryItems,
|
||||
level: level.value,
|
||||
constraints: constraints.value,
|
||||
allergies: allergies.value,
|
||||
expiry_first: true,
|
||||
hard_day_mode: hardDayMode.value,
|
||||
max_missing: maxMissing.value,
|
||||
style_id: styleId.value,
|
||||
category: category.value,
|
||||
wildcard_confirmed: wildcardConfirmed.value,
|
||||
nutrition_filters: nutritionFilters.value,
|
||||
excluded_ids: [...excluded],
|
||||
}
|
||||
}
|
||||
|
||||
function _trackSeen(suggestions: RecipeSuggestion[]) {
|
||||
for (const s of suggestions) {
|
||||
if (s.id) seenIds.value = new Set([...seenIds.value, s.id])
|
||||
}
|
||||
}
|
||||
|
||||
async function suggest(pantryItems: string[]) {
|
||||
loading.value = true
|
||||
error.value = null
|
||||
seenIds.value = new Set()
|
||||
|
||||
try {
|
||||
result.value = await recipesAPI.suggest(_buildRequest(pantryItems))
|
||||
_trackSeen(result.value.suggestions)
|
||||
} catch (err: unknown) {
|
||||
error.value = err instanceof Error ? err.message : 'Failed to get recipe suggestions'
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function loadMore(pantryItems: string[]) {
|
||||
if (!result.value || loading.value) return
|
||||
loading.value = true
|
||||
error.value = null
|
||||
|
||||
try {
|
||||
// Exclude everything already shown (dismissed + all seen this session)
|
||||
const more = await recipesAPI.suggest(_buildRequest(pantryItems, [...seenIds.value]))
|
||||
if (more.suggestions.length === 0) {
|
||||
error.value = 'No more recipes found — try clearing dismissed or adjusting filters.'
|
||||
} else {
|
||||
result.value = {
|
||||
...result.value,
|
||||
suggestions: [...result.value.suggestions, ...more.suggestions],
|
||||
grocery_list: [...new Set([...result.value.grocery_list, ...more.grocery_list])],
|
||||
grocery_links: [...result.value.grocery_links, ...more.grocery_links],
|
||||
}
|
||||
_trackSeen(more.suggestions)
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
error.value = err instanceof Error ? err.message : 'Failed to load more recipes'
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
function dismiss(id: number) {
|
||||
dismissedIds.value = new Set([...dismissedIds.value, id])
|
||||
saveDismissed(dismissedIds.value)
|
||||
// Remove from current results immediately
|
||||
if (result.value) {
|
||||
result.value = {
|
||||
...result.value,
|
||||
suggestions: result.value.suggestions.filter((s) => s.id !== id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function clearDismissed() {
|
||||
dismissedIds.value = new Set()
|
||||
localStorage.removeItem(DISMISSED_KEY)
|
||||
}
|
||||
|
||||
function clearResult() {
|
||||
result.value = null
|
||||
error.value = null
|
||||
wildcardConfirmed.value = false
|
||||
}
|
||||
|
||||
return {
|
||||
result,
|
||||
loading,
|
||||
error,
|
||||
level,
|
||||
constraints,
|
||||
allergies,
|
||||
hardDayMode,
|
||||
maxMissing,
|
||||
styleId,
|
||||
category,
|
||||
wildcardConfirmed,
|
||||
nutritionFilters,
|
||||
dismissedIds,
|
||||
dismissedCount,
|
||||
suggest,
|
||||
loadMore,
|
||||
dismiss,
|
||||
clearDismissed,
|
||||
clearResult,
|
||||
}
|
||||
})
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
/**
|
||||
* Settings Store
|
||||
*
|
||||
* Manages user settings (cooking equipment, preferences) using Pinia.
|
||||
*/
|
||||
|
||||
import { defineStore } from 'pinia'
|
||||
import { ref } from 'vue'
|
||||
import { settingsAPI } from '../services/api'
|
||||
|
||||
export const useSettingsStore = defineStore('settings', () => {
|
||||
// State
|
||||
const cookingEquipment = ref<string[]>([])
|
||||
const loading = ref(false)
|
||||
const saved = ref(false)
|
||||
|
||||
// Actions
|
||||
async function load() {
|
||||
loading.value = true
|
||||
try {
|
||||
const raw = await settingsAPI.getSetting('cooking_equipment')
|
||||
if (raw) {
|
||||
cookingEquipment.value = JSON.parse(raw)
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
console.error('Failed to load settings:', err)
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function save() {
|
||||
loading.value = true
|
||||
try {
|
||||
await settingsAPI.setSetting('cooking_equipment', JSON.stringify(cookingEquipment.value))
|
||||
saved.value = true
|
||||
setTimeout(() => {
|
||||
saved.value = false
|
||||
}, 2000)
|
||||
} catch (err: unknown) {
|
||||
console.error('Failed to save settings:', err)
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
// State
|
||||
cookingEquipment,
|
||||
loading,
|
||||
saved,
|
||||
|
||||
// Actions
|
||||
load,
|
||||
save,
|
||||
}
|
||||
})
|
||||
|
|
@ -1,14 +1,9 @@
|
|||
:root {
|
||||
/* Typography */
|
||||
--font-display: 'Fraunces', Georgia, serif;
|
||||
--font-mono: 'DM Mono', 'Courier New', monospace;
|
||||
--font-body: 'DM Sans', system-ui, sans-serif;
|
||||
|
||||
font-family: var(--font-body);
|
||||
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: dark;
|
||||
color-scheme: light dark;
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
|
|
@ -16,79 +11,66 @@
|
|||
-moz-osx-font-smoothing: grayscale;
|
||||
|
||||
/* Theme Colors - Dark Mode (Default) */
|
||||
--color-text-primary: rgba(255, 248, 235, 0.92);
|
||||
--color-text-secondary: rgba(255, 248, 235, 0.60);
|
||||
--color-text-muted: rgba(255, 248, 235, 0.38);
|
||||
--color-text-primary: rgba(255, 255, 255, 0.87);
|
||||
--color-text-secondary: rgba(255, 255, 255, 0.6);
|
||||
--color-text-muted: rgba(255, 255, 255, 0.4);
|
||||
|
||||
--color-bg-primary: #1e1c1a;
|
||||
--color-bg-secondary: #161412;
|
||||
--color-bg-elevated: #2a2724;
|
||||
--color-bg-card: #2a2724;
|
||||
--color-bg-input: #161412;
|
||||
--color-bg-primary: #242424;
|
||||
--color-bg-secondary: #1a1a1a;
|
||||
--color-bg-elevated: #2d2d2d;
|
||||
--color-bg-card: #2d2d2d;
|
||||
--color-bg-input: #1a1a1a;
|
||||
|
||||
--color-border: rgba(232, 168, 32, 0.12);
|
||||
--color-border-focus: rgba(232, 168, 32, 0.35);
|
||||
--color-border: rgba(255, 255, 255, 0.1);
|
||||
--color-border-focus: rgba(255, 255, 255, 0.2);
|
||||
|
||||
/* Brand Colors — Saffron amber + forest green */
|
||||
--color-primary: #e8a820;
|
||||
--color-primary-dark: #c88c10;
|
||||
--color-primary-light: #f0bc48;
|
||||
--color-secondary: #2d5a27;
|
||||
--color-secondary-light: #3d7a35;
|
||||
--color-secondary-dark: #1e3d1a;
|
||||
/* Brand Colors */
|
||||
--color-primary: #667eea;
|
||||
--color-primary-dark: #5568d3;
|
||||
--color-primary-light: #7d8ff0;
|
||||
--color-secondary: #764ba2;
|
||||
|
||||
/* Status Colors */
|
||||
--color-success: #4a8c40;
|
||||
--color-success-dark: #3a7030;
|
||||
--color-success-light: #6aac60;
|
||||
--color-success-bg: rgba(74, 140, 64, 0.12);
|
||||
--color-success-border: rgba(74, 140, 64, 0.30);
|
||||
--color-success: #4CAF50;
|
||||
--color-success-dark: #45a049;
|
||||
--color-success-light: #66bb6a;
|
||||
--color-success-bg: rgba(76, 175, 80, 0.1);
|
||||
--color-success-border: rgba(76, 175, 80, 0.3);
|
||||
|
||||
--color-warning: #e8a820;
|
||||
--color-warning-dark: #c88c10;
|
||||
--color-warning-light: #f0bc48;
|
||||
--color-warning-bg: rgba(232, 168, 32, 0.12);
|
||||
--color-warning-border: rgba(232, 168, 32, 0.30);
|
||||
--color-warning: #ff9800;
|
||||
--color-warning-dark: #f57c00;
|
||||
--color-warning-light: #ffb74d;
|
||||
--color-warning-bg: rgba(255, 152, 0, 0.1);
|
||||
--color-warning-border: rgba(255, 152, 0, 0.3);
|
||||
|
||||
--color-error: #c0392b;
|
||||
--color-error-dark: #96281b;
|
||||
--color-error-light: #e74c3c;
|
||||
--color-error-bg: rgba(192, 57, 43, 0.12);
|
||||
--color-error-border: rgba(192, 57, 43, 0.30);
|
||||
--color-error: #f44336;
|
||||
--color-error-dark: #d32f2f;
|
||||
--color-error-light: #ff6b6b;
|
||||
--color-error-bg: rgba(244, 67, 54, 0.1);
|
||||
--color-error-border: rgba(244, 67, 54, 0.3);
|
||||
|
||||
--color-info: #2980b9;
|
||||
--color-info-dark: #1a5f8a;
|
||||
--color-info-light: #5dade2;
|
||||
--color-info-bg: rgba(41, 128, 185, 0.12);
|
||||
--color-info-border: rgba(41, 128, 185, 0.30);
|
||||
|
||||
/* Location dot colors */
|
||||
--color-loc-fridge: #5dade2;
|
||||
--color-loc-freezer: #48d1cc;
|
||||
--color-loc-garage-freezer: #7fb3d3;
|
||||
--color-loc-pantry: #e8a820;
|
||||
--color-loc-cabinet: #a0855b;
|
||||
--color-info: #2196F3;
|
||||
--color-info-dark: #1976D2;
|
||||
--color-info-light: #64b5f6;
|
||||
--color-info-bg: rgba(33, 150, 243, 0.1);
|
||||
--color-info-border: rgba(33, 150, 243, 0.3);
|
||||
|
||||
/* Gradient */
|
||||
--gradient-primary: linear-gradient(135deg, var(--color-primary) 0%, #c88c10 100%);
|
||||
--gradient-secondary: linear-gradient(135deg, var(--color-secondary) 0%, var(--color-secondary-light) 100%);
|
||||
--gradient-header: linear-gradient(160deg, #2a2724 0%, #1e1c1a 100%);
|
||||
--gradient-primary: linear-gradient(135deg, var(--color-primary) 0%, var(--color-secondary) 100%);
|
||||
|
||||
/* Shadows */
|
||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.4);
|
||||
--shadow-md: 0 4px 8px rgba(0, 0, 0, 0.4);
|
||||
--shadow-lg: 0 10px 24px rgba(0, 0, 0, 0.5);
|
||||
--shadow-xl: 0 20px 48px rgba(0, 0, 0, 0.6);
|
||||
--shadow-amber: 0 4px 16px rgba(232, 168, 32, 0.20);
|
||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.3);
|
||||
--shadow-md: 0 4px 6px rgba(0, 0, 0, 0.3);
|
||||
--shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.4);
|
||||
--shadow-xl: 0 20px 40px rgba(0, 0, 0, 0.5);
|
||||
|
||||
/* Typography Scale */
|
||||
--font-size-xs: 11px;
|
||||
--font-size-sm: 13px;
|
||||
--font-size-base: 15px;
|
||||
--font-size-lg: 17px;
|
||||
--font-size-xl: 22px;
|
||||
--font-size-2xl: 30px;
|
||||
--font-size-display: 28px;
|
||||
/* Typography */
|
||||
--font-size-xs: 12px;
|
||||
--font-size-sm: 14px;
|
||||
--font-size-base: 16px;
|
||||
--font-size-lg: 18px;
|
||||
--font-size-xl: 24px;
|
||||
--font-size-2xl: 32px;
|
||||
|
||||
/* Spacing */
|
||||
--spacing-xs: 4px;
|
||||
|
|
@ -98,155 +80,176 @@
|
|||
--spacing-xl: 32px;
|
||||
|
||||
/* Border Radius */
|
||||
--radius-sm: 6px;
|
||||
--radius-md: 8px;
|
||||
--radius-lg: 12px;
|
||||
--radius-xl: 16px;
|
||||
--radius-pill: 999px;
|
||||
--radius-sm: 4px;
|
||||
--radius-md: 6px;
|
||||
--radius-lg: 8px;
|
||||
--radius-xl: 12px;
|
||||
|
||||
color: var(--color-text-primary);
|
||||
background-color: var(--color-bg-primary);
|
||||
}
|
||||
|
||||
/* Light mode overrides */
|
||||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
--color-text-primary: #2c1a06;
|
||||
--color-text-secondary: #6b4c1e;
|
||||
--color-text-muted: #a0845a;
|
||||
|
||||
--color-bg-primary: #fdf8f0;
|
||||
--color-bg-secondary: #ffffff;
|
||||
--color-bg-elevated: #fff9ed;
|
||||
--color-bg-card: #ffffff;
|
||||
--color-bg-input: #fef9ef;
|
||||
|
||||
--color-border: rgba(168, 100, 20, 0.15);
|
||||
--color-border-focus: rgba(168, 100, 20, 0.40);
|
||||
|
||||
--color-success-bg: #e8f5e2;
|
||||
--color-success-border: #c3e0bb;
|
||||
--color-warning-bg: #fff8e1;
|
||||
--color-warning-border: #ffe08a;
|
||||
--color-error-bg: #fdecea;
|
||||
--color-error-border: #f5c6c2;
|
||||
--color-info-bg: #e3f2fd;
|
||||
--color-info-border: #b3d8f5;
|
||||
|
||||
--gradient-header: linear-gradient(160deg, #fff9ed 0%, #fdf8f0 100%);
|
||||
|
||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.08);
|
||||
--shadow-md: 0 4px 8px rgba(0, 0, 0, 0.10);
|
||||
--shadow-lg: 0 10px 24px rgba(0, 0, 0, 0.12);
|
||||
--shadow-xl: 0 20px 48px rgba(0, 0, 0, 0.16);
|
||||
--shadow-amber: 0 4px 16px rgba(168, 100, 20, 0.15);
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
font-weight: 500;
|
||||
color: var(--color-primary);
|
||||
color: #646cff;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
a:hover {
|
||||
color: var(--color-primary-light);
|
||||
color: #535bf2;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
place-items: center;
|
||||
min-width: 320px;
|
||||
min-height: 100vh;
|
||||
overflow-x: hidden; /* prevent any element from expanding the mobile viewport */
|
||||
background-color: var(--color-bg-primary);
|
||||
color: var(--color-text-primary);
|
||||
}
|
||||
|
||||
h1, h2, h3 {
|
||||
font-family: var(--font-display);
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
h1 {
|
||||
font-size: 3.2em;
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: var(--radius-md);
|
||||
border-radius: 8px;
|
||||
border: 1px solid transparent;
|
||||
padding: 0.5em 1.1em;
|
||||
font-size: var(--font-size-sm);
|
||||
padding: 0.6em 1.2em;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
font-family: var(--font-body);
|
||||
background-color: var(--color-bg-elevated);
|
||||
color: var(--color-text-primary);
|
||||
font-family: inherit;
|
||||
background-color: #1a1a1a;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
transition: border-color 0.25s;
|
||||
}
|
||||
button:hover {
|
||||
border-color: var(--color-primary);
|
||||
border-color: #646cff;
|
||||
}
|
||||
button:focus,
|
||||
button:focus-visible {
|
||||
outline: 2px solid var(--color-primary);
|
||||
outline-offset: 2px;
|
||||
outline: 4px auto -webkit-focus-ring-color;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: var(--spacing-lg);
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
#app {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
text-align: left;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
/* Theme Colors - Light Mode */
|
||||
--color-text-primary: #213547;
|
||||
--color-text-secondary: #666;
|
||||
--color-text-muted: #999;
|
||||
|
||||
--color-bg-primary: #f5f5f5;
|
||||
--color-bg-secondary: #ffffff;
|
||||
--color-bg-elevated: #ffffff;
|
||||
--color-bg-card: #ffffff;
|
||||
--color-bg-input: #ffffff;
|
||||
|
||||
--color-border: #ddd;
|
||||
--color-border-focus: #ccc;
|
||||
|
||||
/* Status colors stay the same in light mode */
|
||||
/* But we adjust backgrounds for better contrast */
|
||||
--color-success-bg: #d4edda;
|
||||
--color-success-border: #c3e6cb;
|
||||
|
||||
--color-warning-bg: #fff3cd;
|
||||
--color-warning-border: #ffeaa7;
|
||||
|
||||
--color-error-bg: #f8d7da;
|
||||
--color-error-border: #f5c6cb;
|
||||
|
||||
--color-info-bg: #d1ecf1;
|
||||
--color-info-border: #bee5eb;
|
||||
|
||||
/* Shadows for light mode (lighter) */
|
||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
--shadow-md: 0 4px 6px rgba(0, 0, 0, 0.1);
|
||||
--shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.15);
|
||||
--shadow-xl: 0 20px 40px rgba(0, 0, 0, 0.2);
|
||||
|
||||
color: var(--color-text-primary);
|
||||
background-color: var(--color-bg-primary);
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #747bff;
|
||||
}
|
||||
|
||||
button {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
}
|
||||
|
||||
/* Mobile Responsive Typography and Spacing */
|
||||
@media (max-width: 480px) {
|
||||
:root {
|
||||
/* Reduce font sizes for mobile */
|
||||
--font-size-xs: 11px;
|
||||
--font-size-sm: 12px;
|
||||
--font-size-sm: 13px;
|
||||
--font-size-base: 14px;
|
||||
--font-size-lg: 16px;
|
||||
--font-size-xl: 19px;
|
||||
--font-size-xl: 20px;
|
||||
--font-size-2xl: 24px;
|
||||
--font-size-display: 22px;
|
||||
|
||||
/* Reduce spacing for mobile */
|
||||
--spacing-xs: 4px;
|
||||
--spacing-sm: 6px;
|
||||
--spacing-md: 12px;
|
||||
--spacing-lg: 16px;
|
||||
--spacing-xl: 20px;
|
||||
|
||||
--shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.3);
|
||||
--shadow-md: 0 2px 6px rgba(0, 0, 0, 0.35);
|
||||
--shadow-lg: 0 6px 12px rgba(0, 0, 0, 0.40);
|
||||
--shadow-xl: 0 12px 24px rgba(0, 0, 0, 0.50);
|
||||
/* Reduce shadows for mobile */
|
||||
--shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.2);
|
||||
--shadow-md: 0 2px 4px rgba(0, 0, 0, 0.2);
|
||||
--shadow-lg: 0 4px 8px rgba(0, 0, 0, 0.3);
|
||||
--shadow-xl: 0 8px 16px rgba(0, 0, 0, 0.4);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: var(--spacing-md);
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
#app {
|
||||
padding: 0;
|
||||
padding: 1rem;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 481px) and (max-width: 768px) {
|
||||
:root {
|
||||
--font-size-base: 14px;
|
||||
--font-size-lg: 16px;
|
||||
--font-size-xl: 20px;
|
||||
--font-size-2xl: 26px;
|
||||
/* Slightly reduced sizes for tablets */
|
||||
--font-size-base: 15px;
|
||||
--font-size-lg: 17px;
|
||||
--font-size-xl: 22px;
|
||||
--font-size-2xl: 28px;
|
||||
|
||||
--spacing-md: 14px;
|
||||
--spacing-lg: 20px;
|
||||
--spacing-xl: 28px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2.5em;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: var(--spacing-md) var(--spacing-lg);
|
||||
padding: 1.5em;
|
||||
}
|
||||
|
||||
#app {
|
||||
padding: 0;
|
||||
padding: 1.5rem;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
* Central Theme System for Kiwi
|
||||
* Central Theme System for Project Thoth
|
||||
*
|
||||
* This file contains all reusable, theme-aware, responsive CSS classes.
|
||||
* Components should use these classes instead of custom styles where possible.
|
||||
|
|
@ -9,42 +9,24 @@
|
|||
LAYOUT UTILITIES - RESPONSIVE GRIDS
|
||||
============================================ */
|
||||
|
||||
/* Responsive Grid - Automatically adjusts columns based on screen size */
|
||||
.grid-responsive {
|
||||
display: grid;
|
||||
gap: var(--spacing-md);
|
||||
}
|
||||
|
||||
/* Mobile: 1 column, Tablet: 2 columns, Desktop: 3+ columns */
|
||||
.grid-auto {
|
||||
display: grid;
|
||||
gap: var(--spacing-md);
|
||||
grid-template-columns: 1fr;
|
||||
grid-template-columns: 1fr; /* Default to single column */
|
||||
}
|
||||
|
||||
/* Stats grid — horizontal strip of compact stats */
|
||||
/* Stats grid - always fills available space */
|
||||
.grid-stats {
|
||||
display: grid;
|
||||
gap: var(--spacing-md);
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.grid-stats-strip {
|
||||
display: flex;
|
||||
gap: 0;
|
||||
overflow: hidden;
|
||||
border-radius: var(--radius-lg);
|
||||
background: var(--color-bg-secondary);
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.grid-stats-strip .stat-strip-item {
|
||||
flex: 1;
|
||||
text-align: center;
|
||||
padding: var(--spacing-sm) var(--spacing-xs);
|
||||
border-right: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.grid-stats-strip .stat-strip-item:last-child {
|
||||
border-right: none;
|
||||
grid-template-columns: 1fr; /* Default to single column */
|
||||
}
|
||||
|
||||
/* Force specific column counts */
|
||||
|
|
@ -54,7 +36,7 @@
|
|||
.grid-4 { grid-template-columns: repeat(4, 1fr); }
|
||||
|
||||
/* ============================================
|
||||
FLEXBOX UTILITIES
|
||||
FLEXBOX UTILITIES - RESPONSIVE
|
||||
============================================ */
|
||||
|
||||
.flex { display: flex; }
|
||||
|
|
@ -81,6 +63,7 @@
|
|||
align-items: center;
|
||||
}
|
||||
|
||||
/* Stack on mobile, horizontal on desktop */
|
||||
.flex-responsive {
|
||||
display: flex;
|
||||
gap: var(--spacing-md);
|
||||
|
|
@ -91,12 +74,14 @@
|
|||
SPACING UTILITIES
|
||||
============================================ */
|
||||
|
||||
/* Gaps */
|
||||
.gap-xs { gap: var(--spacing-xs); }
|
||||
.gap-sm { gap: var(--spacing-sm); }
|
||||
.gap-md { gap: var(--spacing-md); }
|
||||
.gap-lg { gap: var(--spacing-lg); }
|
||||
.gap-xl { gap: var(--spacing-xl); }
|
||||
|
||||
/* Padding */
|
||||
.p-0 { padding: 0; }
|
||||
.p-xs { padding: var(--spacing-xs); }
|
||||
.p-sm { padding: var(--spacing-sm); }
|
||||
|
|
@ -104,6 +89,7 @@
|
|||
.p-lg { padding: var(--spacing-lg); }
|
||||
.p-xl { padding: var(--spacing-xl); }
|
||||
|
||||
/* Margin */
|
||||
.m-0 { margin: 0; }
|
||||
.m-xs { margin: var(--spacing-xs); }
|
||||
.m-sm { margin: var(--spacing-sm); }
|
||||
|
|
@ -111,14 +97,9 @@
|
|||
.m-lg { margin: var(--spacing-lg); }
|
||||
.m-xl { margin: var(--spacing-xl); }
|
||||
|
||||
.mt-xs { margin-top: var(--spacing-xs); }
|
||||
.mt-sm { margin-top: var(--spacing-sm); }
|
||||
/* Margin/Padding specific sides */
|
||||
.mt-md { margin-top: var(--spacing-md); }
|
||||
.mb-xs { margin-bottom: var(--spacing-xs); }
|
||||
.mb-sm { margin-bottom: var(--spacing-sm); }
|
||||
.mb-md { margin-bottom: var(--spacing-md); }
|
||||
.mb-lg { margin-bottom: var(--spacing-lg); }
|
||||
.ml-xs { margin-left: var(--spacing-xs); }
|
||||
.ml-md { margin-left: var(--spacing-md); }
|
||||
.mr-md { margin-right: var(--spacing-md); }
|
||||
|
||||
|
|
@ -134,9 +115,8 @@
|
|||
.card {
|
||||
background: var(--color-bg-card);
|
||||
border-radius: var(--radius-xl);
|
||||
padding: var(--spacing-lg);
|
||||
padding: var(--spacing-xl);
|
||||
box-shadow: var(--shadow-md);
|
||||
border: 1px solid var(--color-border);
|
||||
transition: box-shadow 0.2s ease;
|
||||
}
|
||||
|
||||
|
|
@ -149,22 +129,20 @@
|
|||
border-radius: var(--radius-lg);
|
||||
padding: var(--spacing-md);
|
||||
box-shadow: var(--shadow-sm);
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.card-secondary {
|
||||
background: var(--color-bg-secondary);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: var(--spacing-md);
|
||||
padding: var(--spacing-lg);
|
||||
box-shadow: var(--shadow-sm);
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
/* Status border variants */
|
||||
.card-success { border-left: 3px solid var(--color-success); }
|
||||
.card-warning { border-left: 3px solid var(--color-warning); }
|
||||
.card-error { border-left: 3px solid var(--color-error); }
|
||||
.card-info { border-left: 3px solid var(--color-info); }
|
||||
.card-success { border-left: 4px solid var(--color-success); }
|
||||
.card-warning { border-left: 4px solid var(--color-warning); }
|
||||
.card-error { border-left: 4px solid var(--color-error); }
|
||||
.card-info { border-left: 4px solid var(--color-info); }
|
||||
|
||||
/* ============================================
|
||||
BUTTON COMPONENTS - THEME AWARE
|
||||
|
|
@ -172,18 +150,13 @@
|
|||
|
||||
.btn {
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
border: 1px solid transparent;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
font-size: var(--font-size-sm);
|
||||
font-weight: 600;
|
||||
font-family: var(--font-body);
|
||||
cursor: pointer;
|
||||
transition: all 0.18s ease;
|
||||
transition: all 0.2s ease;
|
||||
white-space: nowrap;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.btn:hover {
|
||||
|
|
@ -195,7 +168,7 @@
|
|||
}
|
||||
|
||||
.btn:disabled {
|
||||
opacity: 0.45;
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
|
@ -203,14 +176,8 @@
|
|||
/* Button variants */
|
||||
.btn-primary {
|
||||
background: var(--gradient-primary);
|
||||
color: #1e1c1a;
|
||||
color: white;
|
||||
border: none;
|
||||
font-weight: 700;
|
||||
box-shadow: var(--shadow-amber);
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
box-shadow: 0 6px 20px rgba(232, 168, 32, 0.35);
|
||||
}
|
||||
|
||||
.btn-success {
|
||||
|
|
@ -241,49 +208,20 @@
|
|||
}
|
||||
|
||||
.btn-secondary {
|
||||
background: var(--color-bg-elevated);
|
||||
color: var(--color-text-secondary);
|
||||
border: 1px solid var(--color-border);
|
||||
background: var(--color-bg-secondary);
|
||||
color: var(--color-text-primary);
|
||||
border: 2px solid var(--color-border);
|
||||
}
|
||||
|
||||
.btn-secondary:hover:not(:disabled) {
|
||||
background: var(--color-bg-primary);
|
||||
border-color: var(--color-primary);
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.btn-secondary.active {
|
||||
background: var(--color-primary);
|
||||
color: #1e1c1a;
|
||||
background: var(--gradient-primary);
|
||||
color: white;
|
||||
border-color: var(--color-primary);
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
/* Pill chip button — for filter chips */
|
||||
.btn-chip {
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: var(--radius-pill);
|
||||
font-size: var(--font-size-xs);
|
||||
font-weight: 500;
|
||||
font-family: var(--font-body);
|
||||
background: var(--color-bg-elevated);
|
||||
color: var(--color-text-secondary);
|
||||
cursor: pointer;
|
||||
transition: all 0.18s ease;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.btn-chip:hover {
|
||||
border-color: var(--color-primary);
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.btn-chip.active {
|
||||
background: var(--color-primary);
|
||||
color: #1e1c1a;
|
||||
border-color: var(--color-primary);
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
/* Button sizes */
|
||||
|
|
@ -294,38 +232,7 @@
|
|||
|
||||
.btn-lg {
|
||||
padding: var(--spacing-md) var(--spacing-xl);
|
||||
font-size: var(--font-size-base);
|
||||
}
|
||||
|
||||
/* Icon-only action button */
|
||||
.btn-icon {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
padding: 0;
|
||||
border: none;
|
||||
border-radius: var(--radius-md);
|
||||
background: transparent;
|
||||
color: var(--color-text-muted);
|
||||
cursor: pointer;
|
||||
transition: all 0.18s ease;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.btn-icon:hover {
|
||||
background: var(--color-bg-primary);
|
||||
color: var(--color-text-primary);
|
||||
transform: none;
|
||||
}
|
||||
|
||||
.btn-icon.btn-icon-danger:hover {
|
||||
color: var(--color-error);
|
||||
}
|
||||
|
||||
.btn-icon.btn-icon-success:hover {
|
||||
color: var(--color-success);
|
||||
font-size: var(--font-size-lg);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
|
|
@ -338,13 +245,10 @@
|
|||
|
||||
.form-label {
|
||||
display: block;
|
||||
margin-bottom: var(--spacing-xs);
|
||||
margin-bottom: var(--spacing-sm);
|
||||
font-weight: 600;
|
||||
color: var(--color-text-secondary);
|
||||
font-size: var(--font-size-xs);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.06em;
|
||||
font-family: var(--font-body);
|
||||
color: var(--color-text-primary);
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.form-input,
|
||||
|
|
@ -357,9 +261,7 @@
|
|||
background: var(--color-bg-input);
|
||||
color: var(--color-text-primary);
|
||||
font-size: var(--font-size-sm);
|
||||
font-family: var(--font-body);
|
||||
transition: border-color 0.18s ease, box-shadow 0.18s ease;
|
||||
box-sizing: border-box;
|
||||
transition: border-color 0.2s ease, box-shadow 0.2s ease;
|
||||
}
|
||||
|
||||
.form-input:focus,
|
||||
|
|
@ -367,36 +269,22 @@
|
|||
.form-textarea:focus {
|
||||
outline: none;
|
||||
border-color: var(--color-primary);
|
||||
box-shadow: 0 0 0 3px var(--color-warning-bg);
|
||||
box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
|
||||
}
|
||||
|
||||
.form-textarea {
|
||||
resize: vertical;
|
||||
min-height: 80px;
|
||||
font-family: var(--font-body);
|
||||
font-family: inherit;
|
||||
}
|
||||
|
||||
/* Form layouts */
|
||||
.form-row {
|
||||
display: grid;
|
||||
gap: var(--spacing-md);
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
/* Chip row filter bar — horizontal scroll */
|
||||
.filter-chip-row {
|
||||
display: flex;
|
||||
gap: var(--spacing-xs);
|
||||
overflow-x: auto;
|
||||
padding-bottom: var(--spacing-xs);
|
||||
scrollbar-width: none;
|
||||
min-width: 0; /* allow flex item to shrink below content; lets overflow-x scroll internally */
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.filter-chip-row::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
TEXT UTILITIES
|
||||
============================================ */
|
||||
|
|
@ -408,17 +296,6 @@
|
|||
.text-xl { font-size: var(--font-size-xl); }
|
||||
.text-2xl { font-size: var(--font-size-2xl); }
|
||||
|
||||
/* Display font */
|
||||
.text-display {
|
||||
font-family: var(--font-display);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Mono font */
|
||||
.text-mono {
|
||||
font-family: var(--font-mono);
|
||||
}
|
||||
|
||||
.text-primary { color: var(--color-text-primary); }
|
||||
.text-secondary { color: var(--color-text-secondary); }
|
||||
.text-muted { color: var(--color-text-muted); }
|
||||
|
|
@ -427,7 +304,6 @@
|
|||
.text-warning { color: var(--color-warning); }
|
||||
.text-error { color: var(--color-error); }
|
||||
.text-info { color: var(--color-info); }
|
||||
.text-amber { color: var(--color-primary); }
|
||||
|
||||
.text-center { text-align: center; }
|
||||
.text-left { text-align: left; }
|
||||
|
|
@ -437,76 +313,59 @@
|
|||
.font-semibold { font-weight: 600; }
|
||||
.font-normal { font-weight: 400; }
|
||||
|
||||
/* ============================================
|
||||
LOCATION DOT INDICATORS
|
||||
============================================ */
|
||||
|
||||
.loc-dot {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
flex-shrink: 0;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.loc-dot-fridge { background: var(--color-loc-fridge); }
|
||||
.loc-dot-freezer { background: var(--color-loc-freezer); }
|
||||
.loc-dot-garage_freezer { background: var(--color-loc-garage-freezer); }
|
||||
.loc-dot-pantry { background: var(--color-loc-pantry); }
|
||||
.loc-dot-cabinet { background: var(--color-loc-cabinet); }
|
||||
|
||||
/* Location left-border strip on inventory rows */
|
||||
.inv-row-fridge { border-left-color: var(--color-loc-fridge) !important; }
|
||||
.inv-row-freezer { border-left-color: var(--color-loc-freezer) !important; }
|
||||
.inv-row-garage_freezer { border-left-color: var(--color-loc-garage-freezer) !important; }
|
||||
.inv-row-pantry { border-left-color: var(--color-loc-pantry) !important; }
|
||||
.inv-row-cabinet { border-left-color: var(--color-loc-cabinet) !important; }
|
||||
|
||||
/* ============================================
|
||||
RESPONSIVE UTILITIES
|
||||
============================================ */
|
||||
|
||||
/* Show/Hide based on screen size */
|
||||
.mobile-only { display: none; }
|
||||
.desktop-only { display: block; }
|
||||
|
||||
/* Width utilities */
|
||||
.w-full { width: 100%; }
|
||||
.w-auto { width: auto; }
|
||||
|
||||
/* Height utilities */
|
||||
.h-full { height: 100%; }
|
||||
.h-auto { height: auto; }
|
||||
|
||||
/* ============================================
|
||||
MOBILE BREAKPOINTS (<=480px)
|
||||
MOBILE BREAKPOINTS (≤480px)
|
||||
============================================ */
|
||||
|
||||
@media (max-width: 480px) {
|
||||
/* Show/Hide */
|
||||
.mobile-only { display: block; }
|
||||
.desktop-only { display: none; }
|
||||
|
||||
/* Grids already default to 1fr, just ensure it stays that way */
|
||||
.grid-2,
|
||||
.grid-3,
|
||||
.grid-4 {
|
||||
grid-template-columns: 1fr !important;
|
||||
}
|
||||
|
||||
/* Stack flex items vertically */
|
||||
.flex-responsive {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
/* Buttons take full width */
|
||||
.btn-mobile-full {
|
||||
width: 100%;
|
||||
min-width: 100%;
|
||||
}
|
||||
|
||||
/* Reduce card padding on mobile */
|
||||
.card {
|
||||
padding: var(--spacing-md);
|
||||
border-radius: var(--radius-lg);
|
||||
}
|
||||
|
||||
.card-sm {
|
||||
padding: var(--spacing-sm);
|
||||
}
|
||||
|
||||
/* Allow text wrapping on mobile */
|
||||
.btn {
|
||||
white-space: normal;
|
||||
text-align: center;
|
||||
|
|
@ -518,6 +377,7 @@
|
|||
============================================ */
|
||||
|
||||
@media (min-width: 481px) and (max-width: 768px) {
|
||||
/* 2-column layouts on tablets */
|
||||
.grid-3,
|
||||
.grid-4 {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
|
|
@ -542,11 +402,11 @@
|
|||
|
||||
@media (min-width: 769px) and (max-width: 1024px) {
|
||||
.grid-auto {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
}
|
||||
|
||||
.grid-stats {
|
||||
grid-template-columns: repeat(4, 1fr);
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
}
|
||||
|
||||
.grid-4 {
|
||||
|
|
@ -555,16 +415,16 @@
|
|||
}
|
||||
|
||||
/* ============================================
|
||||
LARGE DESKTOP (>=1025px)
|
||||
LARGE DESKTOP (≥1025px)
|
||||
============================================ */
|
||||
|
||||
@media (min-width: 1025px) {
|
||||
.grid-auto {
|
||||
grid-template-columns: repeat(auto-fill, minmax(280px, 1fr));
|
||||
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
|
||||
}
|
||||
|
||||
.grid-stats {
|
||||
grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));
|
||||
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||
}
|
||||
|
||||
.form-row {
|
||||
|
|
@ -577,37 +437,34 @@
|
|||
============================================ */
|
||||
|
||||
.status-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 3px var(--spacing-sm);
|
||||
border-radius: var(--radius-pill);
|
||||
display: inline-block;
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
border-radius: var(--radius-sm);
|
||||
font-size: var(--font-size-xs);
|
||||
font-weight: 600;
|
||||
font-family: var(--font-mono);
|
||||
letter-spacing: 0.02em;
|
||||
}
|
||||
|
||||
.status-success {
|
||||
background: var(--color-success-bg);
|
||||
color: var(--color-success-light);
|
||||
color: var(--color-success-dark);
|
||||
border: 1px solid var(--color-success-border);
|
||||
}
|
||||
|
||||
.status-warning {
|
||||
background: var(--color-warning-bg);
|
||||
color: var(--color-warning-light);
|
||||
color: var(--color-warning-dark);
|
||||
border: 1px solid var(--color-warning-border);
|
||||
}
|
||||
|
||||
.status-error {
|
||||
background: var(--color-error-bg);
|
||||
color: var(--color-error-light);
|
||||
color: var(--color-error-dark);
|
||||
border: 1px solid var(--color-error-border);
|
||||
}
|
||||
|
||||
.status-info {
|
||||
background: var(--color-info-bg);
|
||||
color: var(--color-info-light);
|
||||
color: var(--color-info-dark);
|
||||
border: 1px solid var(--color-info-border);
|
||||
}
|
||||
|
||||
|
|
@ -631,7 +488,7 @@
|
|||
@keyframes slideUp {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(16px);
|
||||
transform: translateY(20px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
|
|
@ -639,33 +496,23 @@
|
|||
}
|
||||
}
|
||||
|
||||
/* Urgency pulse — for items expiring very soon */
|
||||
@keyframes urgencyPulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.6; }
|
||||
}
|
||||
|
||||
.pulse-urgent {
|
||||
animation: urgencyPulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
LOADING UTILITIES
|
||||
============================================ */
|
||||
|
||||
.spinner {
|
||||
border: 2px solid var(--color-border);
|
||||
border-top: 2px solid var(--color-primary);
|
||||
border: 3px solid var(--color-border);
|
||||
border-top: 3px solid var(--color-primary);
|
||||
border-radius: 50%;
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
animation: spin 0.9s linear infinite;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
animation: spin 1s linear infinite;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.spinner-sm {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
border-width: 2px;
|
||||
}
|
||||
|
||||
|
|
@ -687,160 +534,3 @@
|
|||
.divider-md {
|
||||
margin: var(--spacing-md) 0;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
SECTION HEADERS (display font)
|
||||
============================================ */
|
||||
|
||||
.section-title {
|
||||
font-family: var(--font-display);
|
||||
font-style: italic;
|
||||
font-weight: 600;
|
||||
color: var(--color-text-primary);
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
EASTER EGG — GRID KITCHEN NEON MODE
|
||||
Activated via Konami code
|
||||
============================================ */
|
||||
|
||||
body.neon-mode .card,
|
||||
body.neon-mode .card-sm,
|
||||
body.neon-mode .card-secondary {
|
||||
box-shadow:
|
||||
0 0 0 1px rgba(255, 0, 110, 0.35),
|
||||
0 0 12px rgba(255, 0, 110, 0.18),
|
||||
0 2px 20px rgba(131, 56, 236, 0.15);
|
||||
}
|
||||
|
||||
body.neon-mode .btn-primary {
|
||||
box-shadow: 0 0 18px rgba(255, 0, 110, 0.55), 0 0 36px rgba(131, 56, 236, 0.25);
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
body.neon-mode .wordmark-kiwi {
|
||||
text-shadow: 0 0 10px rgba(255, 0, 110, 0.7), 0 0 24px rgba(131, 56, 236, 0.5);
|
||||
}
|
||||
|
||||
body.neon-mode .sidebar,
|
||||
body.neon-mode .bottom-nav {
|
||||
border-color: rgba(255, 0, 110, 0.3);
|
||||
box-shadow: 4px 0 20px rgba(255, 0, 110, 0.12);
|
||||
}
|
||||
|
||||
body.neon-mode .sidebar-item.active,
|
||||
body.neon-mode .nav-item.active {
|
||||
text-shadow: 0 0 8px currentColor;
|
||||
}
|
||||
|
||||
/* Scanline overlay */
|
||||
body.neon-mode::after {
|
||||
content: '';
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: repeating-linear-gradient(
|
||||
0deg,
|
||||
transparent,
|
||||
transparent 3px,
|
||||
rgba(0, 0, 0, 0.08) 3px,
|
||||
rgba(0, 0, 0, 0.08) 4px
|
||||
);
|
||||
pointer-events: none;
|
||||
z-index: 9998;
|
||||
animation: scanlineScroll 8s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes scanlineScroll {
|
||||
0% { background-position: 0 0; }
|
||||
100% { background-position: 0 80px; }
|
||||
}
|
||||
|
||||
/* CRT flicker on wordmark */
|
||||
body.neon-mode .wordmark-kiwi {
|
||||
animation: crtFlicker 6s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@keyframes crtFlicker {
|
||||
0%, 94%, 100% { opacity: 1; }
|
||||
95% { opacity: 0.88; }
|
||||
97% { opacity: 0.95; }
|
||||
98% { opacity: 0.82; }
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
EASTER EGG — KIWI BIRD SPRITE
|
||||
============================================ */
|
||||
|
||||
.kiwi-bird-stage {
|
||||
position: fixed;
|
||||
bottom: 72px; /* above bottom nav */
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 72px;
|
||||
pointer-events: none;
|
||||
z-index: 9999;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@media (min-width: 769px) {
|
||||
.kiwi-bird-stage {
|
||||
bottom: 0;
|
||||
left: 200px; /* clear the sidebar */
|
||||
}
|
||||
}
|
||||
|
||||
.kiwi-bird {
|
||||
position: absolute;
|
||||
bottom: 8px;
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
will-change: transform;
|
||||
}
|
||||
|
||||
/* Enters from right, walks left */
|
||||
.kiwi-bird.rtl {
|
||||
animation: kiwiWalkRtl 5.5s ease-in-out forwards;
|
||||
}
|
||||
.kiwi-bird.rtl .kiwi-svg {
|
||||
transform: scaleX(1); /* faces left */
|
||||
}
|
||||
|
||||
/* Enters from left, walks right */
|
||||
.kiwi-bird.ltr {
|
||||
animation: kiwiWalkLtr 5.5s ease-in-out forwards;
|
||||
}
|
||||
.kiwi-bird.ltr .kiwi-svg {
|
||||
transform: scaleX(-1); /* faces right */
|
||||
}
|
||||
|
||||
/* Bob on each step */
|
||||
.kiwi-svg {
|
||||
display: block;
|
||||
animation: kiwiBob 0.38s steps(1) infinite;
|
||||
}
|
||||
|
||||
@keyframes kiwiWalkRtl {
|
||||
0% { right: -80px; }
|
||||
15% { right: 35%; } /* enter and slow */
|
||||
40% { right: 35%; } /* pause — sniffing */
|
||||
55% { right: 38%; } /* tiny shuffle */
|
||||
60% { right: 35%; }
|
||||
85% { right: 35%; }
|
||||
100% { right: calc(100% + 80px); } /* exit left */
|
||||
}
|
||||
|
||||
@keyframes kiwiWalkLtr {
|
||||
0% { left: -80px; }
|
||||
15% { left: 35%; }
|
||||
40% { left: 35%; }
|
||||
55% { left: 38%; }
|
||||
60% { left: 35%; }
|
||||
85% { left: 35%; }
|
||||
100% { left: calc(100% + 80px); }
|
||||
}
|
||||
|
||||
@keyframes kiwiBob {
|
||||
0% { transform: translateY(0) scaleX(var(--bird-flip, 1)); }
|
||||
50% { transform: translateY(-4px) scaleX(var(--bird-flip, 1)); }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,12 +18,10 @@ dependencies = [
|
|||
"opencv-python>=4.8",
|
||||
"numpy>=1.25",
|
||||
"pyzbar>=0.1.9",
|
||||
"Pillow>=10.0",
|
||||
# HTTP clients
|
||||
# HTTP client
|
||||
"httpx>=0.27",
|
||||
"requests>=2.31",
|
||||
# CircuitForge shared scaffold
|
||||
"circuitforge-core>=0.6.0",
|
||||
"circuitforge-core",
|
||||
]
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
|
|
|
|||
|
|
@ -1,134 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Backfill texture_profile in ingredient_profiles from existing macro data.
|
||||
|
||||
Texture categories and their macro signatures (all values g/100g):
|
||||
fatty - fat > 60 (oils, lard, pure butter)
|
||||
creamy - fat 15-60 (cream, cheese, fatty meats, nut butter)
|
||||
firm - protein > 15, fat < 15 (lean meats, fish, legumes, firm tofu)
|
||||
starchy - carbs > 40, fat < 10 (flour, oats, rice, bread, potatoes)
|
||||
fibrous - fiber > 4, carbs < 40 (brassicas, leafy greens, whole grains)
|
||||
tender - protein 2-15, fat < 10, (soft veg, eggs, soft tofu, cooked beans)
|
||||
carbs < 40
|
||||
liquid - calories < 25, fat < 1, (broth, juice, dilute sauces)
|
||||
protein < 3
|
||||
neutral - fallthrough default
|
||||
|
||||
Rules are applied in priority order: fatty → creamy → firm → starchy →
|
||||
fibrous → tender → liquid → neutral.
|
||||
|
||||
Run:
|
||||
python scripts/backfill_texture_profiles.py [path/to/kiwi.db]
|
||||
|
||||
Or inside the container:
|
||||
docker exec kiwi-cloud-api-1 python /app/kiwi/scripts/backfill_texture_profiles.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Default DB paths to try
|
||||
_DEFAULT_PATHS = [
|
||||
"/devl/kiwi-cloud-data/local-dev/kiwi.db",
|
||||
"/devl/kiwi-data/kiwi.db",
|
||||
]
|
||||
|
||||
BATCH_SIZE = 5_000
|
||||
|
||||
|
||||
def _classify(fat: float, protein: float, carbs: float,
|
||||
fiber: float, calories: float) -> str:
|
||||
# Cap runaway values — data quality issue in some branded entries
|
||||
fat = min(fat or 0.0, 100.0)
|
||||
protein = min(protein or 0.0, 100.0)
|
||||
carbs = min(carbs or 0.0, 100.0)
|
||||
fiber = min(fiber or 0.0, 50.0)
|
||||
calories = min(calories or 0.0, 900.0)
|
||||
|
||||
if fat > 60:
|
||||
return "fatty"
|
||||
if fat > 15:
|
||||
return "creamy"
|
||||
# Starchy before firm: oats/legumes have high protein AND high carbs — carbs win
|
||||
if carbs > 40 and fat < 10:
|
||||
return "starchy"
|
||||
# Firm: lean proteins with low carbs (meats, fish, hard tofu)
|
||||
# Lower protein threshold (>7) catches tofu (9%) and similar plant proteins
|
||||
if protein > 7 and fat < 12 and carbs < 20:
|
||||
return "firm"
|
||||
if fiber > 4 and carbs < 40:
|
||||
return "fibrous"
|
||||
if 2 < protein <= 15 and fat < 10 and carbs < 40:
|
||||
return "tender"
|
||||
if calories < 25 and fat < 1 and protein < 3:
|
||||
return "liquid"
|
||||
return "neutral"
|
||||
|
||||
|
||||
def backfill(db_path: str) -> None:
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
total = conn.execute("SELECT COUNT(*) FROM ingredient_profiles").fetchone()[0]
|
||||
print(f"Total rows: {total:,}")
|
||||
|
||||
updated = 0
|
||||
offset = 0
|
||||
counts: dict[str, int] = {}
|
||||
|
||||
while True:
|
||||
rows = conn.execute(
|
||||
"""SELECT id, fat_pct, protein_pct, carbs_g_per_100g,
|
||||
fiber_g_per_100g, calories_per_100g
|
||||
FROM ingredient_profiles
|
||||
LIMIT ? OFFSET ?""",
|
||||
(BATCH_SIZE, offset),
|
||||
).fetchall()
|
||||
|
||||
if not rows:
|
||||
break
|
||||
|
||||
batch: list[tuple[str, int]] = []
|
||||
for row in rows:
|
||||
texture = _classify(
|
||||
row["fat_pct"],
|
||||
row["protein_pct"],
|
||||
row["carbs_g_per_100g"],
|
||||
row["fiber_g_per_100g"],
|
||||
row["calories_per_100g"],
|
||||
)
|
||||
counts[texture] = counts.get(texture, 0) + 1
|
||||
batch.append((texture, row["id"]))
|
||||
|
||||
conn.executemany(
|
||||
"UPDATE ingredient_profiles SET texture_profile = ? WHERE id = ?",
|
||||
batch,
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
updated += len(batch)
|
||||
offset += BATCH_SIZE
|
||||
print(f" {updated:,} / {total:,} updated...", end="\r")
|
||||
|
||||
print(f"\nDone. {updated:,} rows updated.\n")
|
||||
print("Texture distribution:")
|
||||
for texture, count in sorted(counts.items(), key=lambda x: -x[1]):
|
||||
pct = count / updated * 100
|
||||
print(f" {texture:10s} {count:8,} ({pct:.1f}%)")
|
||||
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1:
|
||||
path = sys.argv[1]
|
||||
else:
|
||||
path = next((p for p in _DEFAULT_PATHS if Path(p).exists()), None)
|
||||
if not path:
|
||||
print(f"No DB found. Pass path as argument or create one of: {_DEFAULT_PATHS}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Backfilling texture profiles in: {path}")
|
||||
backfill(path)
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
"""
|
||||
Import FlavorGraph compound->ingredient map into flavor_molecules table.
|
||||
|
||||
FlavorGraph GitHub: https://github.com/lamypark/FlavorGraph
|
||||
Download: git clone https://github.com/lamypark/FlavorGraph /tmp/flavorgraph
|
||||
|
||||
Usage:
|
||||
conda run -n cf python scripts/pipeline/build_flavorgraph_index.py \
|
||||
--db data/kiwi.db \
|
||||
--flavorgraph-dir /tmp/flavorgraph/input
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json
|
||||
import sqlite3
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def parse_ingredient_nodes(
|
||||
nodes_path: Path, edges_path: Path
|
||||
) -> tuple[dict[str, list[str]], dict[str, str]]:
|
||||
"""Parse FlavorGraph CSVs → (ingredient→compounds, compound→name)."""
|
||||
nodes = pd.read_csv(nodes_path, dtype=str).fillna("")
|
||||
edges = pd.read_csv(edges_path, dtype=str).fillna("")
|
||||
|
||||
ingredient_ids: dict[str, str] = {} # node_id -> ingredient_name
|
||||
compound_names: dict[str, str] = {} # node_id -> compound_name
|
||||
|
||||
for _, row in nodes.iterrows():
|
||||
nid = row["node_id"]
|
||||
name = row["name"].lower().replace("_", " ").strip()
|
||||
if row["node_type"] == "ingredient":
|
||||
ingredient_ids[nid] = name
|
||||
else:
|
||||
compound_names[nid] = name
|
||||
|
||||
ingredient_compounds: dict[str, list[str]] = defaultdict(list)
|
||||
for _, row in edges.iterrows():
|
||||
src, tgt = row["id_1"], row["id_2"]
|
||||
if src in ingredient_ids:
|
||||
ingredient_compounds[ingredient_ids[src]].append(tgt)
|
||||
if tgt in ingredient_ids:
|
||||
ingredient_compounds[ingredient_ids[tgt]].append(src)
|
||||
|
||||
return dict(ingredient_compounds), compound_names
|
||||
|
||||
|
||||
def build(db_path: Path, flavorgraph_dir: Path) -> None:
|
||||
nodes_path = flavorgraph_dir / "nodes_191120.csv"
|
||||
edges_path = flavorgraph_dir / "edges_191120.csv"
|
||||
|
||||
ingredient_map, compound_names = parse_ingredient_nodes(nodes_path, edges_path)
|
||||
|
||||
compound_ingredients: dict[str, list[str]] = defaultdict(list)
|
||||
for ingredient, compounds in ingredient_map.items():
|
||||
for cid in compounds:
|
||||
compound_ingredients[cid].append(ingredient)
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
try:
|
||||
for ingredient, compounds in ingredient_map.items():
|
||||
conn.execute(
|
||||
"UPDATE ingredient_profiles SET flavor_molecule_ids = ? WHERE name = ?",
|
||||
(json.dumps(compounds), ingredient),
|
||||
)
|
||||
|
||||
for cid, ingredients in compound_ingredients.items():
|
||||
conn.execute(
|
||||
"INSERT OR IGNORE INTO flavor_molecules (compound_id, compound_name, ingredient_names)"
|
||||
" VALUES (?, ?, ?)",
|
||||
(cid, compound_names.get(cid, cid), json.dumps(ingredients)),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
print(f"Indexed {len(ingredient_map)} ingredients, {len(compound_ingredients)} compounds")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--db", required=True, type=Path)
|
||||
parser.add_argument("--flavorgraph-dir", required=True, type=Path)
|
||||
args = parser.parse_args()
|
||||
build(args.db, args.flavorgraph_dir)
|
||||
|
|
@ -1,170 +0,0 @@
|
|||
"""
|
||||
Build ingredient_profiles table from USDA FDC (Food Data Central) data.
|
||||
|
||||
Usage:
|
||||
conda run -n job-seeker python scripts/pipeline/build_ingredient_index.py \
|
||||
--db /path/to/kiwi.db \
|
||||
--usda-fdc data/usda_fdc_cleaned.parquet \
|
||||
--usda-branded data/usda_branded.parquet
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
# ── Element derivation rules (threshold-based) ────────────────────────────
|
||||
|
||||
_ELEMENT_RULES: list[tuple[str, callable]] = [
|
||||
("Richness", lambda r: r.get("fat_pct", 0) > 5.0),
|
||||
("Seasoning", lambda r: r.get("sodium_mg_per_100g", 0) > 200),
|
||||
("Depth", lambda r: r.get("glutamate_mg", 0) > 1.0),
|
||||
("Structure", lambda r: r.get("starch_pct", 0) > 10.0 or r.get("binding_score", 0) >= 2),
|
||||
("Texture", lambda r: r.get("water_activity", 1.0) < 0.6), # low water = likely crunchy/dry
|
||||
]
|
||||
|
||||
_ACID_KEYWORDS = ["vinegar", "lemon", "lime", "citric", "tartaric", "kombucha", "kefir",
|
||||
"yogurt", "buttermilk", "wine", "tomato"]
|
||||
_AROMA_KEYWORDS = ["garlic", "onion", "herb", "spice", "basil", "oregano", "cumin",
|
||||
"ginger", "cinnamon", "pepper", "chili", "paprika", "thyme", "rosemary",
|
||||
"cilantro", "parsley", "dill", "fennel", "cardamom", "turmeric"]
|
||||
_FERMENTED_KEYWORDS = ["miso", "soy sauce", "kimchi", "sauerkraut", "kefir", "yogurt",
|
||||
"kombucha", "tempeh", "natto", "vinegar", "nutritional yeast"]
|
||||
|
||||
|
||||
def normalize_name(raw: str) -> str:
|
||||
"""Lowercase, strip parentheticals and trailing descriptors."""
|
||||
name = raw.lower().strip()
|
||||
name = re.sub(r"\(.*?\)", "", name) # remove (85% lean)
|
||||
name = re.sub(r",.*$", "", name) # remove ,shredded
|
||||
name = re.sub(r"\s+", " ", name).strip()
|
||||
return name
|
||||
|
||||
|
||||
def derive_elements(row: dict) -> list[str]:
|
||||
elements = [elem for elem, check in _ELEMENT_RULES if check(row)]
|
||||
name = row.get("name", "").lower()
|
||||
if any(k in name for k in _ACID_KEYWORDS):
|
||||
elements.append("Brightness")
|
||||
if any(k in name for k in _AROMA_KEYWORDS):
|
||||
elements.append("Aroma")
|
||||
return list(dict.fromkeys(elements)) # dedup, preserve order
|
||||
|
||||
|
||||
def derive_binding_score(row: dict) -> int:
|
||||
protein = row.get("protein_pct", 0)
|
||||
starch = row.get("starch_pct", 0)
|
||||
if starch > 50 or (protein > 10 and starch > 20):
|
||||
return 3
|
||||
if starch > 20 or protein > 12:
|
||||
return 2
|
||||
if starch > 5 or protein > 6:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def build(db_path: Path, usda_fdc_path: Path, usda_branded_path: Path) -> None:
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
|
||||
df_fdc = pd.read_parquet(usda_fdc_path)
|
||||
df_branded = pd.read_parquet(usda_branded_path)
|
||||
|
||||
# Rename columns to unified schema
|
||||
fdc_col_map = {
|
||||
"food_item": "name",
|
||||
"Total lipid (fat)": "fat_pct",
|
||||
"Protein": "protein_pct",
|
||||
"Carbohydrate, by difference": "carb_pct",
|
||||
"Fiber, total dietary": "fiber_pct",
|
||||
"Sodium, Na": "sodium_mg_per_100g",
|
||||
"Water": "moisture_pct",
|
||||
"Energy": "calories_per_100g",
|
||||
}
|
||||
df = df_fdc.rename(columns={k: v for k, v in fdc_col_map.items() if k in df_fdc.columns})
|
||||
|
||||
# Build a sugar lookup from the branded parquet (keyed by normalized name).
|
||||
# usda_branded has SUGARS, TOTAL (G) for processed/packaged foods.
|
||||
branded_col_map = {
|
||||
"FOOD_NAME": "name",
|
||||
"SUGARS, TOTAL (G)": "sugar_g_per_100g",
|
||||
}
|
||||
df_branded_slim = df_branded.rename(
|
||||
columns={k: v for k, v in branded_col_map.items() if k in df_branded.columns}
|
||||
)[list(set(branded_col_map.values()) & set(df_branded.rename(columns=branded_col_map).columns))]
|
||||
sugar_lookup: dict[str, float] = {}
|
||||
for _, brow in df_branded_slim.iterrows():
|
||||
bname = normalize_name(str(brow.get("name", "")))
|
||||
val = brow.get("sugar_g_per_100g")
|
||||
try:
|
||||
fval = float(val) # type: ignore[arg-type]
|
||||
if fval > 0 and bname not in sugar_lookup:
|
||||
sugar_lookup[bname] = fval
|
||||
except (TypeError, ValueError):
|
||||
pass
|
||||
|
||||
inserted = 0
|
||||
for _, row in df.iterrows():
|
||||
name = normalize_name(str(row.get("name", "")))
|
||||
if not name or len(name) < 2:
|
||||
continue
|
||||
r = {
|
||||
"name": name,
|
||||
"fat_pct": float(row.get("fat_pct") or 0),
|
||||
"protein_pct": float(row.get("protein_pct") or 0),
|
||||
"moisture_pct": float(row.get("moisture_pct") or 0),
|
||||
"sodium_mg_per_100g": float(row.get("sodium_mg_per_100g") or 0),
|
||||
"starch_pct": 0.0,
|
||||
"carbs_g_per_100g": float(row.get("carb_pct") or 0),
|
||||
"fiber_g_per_100g": float(row.get("fiber_pct") or 0),
|
||||
"calories_per_100g": float(row.get("calories_per_100g") or 0),
|
||||
"sugar_g_per_100g": sugar_lookup.get(name, 0.0),
|
||||
}
|
||||
r["binding_score"] = derive_binding_score(r)
|
||||
r["elements"] = derive_elements(r)
|
||||
r["is_fermented"] = int(any(k in name for k in _FERMENTED_KEYWORDS))
|
||||
|
||||
try:
|
||||
# Insert new profile or update macro columns on existing one.
|
||||
conn.execute("""
|
||||
INSERT INTO ingredient_profiles
|
||||
(name, elements, fat_pct, fat_saturated_pct, moisture_pct,
|
||||
protein_pct, starch_pct, binding_score, sodium_mg_per_100g,
|
||||
is_fermented,
|
||||
carbs_g_per_100g, fiber_g_per_100g, calories_per_100g, sugar_g_per_100g,
|
||||
source)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
|
||||
ON CONFLICT(name) DO UPDATE SET
|
||||
carbs_g_per_100g = excluded.carbs_g_per_100g,
|
||||
fiber_g_per_100g = excluded.fiber_g_per_100g,
|
||||
calories_per_100g = excluded.calories_per_100g,
|
||||
sugar_g_per_100g = excluded.sugar_g_per_100g
|
||||
""", (
|
||||
r["name"], json.dumps(r["elements"]),
|
||||
r["fat_pct"], 0.0, r["moisture_pct"],
|
||||
r["protein_pct"], r["starch_pct"], r["binding_score"],
|
||||
r["sodium_mg_per_100g"], r["is_fermented"],
|
||||
r["carbs_g_per_100g"], r["fiber_g_per_100g"],
|
||||
r["calories_per_100g"], r["sugar_g_per_100g"],
|
||||
"usda_fdc",
|
||||
))
|
||||
inserted += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
print(f"Inserted {inserted} ingredient profiles from USDA FDC")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--db", required=True, type=Path)
|
||||
parser.add_argument("--usda-fdc", required=True, type=Path)
|
||||
parser.add_argument("--usda-branded", required=True, type=Path)
|
||||
args = parser.parse_args()
|
||||
build(args.db, args.usda_fdc, args.usda_branded)
|
||||
|
|
@ -1,225 +0,0 @@
|
|||
"""
|
||||
Import food.com recipe corpus into recipes table.
|
||||
|
||||
Usage:
|
||||
conda run -n job-seeker python scripts/pipeline/build_recipe_index.py \
|
||||
--db /path/to/kiwi.db \
|
||||
--recipes data/recipes_foodcom.parquet \
|
||||
--batch-size 10000
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
|
||||
_MEASURE_PATTERN = re.compile(
|
||||
r"^\d[\d\s/\u00bc\u00bd\u00be\u2153\u2154]*\s*(cup|tbsp|tsp|oz|lb|g|kg|ml|l|clove|slice|piece|can|pkg|package|bunch|head|stalk|sprig|pinch|dash|to taste|as needed)s?\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
_LEAD_NUMBER = re.compile(r"^\d[\d\s/\u00bc\u00bd\u00be\u2153\u2154]*\s*")
|
||||
_TRAILING_QUALIFIER = re.compile(
|
||||
r"\s*(to taste|as needed|or more|or less|optional|if desired|if needed)\s*$",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
_QUOTED = re.compile(r'"([^"]*)"')
|
||||
|
||||
|
||||
def _float_or_none(val: object) -> float | None:
|
||||
"""Return float > 0, or None for missing / zero values."""
|
||||
try:
|
||||
v = float(val) # type: ignore[arg-type]
|
||||
return v if v > 0 else None
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _safe_list(val: object) -> list:
|
||||
"""Convert a value to a list, handling NaN/float/None gracefully."""
|
||||
if val is None:
|
||||
return []
|
||||
try:
|
||||
import math
|
||||
if isinstance(val, float) and math.isnan(val):
|
||||
return []
|
||||
except Exception:
|
||||
pass
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
return []
|
||||
|
||||
|
||||
def _parse_r_vector(s: str) -> list[str]:
|
||||
"""Parse R character vector format: c("a", "b") -> ["a", "b"]."""
|
||||
return _QUOTED.findall(s)
|
||||
|
||||
|
||||
def extract_ingredient_names(raw_list: list[str]) -> list[str]:
|
||||
"""Strip quantities and units from ingredient strings -> normalized names."""
|
||||
names = []
|
||||
for raw in raw_list:
|
||||
s = raw.lower().strip()
|
||||
s = _MEASURE_PATTERN.sub("", s)
|
||||
s = _LEAD_NUMBER.sub("", s)
|
||||
s = re.sub(r"\(.*?\)", "", s)
|
||||
s = re.sub(r",.*$", "", s)
|
||||
s = _TRAILING_QUALIFIER.sub("", s)
|
||||
s = s.strip(" -.,")
|
||||
if s and len(s) > 1:
|
||||
names.append(s)
|
||||
return names
|
||||
|
||||
|
||||
def compute_element_coverage(profiles: list[dict]) -> dict[str, float]:
|
||||
counts: dict[str, int] = {}
|
||||
for p in profiles:
|
||||
for elem in p.get("elements", []):
|
||||
counts[elem] = counts.get(elem, 0) + 1
|
||||
if not profiles:
|
||||
return {}
|
||||
return {e: round(c / len(profiles), 3) for e, c in counts.items()}
|
||||
|
||||
|
||||
def _parse_allrecipes_text(text: str) -> tuple[str, list[str], list[str]]:
|
||||
"""Parse corbt/all-recipes text format into (title, ingredients, directions)."""
|
||||
lines = text.strip().split('\n')
|
||||
title = lines[0].strip()
|
||||
ingredients: list[str] = []
|
||||
directions: list[str] = []
|
||||
section: str | None = None
|
||||
for line in lines[1:]:
|
||||
stripped = line.strip()
|
||||
if stripped.lower() == 'ingredients:':
|
||||
section = 'ingredients'
|
||||
elif stripped.lower() in ('directions:', 'steps:', 'instructions:'):
|
||||
section = 'directions'
|
||||
elif stripped.startswith('- ') and section == 'ingredients':
|
||||
ingredients.append(stripped[2:].strip())
|
||||
elif stripped.startswith('- ') and section == 'directions':
|
||||
directions.append(stripped[2:].strip())
|
||||
return title, ingredients, directions
|
||||
|
||||
|
||||
def _row_to_fields(row: pd.Series) -> tuple[str, str, list[str], list[str]]:
|
||||
"""Extract (external_id, title, raw_ingredients, directions) from a parquet row.
|
||||
|
||||
Handles both corbt/all-recipes (single 'input' text column) and the
|
||||
food.com columnar format (RecipeId, Name, RecipeIngredientParts, ...).
|
||||
"""
|
||||
if "input" in row.index and pd.notna(row.get("input")):
|
||||
title, raw_ingredients, directions = _parse_allrecipes_text(str(row["input"]))
|
||||
external_id = f"ar_{hash(title) & 0xFFFFFFFF}"
|
||||
else:
|
||||
raw_parts = row.get("RecipeIngredientParts", [])
|
||||
if isinstance(raw_parts, str):
|
||||
parsed = _parse_r_vector(raw_parts)
|
||||
raw_parts = parsed if parsed else [raw_parts]
|
||||
raw_ingredients = [str(i) for i in (_safe_list(raw_parts))]
|
||||
|
||||
raw_dirs = row.get("RecipeInstructions", [])
|
||||
if isinstance(raw_dirs, str):
|
||||
parsed_dirs = _parse_r_vector(raw_dirs)
|
||||
directions = parsed_dirs if parsed_dirs else [raw_dirs]
|
||||
else:
|
||||
directions = [str(d) for d in (_safe_list(raw_dirs))]
|
||||
|
||||
title = str(row.get("Name", ""))[:500]
|
||||
external_id = str(row.get("RecipeId", ""))
|
||||
|
||||
return external_id, title, raw_ingredients, directions
|
||||
|
||||
|
||||
def build(db_path: Path, recipes_path: Path, batch_size: int = 10000) -> None:
|
||||
conn = sqlite3.connect(db_path)
|
||||
try:
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
|
||||
# Pre-load ingredient element profiles to avoid N+1 queries
|
||||
profile_index: dict[str, list[str]] = {}
|
||||
for row in conn.execute("SELECT name, elements FROM ingredient_profiles"):
|
||||
try:
|
||||
profile_index[row[0]] = json.loads(row[1])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
df = pd.read_parquet(recipes_path)
|
||||
inserted = 0
|
||||
batch = []
|
||||
|
||||
for _, row in df.iterrows():
|
||||
external_id, title, raw_ingredients, directions = _row_to_fields(row)
|
||||
if not title:
|
||||
continue
|
||||
ingredient_names = extract_ingredient_names(raw_ingredients)
|
||||
|
||||
profiles = []
|
||||
for name in ingredient_names:
|
||||
if name in profile_index:
|
||||
profiles.append({"elements": profile_index[name]})
|
||||
coverage = compute_element_coverage(profiles)
|
||||
|
||||
batch.append((
|
||||
external_id,
|
||||
title,
|
||||
json.dumps(raw_ingredients),
|
||||
json.dumps(ingredient_names),
|
||||
json.dumps(directions),
|
||||
str(row.get("RecipeCategory", "") or ""),
|
||||
json.dumps(_safe_list(row.get("Keywords"))),
|
||||
_float_or_none(row.get("Calories")),
|
||||
_float_or_none(row.get("FatContent")),
|
||||
_float_or_none(row.get("ProteinContent")),
|
||||
_float_or_none(row.get("SodiumContent")),
|
||||
json.dumps(coverage),
|
||||
# New macro columns (migration 014)
|
||||
_float_or_none(row.get("SugarContent")),
|
||||
_float_or_none(row.get("CarbohydrateContent")),
|
||||
_float_or_none(row.get("FiberContent")),
|
||||
_float_or_none(row.get("RecipeServings")),
|
||||
0, # nutrition_estimated — food.com direct data is authoritative
|
||||
))
|
||||
|
||||
if len(batch) >= batch_size:
|
||||
before = conn.total_changes
|
||||
conn.executemany("""
|
||||
INSERT OR REPLACE INTO recipes
|
||||
(external_id, title, ingredients, ingredient_names, directions,
|
||||
category, keywords, calories, fat_g, protein_g, sodium_mg,
|
||||
element_coverage,
|
||||
sugar_g, carbs_g, fiber_g, servings, nutrition_estimated)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
|
||||
""", batch)
|
||||
conn.commit()
|
||||
inserted += conn.total_changes - before
|
||||
print(f" {inserted} recipes inserted...")
|
||||
batch = []
|
||||
|
||||
if batch:
|
||||
before = conn.total_changes
|
||||
conn.executemany("""
|
||||
INSERT OR REPLACE INTO recipes
|
||||
(external_id, title, ingredients, ingredient_names, directions,
|
||||
category, keywords, calories, fat_g, protein_g, sodium_mg,
|
||||
element_coverage,
|
||||
sugar_g, carbs_g, fiber_g, servings, nutrition_estimated)
|
||||
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
|
||||
""", batch)
|
||||
conn.commit()
|
||||
inserted += conn.total_changes - before
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
print(f"Total: {inserted} recipes inserted")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--db", required=True, type=Path)
|
||||
parser.add_argument("--recipes", required=True, type=Path)
|
||||
parser.add_argument("--batch-size", type=int, default=10000)
|
||||
args = parser.parse_args()
|
||||
build(args.db, args.recipes, args.batch_size)
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
"""
|
||||
Derive substitution pairs by diffing lishuyang/recipepairs.
|
||||
GPL-3.0 source -- derived annotations only, raw pairs not shipped.
|
||||
|
||||
Usage:
|
||||
PYTHONPATH=/path/to/kiwi conda run -n cf python scripts/pipeline/derive_substitutions.py \
|
||||
--db /path/to/kiwi.db \
|
||||
--recipepairs data/pipeline/recipepairs.parquet \
|
||||
--recipepairs-recipes data/pipeline/recipepairs_recipes.parquet
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def diff_ingredients(base: list[str], target: list[str]) -> tuple[list[str], list[str]]:
|
||||
base_set = set(base)
|
||||
target_set = set(target)
|
||||
removed = list(base_set - target_set)
|
||||
added = list(target_set - base_set)
|
||||
return removed, added
|
||||
|
||||
|
||||
def _parse_categories(val: object) -> list[str]:
|
||||
"""Parse categories field which may be a list, str-repr list, or bare string."""
|
||||
if isinstance(val, list):
|
||||
return [str(v) for v in val]
|
||||
if isinstance(val, str):
|
||||
val = val.strip()
|
||||
if val.startswith("["):
|
||||
# parse list repr: ['a', 'b'] — use json after converting single quotes
|
||||
try:
|
||||
fixed = re.sub(r"'", '"', val)
|
||||
return json.loads(fixed)
|
||||
except Exception:
|
||||
pass
|
||||
return [val] if val else []
|
||||
return []
|
||||
|
||||
|
||||
def build(db_path: Path, recipepairs_path: Path, recipes_path: Path) -> None:
|
||||
conn = sqlite3.connect(db_path)
|
||||
try:
|
||||
# Load ingredient lists from the bundled recipepairs recipe corpus.
|
||||
# This is GPL-3.0 data — we only use it for diffing; raw data is not persisted.
|
||||
print("Loading recipe ingredient index from recipepairs corpus...")
|
||||
recipes_df = pd.read_parquet(recipes_path, columns=["id", "ingredients"])
|
||||
recipe_ingredients: dict[str, list[str]] = {}
|
||||
for _, r in recipes_df.iterrows():
|
||||
ings = r["ingredients"]
|
||||
if ings is not None and hasattr(ings, "__iter__") and not isinstance(ings, str):
|
||||
recipe_ingredients[str(int(r["id"]))] = [str(i) for i in ings]
|
||||
print(f" {len(recipe_ingredients)} recipes loaded")
|
||||
|
||||
pairs_df = pd.read_parquet(recipepairs_path)
|
||||
pair_counts: dict[tuple, dict] = defaultdict(lambda: {"count": 0})
|
||||
|
||||
print("Diffing recipe pairs...")
|
||||
for _, row in pairs_df.iterrows():
|
||||
base_id = str(int(row["base"]))
|
||||
target_id = str(int(row["target"]))
|
||||
base_ings = recipe_ingredients.get(base_id, [])
|
||||
target_ings = recipe_ingredients.get(target_id, [])
|
||||
if not base_ings or not target_ings:
|
||||
continue
|
||||
|
||||
removed, added = diff_ingredients(base_ings, target_ings)
|
||||
if len(removed) != 1 or len(added) != 1:
|
||||
continue
|
||||
|
||||
original = removed[0]
|
||||
substitute = added[0]
|
||||
constraints = _parse_categories(row.get("categories", []))
|
||||
if not constraints:
|
||||
continue
|
||||
for constraint in constraints:
|
||||
key = (original, substitute, constraint)
|
||||
pair_counts[key]["count"] += 1
|
||||
|
||||
def get_profile(name: str) -> dict:
|
||||
row = conn.execute(
|
||||
"SELECT fat_pct, moisture_pct, glutamate_mg, protein_pct "
|
||||
"FROM ingredient_profiles WHERE name = ?", (name,)
|
||||
).fetchone()
|
||||
if row:
|
||||
return {"fat": row[0] or 0, "moisture": row[1] or 0,
|
||||
"glutamate": row[2] or 0, "protein": row[3] or 0}
|
||||
return {"fat": 0, "moisture": 0, "glutamate": 0, "protein": 0}
|
||||
|
||||
print("Writing substitution pairs...")
|
||||
inserted = 0
|
||||
for (original, substitute, constraint), data in pair_counts.items():
|
||||
if data["count"] < 3:
|
||||
continue
|
||||
p_orig = get_profile(original)
|
||||
p_sub = get_profile(substitute)
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO substitution_pairs
|
||||
(original_name, substitute_name, constraint_label,
|
||||
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
||||
occurrence_count, source)
|
||||
VALUES (?,?,?,?,?,?,?,?,?)
|
||||
""", (
|
||||
original, substitute, constraint,
|
||||
round(p_sub["fat"] - p_orig["fat"], 2),
|
||||
round(p_sub["moisture"] - p_orig["moisture"], 2),
|
||||
round(p_sub["glutamate"] - p_orig["glutamate"], 2),
|
||||
round(p_sub["protein"] - p_orig["protein"], 2),
|
||||
data["count"], "derived",
|
||||
))
|
||||
inserted += 1
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
print(f"Inserted {inserted} substitution pairs (min 3 occurrences)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--db", required=True, type=Path)
|
||||
parser.add_argument("--recipepairs", required=True, type=Path,
|
||||
help="pairs.parquet from lishuyang/recipepairs")
|
||||
parser.add_argument("--recipepairs-recipes", required=True, type=Path,
|
||||
dest="recipepairs_recipes",
|
||||
help="recipes.parquet from lishuyang/recipepairs (ingredient lookup)")
|
||||
args = parser.parse_args()
|
||||
build(args.db, args.recipepairs, args.recipepairs_recipes)
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
"""
|
||||
Download recipe engine datasets from HuggingFace.
|
||||
|
||||
Usage:
|
||||
conda run -n cf python scripts/pipeline/download_datasets.py --data-dir data/pipeline
|
||||
|
||||
Downloads:
|
||||
- corbt/all-recipes (no license) → data/pipeline/recipes_allrecipes.parquet [2.1M recipes]
|
||||
- omid5/usda-fdc-foods-cleaned (CC0) → data/pipeline/usda_fdc_cleaned.parquet
|
||||
- jacktol/usda-branded-food-data (MIT) → data/pipeline/usda_branded.parquet
|
||||
- lishuyang/recipepairs (GPL-3.0 ⚠) → data/pipeline/recipepairs.parquet [derive only, don't ship]
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from datasets import load_dataset
|
||||
from huggingface_hub import hf_hub_download
|
||||
|
||||
|
||||
# Standard HuggingFace datasets: (hf_path, split, output_filename)
|
||||
HF_DATASETS = [
|
||||
("corbt/all-recipes", "train", "recipes_allrecipes.parquet"),
|
||||
("omid5/usda-fdc-foods-cleaned", "train", "usda_fdc_cleaned.parquet"),
|
||||
("jacktol/usda-branded-food-data","train", "usda_branded.parquet"),
|
||||
]
|
||||
|
||||
# Datasets that expose raw parquet files directly (no HF dataset builder)
|
||||
HF_PARQUET_FILES = [
|
||||
# (repo_id, repo_filename, output_filename)
|
||||
# lishuyang/recipepairs: GPL-3.0 ⚠ — derive only, don't ship
|
||||
("lishuyang/recipepairs", "pairs.parquet", "recipepairs.parquet"),
|
||||
]
|
||||
|
||||
|
||||
def download_all(data_dir: Path) -> None:
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for hf_path, split, filename in HF_DATASETS:
|
||||
out = data_dir / filename
|
||||
if out.exists():
|
||||
print(f" skip {filename} (already exists)")
|
||||
continue
|
||||
print(f" downloading {hf_path} ...")
|
||||
ds = load_dataset(hf_path, split=split)
|
||||
ds.to_parquet(str(out))
|
||||
print(f" saved → {out}")
|
||||
|
||||
for repo_id, repo_file, filename in HF_PARQUET_FILES:
|
||||
out = data_dir / filename
|
||||
if out.exists():
|
||||
print(f" skip {filename} (already exists)")
|
||||
continue
|
||||
print(f" downloading {repo_id}/{repo_file} ...")
|
||||
cached = hf_hub_download(repo_id=repo_id, filename=repo_file, repo_type="dataset")
|
||||
shutil.copy2(cached, out)
|
||||
print(f" saved → {out}")
|
||||
|
||||
|
||||
_DEFAULT_DATA_DIR = Path(
|
||||
os.environ.get("KIWI_PIPELINE_DATA_DIR", "data/pipeline")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--data-dir",
|
||||
type=Path,
|
||||
default=_DEFAULT_DATA_DIR,
|
||||
help="Directory for downloaded parquets (default: $KIWI_PIPELINE_DATA_DIR or data/pipeline)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
download_all(args.data_dir)
|
||||
|
|
@ -1,109 +0,0 @@
|
|||
"""
|
||||
Estimate macro nutrition for recipes that have no direct data.
|
||||
|
||||
For each recipe where sugar_g / carbs_g / fiber_g / calories are NULL,
|
||||
look up the matched ingredient_profiles and average their per-100g values,
|
||||
then scale by a rough 150g-per-ingredient portion assumption.
|
||||
|
||||
Mark such rows with nutrition_estimated=1 so the UI can display a disclaimer.
|
||||
Recipes with food.com direct data (nutrition_estimated=0 and values set) are untouched.
|
||||
|
||||
Usage:
|
||||
conda run -n job-seeker python scripts/pipeline/estimate_recipe_nutrition.py \
|
||||
--db /path/to/kiwi.db
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
# Rough grams per ingredient when no quantity data is available.
|
||||
_GRAMS_PER_INGREDIENT = 150.0
|
||||
|
||||
|
||||
def estimate(db_path: Path) -> None:
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
|
||||
# Load ingredient_profiles macro data into memory for fast lookup.
|
||||
profile_macros: dict[str, dict[str, float]] = {}
|
||||
for row in conn.execute(
|
||||
"SELECT name, calories_per_100g, carbs_g_per_100g, fiber_g_per_100g, sugar_g_per_100g "
|
||||
"FROM ingredient_profiles"
|
||||
):
|
||||
name, cal, carbs, fiber, sugar = row
|
||||
if name:
|
||||
profile_macros[name] = {
|
||||
"calories": float(cal or 0),
|
||||
"carbs": float(carbs or 0),
|
||||
"fiber": float(fiber or 0),
|
||||
"sugar": float(sugar or 0),
|
||||
}
|
||||
|
||||
# Select recipes with no direct nutrition data.
|
||||
rows = conn.execute(
|
||||
"SELECT id, ingredient_names FROM recipes "
|
||||
"WHERE sugar_g IS NULL AND carbs_g IS NULL AND fiber_g IS NULL"
|
||||
).fetchall()
|
||||
|
||||
updated = 0
|
||||
batch: list[tuple] = []
|
||||
|
||||
for recipe_id, ingredient_names_json in rows:
|
||||
try:
|
||||
names: list[str] = json.loads(ingredient_names_json or "[]")
|
||||
except Exception:
|
||||
names = []
|
||||
|
||||
matched = [profile_macros[n] for n in names if n in profile_macros]
|
||||
if not matched:
|
||||
continue
|
||||
|
||||
# Average per-100g macros across matched ingredients,
|
||||
# then multiply by assumed portion weight per ingredient.
|
||||
n = len(matched)
|
||||
portion_factor = _GRAMS_PER_INGREDIENT / 100.0
|
||||
|
||||
total_cal = sum(m["calories"] for m in matched) / n * portion_factor * n
|
||||
total_carbs = sum(m["carbs"] for m in matched) / n * portion_factor * n
|
||||
total_fiber = sum(m["fiber"] for m in matched) / n * portion_factor * n
|
||||
total_sugar = sum(m["sugar"] for m in matched) / n * portion_factor * n
|
||||
|
||||
batch.append((
|
||||
round(total_cal, 1) or None,
|
||||
round(total_carbs, 2) or None,
|
||||
round(total_fiber, 2) or None,
|
||||
round(total_sugar, 2) or None,
|
||||
recipe_id,
|
||||
))
|
||||
|
||||
if len(batch) >= 5000:
|
||||
conn.executemany(
|
||||
"UPDATE recipes SET calories=?, carbs_g=?, fiber_g=?, sugar_g=?, "
|
||||
"nutrition_estimated=1 WHERE id=?",
|
||||
batch,
|
||||
)
|
||||
conn.commit()
|
||||
updated += len(batch)
|
||||
print(f" {updated} recipes estimated...")
|
||||
batch = []
|
||||
|
||||
if batch:
|
||||
conn.executemany(
|
||||
"UPDATE recipes SET calories=?, carbs_g=?, fiber_g=?, sugar_g=?, "
|
||||
"nutrition_estimated=1 WHERE id=?",
|
||||
batch,
|
||||
)
|
||||
conn.commit()
|
||||
updated += len(batch)
|
||||
|
||||
conn.close()
|
||||
print(f"Total: {updated} recipes received estimated nutrition")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--db", required=True, type=Path)
|
||||
args = parser.parse_args()
|
||||
estimate(args.db)
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
"""Tests for the /feedback endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.main import app
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
# ── /feedback/status ──────────────────────────────────────────────────────────
|
||||
|
||||
def test_status_disabled_when_no_token(monkeypatch):
|
||||
monkeypatch.delenv("FORGEJO_API_TOKEN", raising=False)
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", False)
|
||||
res = client.get("/api/v1/feedback/status")
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"enabled": False}
|
||||
|
||||
|
||||
def test_status_enabled_when_token_set(monkeypatch):
|
||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", False)
|
||||
res = client.get("/api/v1/feedback/status")
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"enabled": True}
|
||||
|
||||
|
||||
def test_status_disabled_in_demo_mode(monkeypatch):
|
||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", True)
|
||||
res = client.get("/api/v1/feedback/status")
|
||||
assert res.status_code == 200
|
||||
assert res.json() == {"enabled": False}
|
||||
|
||||
|
||||
# ── POST /feedback ────────────────────────────────────────────────────────────
|
||||
|
||||
def test_submit_returns_503_when_no_token(monkeypatch):
|
||||
monkeypatch.delenv("FORGEJO_API_TOKEN", raising=False)
|
||||
res = client.post("/api/v1/feedback", json={
|
||||
"title": "Test", "description": "desc", "type": "bug",
|
||||
})
|
||||
assert res.status_code == 503
|
||||
|
||||
|
||||
def test_submit_returns_403_in_demo_mode(monkeypatch):
|
||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", True)
|
||||
res = client.post("/api/v1/feedback", json={
|
||||
"title": "Test", "description": "desc", "type": "bug",
|
||||
})
|
||||
assert res.status_code == 403
|
||||
|
||||
|
||||
def test_submit_creates_issue(monkeypatch):
|
||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
||||
monkeypatch.setenv("FORGEJO_REPO", "Circuit-Forge/kiwi")
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", False)
|
||||
|
||||
# Mock the two Forgejo HTTP calls: label fetch + issue create
|
||||
label_response = MagicMock()
|
||||
label_response.ok = True
|
||||
label_response.json.return_value = [
|
||||
{"id": 1, "name": "beta-feedback"},
|
||||
{"id": 2, "name": "needs-triage"},
|
||||
{"id": 3, "name": "bug"},
|
||||
]
|
||||
|
||||
issue_response = MagicMock()
|
||||
issue_response.ok = True
|
||||
issue_response.json.return_value = {"number": 42, "html_url": "https://example.com/issues/42"}
|
||||
|
||||
with patch("app.api.endpoints.feedback.requests.get", return_value=label_response), \
|
||||
patch("app.api.endpoints.feedback.requests.post", return_value=issue_response):
|
||||
res = client.post("/api/v1/feedback", json={
|
||||
"title": "Something broke",
|
||||
"description": "It broke when I tapped X",
|
||||
"type": "bug",
|
||||
"repro": "1. Open app\n2. Tap X",
|
||||
"tab": "pantry",
|
||||
})
|
||||
|
||||
assert res.status_code == 200
|
||||
data = res.json()
|
||||
assert data["issue_number"] == 42
|
||||
assert data["issue_url"] == "https://example.com/issues/42"
|
||||
|
||||
|
||||
def test_submit_returns_502_on_forgejo_error(monkeypatch):
|
||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
||||
monkeypatch.setattr("app.core.config.settings.DEMO_MODE", False)
|
||||
|
||||
label_response = MagicMock()
|
||||
label_response.ok = True
|
||||
label_response.json.return_value = []
|
||||
|
||||
bad_response = MagicMock()
|
||||
bad_response.ok = False
|
||||
bad_response.text = "forbidden"
|
||||
|
||||
with patch("app.api.endpoints.feedback.requests.get", return_value=label_response), \
|
||||
patch("app.api.endpoints.feedback.requests.post", return_value=bad_response):
|
||||
res = client.post("/api/v1/feedback", json={
|
||||
"title": "Oops", "description": "desc", "type": "other",
|
||||
})
|
||||
|
||||
assert res.status_code == 502
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from app.main import app
|
||||
from app.cloud_session import get_session
|
||||
from app.db.session import get_store
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def _make_session(tier: str = "free", has_byok: bool = False) -> MagicMock:
|
||||
mock = MagicMock()
|
||||
mock.tier = tier
|
||||
mock.has_byok = has_byok
|
||||
return mock
|
||||
|
||||
|
||||
def _make_store() -> MagicMock:
|
||||
mock = MagicMock()
|
||||
mock.search_recipes_by_ingredients.return_value = [
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Butter Pasta",
|
||||
"ingredient_names": ["butter", "pasta"],
|
||||
"element_coverage": {"Richness": 0.5},
|
||||
"match_count": 2,
|
||||
"directions": ["mix and heat"],
|
||||
}
|
||||
]
|
||||
mock.check_and_increment_rate_limit.return_value = (True, 1)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def override_deps():
|
||||
session_mock = _make_session()
|
||||
store_mock = _make_store()
|
||||
app.dependency_overrides[get_session] = lambda: session_mock
|
||||
app.dependency_overrides[get_store] = lambda: store_mock
|
||||
yield session_mock, store_mock
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
def test_suggest_returns_200():
|
||||
resp = client.post("/api/v1/recipes/suggest", json={
|
||||
"pantry_items": ["butter", "pasta"],
|
||||
"level": 1,
|
||||
"constraints": [],
|
||||
})
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert "suggestions" in data
|
||||
assert "element_gaps" in data
|
||||
assert "grocery_list" in data
|
||||
assert "grocery_links" in data
|
||||
|
||||
|
||||
def test_suggest_level4_requires_wildcard_confirmed():
|
||||
resp = client.post("/api/v1/recipes/suggest", json={
|
||||
"pantry_items": ["butter"],
|
||||
"level": 4,
|
||||
"constraints": [],
|
||||
"wildcard_confirmed": False,
|
||||
})
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
def test_suggest_level3_requires_paid_tier(override_deps):
|
||||
session_mock, _ = override_deps
|
||||
session_mock.tier = "free"
|
||||
session_mock.has_byok = False
|
||||
resp = client.post("/api/v1/recipes/suggest", json={
|
||||
"pantry_items": ["butter"],
|
||||
"level": 3,
|
||||
"constraints": [],
|
||||
})
|
||||
assert resp.status_code == 403
|
||||
|
|
@ -1,110 +0,0 @@
|
|||
"""Tests for user settings endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from app.cloud_session import get_session
|
||||
from app.db.session import get_store
|
||||
from app.main import app
|
||||
from app.models.schemas.recipe import RecipeRequest
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def _make_session(tier: str = "free", has_byok: bool = False) -> MagicMock:
|
||||
mock = MagicMock()
|
||||
mock.tier = tier
|
||||
mock.has_byok = has_byok
|
||||
return mock
|
||||
|
||||
|
||||
def _make_store() -> MagicMock:
|
||||
mock = MagicMock()
|
||||
mock.get_setting.return_value = None
|
||||
mock.set_setting.return_value = None
|
||||
mock.search_recipes_by_ingredients.return_value = []
|
||||
mock.check_and_increment_rate_limit.return_value = (True, 1)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def tmp_store() -> MagicMock:
|
||||
session_mock = _make_session()
|
||||
store_mock = _make_store()
|
||||
app.dependency_overrides[get_session] = lambda: session_mock
|
||||
app.dependency_overrides[get_store] = lambda: store_mock
|
||||
yield store_mock
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
def test_set_and_get_cooking_equipment(tmp_store: MagicMock) -> None:
|
||||
"""PUT then GET round-trips the cooking_equipment value."""
|
||||
equipment_json = '["oven", "stovetop"]'
|
||||
|
||||
# PUT stores the value
|
||||
put_resp = client.put(
|
||||
"/api/v1/settings/cooking_equipment",
|
||||
json={"value": equipment_json},
|
||||
)
|
||||
assert put_resp.status_code == 200
|
||||
assert put_resp.json()["key"] == "cooking_equipment"
|
||||
assert put_resp.json()["value"] == equipment_json
|
||||
tmp_store.set_setting.assert_called_once_with("cooking_equipment", equipment_json)
|
||||
|
||||
# GET returns the stored value
|
||||
tmp_store.get_setting.return_value = equipment_json
|
||||
get_resp = client.get("/api/v1/settings/cooking_equipment")
|
||||
assert get_resp.status_code == 200
|
||||
assert get_resp.json()["value"] == equipment_json
|
||||
|
||||
|
||||
def test_get_missing_setting_returns_404(tmp_store: MagicMock) -> None:
|
||||
"""GET an allowed key that was never set returns 404."""
|
||||
tmp_store.get_setting.return_value = None
|
||||
resp = client.get("/api/v1/settings/cooking_equipment")
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
def test_hard_day_mode_uses_equipment_setting(tmp_store: MagicMock) -> None:
|
||||
"""RecipeEngine.suggest() respects cooking_equipment from store when hard_day_mode=True."""
|
||||
equipment_json = '["microwave"]'
|
||||
tmp_store.get_setting.return_value = equipment_json
|
||||
|
||||
engine = RecipeEngine(store=tmp_store)
|
||||
req = RecipeRequest(
|
||||
pantry_items=["rice", "water"],
|
||||
level=1,
|
||||
constraints=[],
|
||||
hard_day_mode=True,
|
||||
)
|
||||
|
||||
result = engine.suggest(req)
|
||||
|
||||
# Engine should have read the equipment setting
|
||||
tmp_store.get_setting.assert_called_with("cooking_equipment")
|
||||
# Result is a valid RecipeResult (no crash)
|
||||
assert result is not None
|
||||
assert hasattr(result, "suggestions")
|
||||
|
||||
|
||||
def test_put_unknown_key_returns_422(tmp_store: MagicMock) -> None:
|
||||
"""PUT to an unknown settings key returns 422."""
|
||||
resp = client.put(
|
||||
"/api/v1/settings/nonexistent_key",
|
||||
json={"value": "something"},
|
||||
)
|
||||
assert resp.status_code == 422
|
||||
|
||||
|
||||
def test_put_null_value_returns_422(tmp_store: MagicMock) -> None:
|
||||
"""PUT with a null value returns 422 (Pydantic validation)."""
|
||||
resp = client.put(
|
||||
"/api/v1/settings/cooking_equipment",
|
||||
json={"value": None},
|
||||
)
|
||||
assert resp.status_code == 422
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
import json, pytest
|
||||
from tests.services.recipe.test_element_classifier import store_with_profiles
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def store_with_recipes(store_with_profiles):
|
||||
store_with_profiles.conn.executemany("""
|
||||
INSERT INTO recipes (external_id, title, ingredients, ingredient_names,
|
||||
directions, category, keywords, element_coverage)
|
||||
VALUES (?,?,?,?,?,?,?,?)
|
||||
""", [
|
||||
("1", "Butter Pasta", '["butter","pasta","parmesan"]',
|
||||
'["butter","pasta","parmesan"]', '["boil pasta","toss with butter"]',
|
||||
"Italian", '["quick","pasta"]',
|
||||
'{"Richness":0.5,"Depth":0.3,"Structure":0.2}'),
|
||||
("2", "Lentil Soup", '["lentils","carrots","onion","broth"]',
|
||||
'["lentils","carrots","onion","broth"]', '["simmer all"]',
|
||||
"Soup", '["vegan","hearty"]',
|
||||
'{"Depth":0.4,"Seasoning":0.3}'),
|
||||
])
|
||||
store_with_profiles.conn.commit()
|
||||
return store_with_profiles
|
||||
|
||||
|
||||
def test_search_recipes_by_ingredient_names(store_with_recipes):
|
||||
results = store_with_recipes.search_recipes_by_ingredients(["butter", "parmesan"])
|
||||
assert len(results) >= 1
|
||||
assert any(r["title"] == "Butter Pasta" for r in results)
|
||||
|
||||
def test_search_recipes_respects_limit(store_with_recipes):
|
||||
results = store_with_recipes.search_recipes_by_ingredients(["butter"], limit=1)
|
||||
assert len(results) <= 1
|
||||
|
||||
def test_check_rate_limit_first_call(store_with_recipes):
|
||||
allowed, count = store_with_recipes.check_and_increment_rate_limit("leftover_mode", daily_max=5)
|
||||
assert allowed is True
|
||||
assert count == 1
|
||||
|
||||
def test_check_rate_limit_exceeded(store_with_recipes):
|
||||
for _ in range(5):
|
||||
store_with_recipes.check_and_increment_rate_limit("leftover_mode", daily_max=5)
|
||||
allowed, count = store_with_recipes.check_and_increment_rate_limit("leftover_mode", daily_max=5)
|
||||
assert allowed is False
|
||||
assert count == 5
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
import csv
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _write_csv(path: Path, rows: list[dict], fieldnames: list[str]) -> None:
|
||||
with open(path, "w", newline="") as f:
|
||||
w = csv.DictWriter(f, fieldnames=fieldnames)
|
||||
w.writeheader()
|
||||
w.writerows(rows)
|
||||
|
||||
|
||||
def test_parse_flavorgraph_node():
|
||||
from scripts.pipeline.build_flavorgraph_index import parse_ingredient_nodes
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp:
|
||||
nodes_path = Path(tmp) / "nodes.csv"
|
||||
edges_path = Path(tmp) / "edges.csv"
|
||||
|
||||
_write_csv(nodes_path, [
|
||||
{"node_id": "1", "name": "beef", "node_type": "ingredient"},
|
||||
{"node_id": "2", "name": "pyrazine", "node_type": "compound"},
|
||||
{"node_id": "3", "name": "mushroom", "node_type": "ingredient"},
|
||||
], ["node_id", "name", "node_type"])
|
||||
|
||||
_write_csv(edges_path, [
|
||||
{"id_1": "1", "id_2": "2", "score": "0.8"},
|
||||
{"id_1": "3", "id_2": "2", "score": "0.7"},
|
||||
], ["id_1", "id_2", "score"])
|
||||
|
||||
ingredient_to_compounds, compound_names = parse_ingredient_nodes(nodes_path, edges_path)
|
||||
|
||||
assert "beef" in ingredient_to_compounds
|
||||
assert "mushroom" in ingredient_to_compounds
|
||||
# compound node_id "2" maps to name "pyrazine"
|
||||
beef_compounds = ingredient_to_compounds["beef"]
|
||||
assert any(compound_names.get(c) == "pyrazine" for c in beef_compounds)
|
||||
mushroom_compounds = ingredient_to_compounds["mushroom"]
|
||||
assert any(compound_names.get(c) == "pyrazine" for c in mushroom_compounds)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
import pytest
|
||||
from pathlib import Path
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parents[2]))
|
||||
|
||||
def test_normalize_ingredient_name():
|
||||
from scripts.pipeline.build_ingredient_index import normalize_name
|
||||
assert normalize_name("Ground Beef (85% lean)") == "ground beef"
|
||||
assert normalize_name(" Olive Oil ") == "olive oil"
|
||||
assert normalize_name("Cheddar Cheese, shredded") == "cheddar cheese"
|
||||
|
||||
def test_derive_elements_from_usda_row():
|
||||
from scripts.pipeline.build_ingredient_index import derive_elements
|
||||
row = {"fat_pct": 20.0, "protein_pct": 17.0, "moisture_pct": 60.0,
|
||||
"sodium_mg_per_100g": 65.0, "glutamate_mg": 2.8, "starch_pct": 0.0}
|
||||
elements = derive_elements(row)
|
||||
assert "Richness" in elements # high fat
|
||||
assert "Depth" in elements # notable glutamate
|
||||
|
||||
def test_derive_binding_score():
|
||||
from scripts.pipeline.build_ingredient_index import derive_binding_score
|
||||
assert derive_binding_score({"protein_pct": 12.0, "starch_pct": 68.0}) == 3 # flour
|
||||
assert derive_binding_score({"protein_pct": 1.0, "starch_pct": 0.5}) == 0 # water
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
def test_extract_ingredient_names():
|
||||
from scripts.pipeline.build_recipe_index import extract_ingredient_names
|
||||
raw = ["2 cups all-purpose flour", "1 lb ground beef (85/15)", "salt to taste"]
|
||||
names = extract_ingredient_names(raw)
|
||||
assert "flour" in names or "all-purpose flour" in names
|
||||
assert "ground beef" in names
|
||||
assert "salt" in names
|
||||
|
||||
def test_compute_element_coverage():
|
||||
from scripts.pipeline.build_recipe_index import compute_element_coverage
|
||||
profiles = [
|
||||
{"elements": ["Richness", "Depth"]},
|
||||
{"elements": ["Brightness"]},
|
||||
{"elements": ["Seasoning"]},
|
||||
]
|
||||
coverage = compute_element_coverage(profiles)
|
||||
assert coverage["Richness"] > 0
|
||||
assert coverage["Brightness"] > 0
|
||||
assert coverage.get("Aroma", 0) == 0
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
def test_diff_ingredient_lists():
|
||||
from scripts.pipeline.derive_substitutions import diff_ingredients
|
||||
base = ["ground beef", "chicken broth", "olive oil", "onion"]
|
||||
target = ["lentils", "vegetable broth", "olive oil", "onion"]
|
||||
removed, added = diff_ingredients(base, target)
|
||||
assert "ground beef" in removed
|
||||
assert "chicken broth" in removed
|
||||
assert "lentils" in added
|
||||
assert "vegetable broth" in added
|
||||
assert "olive oil" not in removed # unchanged
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
import pytest
|
||||
import sqlite3
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from app.db.store import Store
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def store_with_profiles(tmp_path):
|
||||
db_path = tmp_path / "test.db"
|
||||
store = Store(db_path)
|
||||
# Seed ingredient_profiles
|
||||
store.conn.execute("""
|
||||
INSERT INTO ingredient_profiles
|
||||
(name, elements, fat_pct, moisture_pct, glutamate_mg, binding_score,
|
||||
sodium_mg_per_100g, is_fermented, texture_profile)
|
||||
VALUES (?,?,?,?,?,?,?,?,?)
|
||||
""", ("butter", json.dumps(["Richness"]), 81.0, 16.0, 0.1, 0, 11.0, 0, "creamy"))
|
||||
store.conn.execute("""
|
||||
INSERT INTO ingredient_profiles
|
||||
(name, elements, fat_pct, moisture_pct, glutamate_mg, binding_score,
|
||||
sodium_mg_per_100g, is_fermented, texture_profile)
|
||||
VALUES (?,?,?,?,?,?,?,?,?)
|
||||
""", ("parmesan", json.dumps(["Depth", "Seasoning"]), 29.0, 29.0, 1.2, 1, 1600.0, 0, "neutral"))
|
||||
store.conn.commit()
|
||||
return store
|
||||
|
||||
|
||||
def test_classify_known_ingredient(store_with_profiles):
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
clf = ElementClassifier(store_with_profiles)
|
||||
profile = clf.classify("butter")
|
||||
assert "Richness" in profile.elements
|
||||
assert profile.fat_pct == pytest.approx(81.0)
|
||||
assert profile.name == "butter"
|
||||
assert profile.source == "db"
|
||||
|
||||
|
||||
def test_classify_unknown_ingredient_uses_heuristic(store_with_profiles):
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
clf = ElementClassifier(store_with_profiles)
|
||||
profile = clf.classify("ghost pepper hot sauce")
|
||||
# Heuristic should detect acid / aroma
|
||||
assert "Aroma" in profile.elements # "pepper" in name matches Aroma heuristic
|
||||
assert profile.name == "ghost pepper hot sauce"
|
||||
|
||||
|
||||
def test_classify_batch(store_with_profiles):
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
clf = ElementClassifier(store_with_profiles)
|
||||
results = clf.classify_batch(["butter", "parmesan", "unknown herb"])
|
||||
assert len(results) == 3
|
||||
assert results[0].name == "butter"
|
||||
assert results[1].name == "parmesan"
|
||||
|
||||
|
||||
def test_identify_gaps(store_with_profiles):
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
clf = ElementClassifier(store_with_profiles)
|
||||
profiles = [
|
||||
clf.classify("butter"),
|
||||
clf.classify("parmesan"),
|
||||
]
|
||||
gaps = clf.identify_gaps(profiles)
|
||||
# We have Richness + Depth + Seasoning; should flag Brightness, Aroma, Structure, Texture
|
||||
assert "Brightness" in gaps
|
||||
assert "Richness" not in gaps
|
||||
|
|
@ -1,229 +0,0 @@
|
|||
"""Tests for LLMRecipeGenerator — prompt builders and allergy filtering."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from app.models.schemas.recipe import RecipeRequest
|
||||
from app.services.recipe.element_classifier import IngredientProfile
|
||||
|
||||
|
||||
def _make_store():
|
||||
"""Create a minimal in-memory Store."""
|
||||
from app.db.store import Store
|
||||
import sqlite3
|
||||
|
||||
conn = sqlite3.connect(":memory:")
|
||||
conn.row_factory = sqlite3.Row
|
||||
store = Store.__new__(Store)
|
||||
store.conn = conn
|
||||
return store
|
||||
|
||||
|
||||
def test_build_level3_prompt_contains_element_scaffold():
|
||||
"""Level 3 prompt includes element coverage, pantry items, and constraints."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter", "mushrooms"],
|
||||
level=3,
|
||||
constraints=["vegetarian"],
|
||||
)
|
||||
profiles = [
|
||||
IngredientProfile(name="butter", elements=["Richness"]),
|
||||
IngredientProfile(name="mushrooms", elements=["Depth"]),
|
||||
]
|
||||
gaps = ["Brightness", "Aroma"]
|
||||
|
||||
prompt = gen.build_level3_prompt(req, profiles, gaps)
|
||||
|
||||
assert "Richness" in prompt
|
||||
assert "Depth" in prompt
|
||||
assert "Brightness" in prompt
|
||||
assert "butter" in prompt
|
||||
assert "vegetarian" in prompt
|
||||
|
||||
|
||||
def test_build_level4_prompt_contains_pantry_and_constraints():
|
||||
"""Level 4 prompt is concise and includes key context."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
req = RecipeRequest(
|
||||
pantry_items=["pasta", "eggs", "mystery ingredient"],
|
||||
level=4,
|
||||
constraints=["no gluten"],
|
||||
allergies=["gluten"],
|
||||
wildcard_confirmed=True,
|
||||
)
|
||||
|
||||
prompt = gen.build_level4_prompt(req)
|
||||
|
||||
assert "mystery" in prompt.lower()
|
||||
assert "gluten" in prompt.lower()
|
||||
assert len(prompt) < 1500
|
||||
|
||||
|
||||
def test_allergy_items_excluded_from_prompt():
|
||||
"""Allergy items are listed as forbidden AND filtered from pantry shown to LLM."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
req = RecipeRequest(
|
||||
pantry_items=["olive oil", "peanuts", "garlic"],
|
||||
level=3,
|
||||
constraints=[],
|
||||
allergies=["peanuts"],
|
||||
)
|
||||
profiles = [
|
||||
IngredientProfile(name="olive oil", elements=["Richness"]),
|
||||
IngredientProfile(name="peanuts", elements=["Texture"]),
|
||||
IngredientProfile(name="garlic", elements=["Aroma"]),
|
||||
]
|
||||
gaps: list[str] = []
|
||||
|
||||
prompt = gen.build_level3_prompt(req, profiles, gaps)
|
||||
|
||||
# Check peanuts are in the exclusion section but NOT in the pantry section
|
||||
lines = prompt.split("\n")
|
||||
pantry_line = next((l for l in lines if l.startswith("Pantry")), "")
|
||||
exclusion_line = next(
|
||||
(l for l in lines if "must not" in l.lower()),
|
||||
"",
|
||||
)
|
||||
assert "peanuts" not in pantry_line.lower()
|
||||
assert "peanuts" in exclusion_line.lower()
|
||||
assert "olive oil" in prompt.lower()
|
||||
|
||||
|
||||
def test_generate_returns_result_when_llm_responds(monkeypatch):
|
||||
"""generate() returns RecipeResult with title when LLM returns a valid response."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
from app.models.schemas.recipe import RecipeResult
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
canned_response = (
|
||||
"Title: Mushroom Butter Pasta\n"
|
||||
"Ingredients: butter, mushrooms, pasta\n"
|
||||
"Directions: Cook pasta. Sauté mushrooms in butter. Combine.\n"
|
||||
"Notes: Add parmesan to taste.\n"
|
||||
)
|
||||
monkeypatch.setattr(gen, "_call_llm", lambda prompt: canned_response)
|
||||
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter", "mushrooms", "pasta"],
|
||||
level=3,
|
||||
constraints=["vegetarian"],
|
||||
)
|
||||
profiles = [
|
||||
IngredientProfile(name="butter", elements=["Richness"]),
|
||||
IngredientProfile(name="mushrooms", elements=["Depth"]),
|
||||
]
|
||||
gaps = ["Brightness"]
|
||||
|
||||
result = gen.generate(req, profiles, gaps)
|
||||
|
||||
assert isinstance(result, RecipeResult)
|
||||
assert len(result.suggestions) == 1
|
||||
suggestion = result.suggestions[0]
|
||||
assert suggestion.title == "Mushroom Butter Pasta"
|
||||
# All LLM ingredients (butter, mushrooms, pasta) are in the pantry, so none are missing
|
||||
assert suggestion.missing_ingredients == []
|
||||
assert len(suggestion.directions) > 0
|
||||
assert "parmesan" in suggestion.notes.lower()
|
||||
assert result.element_gaps == ["Brightness"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CFOrchClient integration tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass
|
||||
class _FakeAllocation:
|
||||
allocation_id: str = "alloc-test-1"
|
||||
service: str = "vllm"
|
||||
node_id: str = "node-1"
|
||||
gpu_id: int = 0
|
||||
model: str | None = "Ouro-2.6B-Thinking"
|
||||
url: str = "http://test:8000"
|
||||
started: bool = True
|
||||
warm: bool = True
|
||||
|
||||
|
||||
def test_recipe_gen_uses_cf_orch_when_env_set(monkeypatch):
|
||||
"""When CF_ORCH_URL is set, _call_llm uses alloc.url+/v1 as the OpenAI base_url."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
fake_alloc = _FakeAllocation()
|
||||
|
||||
@contextmanager
|
||||
def _fake_llm_context():
|
||||
yield fake_alloc
|
||||
|
||||
captured = {}
|
||||
|
||||
# Fake OpenAI that records the base_url it was constructed with
|
||||
class _FakeOpenAI:
|
||||
def __init__(self, *, base_url, api_key):
|
||||
captured["base_url"] = base_url
|
||||
msg = MagicMock()
|
||||
msg.content = "Title: Test\nIngredients: a\nDirections: do it.\nNotes: none."
|
||||
choice = MagicMock()
|
||||
choice.message = msg
|
||||
completion = MagicMock()
|
||||
completion.choices = [choice]
|
||||
self.chat = MagicMock()
|
||||
self.chat.completions = MagicMock()
|
||||
self.chat.completions.create = MagicMock(return_value=completion)
|
||||
|
||||
# Patch _get_llm_context directly so no real HTTP call is made
|
||||
monkeypatch.setattr(gen, "_get_llm_context", _fake_llm_context)
|
||||
|
||||
with patch("app.services.recipe.llm_recipe.OpenAI", _FakeOpenAI):
|
||||
gen._call_llm("make me a recipe")
|
||||
|
||||
assert captured.get("base_url") == "http://test:8000/v1"
|
||||
|
||||
|
||||
def test_recipe_gen_falls_back_without_cf_orch(monkeypatch):
|
||||
"""When CF_ORCH_URL is not set, _call_llm falls back to LLMRouter."""
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
||||
store = _make_store()
|
||||
gen = LLMRecipeGenerator(store)
|
||||
|
||||
monkeypatch.delenv("CF_ORCH_URL", raising=False)
|
||||
|
||||
router_called = {}
|
||||
|
||||
def _fake_complete(prompt, **_kwargs):
|
||||
router_called["prompt"] = prompt
|
||||
return "Title: Direct\nIngredients: x\nDirections: go.\nNotes: ok."
|
||||
|
||||
fake_router = MagicMock()
|
||||
fake_router.complete.side_effect = _fake_complete
|
||||
|
||||
# LLMRouter is imported locally inside _call_llm, so patch it at its source module.
|
||||
# new_callable=MagicMock makes the class itself a MagicMock; set return_value so
|
||||
# that LLMRouter() (instantiation) yields fake_router rather than a new MagicMock.
|
||||
with patch("circuitforge_core.llm.router.LLMRouter", new_callable=MagicMock) as mock_router_cls:
|
||||
mock_router_cls.return_value = fake_router
|
||||
gen._call_llm("direct path prompt")
|
||||
|
||||
assert router_called.get("prompt") == "direct path prompt"
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
import pytest, json
|
||||
from tests.services.recipe.test_element_classifier import store_with_profiles
|
||||
from tests.db.test_store_recipes import store_with_recipes
|
||||
|
||||
|
||||
def test_level1_returns_ranked_suggestions(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter", "parmesan"],
|
||||
level=1,
|
||||
constraints=[],
|
||||
)
|
||||
result = engine.suggest(req)
|
||||
assert len(result.suggestions) > 0
|
||||
assert result.suggestions[0].title == "Butter Pasta"
|
||||
|
||||
|
||||
def test_level1_expiry_first_requires_rate_limit_free(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
for _ in range(5):
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter"],
|
||||
level=1,
|
||||
constraints=[],
|
||||
expiry_first=True,
|
||||
tier="free",
|
||||
)
|
||||
result = engine.suggest(req)
|
||||
assert result.rate_limited is False
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter"],
|
||||
level=1,
|
||||
constraints=[],
|
||||
expiry_first=True,
|
||||
tier="free",
|
||||
)
|
||||
result = engine.suggest(req)
|
||||
assert result.rate_limited is True
|
||||
|
||||
|
||||
def test_level2_returns_swap_candidates(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
store_with_recipes.conn.execute("""
|
||||
INSERT INTO substitution_pairs
|
||||
(original_name, substitute_name, constraint_label, fat_delta, occurrence_count)
|
||||
VALUES (?,?,?,?,?)
|
||||
""", ("butter", "coconut oil", "vegan", -1.0, 12))
|
||||
store_with_recipes.conn.commit()
|
||||
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter", "parmesan"],
|
||||
level=2,
|
||||
constraints=["vegan"],
|
||||
)
|
||||
result = engine.suggest(req)
|
||||
swapped = [s for s in result.suggestions if s.swap_candidates]
|
||||
assert len(swapped) > 0
|
||||
|
||||
|
||||
def test_element_gaps_reported(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
req = RecipeRequest(pantry_items=["butter"], level=1, constraints=[])
|
||||
result = engine.suggest(req)
|
||||
assert isinstance(result.element_gaps, list)
|
||||
|
||||
|
||||
def test_grocery_list_max_missing(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
# Butter Pasta needs butter, pasta, parmesan. We have only butter → missing 2
|
||||
req = RecipeRequest(
|
||||
pantry_items=["butter"],
|
||||
level=1,
|
||||
constraints=[],
|
||||
max_missing=2,
|
||||
)
|
||||
result = engine.suggest(req)
|
||||
assert all(len(s.missing_ingredients) <= 2 for s in result.suggestions)
|
||||
assert isinstance(result.grocery_list, list)
|
||||
|
||||
|
||||
def test_hard_day_mode_filters_complex_methods(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest, _classify_method_complexity
|
||||
# Test the classifier directly
|
||||
assert _classify_method_complexity(["mix all ingredients", "stir to combine"]) == "easy"
|
||||
assert _classify_method_complexity(["sauté onions", "braise for 2 hours"]) == "involved"
|
||||
|
||||
# With hard_day_mode, involved recipes should be filtered out
|
||||
# Seed a hard recipe into the store
|
||||
store_with_recipes.conn.execute("""
|
||||
INSERT INTO recipes (external_id, title, ingredients, ingredient_names,
|
||||
directions, category, keywords, element_coverage)
|
||||
VALUES (?,?,?,?,?,?,?,?)
|
||||
""", ("99", "Braised Short Ribs",
|
||||
'["butter","beef ribs"]', '["butter","beef ribs"]',
|
||||
'["braise short ribs for 3 hours","reduce sauce"]',
|
||||
"Meat", '[]', '{"Richness":0.8}'))
|
||||
store_with_recipes.conn.commit()
|
||||
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
req_hard = RecipeRequest(pantry_items=["butter"], level=1, constraints=[], hard_day_mode=True)
|
||||
result = engine.suggest(req_hard)
|
||||
titles = [s.title for s in result.suggestions]
|
||||
assert "Braised Short Ribs" not in titles
|
||||
|
||||
|
||||
def test_grocery_links_free_tier(store_with_recipes):
|
||||
from app.services.recipe.recipe_engine import RecipeEngine, RecipeRequest
|
||||
engine = RecipeEngine(store_with_recipes)
|
||||
req = RecipeRequest(pantry_items=["butter"], level=1, constraints=[], max_missing=5)
|
||||
result = engine.suggest(req)
|
||||
# Links may be empty if no retailer env vars set, but structure must be correct
|
||||
assert isinstance(result.grocery_links, list)
|
||||
for link in result.grocery_links:
|
||||
assert hasattr(link, "ingredient")
|
||||
assert hasattr(link, "retailer")
|
||||
assert hasattr(link, "url")
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
def test_seitan_staple_has_yield_formats():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
seitan = lib.get("seitan")
|
||||
assert seitan is not None
|
||||
assert "fresh" in seitan.yield_formats
|
||||
assert "frozen" in seitan.yield_formats
|
||||
|
||||
|
||||
def test_staple_yield_format_has_elements():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
seitan = lib.get("seitan")
|
||||
fresh = seitan.yield_formats["fresh"]
|
||||
assert "Structure" in fresh["elements"]
|
||||
|
||||
|
||||
def test_list_all_staples():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
all_staples = lib.list_all()
|
||||
slugs = [s.slug for s in all_staples]
|
||||
assert "seitan" in slugs
|
||||
assert "tempeh" in slugs
|
||||
|
||||
|
||||
def test_tofu_firm_is_loadable():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
tofu = lib.get("tofu_firm")
|
||||
assert tofu is not None
|
||||
assert tofu.slug == "tofu_firm"
|
||||
|
||||
|
||||
def test_filter_by_dietary_vegan():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
vegan = lib.filter_by_dietary("vegan")
|
||||
assert len(vegan) > 0
|
||||
assert all("vegan" in s.dietary_labels for s in vegan)
|
||||
|
||||
|
||||
def test_list_all_returns_all_three():
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
lib = StapleLibrary()
|
||||
all_staples = lib.list_all()
|
||||
slugs = {s.slug for s in all_staples}
|
||||
assert {"seitan", "tempeh", "tofu_firm"} == slugs
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue