Compare commits
14 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 0bef082ff0 | |||
| c6f45be1ba | |||
| be050f5492 | |||
| e2658f743f | |||
| dbc4aa3c68 | |||
| ed4595d960 | |||
| eba536070c | |||
| 79f345aae6 | |||
| 5385adc52a | |||
| e7ba305e63 | |||
| b2c546e86a | |||
| 8fd77bd1f2 | |||
| 22a3da61c3 | |||
| bea61054fa |
27 changed files with 1222 additions and 107 deletions
|
|
@ -51,6 +51,12 @@ ENABLE_OCR=false
|
|||
DEBUG=false
|
||||
CLOUD_MODE=false
|
||||
DEMO_MODE=false
|
||||
# Product identifier reported in cf-orch coordinator analytics for per-app breakdown
|
||||
CF_APP_NAME=kiwi
|
||||
# USE_ORCH_SCHEDULER: use coordinator-aware multi-GPU scheduler instead of local FIFO.
|
||||
# Unset = auto-detect: true if CLOUD_MODE or circuitforge_orch is installed (paid+ local).
|
||||
# Set false to force LocalScheduler even when cf-orch is present.
|
||||
# USE_ORCH_SCHEDULER=false
|
||||
|
||||
# Cloud mode (set in compose.cloud.yml; also set here for reference)
|
||||
# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data
|
||||
|
|
|
|||
|
|
@ -11,6 +11,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||
COPY circuitforge-core/ ./circuitforge-core/
|
||||
RUN conda run -n base pip install --no-cache-dir -e ./circuitforge-core
|
||||
|
||||
# Install circuitforge-orch — needed for the cf-orch-agent sidecar (compose.override.yml)
|
||||
COPY circuitforge-orch/ ./circuitforge-orch/
|
||||
|
||||
# Create kiwi conda env and install app
|
||||
COPY kiwi/environment.yml .
|
||||
RUN conda env create -f environment.yml
|
||||
|
|
@ -22,8 +25,9 @@ COPY kiwi/ ./kiwi/
|
|||
# they never end up in the cloud image regardless of .dockerignore placement.
|
||||
RUN rm -f /app/kiwi/.env
|
||||
|
||||
# Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep)
|
||||
# Install cf-core and cf-orch into the kiwi env BEFORE installing kiwi
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-orch
|
||||
WORKDIR /app/kiwi
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e .
|
||||
|
||||
|
|
|
|||
|
|
@ -171,7 +171,10 @@ async def create_inventory_item(
|
|||
notes=body.notes,
|
||||
source=body.source,
|
||||
)
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
# RETURNING * omits joined columns (product_name, barcode, category).
|
||||
# Re-fetch with the products JOIN so the response is fully populated (#99).
|
||||
full_item = await asyncio.to_thread(store.get_inventory_item, item["id"])
|
||||
return InventoryItemResponse.model_validate(full_item)
|
||||
|
||||
|
||||
@router.post("/items/bulk-add-by-name", response_model=BulkAddByNameResponse)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from app.db.store import Store
|
|||
from app.models.schemas.recipe import (
|
||||
AssemblyTemplateOut,
|
||||
BuildRequest,
|
||||
RecipeJobStatus,
|
||||
RecipeRequest,
|
||||
RecipeResult,
|
||||
RecipeSuggestion,
|
||||
|
|
@ -28,9 +29,12 @@ from app.services.recipe.assembly_recipes import (
|
|||
)
|
||||
from app.services.recipe.browser_domains import (
|
||||
DOMAINS,
|
||||
category_has_subcategories,
|
||||
get_category_names,
|
||||
get_domain_labels,
|
||||
get_keywords_for_category,
|
||||
get_keywords_for_subcategory,
|
||||
get_subcategory_names,
|
||||
)
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
from app.services.heimdall_orch import check_orch_budget
|
||||
|
|
@ -54,12 +58,50 @@ def _suggest_in_thread(db_path: Path, req: RecipeRequest) -> RecipeResult:
|
|||
store.close()
|
||||
|
||||
|
||||
@router.post("/suggest", response_model=RecipeResult)
|
||||
async def _enqueue_recipe_job(session: CloudUser, req: RecipeRequest):
|
||||
"""Queue an async recipe_llm job and return 202 with job_id.
|
||||
|
||||
Falls back to synchronous generation in CLOUD_MODE (scheduler polls only
|
||||
the shared settings DB, not per-user DBs — see snipe#45 / kiwi backlog).
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from fastapi.responses import JSONResponse
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.tasks.runner import insert_task
|
||||
|
||||
if CLOUD_MODE:
|
||||
log.warning("recipe_llm async jobs not supported in CLOUD_MODE — falling back to sync")
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
return result
|
||||
|
||||
job_id = f"rec_{uuid.uuid4().hex}"
|
||||
|
||||
def _create(db_path: Path) -> int:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
row = store.create_recipe_job(job_id, session.user_id, req.model_dump_json())
|
||||
return row["id"]
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
int_id = await asyncio.to_thread(_create, session.db)
|
||||
params_json = json.dumps({"job_id": job_id})
|
||||
task_id, is_new = insert_task(session.db, "recipe_llm", int_id, params=params_json)
|
||||
if is_new:
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(session.db).enqueue(task_id, "recipe_llm", int_id, params_json)
|
||||
|
||||
return JSONResponse(content={"job_id": job_id, "status": "queued"}, status_code=202)
|
||||
|
||||
|
||||
@router.post("/suggest")
|
||||
async def suggest_recipes(
|
||||
req: RecipeRequest,
|
||||
async_mode: bool = Query(default=False, alias="async"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> RecipeResult:
|
||||
):
|
||||
log.info("recipes auth=%s tier=%s level=%s", _auth_label(session.user_id), session.tier, req.level)
|
||||
# Inject session-authoritative tier/byok immediately — client-supplied values are ignored.
|
||||
# Also read stored unit_system preference; default to metric if not set.
|
||||
|
|
@ -92,12 +134,49 @@ async def suggest_recipes(
|
|||
req = req.model_copy(update={"level": 2})
|
||||
orch_fallback = True
|
||||
|
||||
if req.level in (3, 4) and async_mode:
|
||||
return await _enqueue_recipe_job(session, req)
|
||||
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
if orch_fallback:
|
||||
result = result.model_copy(update={"orch_fallback": True})
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/jobs/{job_id}", response_model=RecipeJobStatus)
|
||||
async def get_recipe_job_status(
|
||||
job_id: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeJobStatus:
|
||||
"""Poll the status of an async recipe generation job.
|
||||
|
||||
Returns 404 when job_id is unknown or belongs to a different user.
|
||||
On status='done' with suggestions=[], the LLM returned empty — client
|
||||
should show a 'no recipe generated, try again' message.
|
||||
"""
|
||||
def _get(db_path: Path) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe_job(job_id, session.user_id)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
row = await asyncio.to_thread(_get, session.db)
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="Job not found.")
|
||||
|
||||
result = None
|
||||
if row["status"] == "done" and row["result"]:
|
||||
result = RecipeResult.model_validate_json(row["result"])
|
||||
|
||||
return RecipeJobStatus(
|
||||
job_id=row["job_id"],
|
||||
status=row["status"],
|
||||
result=result,
|
||||
error=row["error"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/browse/domains")
|
||||
async def list_browse_domains(
|
||||
session: CloudUser = Depends(get_session),
|
||||
|
|
@ -115,15 +194,42 @@ async def list_browse_categories(
|
|||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
keywords_by_category = {
|
||||
cat: get_keywords_for_category(domain, cat)
|
||||
for cat in get_category_names(domain)
|
||||
cat_names = get_category_names(domain)
|
||||
keywords_by_category = {cat: get_keywords_for_category(domain, cat) for cat in cat_names}
|
||||
has_subs = {cat: category_has_subcategories(domain, cat) for cat in cat_names}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_categories(domain, keywords_by_category, has_subs)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}/subcategories")
|
||||
async def list_browse_subcategories(
|
||||
domain: str,
|
||||
category: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for a category that supports subcategories."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
if not category_has_subcategories(domain, category):
|
||||
return []
|
||||
|
||||
subcat_names = get_subcategory_names(domain, category)
|
||||
keywords_by_subcat = {
|
||||
sub: get_keywords_for_subcategory(domain, category, sub)
|
||||
for sub in subcat_names
|
||||
}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_categories(domain, keywords_by_category)
|
||||
return store.get_browser_subcategories(domain, keywords_by_subcat)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
|
@ -137,22 +243,36 @@ async def browse_recipes(
|
|||
page: Annotated[int, Query(ge=1)] = 1,
|
||||
page_size: Annotated[int, Query(ge=1, le=100)] = 20,
|
||||
pantry_items: Annotated[str | None, Query()] = None,
|
||||
subcategory: Annotated[str | None, Query()] = None,
|
||||
q: Annotated[str | None, Query(max_length=200)] = None,
|
||||
sort: Annotated[str, Query(pattern="^(default|alpha|alpha_desc)$")] = "default",
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return a paginated list of recipes for a domain/category.
|
||||
|
||||
Pass pantry_items as a comma-separated string to receive match_pct
|
||||
badges on each result.
|
||||
Pass pantry_items as a comma-separated string to receive match_pct badges.
|
||||
Pass subcategory to narrow within a category that has subcategories.
|
||||
Pass q to filter by title substring. Pass sort for ordering (default/alpha/alpha_desc).
|
||||
"""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
keywords = get_keywords_for_category(domain, category)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
if category == "_all":
|
||||
keywords = None # unfiltered browse
|
||||
elif subcategory:
|
||||
keywords = get_keywords_for_subcategory(domain, category, subcategory)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown subcategory '{subcategory}' in '{category}'.",
|
||||
)
|
||||
else:
|
||||
keywords = get_keywords_for_category(domain, category)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
|
||||
pantry_list = (
|
||||
[p.strip() for p in pantry_items.split(",") if p.strip()]
|
||||
|
|
@ -168,6 +288,8 @@ async def browse_recipes(
|
|||
page=page,
|
||||
page_size=page_size,
|
||||
pantry_items=pantry_list,
|
||||
q=q or None,
|
||||
sort=sort,
|
||||
)
|
||||
store.log_browser_telemetry(
|
||||
domain=domain,
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import logging
|
|||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -22,8 +23,13 @@ def session_bootstrap(session: CloudUser = Depends(get_session)) -> dict:
|
|||
Expected log output:
|
||||
INFO:app.api.endpoints.session: session auth=authed tier=paid
|
||||
INFO:app.api.endpoints.session: session auth=anon tier=free
|
||||
|
||||
E2E test sessions (E2E_TEST_USER_ID) are logged at DEBUG so they don't
|
||||
pollute analytics counts while still being visible when DEBUG=true.
|
||||
"""
|
||||
log.info("session auth=%s tier=%s", _auth_label(session.user_id), session.tier)
|
||||
is_test = bool(settings.E2E_TEST_USER_ID and session.user_id == settings.E2E_TEST_USER_ID)
|
||||
logger = log.debug if is_test else log.info
|
||||
logger("session auth=%s tier=%s%s", _auth_label(session.user_id), session.tier, " e2e=true" if is_test else "")
|
||||
return {
|
||||
"auth": _auth_label(session.user_id),
|
||||
"tier": session.tier,
|
||||
|
|
|
|||
|
|
@ -60,8 +60,19 @@ class Settings:
|
|||
# CFOrchClient reads CF_LICENSE_KEY automatically; exposed here for startup validation.
|
||||
CF_LICENSE_KEY: str | None = os.environ.get("CF_LICENSE_KEY")
|
||||
|
||||
# E2E test account — analytics logging is suppressed for this user_id so test
|
||||
# runs don't pollute session counts. Set to the Directus UUID of the test user.
|
||||
E2E_TEST_USER_ID: str | None = os.environ.get("E2E_TEST_USER_ID") or None
|
||||
|
||||
# Feature flags
|
||||
ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes")
|
||||
# Use OrchestratedScheduler (coordinator-aware, multi-GPU fan-out) instead of
|
||||
# LocalScheduler. Defaults to true in CLOUD_MODE; can be set independently
|
||||
# for multi-GPU local rigs that don't need full cloud auth.
|
||||
USE_ORCH_SCHEDULER: bool | None = (
|
||||
None if os.environ.get("USE_ORCH_SCHEDULER") is None
|
||||
else os.environ.get("USE_ORCH_SCHEDULER", "").lower() in ("1", "true", "yes")
|
||||
)
|
||||
|
||||
# Runtime
|
||||
DEBUG: bool = os.environ.get("DEBUG", "false").lower() in ("1", "true", "yes")
|
||||
|
|
|
|||
14
app/db/migrations/034_recipe_jobs.sql
Normal file
14
app/db/migrations/034_recipe_jobs.sql
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
-- Migration 034: async recipe generation job queue
|
||||
CREATE TABLE IF NOT EXISTS recipe_jobs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
job_id TEXT NOT NULL UNIQUE,
|
||||
user_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
request TEXT NOT NULL,
|
||||
result TEXT,
|
||||
error TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_job_id ON recipe_jobs (job_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_user_id ON recipe_jobs (user_id, created_at DESC);
|
||||
140
app/db/store.py
140
app/db/store.py
|
|
@ -736,6 +736,41 @@ class Store:
|
|||
row = self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
||||
return row
|
||||
|
||||
# --- Async recipe jobs ---
|
||||
|
||||
def create_recipe_job(self, job_id: str, user_id: str, request_json: str) -> sqlite3.Row:
|
||||
return self._insert_returning(
|
||||
"INSERT INTO recipe_jobs (job_id, user_id, status, request) VALUES (?,?,?,?) RETURNING *",
|
||||
(job_id, user_id, "queued", request_json),
|
||||
)
|
||||
|
||||
def get_recipe_job(self, job_id: str, user_id: str) -> sqlite3.Row | None:
|
||||
return self._fetch_one(
|
||||
"SELECT * FROM recipe_jobs WHERE job_id=? AND user_id=?",
|
||||
(job_id, user_id),
|
||||
)
|
||||
|
||||
def update_recipe_job_running(self, job_id: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='running', updated_at=datetime('now') WHERE job_id=?",
|
||||
(job_id,),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def complete_recipe_job(self, job_id: str, result_json: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='done', result=?, updated_at=datetime('now') WHERE job_id=?",
|
||||
(result_json, job_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def fail_recipe_job(self, job_id: str, error: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='failed', error=?, updated_at=datetime('now') WHERE job_id=?",
|
||||
(error, job_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def upsert_built_recipe(
|
||||
self,
|
||||
external_id: str,
|
||||
|
|
@ -1051,17 +1086,38 @@ class Store:
|
|||
# ── recipe browser ────────────────────────────────────────────────────
|
||||
|
||||
def get_browser_categories(
|
||||
self, domain: str, keywords_by_category: dict[str, list[str]]
|
||||
self,
|
||||
domain: str,
|
||||
keywords_by_category: dict[str, list[str]],
|
||||
has_subcategories_by_category: dict[str, bool] | None = None,
|
||||
) -> list[dict]:
|
||||
"""Return [{category, recipe_count}] for each category in the domain.
|
||||
"""Return [{category, recipe_count, has_subcategories}] for each category.
|
||||
|
||||
keywords_by_category maps category name to the keyword list used to
|
||||
match against recipes.category and recipes.keywords.
|
||||
keywords_by_category maps category name → keyword list for counting.
|
||||
has_subcategories_by_category maps category name → bool (optional;
|
||||
defaults to False for all categories when omitted).
|
||||
"""
|
||||
results = []
|
||||
for category, keywords in keywords_by_category.items():
|
||||
count = self._count_recipes_for_keywords(keywords)
|
||||
results.append({"category": category, "recipe_count": count})
|
||||
results.append({
|
||||
"category": category,
|
||||
"recipe_count": count,
|
||||
"has_subcategories": (has_subcategories_by_category or {}).get(category, False),
|
||||
})
|
||||
return results
|
||||
|
||||
def get_browser_subcategories(
|
||||
self, domain: str, keywords_by_subcategory: dict[str, list[str]]
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for each subcategory.
|
||||
|
||||
Mirrors get_browser_categories but for the second level.
|
||||
"""
|
||||
results = []
|
||||
for subcat, keywords in keywords_by_subcategory.items():
|
||||
count = self._count_recipes_for_keywords(keywords)
|
||||
results.append({"subcategory": subcat, "recipe_count": count})
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -1091,42 +1147,76 @@ class Store:
|
|||
|
||||
def browse_recipes(
|
||||
self,
|
||||
keywords: list[str],
|
||||
keywords: list[str] | None,
|
||||
page: int,
|
||||
page_size: int,
|
||||
pantry_items: list[str] | None = None,
|
||||
q: str | None = None,
|
||||
sort: str = "default",
|
||||
) -> dict:
|
||||
"""Return a page of recipes matching the keyword set.
|
||||
|
||||
Pass keywords=None to browse all recipes without category filtering.
|
||||
Each recipe row includes match_pct (float | None) when pantry_items
|
||||
is provided. match_pct is the fraction of ingredient_names covered by
|
||||
the pantry set — computed deterministically, no LLM needed.
|
||||
|
||||
q: optional title substring filter (case-insensitive LIKE).
|
||||
sort: "default" (corpus order) | "alpha" (A→Z) | "alpha_desc" (Z→A).
|
||||
"""
|
||||
if not keywords:
|
||||
if keywords is not None and not keywords:
|
||||
return {"recipes": [], "total": 0, "page": page}
|
||||
|
||||
match_expr = self._browser_fts_query(keywords)
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Reuse cached count — avoids a second index scan on every page turn.
|
||||
total = self._count_recipes_for_keywords(keywords)
|
||||
|
||||
c = self._cp
|
||||
rows = self._fetch_all(
|
||||
f"""
|
||||
SELECT id, title, category, keywords, ingredient_names,
|
||||
calories, fat_g, protein_g, sodium_mg
|
||||
FROM {c}recipes
|
||||
WHERE id IN (
|
||||
SELECT rowid FROM {c}recipe_browser_fts
|
||||
WHERE recipe_browser_fts MATCH ?
|
||||
)
|
||||
ORDER BY id ASC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(match_expr, page_size, offset),
|
||||
|
||||
order_clause = {
|
||||
"alpha": "ORDER BY title ASC",
|
||||
"alpha_desc": "ORDER BY title DESC",
|
||||
}.get(sort, "ORDER BY id ASC")
|
||||
|
||||
q_param = f"%{q.strip()}%" if q and q.strip() else None
|
||||
cols = (
|
||||
f"SELECT id, title, category, keywords, ingredient_names,"
|
||||
f" calories, fat_g, protein_g, sodium_mg FROM {c}recipes"
|
||||
)
|
||||
|
||||
if keywords is None:
|
||||
if q_param:
|
||||
total = self.conn.execute(
|
||||
f"SELECT COUNT(*) FROM {c}recipes WHERE LOWER(title) LIKE LOWER(?)",
|
||||
(q_param,),
|
||||
).fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE LOWER(title) LIKE LOWER(?) {order_clause} LIMIT ? OFFSET ?",
|
||||
(q_param, page_size, offset),
|
||||
)
|
||||
else:
|
||||
total = self.conn.execute(f"SELECT COUNT(*) FROM {c}recipes").fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} {order_clause} LIMIT ? OFFSET ?",
|
||||
(page_size, offset),
|
||||
)
|
||||
else:
|
||||
match_expr = self._browser_fts_query(keywords)
|
||||
fts_sub = f"id IN (SELECT rowid FROM {c}recipe_browser_fts WHERE recipe_browser_fts MATCH ?)"
|
||||
if q_param:
|
||||
total = self.conn.execute(
|
||||
f"SELECT COUNT(*) FROM {c}recipes WHERE {fts_sub} AND LOWER(title) LIKE LOWER(?)",
|
||||
(match_expr, q_param),
|
||||
).fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE {fts_sub} AND LOWER(title) LIKE LOWER(?) {order_clause} LIMIT ? OFFSET ?",
|
||||
(match_expr, q_param, page_size, offset),
|
||||
)
|
||||
else:
|
||||
# Reuse cached count — avoids a second index scan on every page turn.
|
||||
total = self._count_recipes_for_keywords(keywords)
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE {fts_sub} {order_clause} LIMIT ? OFFSET ?",
|
||||
(match_expr, page_size, offset),
|
||||
)
|
||||
|
||||
pantry_set = {p.lower() for p in pantry_items} if pantry_items else None
|
||||
recipes = []
|
||||
for r in rows:
|
||||
|
|
|
|||
|
|
@ -61,6 +61,18 @@ class RecipeResult(BaseModel):
|
|||
orch_fallback: bool = False # True when orch budget exhausted; fell back to local LLM
|
||||
|
||||
|
||||
class RecipeJobQueued(BaseModel):
|
||||
job_id: str
|
||||
status: str = "queued"
|
||||
|
||||
|
||||
class RecipeJobStatus(BaseModel):
|
||||
job_id: str
|
||||
status: str
|
||||
result: RecipeResult | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class NutritionFilters(BaseModel):
|
||||
"""Optional per-serving upper bounds for macro filtering. None = no filter."""
|
||||
max_calories: float | None = None
|
||||
|
|
@ -71,6 +83,10 @@ class NutritionFilters(BaseModel):
|
|||
|
||||
class RecipeRequest(BaseModel):
|
||||
pantry_items: list[str]
|
||||
# Maps product name → secondary state label for items past nominal expiry
|
||||
# but still within their secondary use window (e.g. {"Bread": "stale"}).
|
||||
# Used by the recipe engine to boost recipes suited to those specific states.
|
||||
secondary_pantry_items: dict[str, str] = Field(default_factory=dict)
|
||||
level: int = Field(default=1, ge=1, le=4)
|
||||
constraints: list[str] = Field(default_factory=list)
|
||||
expiry_first: bool = False
|
||||
|
|
|
|||
|
|
@ -194,6 +194,18 @@ class ExpirationPredictor:
|
|||
'uses': ['broth', 'soups', 'risotto', 'gratins'],
|
||||
'warning': None,
|
||||
},
|
||||
'rice': {
|
||||
'window_days': 2,
|
||||
'label': 'day-old',
|
||||
'uses': ['fried rice', 'rice bowls', 'rice porridge'],
|
||||
'warning': 'Refrigerate immediately after cooking — do not leave at room temp.',
|
||||
},
|
||||
'tortillas': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['chilaquiles', 'migas', 'tortilla soup', 'casserole'],
|
||||
'warning': None,
|
||||
},
|
||||
}
|
||||
|
||||
def days_after_opening(self, category: str | None) -> int | None:
|
||||
|
|
|
|||
|
|
@ -5,6 +5,12 @@ Each domain provides a two-level category hierarchy for browsing the recipe corp
|
|||
Keyword matching is case-insensitive against the recipes.category column and the
|
||||
recipes.keywords JSON array. A recipe may appear in multiple categories (correct).
|
||||
|
||||
Category values are either:
|
||||
- list[str] — flat keyword list (no subcategories)
|
||||
- dict — {"keywords": list[str], "subcategories": {name: list[str]}}
|
||||
keywords covers the whole category (used for "All X" browse);
|
||||
subcategories each have their own narrower keyword list.
|
||||
|
||||
These are starter mappings based on the food.com dataset structure. Run:
|
||||
|
||||
SELECT category, count(*) FROM recipes
|
||||
|
|
@ -19,26 +25,213 @@ DOMAINS: dict[str, dict] = {
|
|||
"cuisine": {
|
||||
"label": "Cuisine",
|
||||
"categories": {
|
||||
"Italian": ["italian", "pasta", "pizza", "risotto", "lasagna", "carbonara"],
|
||||
"Mexican": ["mexican", "tex-mex", "taco", "enchilada", "burrito", "salsa", "guacamole"],
|
||||
"Asian": ["asian", "chinese", "japanese", "thai", "korean", "vietnamese", "stir fry", "stir-fry", "ramen", "sushi"],
|
||||
"American": ["american", "southern", "bbq", "barbecue", "comfort food", "cajun", "creole"],
|
||||
"Mediterranean": ["mediterranean", "greek", "middle eastern", "turkish", "moroccan", "lebanese"],
|
||||
"Indian": ["indian", "curry", "lentil", "dal", "tikka", "masala", "biryani"],
|
||||
"European": ["french", "german", "spanish", "british", "irish", "scandinavian"],
|
||||
"Latin American": ["latin american", "peruvian", "argentinian", "colombian", "cuban", "caribbean"],
|
||||
"Italian": {
|
||||
"keywords": ["italian", "pasta", "pizza", "risotto", "lasagna", "carbonara"],
|
||||
"subcategories": {
|
||||
"Sicilian": ["sicilian", "sicily", "arancini", "caponata",
|
||||
"involtini", "cannoli"],
|
||||
"Neapolitan": ["neapolitan", "naples", "pizza napoletana",
|
||||
"sfogliatelle", "ragù"],
|
||||
"Tuscan": ["tuscan", "tuscany", "ribollita", "bistecca",
|
||||
"pappardelle", "crostini"],
|
||||
"Roman": ["roman", "rome", "cacio e pepe", "carbonara",
|
||||
"amatriciana", "gricia", "supplì"],
|
||||
"Venetian": ["venetian", "venice", "risotto", "bigoli",
|
||||
"baccalà", "sarde in saor"],
|
||||
"Ligurian": ["ligurian", "liguria", "pesto", "focaccia",
|
||||
"trofie", "farinata"],
|
||||
},
|
||||
},
|
||||
"Mexican": {
|
||||
"keywords": ["mexican", "tex-mex", "taco", "enchilada", "burrito",
|
||||
"salsa", "guacamole"],
|
||||
"subcategories": {
|
||||
"Oaxacan": ["oaxacan", "oaxaca", "mole negro", "tlayuda",
|
||||
"chapulines", "mezcal"],
|
||||
"Yucatecan": ["yucatecan", "yucatan", "cochinita pibil", "poc chuc",
|
||||
"sopa de lima", "panuchos"],
|
||||
"Veracruz": ["veracruz", "huachinango", "picadas", "enfrijoladas"],
|
||||
"Street Food": ["taco", "elote", "tlacoyos", "torta",
|
||||
"tamale", "quesadilla"],
|
||||
"Mole": ["mole", "mole negro", "mole rojo", "mole verde",
|
||||
"mole poblano"],
|
||||
},
|
||||
},
|
||||
"Asian": {
|
||||
"keywords": ["asian", "chinese", "japanese", "thai", "korean", "vietnamese",
|
||||
"stir fry", "stir-fry", "ramen", "sushi"],
|
||||
"subcategories": {
|
||||
"Korean": ["korean", "kimchi", "bibimbap", "bulgogi", "japchae",
|
||||
"doenjang", "gochujang"],
|
||||
"Japanese": ["japanese", "sushi", "ramen", "tempura", "miso",
|
||||
"teriyaki", "udon", "soba", "bento", "yakitori"],
|
||||
"Chinese": ["chinese", "dim sum", "fried rice", "dumplings", "wonton",
|
||||
"spring roll", "szechuan", "sichuan", "cantonese",
|
||||
"chow mein", "mapo", "lo mein"],
|
||||
"Thai": ["thai", "pad thai", "green curry", "red curry",
|
||||
"coconut milk", "lemongrass", "satay", "tom yum"],
|
||||
"Vietnamese": ["vietnamese", "pho", "banh mi", "spring rolls",
|
||||
"vermicelli", "nuoc cham", "bun bo"],
|
||||
"Filipino": ["filipino", "adobo", "sinigang", "pancit", "lumpia",
|
||||
"kare-kare", "lechon"],
|
||||
"Indonesian": ["indonesian", "rendang", "nasi goreng", "gado-gado",
|
||||
"tempeh", "sambal"],
|
||||
},
|
||||
},
|
||||
"Indian": {
|
||||
"keywords": ["indian", "curry", "lentil", "dal", "tikka", "masala",
|
||||
"biryani", "naan", "chutney"],
|
||||
"subcategories": {
|
||||
"North Indian": ["north indian", "punjabi", "mughal", "tikka masala",
|
||||
"naan", "tandoori", "butter chicken", "palak"],
|
||||
"South Indian": ["south indian", "tamil", "kerala", "dosa", "idli",
|
||||
"sambar", "rasam", "coconut chutney"],
|
||||
"Bengali": ["bengali", "mustard fish", "hilsa", "shorshe"],
|
||||
"Gujarati": ["gujarati", "dhokla", "thepla", "undhiyu"],
|
||||
},
|
||||
},
|
||||
"Mediterranean": {
|
||||
"keywords": ["mediterranean", "greek", "middle eastern", "turkish",
|
||||
"moroccan", "lebanese"],
|
||||
"subcategories": {
|
||||
"Greek": ["greek", "feta", "tzatziki", "moussaka", "spanakopita",
|
||||
"souvlaki", "dolmades"],
|
||||
"Turkish": ["turkish", "kebab", "borek", "meze", "baklava",
|
||||
"lahmacun"],
|
||||
"Moroccan": ["moroccan", "tagine", "couscous", "harissa",
|
||||
"chermoula", "preserved lemon"],
|
||||
"Lebanese": ["lebanese", "middle eastern", "hummus", "falafel",
|
||||
"tabbouleh", "kibbeh", "fattoush"],
|
||||
"Israeli": ["israeli", "shakshuka", "sabich", "za'atar",
|
||||
"tahini"],
|
||||
},
|
||||
},
|
||||
"American": {
|
||||
"keywords": ["american", "southern", "bbq", "barbecue", "comfort food",
|
||||
"cajun", "creole"],
|
||||
"subcategories": {
|
||||
"Southern": ["southern", "soul food", "fried chicken",
|
||||
"collard greens", "cornbread", "biscuits and gravy"],
|
||||
"Cajun/Creole": ["cajun", "creole", "new orleans", "gumbo",
|
||||
"jambalaya", "etouffee", "dirty rice"],
|
||||
"BBQ": ["bbq", "barbecue", "smoked", "brisket", "pulled pork",
|
||||
"ribs", "pit"],
|
||||
"Tex-Mex": ["tex-mex", "southwestern", "chili", "fajita",
|
||||
"queso"],
|
||||
"New England": ["new england", "chowder", "lobster", "clam",
|
||||
"maple", "yankee"],
|
||||
},
|
||||
},
|
||||
"European": {
|
||||
"keywords": ["french", "german", "spanish", "british", "irish",
|
||||
"scandinavian"],
|
||||
"subcategories": {
|
||||
"French": ["french", "provencal", "beurre", "crepe",
|
||||
"ratatouille", "cassoulet", "bouillabaisse"],
|
||||
"Spanish": ["spanish", "paella", "tapas", "gazpacho",
|
||||
"tortilla espanola", "chorizo"],
|
||||
"German": ["german", "bratwurst", "sauerkraut", "schnitzel",
|
||||
"pretzel", "strudel"],
|
||||
"British/Irish": ["british", "irish", "english", "pub food",
|
||||
"shepherd's pie", "bangers", "scones"],
|
||||
"Scandinavian": ["scandinavian", "nordic", "swedish", "norwegian",
|
||||
"danish", "gravlax", "meatballs"],
|
||||
},
|
||||
},
|
||||
"Latin American": {
|
||||
"keywords": ["latin american", "peruvian", "argentinian", "colombian",
|
||||
"cuban", "caribbean", "brazilian"],
|
||||
"subcategories": {
|
||||
"Peruvian": ["peruvian", "ceviche", "lomo saltado", "anticucho",
|
||||
"aji amarillo"],
|
||||
"Brazilian": ["brazilian", "churrasco", "feijoada", "pao de queijo",
|
||||
"brigadeiro"],
|
||||
"Colombian": ["colombian", "bandeja paisa", "arepas", "empanadas",
|
||||
"sancocho"],
|
||||
"Cuban": ["cuban", "ropa vieja", "moros y cristianos",
|
||||
"picadillo", "mojito"],
|
||||
"Caribbean": ["caribbean", "jamaican", "jerk", "trinidadian",
|
||||
"plantain", "roti"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"meal_type": {
|
||||
"label": "Meal Type",
|
||||
"categories": {
|
||||
"Breakfast": ["breakfast", "brunch", "eggs", "pancakes", "waffles", "oatmeal", "muffin"],
|
||||
"Lunch": ["lunch", "sandwich", "wrap", "salad", "soup", "light meal"],
|
||||
"Dinner": ["dinner", "main dish", "entree", "main course", "supper"],
|
||||
"Snack": ["snack", "appetizer", "finger food", "dip", "bite", "starter"],
|
||||
"Dessert": ["dessert", "cake", "cookie", "pie", "sweet", "pudding", "ice cream", "brownie"],
|
||||
"Beverage": ["drink", "smoothie", "cocktail", "beverage", "juice", "shake"],
|
||||
"Side Dish": ["side dish", "side", "accompaniment", "garnish"],
|
||||
"Breakfast": {
|
||||
"keywords": ["breakfast", "brunch", "eggs", "pancakes", "waffles",
|
||||
"oatmeal", "muffin"],
|
||||
"subcategories": {
|
||||
"Eggs": ["egg", "omelette", "frittata", "quiche",
|
||||
"scrambled", "benedict", "shakshuka"],
|
||||
"Pancakes & Waffles": ["pancake", "waffle", "crepe", "french toast"],
|
||||
"Baked Goods": ["muffin", "scone", "biscuit", "quick bread",
|
||||
"coffee cake", "danish"],
|
||||
"Oats & Grains": ["oatmeal", "granola", "porridge", "muesli",
|
||||
"overnight oats"],
|
||||
},
|
||||
},
|
||||
"Lunch": {
|
||||
"keywords": ["lunch", "sandwich", "wrap", "salad", "soup", "light meal"],
|
||||
"subcategories": {
|
||||
"Sandwiches": ["sandwich", "sub", "hoagie", "panini", "club",
|
||||
"grilled cheese", "blt"],
|
||||
"Salads": ["salad", "grain bowl", "chopped", "caesar",
|
||||
"niçoise", "cobb"],
|
||||
"Soups": ["soup", "bisque", "chowder", "gazpacho",
|
||||
"minestrone", "lentil soup"],
|
||||
"Wraps": ["wrap", "burrito bowl", "pita", "lettuce wrap",
|
||||
"quesadilla"],
|
||||
},
|
||||
},
|
||||
"Dinner": {
|
||||
"keywords": ["dinner", "main dish", "entree", "main course", "supper"],
|
||||
"subcategories": {
|
||||
"Casseroles": ["casserole", "bake", "gratin", "lasagna",
|
||||
"sheperd's pie", "pot pie"],
|
||||
"Stews": ["stew", "braise", "slow cooker", "pot roast",
|
||||
"daube", "ragù"],
|
||||
"Grilled": ["grilled", "grill", "barbecue", "charred",
|
||||
"kebab", "skewer"],
|
||||
"Stir-Fries": ["stir fry", "stir-fry", "wok", "sauté",
|
||||
"sauteed"],
|
||||
"Roasts": ["roast", "roasted", "oven", "baked chicken",
|
||||
"pot roast"],
|
||||
},
|
||||
},
|
||||
"Snack": {
|
||||
"keywords": ["snack", "appetizer", "finger food", "dip", "bite",
|
||||
"starter"],
|
||||
"subcategories": {
|
||||
"Dips & Spreads": ["dip", "spread", "hummus", "guacamole",
|
||||
"salsa", "pate"],
|
||||
"Finger Foods": ["finger food", "bite", "skewer", "slider",
|
||||
"wing", "nugget"],
|
||||
"Chips & Crackers": ["chip", "cracker", "crisp", "popcorn",
|
||||
"pretzel"],
|
||||
},
|
||||
},
|
||||
"Dessert": {
|
||||
"keywords": ["dessert", "cake", "cookie", "pie", "sweet", "pudding",
|
||||
"ice cream", "brownie"],
|
||||
"subcategories": {
|
||||
"Cakes": ["cake", "cupcake", "layer cake", "bundt",
|
||||
"cheesecake", "torte"],
|
||||
"Cookies & Bars": ["cookie", "brownie", "blondie", "bar",
|
||||
"biscotti", "shortbread"],
|
||||
"Pies & Tarts": ["pie", "tart", "galette", "cobbler", "crisp",
|
||||
"crumble"],
|
||||
"Frozen": ["ice cream", "gelato", "sorbet", "frozen dessert",
|
||||
"popsicle", "granita"],
|
||||
"Puddings": ["pudding", "custard", "mousse", "panna cotta",
|
||||
"flan", "creme brulee"],
|
||||
"Candy": ["candy", "fudge", "truffle", "brittle",
|
||||
"caramel", "toffee"],
|
||||
},
|
||||
},
|
||||
"Beverage": ["drink", "smoothie", "cocktail", "beverage", "juice", "shake"],
|
||||
"Side Dish": ["side dish", "side", "accompaniment", "garnish"],
|
||||
},
|
||||
},
|
||||
"dietary": {
|
||||
|
|
@ -56,33 +249,128 @@ DOMAINS: dict[str, dict] = {
|
|||
"main_ingredient": {
|
||||
"label": "Main Ingredient",
|
||||
"categories": {
|
||||
# These values match the inferred_tags written by tag_inferrer._MAIN_INGREDIENT_SIGNALS
|
||||
# and indexed into recipe_browser_fts — use exact tag strings.
|
||||
"Chicken": ["main:Chicken"],
|
||||
"Beef": ["main:Beef"],
|
||||
"Pork": ["main:Pork"],
|
||||
"Fish": ["main:Fish"],
|
||||
# keywords use exact inferred_tag strings (main:X) — indexed into recipe_browser_fts.
|
||||
"Chicken": {
|
||||
"keywords": ["main:Chicken"],
|
||||
"subcategories": {
|
||||
"Baked": ["baked chicken", "roast chicken", "chicken casserole",
|
||||
"chicken bake"],
|
||||
"Grilled": ["grilled chicken", "chicken kebab", "bbq chicken",
|
||||
"chicken skewer"],
|
||||
"Fried": ["fried chicken", "chicken cutlet", "chicken schnitzel",
|
||||
"crispy chicken"],
|
||||
"Stewed": ["chicken stew", "chicken soup", "coq au vin",
|
||||
"chicken curry", "chicken braise"],
|
||||
},
|
||||
},
|
||||
"Beef": {
|
||||
"keywords": ["main:Beef"],
|
||||
"subcategories": {
|
||||
"Ground Beef": ["ground beef", "hamburger", "meatball", "meatloaf",
|
||||
"bolognese", "burger"],
|
||||
"Steak": ["steak", "sirloin", "ribeye", "flank steak",
|
||||
"filet mignon", "t-bone"],
|
||||
"Roasts": ["beef roast", "pot roast", "brisket", "prime rib",
|
||||
"chuck roast"],
|
||||
"Stews": ["beef stew", "beef braise", "beef bourguignon",
|
||||
"short ribs"],
|
||||
},
|
||||
},
|
||||
"Pork": {
|
||||
"keywords": ["main:Pork"],
|
||||
"subcategories": {
|
||||
"Chops": ["pork chop", "pork loin", "pork cutlet"],
|
||||
"Pulled/Slow": ["pulled pork", "pork shoulder", "pork butt",
|
||||
"carnitas", "slow cooker pork"],
|
||||
"Sausage": ["sausage", "bratwurst", "chorizo", "andouille",
|
||||
"Italian sausage"],
|
||||
"Ribs": ["pork ribs", "baby back ribs", "spare ribs",
|
||||
"pork belly"],
|
||||
},
|
||||
},
|
||||
"Fish": {
|
||||
"keywords": ["main:Fish"],
|
||||
"subcategories": {
|
||||
"Salmon": ["salmon", "smoked salmon", "gravlax"],
|
||||
"Tuna": ["tuna", "albacore", "ahi"],
|
||||
"White Fish": ["cod", "tilapia", "halibut", "sole", "snapper",
|
||||
"flounder", "bass"],
|
||||
"Shellfish": ["shrimp", "prawn", "crab", "lobster", "scallop",
|
||||
"mussel", "clam", "oyster"],
|
||||
},
|
||||
},
|
||||
"Pasta": ["main:Pasta"],
|
||||
"Vegetables": ["main:Vegetables"],
|
||||
"Eggs": ["main:Eggs"],
|
||||
"Legumes": ["main:Legumes"],
|
||||
"Grains": ["main:Grains"],
|
||||
"Cheese": ["main:Cheese"],
|
||||
"Vegetables": {
|
||||
"keywords": ["main:Vegetables"],
|
||||
"subcategories": {
|
||||
"Root Veg": ["potato", "sweet potato", "carrot", "beet",
|
||||
"parsnip", "turnip"],
|
||||
"Leafy": ["spinach", "kale", "chard", "arugula",
|
||||
"collard greens", "lettuce"],
|
||||
"Brassicas": ["broccoli", "cauliflower", "brussels sprouts",
|
||||
"cabbage", "bok choy"],
|
||||
"Nightshades": ["tomato", "eggplant", "bell pepper", "zucchini",
|
||||
"squash"],
|
||||
"Mushrooms": ["mushroom", "portobello", "shiitake", "oyster mushroom",
|
||||
"chanterelle"],
|
||||
},
|
||||
},
|
||||
"Eggs": ["main:Eggs"],
|
||||
"Legumes": ["main:Legumes"],
|
||||
"Grains": ["main:Grains"],
|
||||
"Cheese": ["main:Cheese"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _get_category_def(domain: str, category: str) -> list[str] | dict | None:
|
||||
"""Return the raw category definition, or None if not found."""
|
||||
return DOMAINS.get(domain, {}).get("categories", {}).get(category)
|
||||
|
||||
|
||||
def get_domain_labels() -> list[dict]:
|
||||
"""Return [{id, label}] for all available domains."""
|
||||
return [{"id": k, "label": v["label"]} for k, v in DOMAINS.items()]
|
||||
|
||||
|
||||
def get_keywords_for_category(domain: str, category: str) -> list[str]:
|
||||
"""Return the keyword list for a domain/category pair, or [] if not found."""
|
||||
domain_data = DOMAINS.get(domain, {})
|
||||
categories = domain_data.get("categories", {})
|
||||
return categories.get(category, [])
|
||||
"""Return the keyword list for the category (top-level, covers all subcategories).
|
||||
|
||||
For flat categories returns the list directly.
|
||||
For nested categories returns the 'keywords' key.
|
||||
Returns [] if category or domain not found.
|
||||
"""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if cat_def is None:
|
||||
return []
|
||||
if isinstance(cat_def, list):
|
||||
return cat_def
|
||||
return cat_def.get("keywords", [])
|
||||
|
||||
|
||||
def category_has_subcategories(domain: str, category: str) -> bool:
|
||||
"""Return True when a category has a subcategory level."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return False
|
||||
return bool(cat_def.get("subcategories"))
|
||||
|
||||
|
||||
def get_subcategory_names(domain: str, category: str) -> list[str]:
|
||||
"""Return subcategory names for a category, or [] if none exist."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return list(cat_def.get("subcategories", {}).keys())
|
||||
|
||||
|
||||
def get_keywords_for_subcategory(domain: str, category: str, subcategory: str) -> list[str]:
|
||||
"""Return keyword list for a specific subcategory, or [] if not found."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return cat_def.get("subcategories", {}).get(subcategory, [])
|
||||
|
||||
|
||||
def get_category_names(domain: str) -> list[str]:
|
||||
|
|
|
|||
|
|
@ -181,6 +181,19 @@ class LLMRecipeGenerator:
|
|||
try:
|
||||
alloc = ctx.__enter__()
|
||||
except Exception as exc:
|
||||
msg = str(exc)
|
||||
# 429 = coordinator at capacity (all nodes at max_concurrent limit).
|
||||
# Don't fall back to LLMRouter — it's also overloaded and the slow
|
||||
# fallback causes nginx 504s. Return "" fast so the caller degrades
|
||||
# gracefully (empty recipe result) rather than timing out.
|
||||
if "429" in msg or "max_concurrent" in msg.lower():
|
||||
logger.info("cf-orch at capacity — returning empty result (graceful degradation)")
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
logger.debug("cf-orch allocation failed, falling back to LLMRouter: %s", exc)
|
||||
ctx = None # __enter__ raised — do not call __exit__
|
||||
|
||||
|
|
|
|||
|
|
@ -155,6 +155,24 @@ _PANTRY_LABEL_SYNONYMS: dict[str, str] = {
|
|||
}
|
||||
|
||||
|
||||
# When a pantry item is in a secondary state (e.g. bread → "stale"), expand
|
||||
# the pantry set with terms that recipe ingredients commonly use to describe
|
||||
# that state. This lets "stale bread" in a recipe ingredient match a pantry
|
||||
# entry that is simply called "Bread" but is past its nominal use-by date.
|
||||
# Each key is (category_in_SECONDARY_WINDOW, label_returned_by_secondary_state).
|
||||
# Values are additional strings added to the pantry set for FTS coverage.
|
||||
_SECONDARY_STATE_SYNONYMS: dict[tuple[str, str], list[str]] = {
|
||||
("bread", "stale"): ["stale bread", "day-old bread", "old bread", "dried bread"],
|
||||
("bakery", "day-old"): ["day-old bread", "stale bread", "stale pastry"],
|
||||
("bananas", "overripe"): ["overripe bananas", "very ripe banana", "ripe bananas", "mashed banana"],
|
||||
("milk", "sour"): ["sour milk", "slightly sour milk", "buttermilk"],
|
||||
("dairy", "sour"): ["sour milk", "slightly sour milk"],
|
||||
("cheese", "well-aged"): ["parmesan rind", "cheese rind", "aged cheese"],
|
||||
("rice", "day-old"): ["day-old rice", "leftover rice", "cold rice", "cooked rice"],
|
||||
("tortillas", "stale"): ["stale tortillas", "dried tortillas", "day-old tortillas"],
|
||||
}
|
||||
|
||||
|
||||
# Matches leading quantity/unit prefixes in recipe ingredient strings,
|
||||
# e.g. "2 cups flour" → "flour", "1/2 c. ketchup" → "ketchup",
|
||||
# "3 oz. butter" → "butter"
|
||||
|
|
@ -284,14 +302,24 @@ def _prep_note_for(ingredient: str) -> str | None:
|
|||
return template.format(ingredient=ingredient_name)
|
||||
|
||||
|
||||
def _expand_pantry_set(pantry_items: list[str]) -> set[str]:
|
||||
def _expand_pantry_set(
|
||||
pantry_items: list[str],
|
||||
secondary_pantry_items: dict[str, str] | None = None,
|
||||
) -> set[str]:
|
||||
"""Return pantry_set expanded with canonical recipe-corpus synonyms.
|
||||
|
||||
For each pantry item, checks _PANTRY_LABEL_SYNONYMS for substring matches
|
||||
and adds the canonical form. This lets single-word recipe ingredients
|
||||
("hamburger", "chicken") match product-label pantry entries
|
||||
("burger patties", "rotisserie chicken").
|
||||
|
||||
If secondary_pantry_items is provided (product_name → state label), items
|
||||
in a secondary state also receive state-specific synonym expansion so that
|
||||
recipe ingredients like "stale bread" or "day-old rice" are matched.
|
||||
"""
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
_predictor = ExpirationPredictor()
|
||||
|
||||
expanded: set[str] = set()
|
||||
for item in pantry_items:
|
||||
lower = item.lower().strip()
|
||||
|
|
@ -299,6 +327,15 @@ def _expand_pantry_set(pantry_items: list[str]) -> set[str]:
|
|||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
expanded.add(canonical)
|
||||
|
||||
# Secondary state expansion — adds terms like "stale bread", "day-old rice"
|
||||
if secondary_pantry_items and item in secondary_pantry_items:
|
||||
state_label = secondary_pantry_items[item]
|
||||
category = _predictor.get_category_from_product(item)
|
||||
if category:
|
||||
synonyms = _SECONDARY_STATE_SYNONYMS.get((category, state_label), [])
|
||||
expanded.update(synonyms)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
|
|
@ -634,7 +671,7 @@ class RecipeEngine:
|
|||
|
||||
profiles = self._classifier.classify_batch(req.pantry_items)
|
||||
gaps = self._classifier.identify_gaps(profiles)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items, req.secondary_pantry_items or None)
|
||||
|
||||
if req.level >= 3:
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ from app.services.expiration_predictor import ExpirationPredictor
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
LLM_TASK_TYPES: frozenset[str] = frozenset({"expiry_llm_fallback"})
|
||||
LLM_TASK_TYPES: frozenset[str] = frozenset({"expiry_llm_fallback", "recipe_llm"})
|
||||
|
||||
VRAM_BUDGETS: dict[str, float] = {
|
||||
# ExpirationPredictor uses a small LLM (16 tokens out, single pass).
|
||||
|
|
@ -88,6 +88,8 @@ def run_task(
|
|||
try:
|
||||
if task_type == "expiry_llm_fallback":
|
||||
_run_expiry_llm_fallback(db_path, job_id, params)
|
||||
elif task_type == "recipe_llm":
|
||||
_run_recipe_llm(db_path, job_id, params)
|
||||
else:
|
||||
raise ValueError(f"Unknown kiwi task type: {task_type!r}")
|
||||
_update_task_status(db_path, task_id, "completed")
|
||||
|
|
@ -143,3 +145,41 @@ def _run_expiry_llm_fallback(
|
|||
expiry,
|
||||
days,
|
||||
)
|
||||
|
||||
|
||||
def _run_recipe_llm(db_path: Path, _job_id_int: int, params: str | None) -> None:
|
||||
"""Run LLM recipe generation for an async recipe job.
|
||||
|
||||
params JSON keys:
|
||||
job_id (required) — recipe_jobs.job_id string (e.g. "rec_a1b2c3...")
|
||||
|
||||
Creates its own Store — follows same pattern as _suggest_in_thread.
|
||||
MUST call store.fail_recipe_job() before re-raising so recipe_jobs.status
|
||||
doesn't stay 'running' while background_tasks shows 'failed'.
|
||||
"""
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import RecipeRequest
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
|
||||
p = json.loads(params or "{}")
|
||||
recipe_job_id: str = p.get("job_id", "")
|
||||
if not recipe_job_id:
|
||||
raise ValueError("recipe_llm: 'job_id' is required in params")
|
||||
|
||||
store = Store(db_path)
|
||||
try:
|
||||
store.update_recipe_job_running(recipe_job_id)
|
||||
row = store._fetch_one(
|
||||
"SELECT request FROM recipe_jobs WHERE job_id=?", (recipe_job_id,)
|
||||
)
|
||||
if row is None:
|
||||
raise ValueError(f"recipe_llm: recipe_jobs row not found: {recipe_job_id!r}")
|
||||
req = RecipeRequest.model_validate_json(row["request"])
|
||||
result = RecipeEngine(store).suggest(req)
|
||||
store.complete_recipe_job(recipe_job_id, result.model_dump_json())
|
||||
log.info("recipe_llm: job %s completed (%d suggestion(s))", recipe_job_id, len(result.suggestions))
|
||||
except Exception as exc:
|
||||
store.fail_recipe_job(recipe_job_id, str(exc))
|
||||
raise
|
||||
finally:
|
||||
store.close()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
# app/tasks/scheduler.py
|
||||
"""Kiwi LLM task scheduler — thin shim over circuitforge_core.tasks.scheduler."""
|
||||
"""Kiwi LLM task scheduler — thin shim over circuitforge_core.tasks.scheduler.
|
||||
|
||||
Local mode (CLOUD_MODE unset): LocalScheduler — simple FIFO, no coordinator.
|
||||
Cloud mode (CLOUD_MODE=true): OrchestratedScheduler — coordinator-aware, fans
|
||||
out concurrent jobs across all registered cf-orch GPU nodes.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
|
@ -7,15 +12,68 @@ from pathlib import Path
|
|||
from circuitforge_core.tasks.scheduler import (
|
||||
TaskScheduler,
|
||||
get_scheduler as _base_get_scheduler,
|
||||
reset_scheduler, # re-export for tests
|
||||
reset_scheduler as _reset_local, # re-export for tests
|
||||
)
|
||||
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.core.config import settings
|
||||
from app.tasks.runner import LLM_TASK_TYPES, VRAM_BUDGETS, run_task
|
||||
|
||||
|
||||
def _orch_available() -> bool:
|
||||
"""Return True if circuitforge_orch is installed in this environment."""
|
||||
try:
|
||||
import circuitforge_orch # noqa: F401
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def _use_orch() -> bool:
|
||||
"""Return True if the OrchestratedScheduler should be used.
|
||||
|
||||
Priority order:
|
||||
1. USE_ORCH_SCHEDULER env var — explicit override always wins.
|
||||
2. CLOUD_MODE=true — use orch in managed cloud deployments.
|
||||
3. circuitforge_orch installed — paid+ local users who have cf-orch
|
||||
set up get coordinator-aware scheduling (local GPU first) automatically.
|
||||
"""
|
||||
override = settings.USE_ORCH_SCHEDULER
|
||||
if override is not None:
|
||||
return override
|
||||
return CLOUD_MODE or _orch_available()
|
||||
|
||||
|
||||
def get_scheduler(db_path: Path) -> TaskScheduler:
|
||||
"""Return the process-level TaskScheduler singleton for Kiwi."""
|
||||
"""Return the process-level TaskScheduler singleton for Kiwi.
|
||||
|
||||
OrchestratedScheduler: coordinator-aware, fans out concurrent jobs across
|
||||
all registered cf-orch GPU nodes. Active when USE_ORCH_SCHEDULER=true,
|
||||
CLOUD_MODE=true, or circuitforge_orch is installed locally (paid+ users
|
||||
running their own cf-orch stack get this automatically; local GPU is
|
||||
preferred by the coordinator's allocation queue).
|
||||
|
||||
LocalScheduler: serial FIFO, no coordinator dependency. Free-tier local
|
||||
installs without circuitforge_orch installed use this automatically.
|
||||
"""
|
||||
if _use_orch():
|
||||
try:
|
||||
from circuitforge_orch.scheduler import get_orch_scheduler
|
||||
except ImportError:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(
|
||||
"circuitforge_orch not installed — falling back to LocalScheduler"
|
||||
)
|
||||
else:
|
||||
return get_orch_scheduler(
|
||||
db_path=db_path,
|
||||
run_task_fn=run_task,
|
||||
task_types=LLM_TASK_TYPES,
|
||||
vram_budgets=VRAM_BUDGETS,
|
||||
coordinator_url=settings.COORDINATOR_URL,
|
||||
service_name="kiwi",
|
||||
)
|
||||
|
||||
return _base_get_scheduler(
|
||||
db_path=db_path,
|
||||
run_task_fn=run_task,
|
||||
|
|
@ -24,3 +82,15 @@ def get_scheduler(db_path: Path) -> TaskScheduler:
|
|||
coordinator_url=settings.COORDINATOR_URL,
|
||||
service_name="kiwi",
|
||||
)
|
||||
|
||||
|
||||
def reset_scheduler() -> None:
|
||||
"""Shut down and clear the active scheduler singleton. TEST TEARDOWN ONLY."""
|
||||
if _use_orch():
|
||||
try:
|
||||
from circuitforge_orch.scheduler import reset_orch_scheduler
|
||||
reset_orch_scheduler()
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
_reset_local()
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ services:
|
|||
# Docker can follow the symlink inside the container.
|
||||
- /Library/Assets/kiwi:/Library/Assets/kiwi:rw
|
||||
|
||||
# cf-orch agent sidecar: registers kiwi as a GPU node with the coordinator.
|
||||
# cf-orch agent sidecar: registers this machine as GPU node "sif" with the coordinator.
|
||||
# The API scheduler uses COORDINATOR_URL to lease VRAM cooperatively; this
|
||||
# agent makes kiwi's VRAM usage visible on the orchestrator dashboard.
|
||||
# agent makes the local VRAM usage visible on the orchestrator dashboard.
|
||||
cf-orch-agent:
|
||||
image: kiwi-api # reuse local api image — cf-core already installed there
|
||||
network_mode: host
|
||||
|
|
@ -21,7 +21,7 @@ services:
|
|||
command: >
|
||||
conda run -n kiwi cf-orch agent
|
||||
--coordinator ${COORDINATOR_URL:-http://10.1.10.71:7700}
|
||||
--node-id kiwi
|
||||
--node-id sif
|
||||
--host 0.0.0.0
|
||||
--port 7702
|
||||
--advertise-host ${CF_ORCH_ADVERTISE_HOST:-10.1.10.71}
|
||||
|
|
|
|||
74
config/llm.yaml.example
Normal file
74
config/llm.yaml.example
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# Kiwi — LLM backend configuration
|
||||
#
|
||||
# Copy to ~/.config/circuitforge/llm.yaml (shared across all CF products)
|
||||
# or to config/llm.yaml (Kiwi-local, takes precedence).
|
||||
#
|
||||
# Kiwi uses LLMs for:
|
||||
# - Expiry prediction fallback (unknown products not in the lookup table)
|
||||
# - Meal planning suggestions
|
||||
#
|
||||
# Local inference (Ollama / vLLM) is the default path — no API key required.
|
||||
# BYOK (bring your own key): set api_key_env to point at your API key env var.
|
||||
# cf-orch trunk: set CF_ORCH_URL env var to allocate cf-text on-demand via
|
||||
# the coordinator instead of hitting a static URL.
|
||||
|
||||
backends:
|
||||
ollama:
|
||||
type: openai_compat
|
||||
enabled: true
|
||||
base_url: http://localhost:11434/v1
|
||||
model: llama3.2:3b
|
||||
api_key: ollama
|
||||
supports_images: false
|
||||
|
||||
vllm:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: http://localhost:8000/v1
|
||||
model: __auto__ # resolved from /v1/models at runtime
|
||||
api_key: ''
|
||||
supports_images: false
|
||||
|
||||
# ── cf-orch trunk services ──────────────────────────────────────────────────
|
||||
# These allocate via cf-orch rather than connecting to a static URL.
|
||||
# cf-orch starts the service on-demand and returns its live URL.
|
||||
# Set CF_ORCH_URL env var or fill in url below; leave enabled: false if
|
||||
# cf-orch is not deployed in your environment.
|
||||
|
||||
cf_text:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: http://localhost:8008/v1 # fallback when cf-orch is not available
|
||||
model: __auto__
|
||||
api_key: any
|
||||
supports_images: false
|
||||
cf_orch:
|
||||
service: cf-text
|
||||
# model_candidates: leave empty to use the service's default_model,
|
||||
# or specify a catalog alias (e.g. "qwen2.5-3b").
|
||||
model_candidates: []
|
||||
ttl_s: 3600
|
||||
|
||||
# ── Cloud / BYOK ───────────────────────────────────────────────────────────
|
||||
|
||||
anthropic:
|
||||
type: anthropic
|
||||
enabled: false
|
||||
model: claude-haiku-4-5-20251001
|
||||
api_key_env: ANTHROPIC_API_KEY
|
||||
supports_images: false
|
||||
|
||||
openai:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: https://api.openai.com/v1
|
||||
model: gpt-4o-mini
|
||||
api_key_env: OPENAI_API_KEY
|
||||
supports_images: false
|
||||
|
||||
fallback_order:
|
||||
- cf_text
|
||||
- ollama
|
||||
- vllm
|
||||
- anthropic
|
||||
- openai
|
||||
|
|
@ -8,7 +8,7 @@ server {
|
|||
# Proxy API requests to the FastAPI container via Docker bridge network.
|
||||
location /api/ {
|
||||
proxy_pass http://api:8512;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Host $http_host;
|
||||
# Prefer X-Real-IP set by Caddy (real client address); fall back to $remote_addr
|
||||
# when accessed directly on LAN without Caddy in the path.
|
||||
proxy_set_header X-Real-IP $http_x_real_ip;
|
||||
|
|
@ -20,6 +20,22 @@ server {
|
|||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
# Direct-port LAN access (localhost:8515): when VITE_API_BASE='/kiwi', the frontend
|
||||
# builds API calls as /kiwi/api/v1/... — proxy these to the API container.
|
||||
# Through Caddy the /kiwi prefix is stripped before reaching nginx, so this block
|
||||
# is only active for direct-port access without Caddy in the path.
|
||||
# Longer prefix (/kiwi/api/ = 10 chars) beats ^~/kiwi/ (6 chars) per nginx rules.
|
||||
location /kiwi/api/ {
|
||||
rewrite ^/kiwi(/api/.*)$ $1 break;
|
||||
proxy_pass http://api:8512;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $http_x_real_ip;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
|
||||
proxy_set_header X-CF-Session $http_x_cf_session;
|
||||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
# When accessed directly (localhost:8515) instead of via Caddy (/kiwi path-strip),
|
||||
# Vite's /kiwi base URL means assets are requested at /kiwi/assets/... but stored
|
||||
# at /assets/... in nginx's root. Alias /kiwi/ → root so direct port access works.
|
||||
|
|
|
|||
|
|
@ -88,22 +88,22 @@
|
|||
|
||||
<main class="app-main">
|
||||
<div class="container">
|
||||
<div v-show="currentTab === 'inventory'" class="tab-content fade-in">
|
||||
<div v-if="mountedTabs.has('inventory')" v-show="currentTab === 'inventory'" class="tab-content fade-in">
|
||||
<InventoryList />
|
||||
</div>
|
||||
<div v-show="currentTab === 'receipts'" class="tab-content fade-in">
|
||||
<div v-if="mountedTabs.has('receipts')" v-show="currentTab === 'receipts'" class="tab-content fade-in">
|
||||
<ReceiptsView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'recipes'" class="tab-content fade-in">
|
||||
<RecipesView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'settings'" class="tab-content fade-in">
|
||||
<div v-if="mountedTabs.has('settings')" v-show="currentTab === 'settings'" class="tab-content fade-in">
|
||||
<SettingsView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'mealplan'" class="tab-content">
|
||||
<div v-if="mountedTabs.has('mealplan')" v-show="currentTab === 'mealplan'" class="tab-content">
|
||||
<MealPlanView />
|
||||
</div>
|
||||
<div v-show="currentTab === 'shopping'" class="tab-content fade-in">
|
||||
<div v-if="mountedTabs.has('shopping')" v-show="currentTab === 'shopping'" class="tab-content fade-in">
|
||||
<ShoppingView />
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -204,7 +204,7 @@
|
|||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, onMounted } from 'vue'
|
||||
import { ref, reactive, onMounted } from 'vue'
|
||||
import InventoryList from './components/InventoryList.vue'
|
||||
import ReceiptsView from './components/ReceiptsView.vue'
|
||||
import RecipesView from './components/RecipesView.vue'
|
||||
|
|
@ -220,6 +220,10 @@ type Tab = 'inventory' | 'receipts' | 'recipes' | 'settings' | 'mealplan' | 'sho
|
|||
|
||||
const currentTab = ref<Tab>('recipes')
|
||||
const sidebarCollapsed = ref(false)
|
||||
// Lazy-mount: tabs mount on first visit and stay mounted (KeepAlive-like behaviour).
|
||||
// Only 'recipes' is in the initial set so non-active tabs don't mount simultaneously
|
||||
// on page load — eliminates concurrent onMounted calls across all tab components.
|
||||
const mountedTabs = reactive(new Set<Tab>(['recipes']))
|
||||
const inventoryStore = useInventoryStore()
|
||||
const { kiwiVisible, kiwiDirection } = useEasterEggs()
|
||||
|
||||
|
|
@ -239,6 +243,7 @@ function onWordmarkClick() {
|
|||
}
|
||||
|
||||
async function switchTab(tab: Tab) {
|
||||
mountedTabs.add(tab)
|
||||
currentTab.value = tab
|
||||
if (tab === 'recipes' && inventoryStore.items.length === 0) {
|
||||
await inventoryStore.fetchItems()
|
||||
|
|
|
|||
|
|
@ -175,7 +175,8 @@ async function uploadFile(file: File) {
|
|||
|
||||
async function loadReceipts() {
|
||||
try {
|
||||
const data = await receiptsAPI.listReceipts()
|
||||
const raw = await receiptsAPI.listReceipts()
|
||||
const data = Array.isArray(raw) ? raw : []
|
||||
// Fetch OCR data for each receipt
|
||||
receipts.value = await Promise.all(
|
||||
data.map(async (receipt: any) => {
|
||||
|
|
|
|||
|
|
@ -15,8 +15,19 @@
|
|||
<div v-if="loadingDomains" class="text-secondary text-sm">Loading…</div>
|
||||
|
||||
<div v-else-if="activeDomain" class="browser-body">
|
||||
<!-- Corpus unavailable notice — shown when all category counts are 0 -->
|
||||
<div v-if="allCountsZero" class="browser-unavailable card p-md text-secondary text-sm">
|
||||
Recipe library is not available on this instance yet. Browse categories will appear once the recipe corpus is loaded.
|
||||
</div>
|
||||
|
||||
<!-- Category list + Surprise Me -->
|
||||
<div class="category-list mb-md flex flex-wrap gap-xs">
|
||||
<div v-else class="category-list mb-sm flex flex-wrap gap-xs">
|
||||
<button
|
||||
:class="['btn', 'btn-secondary', 'cat-btn', { active: activeCategory === '_all' }]"
|
||||
@click="selectCategory('_all')"
|
||||
>
|
||||
All
|
||||
</button>
|
||||
<button
|
||||
v-for="cat in categories"
|
||||
:key="cat.category"
|
||||
|
|
@ -25,6 +36,7 @@
|
|||
>
|
||||
{{ cat.category }}
|
||||
<span class="cat-count">{{ cat.recipe_count }}</span>
|
||||
<span v-if="cat.has_subcategories" class="cat-drill-indicator" title="Has subcategories">›</span>
|
||||
</button>
|
||||
<button
|
||||
v-if="categories.length > 1"
|
||||
|
|
@ -36,11 +48,64 @@
|
|||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Subcategory row — shown when the active category has subcategories -->
|
||||
<div
|
||||
v-if="activeCategoryHasSubs && (subcategories.length > 0 || loadingSubcategories)"
|
||||
class="subcategory-list mb-md flex flex-wrap gap-xs"
|
||||
>
|
||||
<span v-if="loadingSubcategories" class="text-secondary text-xs">Loading…</span>
|
||||
<template v-else>
|
||||
<button
|
||||
:class="['btn', 'btn-secondary', 'subcat-btn', { active: activeSubcategory === null }]"
|
||||
@click="selectSubcategory(null)"
|
||||
>
|
||||
All {{ activeCategory }}
|
||||
</button>
|
||||
<button
|
||||
v-for="sub in subcategories"
|
||||
:key="sub.subcategory"
|
||||
:class="['btn', 'btn-secondary', 'subcat-btn', { active: activeSubcategory === sub.subcategory }]"
|
||||
@click="selectSubcategory(sub.subcategory)"
|
||||
>
|
||||
{{ sub.subcategory }}
|
||||
<span class="cat-count">{{ sub.recipe_count }}</span>
|
||||
</button>
|
||||
</template>
|
||||
</div>
|
||||
|
||||
<!-- Recipe grid -->
|
||||
<template v-if="activeCategory">
|
||||
<div v-if="loadingRecipes" class="text-secondary text-sm">Loading recipes…</div>
|
||||
|
||||
<template v-else>
|
||||
<!-- Search + sort controls -->
|
||||
<div class="browser-controls flex gap-sm mb-sm flex-wrap align-center">
|
||||
<input
|
||||
v-model="searchQuery"
|
||||
@input="onSearchInput"
|
||||
type="search"
|
||||
placeholder="Filter by title…"
|
||||
class="browser-search"
|
||||
/>
|
||||
<div class="sort-btns flex gap-xs">
|
||||
<button
|
||||
:class="['btn', 'btn-secondary', 'sort-btn', { active: sortOrder === 'default' }]"
|
||||
@click="setSort('default')"
|
||||
title="Corpus order"
|
||||
>Default</button>
|
||||
<button
|
||||
:class="['btn', 'btn-secondary', 'sort-btn', { active: sortOrder === 'alpha' }]"
|
||||
@click="setSort('alpha')"
|
||||
title="Alphabetical A→Z"
|
||||
>A→Z</button>
|
||||
<button
|
||||
:class="['btn', 'btn-secondary', 'sort-btn', { active: sortOrder === 'alpha_desc' }]"
|
||||
@click="setSort('alpha_desc')"
|
||||
title="Alphabetical Z→A"
|
||||
>Z→A</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="results-header flex-between mb-sm">
|
||||
<span class="text-sm text-secondary">
|
||||
{{ total }} recipes
|
||||
|
|
@ -101,7 +166,7 @@
|
|||
</template>
|
||||
</template>
|
||||
|
||||
<div v-else class="text-secondary text-sm">Loading recipes…</div>
|
||||
<div v-else-if="!allCountsZero" class="text-secondary text-sm">Loading recipes…</div>
|
||||
</div>
|
||||
|
||||
<div v-else-if="!loadingDomains" class="text-secondary text-sm">Loading…</div>
|
||||
|
|
@ -120,7 +185,7 @@
|
|||
|
||||
<script setup lang="ts">
|
||||
import { ref, computed, onMounted } from 'vue'
|
||||
import { browserAPI, type BrowserDomain, type BrowserCategory, type BrowserRecipe } from '../services/api'
|
||||
import { browserAPI, type BrowserDomain, type BrowserCategory, type BrowserSubcategory, type BrowserRecipe } from '../services/api'
|
||||
import { useSavedRecipesStore } from '../stores/savedRecipes'
|
||||
import { useInventoryStore } from '../stores/inventory'
|
||||
import SaveRecipeModal from './SaveRecipeModal.vue'
|
||||
|
|
@ -136,6 +201,9 @@ const domains = ref<BrowserDomain[]>([])
|
|||
const activeDomain = ref<string | null>(null)
|
||||
const categories = ref<BrowserCategory[]>([])
|
||||
const activeCategory = ref<string | null>(null)
|
||||
const subcategories = ref<BrowserSubcategory[]>([])
|
||||
const activeSubcategory = ref<string | null>(null)
|
||||
const loadingSubcategories = ref(false)
|
||||
const recipes = ref<BrowserRecipe[]>([])
|
||||
const total = ref(0)
|
||||
const page = ref(1)
|
||||
|
|
@ -143,8 +211,18 @@ const pageSize = 20
|
|||
const loadingDomains = ref(false)
|
||||
const loadingRecipes = ref(false)
|
||||
const savingRecipe = ref<BrowserRecipe | null>(null)
|
||||
const searchQuery = ref('')
|
||||
const sortOrder = ref<'default' | 'alpha' | 'alpha_desc'>('default')
|
||||
let searchDebounce: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
const totalPages = computed(() => Math.max(1, Math.ceil(total.value / pageSize)))
|
||||
const allCountsZero = computed(() =>
|
||||
categories.value.length > 0 && categories.value.every(c => c.recipe_count === 0)
|
||||
)
|
||||
const activeCategoryHasSubs = computed(() => {
|
||||
if (!activeCategory.value || activeCategory.value === '_all') return false
|
||||
return categories.value.find(c => c.category === activeCategory.value)?.has_subcategories ?? false
|
||||
})
|
||||
|
||||
const pantryItems = computed(() =>
|
||||
inventoryStore.items
|
||||
|
|
@ -172,15 +250,34 @@ onMounted(async () => {
|
|||
if (!savedStore.savedIds.size) savedStore.load()
|
||||
})
|
||||
|
||||
function onSearchInput() {
|
||||
if (searchDebounce) clearTimeout(searchDebounce)
|
||||
searchDebounce = setTimeout(() => {
|
||||
page.value = 1
|
||||
loadRecipes()
|
||||
}, 350)
|
||||
}
|
||||
|
||||
function setSort(s: 'default' | 'alpha' | 'alpha_desc') {
|
||||
if (sortOrder.value === s) return
|
||||
sortOrder.value = s
|
||||
page.value = 1
|
||||
loadRecipes()
|
||||
}
|
||||
|
||||
async function selectDomain(domainId: string) {
|
||||
activeDomain.value = domainId
|
||||
activeCategory.value = null
|
||||
recipes.value = []
|
||||
total.value = 0
|
||||
page.value = 1
|
||||
searchQuery.value = ''
|
||||
sortOrder.value = 'default'
|
||||
categories.value = await browserAPI.listCategories(domainId)
|
||||
// Auto-select the most-populated category so content appears immediately
|
||||
if (categories.value.length > 0) {
|
||||
// Auto-select the most-populated category so content appears immediately.
|
||||
// Skip when all counts are 0 (corpus not seeded) — no point loading an empty result.
|
||||
const hasRecipes = categories.value.some(c => c.recipe_count > 0)
|
||||
if (hasRecipes) {
|
||||
const top = categories.value.reduce((best, c) =>
|
||||
c.recipe_count > best.recipe_count ? c : best, categories.value[0]!)
|
||||
selectCategory(top.category)
|
||||
|
|
@ -195,6 +292,27 @@ function surpriseMe() {
|
|||
|
||||
async function selectCategory(category: string) {
|
||||
activeCategory.value = category
|
||||
activeSubcategory.value = null
|
||||
subcategories.value = []
|
||||
page.value = 1
|
||||
searchQuery.value = ''
|
||||
sortOrder.value = 'default'
|
||||
|
||||
// Fetch subcategories in the background when the category supports them,
|
||||
// then immediately start loading recipes at the full-category level.
|
||||
const catMeta = categories.value.find(c => c.category === category)
|
||||
if (catMeta?.has_subcategories) {
|
||||
loadingSubcategories.value = true
|
||||
browserAPI.listSubcategories(activeDomain.value!, category)
|
||||
.then(subs => { subcategories.value = subs })
|
||||
.finally(() => { loadingSubcategories.value = false })
|
||||
}
|
||||
|
||||
await loadRecipes()
|
||||
}
|
||||
|
||||
async function selectSubcategory(subcat: string | null) {
|
||||
activeSubcategory.value = subcat
|
||||
page.value = 1
|
||||
await loadRecipes()
|
||||
}
|
||||
|
|
@ -217,6 +335,9 @@ async function loadRecipes() {
|
|||
pantry_items: pantryItems.value.length > 0
|
||||
? pantryItems.value.join(',')
|
||||
: undefined,
|
||||
subcategory: activeSubcategory.value ?? undefined,
|
||||
q: searchQuery.value.trim() || undefined,
|
||||
sort: sortOrder.value !== 'default' ? sortOrder.value : undefined,
|
||||
}
|
||||
)
|
||||
recipes.value = result.recipes
|
||||
|
|
@ -279,6 +400,68 @@ async function doUnsave(recipeId: number) {
|
|||
opacity: 1;
|
||||
}
|
||||
|
||||
.cat-drill-indicator {
|
||||
margin-left: var(--spacing-xs);
|
||||
opacity: 0.5;
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.subcategory-list {
|
||||
padding-left: var(--spacing-sm);
|
||||
border-left: 2px solid var(--color-border);
|
||||
margin-left: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.subcat-btn {
|
||||
font-size: var(--font-size-xs, 0.78rem);
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.subcat-btn.active {
|
||||
background: var(--color-primary);
|
||||
color: white;
|
||||
border-color: var(--color-primary);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.subcat-btn.active .cat-count {
|
||||
background: rgba(255, 255, 255, 0.2);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.browser-controls {
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.browser-search {
|
||||
flex: 1;
|
||||
min-width: 120px;
|
||||
max-width: 260px;
|
||||
padding: var(--spacing-xs) var(--spacing-sm);
|
||||
font-size: var(--font-size-sm);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: var(--radius-sm);
|
||||
background: var(--color-bg);
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
.browser-search:focus {
|
||||
outline: none;
|
||||
border-color: var(--color-primary);
|
||||
}
|
||||
|
||||
.sort-btn {
|
||||
font-size: var(--font-size-xs, 0.75rem);
|
||||
padding: 2px var(--spacing-sm);
|
||||
}
|
||||
|
||||
.sort-btn.active {
|
||||
background: var(--color-primary);
|
||||
color: white;
|
||||
border-color: var(--color-primary);
|
||||
}
|
||||
|
||||
.recipe-grid {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
|
|
|||
|
|
@ -287,7 +287,10 @@
|
|||
@click="handleSuggest"
|
||||
>
|
||||
<span v-if="recipesStore.loading && !isLoadingMore">
|
||||
<span class="spinner spinner-sm inline-spinner"></span> Finding recipes…
|
||||
<span class="spinner spinner-sm inline-spinner"></span>
|
||||
<span v-if="recipesStore.jobStatus === 'queued'">Queued…</span>
|
||||
<span v-else-if="recipesStore.jobStatus === 'running'">Generating…</span>
|
||||
<span v-else>Finding recipes…</span>
|
||||
</span>
|
||||
<span v-else>Suggest Recipes</span>
|
||||
</button>
|
||||
|
|
@ -312,7 +315,9 @@
|
|||
|
||||
<!-- Screen reader announcement for loading + results -->
|
||||
<div aria-live="polite" aria-atomic="true" class="sr-only">
|
||||
<span v-if="recipesStore.loading">Finding recipes…</span>
|
||||
<span v-if="recipesStore.loading && recipesStore.jobStatus === 'queued'">Recipe request queued, waiting for model…</span>
|
||||
<span v-else-if="recipesStore.loading && recipesStore.jobStatus === 'running'">Generating your recipe now…</span>
|
||||
<span v-else-if="recipesStore.loading">Finding recipes…</span>
|
||||
<span v-else-if="recipesStore.result">
|
||||
{{ filteredSuggestions.length }} recipe{{ filteredSuggestions.length !== 1 ? 's' : '' }} found
|
||||
</span>
|
||||
|
|
@ -951,6 +956,19 @@ const pantryItems = computed(() => {
|
|||
return sorted.map((item) => item.product_name).filter(Boolean) as string[]
|
||||
})
|
||||
|
||||
// Secondary-state items: expired but still usable in specific recipes.
|
||||
// Maps product_name → secondary_state label (e.g. "Bread" → "stale").
|
||||
// Sent alongside pantry_items so the recipe engine can boost relevant recipes.
|
||||
const secondaryPantryItems = computed<Record<string, string>>(() => {
|
||||
const result: Record<string, string> = {}
|
||||
for (const item of inventoryStore.items) {
|
||||
if (item.secondary_state && item.product_name) {
|
||||
result[item.product_name] = item.secondary_state
|
||||
}
|
||||
}
|
||||
return result
|
||||
})
|
||||
|
||||
// Grocery links relevant to a specific recipe's missing ingredients
|
||||
function groceryLinksForRecipe(recipe: RecipeSuggestion): GroceryLink[] {
|
||||
if (!recipesStore.result) return []
|
||||
|
|
@ -1025,12 +1043,12 @@ function onNutritionInput(key: NutritionKey, e: Event) {
|
|||
// Suggest handler
|
||||
async function handleSuggest() {
|
||||
isLoadingMore.value = false
|
||||
await recipesStore.suggest(pantryItems.value)
|
||||
await recipesStore.suggest(pantryItems.value, secondaryPantryItems.value)
|
||||
}
|
||||
|
||||
async function handleLoadMore() {
|
||||
isLoadingMore.value = true
|
||||
await recipesStore.loadMore(pantryItems.value)
|
||||
await recipesStore.loadMore(pantryItems.value, secondaryPantryItems.value)
|
||||
isLoadingMore.value = false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -524,8 +524,18 @@ export interface RecipeResult {
|
|||
rate_limit_count: number
|
||||
}
|
||||
|
||||
export type RecipeJobStatusValue = 'queued' | 'running' | 'done' | 'failed'
|
||||
|
||||
export interface RecipeJobStatus {
|
||||
job_id: string
|
||||
status: RecipeJobStatusValue
|
||||
result: RecipeResult | null
|
||||
error: string | null
|
||||
}
|
||||
|
||||
export interface RecipeRequest {
|
||||
pantry_items: string[]
|
||||
secondary_pantry_items: Record<string, string>
|
||||
level: number
|
||||
constraints: string[]
|
||||
allergies: string[]
|
||||
|
|
@ -592,6 +602,18 @@ export const recipesAPI = {
|
|||
const response = await api.post('/recipes/suggest', req, { timeout: 120000 })
|
||||
return response.data
|
||||
},
|
||||
|
||||
/** Submit an async job for L3/L4 generation. Returns job_id + initial status. */
|
||||
async suggestAsync(req: RecipeRequest): Promise<{ job_id: string; status: string }> {
|
||||
const response = await api.post('/recipes/suggest', req, { params: { async: 'true' }, timeout: 15000 })
|
||||
return response.data
|
||||
},
|
||||
|
||||
/** Poll an async job. Returns the full status including result once done. */
|
||||
async pollJob(jobId: string): Promise<RecipeJobStatus> {
|
||||
const response = await api.get(`/recipes/jobs/${jobId}`, { timeout: 10000 })
|
||||
return response.data
|
||||
},
|
||||
async getRecipe(id: number): Promise<RecipeSuggestion> {
|
||||
const response = await api.get(`/recipes/${id}`)
|
||||
return response.data
|
||||
|
|
@ -880,6 +902,12 @@ export interface BrowserDomain {
|
|||
export interface BrowserCategory {
|
||||
category: string
|
||||
recipe_count: number
|
||||
has_subcategories: boolean
|
||||
}
|
||||
|
||||
export interface BrowserSubcategory {
|
||||
subcategory: string
|
||||
recipe_count: number
|
||||
}
|
||||
|
||||
export interface BrowserRecipe {
|
||||
|
|
@ -906,10 +934,19 @@ export const browserAPI = {
|
|||
const response = await api.get(`/recipes/browse/${domain}`)
|
||||
return response.data
|
||||
},
|
||||
async listSubcategories(domain: string, category: string): Promise<BrowserSubcategory[]> {
|
||||
const response = await api.get(
|
||||
`/recipes/browse/${domain}/${encodeURIComponent(category)}/subcategories`
|
||||
)
|
||||
return response.data
|
||||
},
|
||||
async browse(domain: string, category: string, params?: {
|
||||
page?: number
|
||||
page_size?: number
|
||||
pantry_items?: string
|
||||
subcategory?: string
|
||||
q?: string
|
||||
sort?: string
|
||||
}): Promise<BrowserResult> {
|
||||
const response = await api.get(`/recipes/browse/${domain}/${encodeURIComponent(category)}`, { params })
|
||||
return response.data
|
||||
|
|
|
|||
|
|
@ -55,11 +55,12 @@ export const useInventoryStore = defineStore('inventory', () => {
|
|||
error.value = null
|
||||
|
||||
try {
|
||||
items.value = await inventoryAPI.listItems({
|
||||
const result = await inventoryAPI.listItems({
|
||||
item_status: statusFilter.value === 'all' ? undefined : statusFilter.value,
|
||||
location: locationFilter.value === 'all' ? undefined : locationFilter.value,
|
||||
limit: 1000,
|
||||
})
|
||||
items.value = Array.isArray(result) ? result : []
|
||||
} catch (err: any) {
|
||||
error.value = err.response?.data?.detail || 'Failed to fetch inventory items'
|
||||
console.error('Error fetching inventory:', err)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,8 @@ export const useMealPlanStore = defineStore('mealPlan', () => {
|
|||
async function loadPlans() {
|
||||
loading.value = true
|
||||
try {
|
||||
plans.value = await mealPlanAPI.list()
|
||||
const result = await mealPlanAPI.list()
|
||||
plans.value = Array.isArray(result) ? result : []
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { defineStore } from 'pinia'
|
||||
import { ref, computed, watch } from 'vue'
|
||||
import { recipesAPI, type RecipeResult, type RecipeSuggestion, type RecipeRequest, type NutritionFilters } from '../services/api'
|
||||
import { recipesAPI, type RecipeResult, type RecipeSuggestion, type RecipeRequest, type RecipeJobStatusValue, type NutritionFilters } from '../services/api'
|
||||
|
||||
const DISMISSED_KEY = 'kiwi:dismissed_recipes'
|
||||
const DISMISS_TTL_MS = 7 * 24 * 60 * 60 * 1000
|
||||
|
|
@ -121,6 +121,7 @@ export const useRecipesStore = defineStore('recipes', () => {
|
|||
const result = ref<RecipeResult | null>(null)
|
||||
const loading = ref(false)
|
||||
const error = ref<string | null>(null)
|
||||
const jobStatus = ref<RecipeJobStatusValue | null>(null)
|
||||
|
||||
// Request parameters
|
||||
const level = ref(1)
|
||||
|
|
@ -163,10 +164,15 @@ export const useRecipesStore = defineStore('recipes', () => {
|
|||
|
||||
const dismissedCount = computed(() => dismissedIds.value.size)
|
||||
|
||||
function _buildRequest(pantryItems: string[], extraExcluded: number[] = []): RecipeRequest {
|
||||
function _buildRequest(
|
||||
pantryItems: string[],
|
||||
secondaryPantryItems: Record<string, string> = {},
|
||||
extraExcluded: number[] = [],
|
||||
): RecipeRequest {
|
||||
const excluded = new Set([...dismissedIds.value, ...extraExcluded])
|
||||
return {
|
||||
pantry_items: pantryItems,
|
||||
secondary_pantry_items: secondaryPantryItems,
|
||||
level: level.value,
|
||||
constraints: constraints.value,
|
||||
allergies: allergies.value,
|
||||
|
|
@ -191,29 +197,68 @@ export const useRecipesStore = defineStore('recipes', () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function suggest(pantryItems: string[]) {
|
||||
async function suggest(pantryItems: string[], secondaryPantryItems: Record<string, string> = {}) {
|
||||
loading.value = true
|
||||
error.value = null
|
||||
jobStatus.value = null
|
||||
seenIds.value = new Set()
|
||||
|
||||
try {
|
||||
result.value = await recipesAPI.suggest(_buildRequest(pantryItems))
|
||||
_trackSeen(result.value.suggestions)
|
||||
if (level.value >= 3) {
|
||||
await _suggestAsync(pantryItems, secondaryPantryItems)
|
||||
} else {
|
||||
result.value = await recipesAPI.suggest(_buildRequest(pantryItems, secondaryPantryItems))
|
||||
_trackSeen(result.value.suggestions)
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
error.value = err instanceof Error ? err.message : 'Failed to get recipe suggestions'
|
||||
} finally {
|
||||
loading.value = false
|
||||
jobStatus.value = null
|
||||
}
|
||||
}
|
||||
|
||||
async function loadMore(pantryItems: string[]) {
|
||||
async function _suggestAsync(pantryItems: string[], secondaryPantryItems: Record<string, string>) {
|
||||
const queued = await recipesAPI.suggestAsync(_buildRequest(pantryItems, secondaryPantryItems))
|
||||
|
||||
// CLOUD_MODE or future sync fallback: server returned result directly (status 200)
|
||||
if ('suggestions' in queued) {
|
||||
result.value = queued as unknown as RecipeResult
|
||||
_trackSeen(result.value.suggestions)
|
||||
return
|
||||
}
|
||||
|
||||
jobStatus.value = 'queued'
|
||||
const { job_id } = queued
|
||||
const deadline = Date.now() + 90_000
|
||||
const POLL_MS = 2_500
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
await new Promise((resolve) => setTimeout(resolve, POLL_MS))
|
||||
const poll = await recipesAPI.pollJob(job_id)
|
||||
jobStatus.value = poll.status
|
||||
|
||||
if (poll.status === 'done') {
|
||||
result.value = poll.result
|
||||
if (result.value) _trackSeen(result.value.suggestions)
|
||||
return
|
||||
}
|
||||
if (poll.status === 'failed') {
|
||||
throw new Error(poll.error ?? 'Recipe generation failed')
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Recipe generation timed out — the model may be busy. Try again.')
|
||||
}
|
||||
|
||||
async function loadMore(pantryItems: string[], secondaryPantryItems: Record<string, string> = {}) {
|
||||
if (!result.value || loading.value) return
|
||||
loading.value = true
|
||||
error.value = null
|
||||
|
||||
try {
|
||||
// Exclude everything already shown (dismissed + all seen this session)
|
||||
const more = await recipesAPI.suggest(_buildRequest(pantryItems, [...seenIds.value]))
|
||||
const more = await recipesAPI.suggest(_buildRequest(pantryItems, secondaryPantryItems, [...seenIds.value]))
|
||||
if (more.suggestions.length === 0) {
|
||||
error.value = 'No more recipes found — try clearing dismissed or adjusting filters.'
|
||||
} else {
|
||||
|
|
@ -303,6 +348,7 @@ export const useRecipesStore = defineStore('recipes', () => {
|
|||
result,
|
||||
loading,
|
||||
error,
|
||||
jobStatus,
|
||||
level,
|
||||
constraints,
|
||||
allergies,
|
||||
|
|
|
|||
|
|
@ -111,6 +111,7 @@
|
|||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
}
|
||||
.align-center { align-items: center; }
|
||||
|
||||
.flex-responsive {
|
||||
display: flex;
|
||||
|
|
|
|||
Loading…
Reference in a new issue