fix: wire recipe corpus to cloud per-user DBs via SQLite ATTACH (#102)
Cloud mode: attach shared read-only corpus DB (RECIPE_DB_PATH env var)
as "corpus" schema so per-user SQLite DBs can access 3.19M recipes.
All corpus table references now use self._cp prefix ("corpus." in cloud,
"" in local). FTS5 pseudo-column kept unqualified per SQLite spec.
compose.cloud.yml: bind-mount /Library/Assets/kiwi/kiwi.db read-only.
Also fix batch of audit issues:
- #101: OCR approval used source="receipt_ocr" for inventory_items — use "receipt"
- #89/#100: Shopping confirm-purchase used source="shopping_list" — use "manual"
- #103: Frontend inventory filter sent ?status= but API expects ?item_status=
- #104: InventoryItemUpdate schema missing purchase_date field; store.py allowed set also missing it
- #105: Guest cookie Secure flag tied to CLOUD_MODE instead of X-Forwarded-Proto; broke HTTP direct-port access
This commit is contained in:
parent
8483b9ae5f
commit
890216a1f0
12 changed files with 570 additions and 59 deletions
|
|
@ -3,6 +3,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
@ -11,7 +12,9 @@ import aiofiles
|
||||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from app.cloud_session import CloudUser, get_session
|
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
from app.db.session import get_store
|
from app.db.session import get_store
|
||||||
from app.services.expiration_predictor import ExpirationPredictor
|
from app.services.expiration_predictor import ExpirationPredictor
|
||||||
|
|
||||||
|
|
@ -41,7 +44,7 @@ router = APIRouter()
|
||||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
def _enrich_item(item: dict) -> dict:
|
def _enrich_item(item: dict) -> dict:
|
||||||
"""Attach computed opened_expiry_date when opened_date is set."""
|
"""Attach computed fields: opened_expiry_date, secondary_state/uses/warning."""
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
opened = item.get("opened_date")
|
opened = item.get("opened_date")
|
||||||
if opened:
|
if opened:
|
||||||
|
|
@ -54,6 +57,15 @@ def _enrich_item(item: dict) -> dict:
|
||||||
pass
|
pass
|
||||||
if "opened_expiry_date" not in item:
|
if "opened_expiry_date" not in item:
|
||||||
item = {**item, "opened_expiry_date": None}
|
item = {**item, "opened_expiry_date": None}
|
||||||
|
|
||||||
|
# Secondary use window — check sell-by date (not opened expiry)
|
||||||
|
sec = _predictor.secondary_state(item.get("category"), item.get("expiration_date"))
|
||||||
|
item = {
|
||||||
|
**item,
|
||||||
|
"secondary_state": sec["label"] if sec else None,
|
||||||
|
"secondary_uses": sec["uses"] if sec else None,
|
||||||
|
"secondary_warning": sec["warning"] if sec else None,
|
||||||
|
}
|
||||||
return item
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -141,7 +153,12 @@ async def delete_product(product_id: int, store: Store = Depends(get_store)):
|
||||||
# ── Inventory items ───────────────────────────────────────────────────────────
|
# ── Inventory items ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED)
|
@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED)
|
||||||
async def create_inventory_item(body: InventoryItemCreate, store: Store = Depends(get_store)):
|
async def create_inventory_item(
|
||||||
|
body: InventoryItemCreate,
|
||||||
|
store: Store = Depends(get_store),
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
log.info("add_item auth=%s tier=%s product_id=%s", _auth_label(session.user_id), session.tier, body.product_id)
|
||||||
item = await asyncio.to_thread(
|
item = await asyncio.to_thread(
|
||||||
store.add_inventory_item,
|
store.add_inventory_item,
|
||||||
body.product_id,
|
body.product_id,
|
||||||
|
|
@ -167,7 +184,7 @@ async def bulk_add_items_by_name(body: BulkAddByNameRequest, store: Store = Depe
|
||||||
for entry in body.items:
|
for entry in body.items:
|
||||||
try:
|
try:
|
||||||
product, _ = await asyncio.to_thread(
|
product, _ = await asyncio.to_thread(
|
||||||
store.get_or_create_product, entry.name, None, source="shopping"
|
store.get_or_create_product, entry.name, None, source="manual"
|
||||||
)
|
)
|
||||||
item = await asyncio.to_thread(
|
item = await asyncio.to_thread(
|
||||||
store.add_inventory_item,
|
store.add_inventory_item,
|
||||||
|
|
@ -175,7 +192,7 @@ async def bulk_add_items_by_name(body: BulkAddByNameRequest, store: Store = Depe
|
||||||
entry.location,
|
entry.location,
|
||||||
quantity=entry.quantity,
|
quantity=entry.quantity,
|
||||||
unit=entry.unit,
|
unit=entry.unit,
|
||||||
source="shopping",
|
source="manual",
|
||||||
)
|
)
|
||||||
results.append(BulkAddItemResult(name=entry.name, ok=True, item_id=item["id"]))
|
results.append(BulkAddItemResult(name=entry.name, ok=True, item_id=item["id"]))
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|
@ -320,6 +337,7 @@ async def scan_barcode_text(
|
||||||
session: CloudUser = Depends(get_session),
|
session: CloudUser = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""Scan a barcode from a text string (e.g. from a hardware scanner or manual entry)."""
|
"""Scan a barcode from a text string (e.g. from a hardware scanner or manual entry)."""
|
||||||
|
log.info("scan auth=%s tier=%s barcode=%r", _auth_label(session.user_id), session.tier, body.barcode)
|
||||||
from app.services.openfoodfacts import OpenFoodFactsService
|
from app.services.openfoodfacts import OpenFoodFactsService
|
||||||
from app.services.expiration_predictor import ExpirationPredictor
|
from app.services.expiration_predictor import ExpirationPredictor
|
||||||
|
|
||||||
|
|
@ -388,6 +406,7 @@ async def scan_barcode_image(
|
||||||
session: CloudUser = Depends(get_session),
|
session: CloudUser = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""Scan a barcode from an uploaded image. Requires Phase 2 scanner integration."""
|
"""Scan a barcode from an uploaded image. Requires Phase 2 scanner integration."""
|
||||||
|
log.info("scan_image auth=%s tier=%s", _auth_label(session.user_id), session.tier)
|
||||||
temp_dir = Path("/tmp/kiwi_barcode_scans")
|
temp_dir = Path("/tmp/kiwi_barcode_scans")
|
||||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||||
temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}"
|
temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}"
|
||||||
|
|
|
||||||
|
|
@ -219,7 +219,7 @@ def _commit_items(
|
||||||
receipt_id=receipt_id,
|
receipt_id=receipt_id,
|
||||||
purchase_date=str(purchase_date) if purchase_date else None,
|
purchase_date=str(purchase_date) if purchase_date else None,
|
||||||
expiration_date=str(exp) if exp else None,
|
expiration_date=str(exp) if exp else None,
|
||||||
source="receipt_ocr",
|
source="receipt",
|
||||||
)
|
)
|
||||||
|
|
||||||
created.append(ApprovedInventoryItem(
|
created.append(ApprovedInventoryItem(
|
||||||
|
|
|
||||||
224
app/api/endpoints/shopping.py
Normal file
224
app/api/endpoints/shopping.py
Normal file
|
|
@ -0,0 +1,224 @@
|
||||||
|
"""Shopping list endpoints.
|
||||||
|
|
||||||
|
Free tier for all users (anonymous guests included — shopping list is the
|
||||||
|
primary affiliate revenue surface). Confirm-purchase action is also Free:
|
||||||
|
it moves a checked item into pantry inventory without a tier gate so the
|
||||||
|
flow works for anyone who signs up or browses without an account.
|
||||||
|
|
||||||
|
Routes:
|
||||||
|
GET /shopping — list items (with affiliate links)
|
||||||
|
POST /shopping — add item manually
|
||||||
|
PATCH /shopping/{id} — update (check/uncheck, rename, qty)
|
||||||
|
DELETE /shopping/{id} — remove single item
|
||||||
|
DELETE /shopping/checked — clear all checked items
|
||||||
|
DELETE /shopping/all — clear entire list
|
||||||
|
POST /shopping/from-recipe — bulk add gaps from a recipe
|
||||||
|
POST /shopping/{id}/confirm — confirm purchase → add to pantry inventory
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
|
from app.cloud_session import CloudUser, get_session
|
||||||
|
from app.db.session import get_store
|
||||||
|
from app.db.store import Store
|
||||||
|
from app.models.schemas.shopping import (
|
||||||
|
BulkAddFromRecipeRequest,
|
||||||
|
ConfirmPurchaseRequest,
|
||||||
|
ShoppingItemCreate,
|
||||||
|
ShoppingItemResponse,
|
||||||
|
ShoppingItemUpdate,
|
||||||
|
)
|
||||||
|
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def _enrich(item: dict, builder: GroceryLinkBuilder) -> ShoppingItemResponse:
|
||||||
|
"""Attach live affiliate links to a raw store row."""
|
||||||
|
links = builder.build_links(item["name"])
|
||||||
|
return ShoppingItemResponse(
|
||||||
|
**{**item, "checked": bool(item.get("checked", 0))},
|
||||||
|
grocery_links=[{"ingredient": l.ingredient, "retailer": l.retailer, "url": l.url} for l in links],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _in_thread(db_path, fn):
|
||||||
|
store = Store(db_path)
|
||||||
|
try:
|
||||||
|
return fn(store)
|
||||||
|
finally:
|
||||||
|
store.close()
|
||||||
|
|
||||||
|
|
||||||
|
# ── List ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.get("", response_model=list[ShoppingItemResponse])
|
||||||
|
async def list_shopping_items(
|
||||||
|
include_checked: bool = True,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||||
|
items = await asyncio.to_thread(
|
||||||
|
_in_thread, session.db, lambda s: s.list_shopping_items(include_checked)
|
||||||
|
)
|
||||||
|
return [_enrich(i, builder) for i in items]
|
||||||
|
|
||||||
|
|
||||||
|
# ── Add manually ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("", response_model=ShoppingItemResponse, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def add_shopping_item(
|
||||||
|
body: ShoppingItemCreate,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||||
|
item = await asyncio.to_thread(
|
||||||
|
_in_thread,
|
||||||
|
session.db,
|
||||||
|
lambda s: s.add_shopping_item(
|
||||||
|
name=body.name,
|
||||||
|
quantity=body.quantity,
|
||||||
|
unit=body.unit,
|
||||||
|
category=body.category,
|
||||||
|
notes=body.notes,
|
||||||
|
source=body.source,
|
||||||
|
recipe_id=body.recipe_id,
|
||||||
|
sort_order=body.sort_order,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return _enrich(item, builder)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Bulk add from recipe ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("/from-recipe", response_model=list[ShoppingItemResponse], status_code=status.HTTP_201_CREATED)
|
||||||
|
async def add_from_recipe(
|
||||||
|
body: BulkAddFromRecipeRequest,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
"""Add missing ingredients from a recipe to the shopping list.
|
||||||
|
|
||||||
|
Runs pantry gap analysis and adds only the items the user doesn't have
|
||||||
|
(unless include_covered=True). Skips duplicates already on the list.
|
||||||
|
"""
|
||||||
|
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||||
|
|
||||||
|
def _run(store: Store):
|
||||||
|
recipe = store.get_recipe(body.recipe_id)
|
||||||
|
if not recipe:
|
||||||
|
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||||
|
inventory = store.list_inventory()
|
||||||
|
gaps, covered = compute_shopping_list([recipe], inventory)
|
||||||
|
targets = (gaps + covered) if body.include_covered else gaps
|
||||||
|
|
||||||
|
# Avoid duplicates already on the list
|
||||||
|
existing = {i["name"].lower() for i in store.list_shopping_items()}
|
||||||
|
added = []
|
||||||
|
for gap in targets:
|
||||||
|
if gap.ingredient_name.lower() in existing:
|
||||||
|
continue
|
||||||
|
item = store.add_shopping_item(
|
||||||
|
name=gap.ingredient_name,
|
||||||
|
quantity=None,
|
||||||
|
unit=gap.have_unit,
|
||||||
|
source="recipe",
|
||||||
|
recipe_id=body.recipe_id,
|
||||||
|
)
|
||||||
|
added.append(item)
|
||||||
|
return added
|
||||||
|
|
||||||
|
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||||
|
items = await asyncio.to_thread(_in_thread, session.db, _run)
|
||||||
|
return [_enrich(i, builder) for i in items]
|
||||||
|
|
||||||
|
|
||||||
|
# ── Update ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.patch("/{item_id}", response_model=ShoppingItemResponse)
|
||||||
|
async def update_shopping_item(
|
||||||
|
item_id: int,
|
||||||
|
body: ShoppingItemUpdate,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||||
|
item = await asyncio.to_thread(
|
||||||
|
_in_thread,
|
||||||
|
session.db,
|
||||||
|
lambda s: s.update_shopping_item(item_id, **body.model_dump(exclude_none=True)),
|
||||||
|
)
|
||||||
|
if not item:
|
||||||
|
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||||
|
return _enrich(item, builder)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Confirm purchase → pantry ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.post("/{item_id}/confirm", status_code=status.HTTP_201_CREATED)
|
||||||
|
async def confirm_purchase(
|
||||||
|
item_id: int,
|
||||||
|
body: ConfirmPurchaseRequest,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
"""Confirm a checked item was purchased and add it to pantry inventory.
|
||||||
|
|
||||||
|
Human approval step: the user explicitly confirms what they actually bought
|
||||||
|
before it lands in their pantry. Returns the new inventory item.
|
||||||
|
"""
|
||||||
|
def _run(store: Store):
|
||||||
|
shopping_item = store.get_shopping_item(item_id)
|
||||||
|
if not shopping_item:
|
||||||
|
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||||
|
|
||||||
|
qty = body.quantity if body.quantity is not None else (shopping_item.get("quantity") or 1.0)
|
||||||
|
unit = body.unit or shopping_item.get("unit") or "count"
|
||||||
|
category = shopping_item.get("category")
|
||||||
|
|
||||||
|
product = store.get_or_create_product(
|
||||||
|
name=shopping_item["name"],
|
||||||
|
category=category,
|
||||||
|
)
|
||||||
|
inv_item = store.add_inventory_item(
|
||||||
|
product_id=product["id"],
|
||||||
|
location=body.location,
|
||||||
|
quantity=qty,
|
||||||
|
unit=unit,
|
||||||
|
source="manual",
|
||||||
|
)
|
||||||
|
# Mark the shopping item checked and leave it for the user to clear
|
||||||
|
store.update_shopping_item(item_id, checked=True)
|
||||||
|
return inv_item
|
||||||
|
|
||||||
|
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||||
|
|
||||||
|
|
||||||
|
# ── Delete ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_shopping_item(
|
||||||
|
item_id: int,
|
||||||
|
session: CloudUser = Depends(get_session),
|
||||||
|
):
|
||||||
|
deleted = await asyncio.to_thread(
|
||||||
|
_in_thread, session.db, lambda s: s.delete_shopping_item(item_id)
|
||||||
|
)
|
||||||
|
if not deleted:
|
||||||
|
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/checked", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def clear_checked(session: CloudUser = Depends(get_session)):
|
||||||
|
await asyncio.to_thread(
|
||||||
|
_in_thread, session.db, lambda s: s.clear_checked_shopping_items()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/all", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def clear_all(session: CloudUser = Depends(get_session)):
|
||||||
|
await asyncio.to_thread(
|
||||||
|
_in_thread, session.db, lambda s: s.clear_all_shopping_items()
|
||||||
|
)
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, feedback_attach, household, saved_recipes, imitate, meal_plans, orch_usage
|
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, feedback_attach, household, saved_recipes, imitate, meal_plans, orch_usage, session, shopping
|
||||||
from app.api.endpoints.community import router as community_router
|
from app.api.endpoints.community import router as community_router
|
||||||
|
|
||||||
api_router = APIRouter()
|
api_router = APIRouter()
|
||||||
|
|
||||||
|
api_router.include_router(session.router, prefix="/session", tags=["session"])
|
||||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
||||||
|
|
@ -19,4 +20,5 @@ api_router.include_router(household.router, prefix="/household", tags=
|
||||||
api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"])
|
api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"])
|
||||||
api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"])
|
api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"])
|
||||||
api_router.include_router(orch_usage.router, prefix="/orch-usage", tags=["orch-usage"])
|
api_router.include_router(orch_usage.router, prefix="/orch-usage", tags=["orch-usage"])
|
||||||
|
api_router.include_router(shopping.router, prefix="/shopping", tags=["shopping"])
|
||||||
api_router.include_router(community_router)
|
api_router.include_router(community_router)
|
||||||
|
|
|
||||||
|
|
@ -22,10 +22,12 @@ import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
import jwt as pyjwt
|
import jwt as pyjwt
|
||||||
import requests
|
import requests
|
||||||
import yaml
|
import yaml
|
||||||
from fastapi import Depends, HTTPException, Request
|
from fastapi import Depends, HTTPException, Request, Response
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -82,6 +84,15 @@ _TIER_CACHE_TTL = 300 # 5 minutes
|
||||||
TIERS = ["free", "paid", "premium", "ultra"]
|
TIERS = ["free", "paid", "premium", "ultra"]
|
||||||
|
|
||||||
|
|
||||||
|
def _auth_label(user_id: str) -> str:
|
||||||
|
"""Classify a user_id into a short tag for structured log lines. No PII emitted."""
|
||||||
|
if user_id in ("local", "local-dev"):
|
||||||
|
return "local"
|
||||||
|
if user_id.startswith("anon-"):
|
||||||
|
return "anon"
|
||||||
|
return "authed"
|
||||||
|
|
||||||
|
|
||||||
# ── Domain ────────────────────────────────────────────────────────────────────
|
# ── Domain ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
|
|
@ -172,9 +183,13 @@ def _user_db_path(user_id: str, household_id: str | None = None) -> Path:
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def _anon_db_path() -> Path:
|
def _anon_guest_db_path(guest_id: str) -> Path:
|
||||||
"""Ephemeral DB for unauthenticated guest visitors (Free tier, no persistence)."""
|
"""Per-session DB for unauthenticated guest visitors.
|
||||||
path = CLOUD_DATA_ROOT / "anonymous" / "kiwi.db"
|
|
||||||
|
Each anonymous visitor gets an isolated SQLite DB keyed by their guest UUID
|
||||||
|
cookie, so shopping lists and affiliate interactions never bleed across sessions.
|
||||||
|
"""
|
||||||
|
path = CLOUD_DATA_ROOT / f"anon-{guest_id}" / "kiwi.db"
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
@ -204,20 +219,52 @@ def _detect_byok(config_path: Path = _LLM_CONFIG_PATH) -> bool:
|
||||||
|
|
||||||
# ── FastAPI dependency ────────────────────────────────────────────────────────
|
# ── FastAPI dependency ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
def get_session(request: Request) -> CloudUser:
|
_GUEST_COOKIE = "kiwi_guest_id"
|
||||||
|
_GUEST_COOKIE_MAX_AGE = 60 * 60 * 24 * 90 # 90 days
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_guest_session(request: Request, response: Response, has_byok: bool) -> CloudUser:
|
||||||
|
"""Return a per-session anonymous CloudUser, creating a guest UUID cookie if needed."""
|
||||||
|
guest_id = request.cookies.get(_GUEST_COOKIE, "").strip()
|
||||||
|
is_new = not guest_id
|
||||||
|
if is_new:
|
||||||
|
guest_id = str(uuid.uuid4())
|
||||||
|
log.debug("New guest session assigned: anon-%s", guest_id[:8])
|
||||||
|
# Secure flag only when the request actually arrived over HTTPS
|
||||||
|
# (Caddy sets X-Forwarded-Proto=https in cloud; absent on direct port access).
|
||||||
|
# Avoids losing the session cookie on HTTP direct-port testing of the cloud stack.
|
||||||
|
is_https = request.headers.get("x-forwarded-proto", "http").lower() == "https"
|
||||||
|
response.set_cookie(
|
||||||
|
key=_GUEST_COOKIE,
|
||||||
|
value=guest_id,
|
||||||
|
max_age=_GUEST_COOKIE_MAX_AGE,
|
||||||
|
httponly=True,
|
||||||
|
samesite="lax",
|
||||||
|
secure=is_https,
|
||||||
|
)
|
||||||
|
return CloudUser(
|
||||||
|
user_id=f"anon-{guest_id}",
|
||||||
|
tier="free",
|
||||||
|
db=_anon_guest_db_path(guest_id),
|
||||||
|
has_byok=has_byok,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_session(request: Request, response: Response) -> CloudUser:
|
||||||
"""FastAPI dependency — resolves the current user from the request.
|
"""FastAPI dependency — resolves the current user from the request.
|
||||||
|
|
||||||
Local mode: fully-privileged "local" user pointing at local DB.
|
Local mode: fully-privileged "local" user pointing at local DB.
|
||||||
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
||||||
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
||||||
returns a "local" session without JWT validation (dev/LAN use only).
|
returns a "local" session without JWT validation (dev/LAN use only).
|
||||||
|
Anonymous: per-session UUID cookie isolates each guest visitor's data.
|
||||||
"""
|
"""
|
||||||
has_byok = _detect_byok()
|
has_byok = _detect_byok()
|
||||||
|
|
||||||
if not CLOUD_MODE:
|
if not CLOUD_MODE:
|
||||||
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
||||||
|
|
||||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
# Prefer X-Real-IP (set by Caddy from the actual client address) over the
|
||||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||||
client_ip = (
|
client_ip = (
|
||||||
request.headers.get("x-real-ip", "")
|
request.headers.get("x-real-ip", "")
|
||||||
|
|
@ -229,26 +276,19 @@ def get_session(request: Request) -> CloudUser:
|
||||||
dev_db = _user_db_path("local-dev")
|
dev_db = _user_db_path("local-dev")
|
||||||
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
||||||
|
|
||||||
raw_header = (
|
# Resolve cf_session JWT: prefer the explicit header injected by Caddy, then
|
||||||
request.headers.get("x-cf-session", "")
|
# fall back to the cf_session cookie value. Other cookies (e.g. kiwi_guest_id)
|
||||||
or request.headers.get("cookie", "")
|
# must never be treated as auth tokens.
|
||||||
)
|
raw_session = request.headers.get("x-cf-session", "").strip()
|
||||||
if not raw_header:
|
if not raw_session:
|
||||||
return CloudUser(
|
raw_session = request.cookies.get("cf_session", "").strip()
|
||||||
user_id="anonymous",
|
|
||||||
tier="free",
|
|
||||||
db=_anon_db_path(),
|
|
||||||
has_byok=has_byok,
|
|
||||||
)
|
|
||||||
|
|
||||||
token = _extract_session_token(raw_header) # gitleaks:allow — function name, not a secret
|
if not raw_session:
|
||||||
|
return _resolve_guest_session(request, response, has_byok)
|
||||||
|
|
||||||
|
token = _extract_session_token(raw_session) # gitleaks:allow — function name, not a secret
|
||||||
if not token:
|
if not token:
|
||||||
return CloudUser(
|
return _resolve_guest_session(request, response, has_byok)
|
||||||
user_id="anonymous",
|
|
||||||
tier="free",
|
|
||||||
db=_anon_db_path(),
|
|
||||||
has_byok=has_byok,
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id = validate_session_jwt(token)
|
user_id = validate_session_jwt(token)
|
||||||
_ensure_provisioned(user_id)
|
_ensure_provisioned(user_id)
|
||||||
|
|
|
||||||
171
app/db/store.py
171
app/db/store.py
|
|
@ -23,12 +23,25 @@ _COUNT_CACHE: dict[tuple[str, ...], int] = {}
|
||||||
|
|
||||||
class Store:
|
class Store:
|
||||||
def __init__(self, db_path: Path, key: str = "") -> None:
|
def __init__(self, db_path: Path, key: str = "") -> None:
|
||||||
|
import os
|
||||||
self._db_path = str(db_path)
|
self._db_path = str(db_path)
|
||||||
self.conn: sqlite3.Connection = get_connection(db_path, key)
|
self.conn: sqlite3.Connection = get_connection(db_path, key)
|
||||||
self.conn.execute("PRAGMA journal_mode=WAL")
|
self.conn.execute("PRAGMA journal_mode=WAL")
|
||||||
self.conn.execute("PRAGMA foreign_keys=ON")
|
self.conn.execute("PRAGMA foreign_keys=ON")
|
||||||
run_migrations(self.conn, MIGRATIONS_DIR)
|
run_migrations(self.conn, MIGRATIONS_DIR)
|
||||||
|
|
||||||
|
# When RECIPE_DB_PATH is set (cloud mode), attach the shared read-only
|
||||||
|
# corpus DB as the "corpus" schema so per-user DBs can access recipe data.
|
||||||
|
# _cp (corpus prefix) is "corpus." in cloud mode, "" in local mode.
|
||||||
|
corpus_path = os.environ.get("RECIPE_DB_PATH", "")
|
||||||
|
if corpus_path:
|
||||||
|
self.conn.execute("ATTACH DATABASE ? AS corpus", (corpus_path,))
|
||||||
|
self._cp = "corpus."
|
||||||
|
self._corpus_path = corpus_path
|
||||||
|
else:
|
||||||
|
self._cp = ""
|
||||||
|
self._corpus_path = self._db_path
|
||||||
|
|
||||||
def close(self) -> None:
|
def close(self) -> None:
|
||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
|
|
@ -218,8 +231,8 @@ class Store:
|
||||||
|
|
||||||
def update_inventory_item(self, item_id: int, **kwargs) -> dict[str, Any] | None:
|
def update_inventory_item(self, item_id: int, **kwargs) -> dict[str, Any] | None:
|
||||||
allowed = {"quantity", "unit", "location", "sublocation",
|
allowed = {"quantity", "unit", "location", "sublocation",
|
||||||
"expiration_date", "opened_date", "status", "notes", "consumed_at",
|
"purchase_date", "expiration_date", "opened_date",
|
||||||
"disposal_reason"}
|
"status", "notes", "consumed_at", "disposal_reason"}
|
||||||
updates = {k: v for k, v in kwargs.items() if k in allowed}
|
updates = {k: v for k, v in kwargs.items() if k in allowed}
|
||||||
if not updates:
|
if not updates:
|
||||||
return self.get_inventory_item(item_id)
|
return self.get_inventory_item(item_id)
|
||||||
|
|
@ -372,8 +385,9 @@ class Store:
|
||||||
|
|
||||||
def _fts_ready(self) -> bool:
|
def _fts_ready(self) -> bool:
|
||||||
"""Return True if the recipes_fts virtual table exists."""
|
"""Return True if the recipes_fts virtual table exists."""
|
||||||
|
schema = "corpus" if self._cp else "main"
|
||||||
row = self._fetch_one(
|
row = self._fetch_one(
|
||||||
"SELECT 1 FROM sqlite_master WHERE type='table' AND name='recipes_fts'"
|
f"SELECT 1 FROM {schema}.sqlite_master WHERE type='table' AND name='recipes_fts'"
|
||||||
)
|
)
|
||||||
return row is not None
|
return row is not None
|
||||||
|
|
||||||
|
|
@ -664,10 +678,12 @@ class Store:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Pull up to 10× limit candidates so ranking has enough headroom.
|
# Pull up to 10× limit candidates so ranking has enough headroom.
|
||||||
|
# FTS5 pseudo-column in WHERE uses bare table name, not schema-qualified.
|
||||||
|
c = self._cp
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT r.*
|
SELECT r.*
|
||||||
FROM recipes_fts
|
FROM {c}recipes_fts
|
||||||
JOIN recipes r ON r.id = recipes_fts.rowid
|
JOIN {c}recipes r ON r.id = {c}recipes_fts.rowid
|
||||||
WHERE recipes_fts MATCH ?
|
WHERE recipes_fts MATCH ?
|
||||||
{where_extra}
|
{where_extra}
|
||||||
LIMIT ?
|
LIMIT ?
|
||||||
|
|
@ -701,9 +717,10 @@ class Store:
|
||||||
"CASE WHEN r.ingredient_names LIKE ? THEN 1 ELSE 0 END"
|
"CASE WHEN r.ingredient_names LIKE ? THEN 1 ELSE 0 END"
|
||||||
for _ in ingredient_names
|
for _ in ingredient_names
|
||||||
)
|
)
|
||||||
|
c = self._cp
|
||||||
sql = f"""
|
sql = f"""
|
||||||
SELECT r.*, ({match_score}) AS match_count
|
SELECT r.*, ({match_score}) AS match_count
|
||||||
FROM recipes r
|
FROM {c}recipes r
|
||||||
WHERE ({like_clauses})
|
WHERE ({like_clauses})
|
||||||
{where_extra}
|
{where_extra}
|
||||||
ORDER BY match_count DESC, r.id ASC
|
ORDER BY match_count DESC, r.id ASC
|
||||||
|
|
@ -713,7 +730,11 @@ class Store:
|
||||||
return self._fetch_all(sql, tuple(all_params))
|
return self._fetch_all(sql, tuple(all_params))
|
||||||
|
|
||||||
def get_recipe(self, recipe_id: int) -> dict | None:
|
def get_recipe(self, recipe_id: int) -> dict | None:
|
||||||
return self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
row = self._fetch_one(f"SELECT * FROM {self._cp}recipes WHERE id = ?", (recipe_id,))
|
||||||
|
if row is None and self._cp:
|
||||||
|
# Fall back to user's own assembled recipes in main schema
|
||||||
|
row = self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
||||||
|
return row
|
||||||
|
|
||||||
def upsert_built_recipe(
|
def upsert_built_recipe(
|
||||||
self,
|
self,
|
||||||
|
|
@ -764,7 +785,7 @@ class Store:
|
||||||
return {}
|
return {}
|
||||||
placeholders = ",".join("?" * len(names))
|
placeholders = ",".join("?" * len(names))
|
||||||
rows = self._fetch_all(
|
rows = self._fetch_all(
|
||||||
f"SELECT name, elements FROM ingredient_profiles WHERE name IN ({placeholders})",
|
f"SELECT name, elements FROM {self._cp}ingredient_profiles WHERE name IN ({placeholders})",
|
||||||
tuple(names),
|
tuple(names),
|
||||||
)
|
)
|
||||||
result: dict[str, list[str]] = {}
|
result: dict[str, list[str]] = {}
|
||||||
|
|
@ -905,12 +926,25 @@ class Store:
|
||||||
"title": "r.title ASC",
|
"title": "r.title ASC",
|
||||||
}.get(sort_by, "sr.saved_at DESC")
|
}.get(sort_by, "sr.saved_at DESC")
|
||||||
|
|
||||||
|
c = self._cp
|
||||||
|
# In corpus-attached (cloud) mode: try corpus recipes first, fall back
|
||||||
|
# to user's own assembled recipes. In local mode: single join suffices.
|
||||||
|
if c:
|
||||||
|
recipe_join = (
|
||||||
|
f"LEFT JOIN {c}recipes rc ON rc.id = sr.recipe_id "
|
||||||
|
"LEFT JOIN recipes rm ON rm.id = sr.recipe_id"
|
||||||
|
)
|
||||||
|
title_col = "COALESCE(rc.title, rm.title) AS title"
|
||||||
|
else:
|
||||||
|
recipe_join = "JOIN recipes rc ON rc.id = sr.recipe_id"
|
||||||
|
title_col = "rc.title"
|
||||||
|
|
||||||
if collection_id is not None:
|
if collection_id is not None:
|
||||||
return self._fetch_all(
|
return self._fetch_all(
|
||||||
f"""
|
f"""
|
||||||
SELECT sr.*, r.title
|
SELECT sr.*, {title_col}
|
||||||
FROM saved_recipes sr
|
FROM saved_recipes sr
|
||||||
JOIN recipes r ON r.id = sr.recipe_id
|
{recipe_join}
|
||||||
JOIN recipe_collection_members rcm ON rcm.saved_recipe_id = sr.id
|
JOIN recipe_collection_members rcm ON rcm.saved_recipe_id = sr.id
|
||||||
WHERE rcm.collection_id = ?
|
WHERE rcm.collection_id = ?
|
||||||
ORDER BY {order}
|
ORDER BY {order}
|
||||||
|
|
@ -919,9 +953,9 @@ class Store:
|
||||||
)
|
)
|
||||||
return self._fetch_all(
|
return self._fetch_all(
|
||||||
f"""
|
f"""
|
||||||
SELECT sr.*, r.title
|
SELECT sr.*, {title_col}
|
||||||
FROM saved_recipes sr
|
FROM saved_recipes sr
|
||||||
JOIN recipes r ON r.id = sr.recipe_id
|
{recipe_join}
|
||||||
ORDER BY {order}
|
ORDER BY {order}
|
||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
|
|
@ -936,10 +970,26 @@ class Store:
|
||||||
# ── recipe collections ────────────────────────────────────────────────
|
# ── recipe collections ────────────────────────────────────────────────
|
||||||
|
|
||||||
def create_collection(self, name: str, description: str | None) -> dict:
|
def create_collection(self, name: str, description: str | None) -> dict:
|
||||||
return self._insert_returning(
|
# INSERT RETURNING * omits aggregate columns (e.g. member_count); re-query
|
||||||
"INSERT INTO recipe_collections (name, description) VALUES (?, ?) RETURNING *",
|
# with the same SELECT used by get_collections() so the response shape is consistent.
|
||||||
|
cur = self.conn.execute(
|
||||||
|
"INSERT INTO recipe_collections (name, description) VALUES (?, ?)",
|
||||||
(name, description),
|
(name, description),
|
||||||
)
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
new_id = cur.lastrowid
|
||||||
|
row = self._fetch_one(
|
||||||
|
"""
|
||||||
|
SELECT rc.*,
|
||||||
|
COUNT(rcm.saved_recipe_id) AS member_count
|
||||||
|
FROM recipe_collections rc
|
||||||
|
LEFT JOIN recipe_collection_members rcm ON rcm.collection_id = rc.id
|
||||||
|
WHERE rc.id = ?
|
||||||
|
GROUP BY rc.id
|
||||||
|
""",
|
||||||
|
(new_id,),
|
||||||
|
)
|
||||||
|
return row # type: ignore[return-value]
|
||||||
|
|
||||||
def delete_collection(self, collection_id: int) -> None:
|
def delete_collection(self, collection_id: int) -> None:
|
||||||
self.conn.execute(
|
self.conn.execute(
|
||||||
|
|
@ -1023,12 +1073,16 @@ class Store:
|
||||||
def _count_recipes_for_keywords(self, keywords: list[str]) -> int:
|
def _count_recipes_for_keywords(self, keywords: list[str]) -> int:
|
||||||
if not keywords:
|
if not keywords:
|
||||||
return 0
|
return 0
|
||||||
cache_key = (self._db_path, *sorted(keywords))
|
# Use corpus path as cache key so all cloud users share the same counts.
|
||||||
|
cache_key = (self._corpus_path, *sorted(keywords))
|
||||||
if cache_key in _COUNT_CACHE:
|
if cache_key in _COUNT_CACHE:
|
||||||
return _COUNT_CACHE[cache_key]
|
return _COUNT_CACHE[cache_key]
|
||||||
match_expr = self._browser_fts_query(keywords)
|
match_expr = self._browser_fts_query(keywords)
|
||||||
|
c = self._cp
|
||||||
|
# FTS5 pseudo-column in WHERE is always the bare (unqualified) table name,
|
||||||
|
# even when the table is accessed through an ATTACHed schema.
|
||||||
row = self.conn.execute(
|
row = self.conn.execute(
|
||||||
"SELECT count(*) FROM recipe_browser_fts WHERE recipe_browser_fts MATCH ?",
|
f"SELECT count(*) FROM {c}recipe_browser_fts WHERE recipe_browser_fts MATCH ?",
|
||||||
(match_expr,),
|
(match_expr,),
|
||||||
).fetchone()
|
).fetchone()
|
||||||
count = row[0] if row else 0
|
count = row[0] if row else 0
|
||||||
|
|
@ -1057,13 +1111,14 @@ class Store:
|
||||||
# Reuse cached count — avoids a second index scan on every page turn.
|
# Reuse cached count — avoids a second index scan on every page turn.
|
||||||
total = self._count_recipes_for_keywords(keywords)
|
total = self._count_recipes_for_keywords(keywords)
|
||||||
|
|
||||||
|
c = self._cp
|
||||||
rows = self._fetch_all(
|
rows = self._fetch_all(
|
||||||
"""
|
f"""
|
||||||
SELECT id, title, category, keywords, ingredient_names,
|
SELECT id, title, category, keywords, ingredient_names,
|
||||||
calories, fat_g, protein_g, sodium_mg
|
calories, fat_g, protein_g, sodium_mg
|
||||||
FROM recipes
|
FROM {c}recipes
|
||||||
WHERE id IN (
|
WHERE id IN (
|
||||||
SELECT rowid FROM recipe_browser_fts
|
SELECT rowid FROM {c}recipe_browser_fts
|
||||||
WHERE recipe_browser_fts MATCH ?
|
WHERE recipe_browser_fts MATCH ?
|
||||||
)
|
)
|
||||||
ORDER BY id ASC
|
ORDER BY id ASC
|
||||||
|
|
@ -1154,10 +1209,11 @@ class Store:
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
def get_plan_slots(self, plan_id: int) -> list[dict]:
|
def get_plan_slots(self, plan_id: int) -> list[dict]:
|
||||||
|
c = self._cp
|
||||||
return self._fetch_all(
|
return self._fetch_all(
|
||||||
"""SELECT s.*, r.title AS recipe_title
|
f"""SELECT s.*, r.title AS recipe_title
|
||||||
FROM meal_plan_slots s
|
FROM meal_plan_slots s
|
||||||
LEFT JOIN recipes r ON r.id = s.recipe_id
|
LEFT JOIN {c}recipes r ON r.id = s.recipe_id
|
||||||
WHERE s.plan_id = ?
|
WHERE s.plan_id = ?
|
||||||
ORDER BY s.day_of_week, s.meal_type""",
|
ORDER BY s.day_of_week, s.meal_type""",
|
||||||
(plan_id,),
|
(plan_id,),
|
||||||
|
|
@ -1165,10 +1221,11 @@ class Store:
|
||||||
|
|
||||||
def get_plan_recipes(self, plan_id: int) -> list[dict]:
|
def get_plan_recipes(self, plan_id: int) -> list[dict]:
|
||||||
"""Return full recipe rows for all recipes assigned to a plan."""
|
"""Return full recipe rows for all recipes assigned to a plan."""
|
||||||
|
c = self._cp
|
||||||
return self._fetch_all(
|
return self._fetch_all(
|
||||||
"""SELECT DISTINCT r.*
|
f"""SELECT DISTINCT r.*
|
||||||
FROM meal_plan_slots s
|
FROM meal_plan_slots s
|
||||||
JOIN recipes r ON r.id = s.recipe_id
|
JOIN {c}recipes r ON r.id = s.recipe_id
|
||||||
WHERE s.plan_id = ? AND s.recipe_id IS NOT NULL""",
|
WHERE s.plan_id = ? AND s.recipe_id IS NOT NULL""",
|
||||||
(plan_id,),
|
(plan_id,),
|
||||||
)
|
)
|
||||||
|
|
@ -1256,3 +1313,71 @@ class Store:
|
||||||
(pseudonym, directus_user_id),
|
(pseudonym, directus_user_id),
|
||||||
)
|
)
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
|
|
||||||
|
# ── Shopping list ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def add_shopping_item(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
quantity: float | None = None,
|
||||||
|
unit: str | None = None,
|
||||||
|
category: str | None = None,
|
||||||
|
notes: str | None = None,
|
||||||
|
source: str = "manual",
|
||||||
|
recipe_id: int | None = None,
|
||||||
|
sort_order: int = 0,
|
||||||
|
) -> dict:
|
||||||
|
return self._insert_returning(
|
||||||
|
"""INSERT INTO shopping_list_items
|
||||||
|
(name, quantity, unit, category, notes, source, recipe_id, sort_order)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING *""",
|
||||||
|
(name, quantity, unit, category, notes, source, recipe_id, sort_order),
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_shopping_items(self, include_checked: bool = True) -> list[dict]:
|
||||||
|
where = "" if include_checked else "WHERE checked = 0"
|
||||||
|
self.conn.row_factory = sqlite3.Row
|
||||||
|
rows = self.conn.execute(
|
||||||
|
f"SELECT * FROM shopping_list_items {where} ORDER BY checked, sort_order, id",
|
||||||
|
).fetchall()
|
||||||
|
return [self._row_to_dict(r) for r in rows]
|
||||||
|
|
||||||
|
def get_shopping_item(self, item_id: int) -> dict | None:
|
||||||
|
self.conn.row_factory = sqlite3.Row
|
||||||
|
row = self.conn.execute(
|
||||||
|
"SELECT * FROM shopping_list_items WHERE id = ?", (item_id,)
|
||||||
|
).fetchone()
|
||||||
|
return self._row_to_dict(row) if row else None
|
||||||
|
|
||||||
|
def update_shopping_item(self, item_id: int, **kwargs) -> dict | None:
|
||||||
|
allowed = {"name", "quantity", "unit", "category", "checked", "notes", "sort_order"}
|
||||||
|
fields = {k: v for k, v in kwargs.items() if k in allowed and v is not None}
|
||||||
|
if not fields:
|
||||||
|
return self.get_shopping_item(item_id)
|
||||||
|
if "checked" in fields:
|
||||||
|
fields["checked"] = 1 if fields["checked"] else 0
|
||||||
|
set_clause = ", ".join(f"{k} = ?" for k in fields)
|
||||||
|
values = list(fields.values()) + [item_id]
|
||||||
|
self.conn.execute(
|
||||||
|
f"UPDATE shopping_list_items SET {set_clause}, updated_at = datetime('now') WHERE id = ?",
|
||||||
|
values,
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
return self.get_shopping_item(item_id)
|
||||||
|
|
||||||
|
def delete_shopping_item(self, item_id: int) -> bool:
|
||||||
|
cur = self.conn.execute(
|
||||||
|
"DELETE FROM shopping_list_items WHERE id = ?", (item_id,)
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
return cur.rowcount > 0
|
||||||
|
|
||||||
|
def clear_checked_shopping_items(self) -> int:
|
||||||
|
cur = self.conn.execute("DELETE FROM shopping_list_items WHERE checked = 1")
|
||||||
|
self.conn.commit()
|
||||||
|
return cur.rowcount
|
||||||
|
|
||||||
|
def clear_all_shopping_items(self) -> int:
|
||||||
|
cur = self.conn.execute("DELETE FROM shopping_list_items")
|
||||||
|
self.conn.commit()
|
||||||
|
return cur.rowcount
|
||||||
|
|
|
||||||
|
|
@ -89,6 +89,7 @@ class InventoryItemUpdate(BaseModel):
|
||||||
unit: Optional[str] = None
|
unit: Optional[str] = None
|
||||||
location: Optional[str] = None
|
location: Optional[str] = None
|
||||||
sublocation: Optional[str] = None
|
sublocation: Optional[str] = None
|
||||||
|
purchase_date: Optional[date] = None
|
||||||
expiration_date: Optional[date] = None
|
expiration_date: Optional[date] = None
|
||||||
opened_date: Optional[date] = None
|
opened_date: Optional[date] = None
|
||||||
status: Optional[str] = None
|
status: Optional[str] = None
|
||||||
|
|
@ -118,6 +119,9 @@ class InventoryItemResponse(BaseModel):
|
||||||
expiration_date: Optional[str]
|
expiration_date: Optional[str]
|
||||||
opened_date: Optional[str] = None
|
opened_date: Optional[str] = None
|
||||||
opened_expiry_date: Optional[str] = None
|
opened_expiry_date: Optional[str] = None
|
||||||
|
secondary_state: Optional[str] = None
|
||||||
|
secondary_uses: Optional[List[str]] = None
|
||||||
|
secondary_warning: Optional[str] = None
|
||||||
status: str
|
status: str
|
||||||
notes: Optional[str]
|
notes: Optional[str]
|
||||||
disposal_reason: Optional[str] = None
|
disposal_reason: Optional[str] = None
|
||||||
|
|
|
||||||
|
|
@ -84,8 +84,9 @@ class ElementClassifier:
|
||||||
name = ingredient_name.lower().strip()
|
name = ingredient_name.lower().strip()
|
||||||
if not name:
|
if not name:
|
||||||
return IngredientProfile(name="", elements=[], source="heuristic")
|
return IngredientProfile(name="", elements=[], source="heuristic")
|
||||||
|
c = self._store._cp
|
||||||
row = self._store._fetch_one(
|
row = self._store._fetch_one(
|
||||||
"SELECT * FROM ingredient_profiles WHERE name = ?", (name,)
|
f"SELECT * FROM {c}ingredient_profiles WHERE name = ?", (name,)
|
||||||
)
|
)
|
||||||
if row:
|
if row:
|
||||||
return self._row_to_profile(row)
|
return self._row_to_profile(row)
|
||||||
|
|
|
||||||
|
|
@ -55,11 +55,12 @@ class SubstitutionEngine:
|
||||||
ingredient_name: str,
|
ingredient_name: str,
|
||||||
constraint: str,
|
constraint: str,
|
||||||
) -> list[SubstitutionSwap]:
|
) -> list[SubstitutionSwap]:
|
||||||
rows = self._store._fetch_all("""
|
c = self._store._cp
|
||||||
|
rows = self._store._fetch_all(f"""
|
||||||
SELECT substitute_name, constraint_label,
|
SELECT substitute_name, constraint_label,
|
||||||
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
||||||
occurrence_count, compensation_hints
|
occurrence_count, compensation_hints
|
||||||
FROM substitution_pairs
|
FROM {c}substitution_pairs
|
||||||
WHERE original_name = ? AND constraint_label = ?
|
WHERE original_name = ? AND constraint_label = ?
|
||||||
ORDER BY occurrence_count DESC
|
ORDER BY occurrence_count DESC
|
||||||
""", (ingredient_name.lower(), constraint))
|
""", (ingredient_name.lower(), constraint))
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ services:
|
||||||
environment:
|
environment:
|
||||||
CLOUD_MODE: "true"
|
CLOUD_MODE: "true"
|
||||||
CLOUD_DATA_ROOT: /devl/kiwi-cloud-data
|
CLOUD_DATA_ROOT: /devl/kiwi-cloud-data
|
||||||
|
RECIPE_DB_PATH: /devl/kiwi-corpus/recipes.db
|
||||||
KIWI_BASE_URL: https://menagerie.circuitforge.tech/kiwi
|
KIWI_BASE_URL: https://menagerie.circuitforge.tech/kiwi
|
||||||
# DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN — set in .env
|
# DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN — set in .env
|
||||||
# DEV ONLY: comma-separated IPs that bypass JWT auth (LAN testing without Caddy).
|
# DEV ONLY: comma-separated IPs that bypass JWT auth (LAN testing without Caddy).
|
||||||
|
|
@ -27,6 +28,8 @@ services:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
volumes:
|
volumes:
|
||||||
- /devl/kiwi-cloud-data:/devl/kiwi-cloud-data
|
- /devl/kiwi-cloud-data:/devl/kiwi-cloud-data
|
||||||
|
# Recipe corpus — shared read-only NFS-backed SQLite (3.1M recipes, 2.9GB)
|
||||||
|
- /Library/Assets/kiwi/kiwi.db:/devl/kiwi-corpus/recipes.db:ro
|
||||||
# LLM config — shared with other CF products; read-only in container
|
# LLM config — shared with other CF products; read-only in container
|
||||||
- ${HOME}/.config/circuitforge:/root/.config/circuitforge:ro
|
- ${HOME}/.config/circuitforge:/root/.config/circuitforge:ro
|
||||||
networks:
|
networks:
|
||||||
|
|
|
||||||
|
|
@ -93,6 +93,9 @@ export interface InventoryItem {
|
||||||
expiration_date: string | null
|
expiration_date: string | null
|
||||||
opened_date: string | null
|
opened_date: string | null
|
||||||
opened_expiry_date: string | null
|
opened_expiry_date: string | null
|
||||||
|
secondary_state: string | null
|
||||||
|
secondary_uses: string[] | null
|
||||||
|
secondary_warning: string | null
|
||||||
status: string
|
status: string
|
||||||
source: string
|
source: string
|
||||||
notes: string | null
|
notes: string | null
|
||||||
|
|
@ -187,7 +190,7 @@ export const inventoryAPI = {
|
||||||
*/
|
*/
|
||||||
async listItems(params?: {
|
async listItems(params?: {
|
||||||
location?: string
|
location?: string
|
||||||
status?: string
|
item_status?: string
|
||||||
limit?: number
|
limit?: number
|
||||||
offset?: number
|
offset?: number
|
||||||
}): Promise<InventoryItem[]> {
|
}): Promise<InventoryItem[]> {
|
||||||
|
|
@ -913,6 +916,77 @@ export const browserAPI = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Shopping List ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export interface GroceryLink {
|
||||||
|
ingredient: string
|
||||||
|
retailer: string
|
||||||
|
url: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ShoppingItem {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
quantity: number | null
|
||||||
|
unit: string | null
|
||||||
|
category: string | null
|
||||||
|
checked: boolean
|
||||||
|
notes: string | null
|
||||||
|
source: string
|
||||||
|
recipe_id: number | null
|
||||||
|
sort_order: number
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
grocery_links: GroceryLink[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ShoppingItemCreate {
|
||||||
|
name: string
|
||||||
|
quantity?: number
|
||||||
|
unit?: string
|
||||||
|
category?: string
|
||||||
|
notes?: string
|
||||||
|
source?: string
|
||||||
|
recipe_id?: number
|
||||||
|
sort_order?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ShoppingItemUpdate {
|
||||||
|
name?: string
|
||||||
|
quantity?: number
|
||||||
|
unit?: string
|
||||||
|
category?: string
|
||||||
|
checked?: boolean
|
||||||
|
notes?: string
|
||||||
|
sort_order?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export const shoppingAPI = {
|
||||||
|
list: (includeChecked = true) =>
|
||||||
|
api.get<ShoppingItem[]>('/shopping', { params: { include_checked: includeChecked } }).then(r => r.data),
|
||||||
|
|
||||||
|
add: (item: ShoppingItemCreate) =>
|
||||||
|
api.post<ShoppingItem>('/shopping', item).then(r => r.data),
|
||||||
|
|
||||||
|
addFromRecipe: (recipeId: number, includeCovered = false) =>
|
||||||
|
api.post<ShoppingItem[]>('/shopping/from-recipe', { recipe_id: recipeId, include_covered: includeCovered }).then(r => r.data),
|
||||||
|
|
||||||
|
update: (id: number, update: ShoppingItemUpdate) =>
|
||||||
|
api.patch<ShoppingItem>(`/shopping/${id}`, update).then(r => r.data),
|
||||||
|
|
||||||
|
remove: (id: number) =>
|
||||||
|
api.delete(`/shopping/${id}`),
|
||||||
|
|
||||||
|
clearChecked: () =>
|
||||||
|
api.delete('/shopping/checked'),
|
||||||
|
|
||||||
|
clearAll: () =>
|
||||||
|
api.delete('/shopping/all'),
|
||||||
|
|
||||||
|
confirmPurchase: (id: number, location = 'pantry', quantity?: number, unit?: string) =>
|
||||||
|
api.post(`/shopping/${id}/confirm`, { location, quantity, unit }).then(r => r.data),
|
||||||
|
}
|
||||||
|
|
||||||
// ── Orch Usage ────────────────────────────────────────────────────────────────
|
// ── Orch Usage ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
export async function getOrchUsage(): Promise<OrchUsage | null> {
|
export async function getOrchUsage(): Promise<OrchUsage | null> {
|
||||||
|
|
@ -920,4 +994,22 @@ export async function getOrchUsage(): Promise<OrchUsage | null> {
|
||||||
return resp.data
|
return resp.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Session Bootstrap ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export interface SessionInfo {
|
||||||
|
auth: 'local' | 'anon' | 'authed'
|
||||||
|
tier: string
|
||||||
|
has_byok: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Call once on app load. Logs auth= + tier= server-side for analytics. */
|
||||||
|
export async function bootstrapSession(): Promise<SessionInfo | null> {
|
||||||
|
try {
|
||||||
|
const resp = await api.get<SessionInfo>('/session/bootstrap')
|
||||||
|
return resp.data
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export default api
|
export default api
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ export const useInventoryStore = defineStore('inventory', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
items.value = await inventoryAPI.listItems({
|
items.value = await inventoryAPI.listItems({
|
||||||
status: statusFilter.value === 'all' ? undefined : statusFilter.value,
|
item_status: statusFilter.value === 'all' ? undefined : statusFilter.value,
|
||||||
location: locationFilter.value === 'all' ? undefined : locationFilter.value,
|
location: locationFilter.value === 'all' ? undefined : locationFilter.value,
|
||||||
limit: 1000,
|
limit: 1000,
|
||||||
})
|
})
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue