feat: cf-orch integration — CFOrchClient for recipe gen + Docuvision OCR fast-path #11
179 changed files with 553 additions and 20511 deletions
44
.cliff.toml
44
.cliff.toml
|
|
@ -1,44 +0,0 @@
|
|||
# git-cliff changelog configuration for Kiwi
|
||||
# See: https://git-cliff.org/docs/configuration
|
||||
|
||||
[changelog]
|
||||
header = """
|
||||
# Changelog\n
|
||||
"""
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [Unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}**{{ commit.scope }}:** {% endif %}{{ commit.message | upper_first }}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
trim = true
|
||||
|
||||
[git]
|
||||
conventional_commits = true
|
||||
filter_unconventional = true
|
||||
split_commits = false
|
||||
commit_preprocessors = []
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^refactor", group = "Refactoring" },
|
||||
{ message = "^docs", group = "Documentation" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore", group = "Chores" },
|
||||
{ message = "^ci", group = "CI/CD" },
|
||||
{ message = "^revert", group = "Reverts" },
|
||||
]
|
||||
filter_commits = false
|
||||
tag_pattern = "v[0-9].*"
|
||||
skip_tags = ""
|
||||
ignore_tags = ""
|
||||
topo_order = false
|
||||
sort_commits = "oldest"
|
||||
50
.env.example
50
.env.example
|
|
@ -21,23 +21,6 @@ DATA_DIR=./data
|
|||
# IP this machine advertises to the coordinator (must be reachable from coordinator host)
|
||||
# CF_ORCH_ADVERTISE_HOST=10.1.10.71
|
||||
|
||||
# CF-core hosted coordinator (managed cloud GPU inference — Paid+ tier)
|
||||
# Set CF_ORCH_URL to use a hosted cf-orch coordinator instead of self-hosting.
|
||||
# CF_LICENSE_KEY is read automatically by CFOrchClient for bearer auth.
|
||||
# CF_ORCH_URL=https://orch.circuitforge.tech
|
||||
# CF_LICENSE_KEY=CFG-KIWI-xxxx-xxxx-xxxx
|
||||
|
||||
# LLM backend — env-var auto-config (no llm.yaml needed for bare-metal users)
|
||||
# LLMRouter checks these in priority order:
|
||||
# 1. Anthropic cloud — set ANTHROPIC_API_KEY
|
||||
# 2. OpenAI cloud — set OPENAI_API_KEY
|
||||
# 3. Local Ollama — set OLLAMA_HOST (+ optionally OLLAMA_MODEL)
|
||||
# All three are optional; leave unset to rely on a local llm.yaml instead.
|
||||
# ANTHROPIC_API_KEY=sk-ant-...
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# OLLAMA_HOST=http://localhost:11434
|
||||
# OLLAMA_MODEL=llama3.2
|
||||
|
||||
# Processing
|
||||
USE_GPU=true
|
||||
GPU_MEMORY_LIMIT=6144
|
||||
|
|
@ -51,12 +34,6 @@ ENABLE_OCR=false
|
|||
DEBUG=false
|
||||
CLOUD_MODE=false
|
||||
DEMO_MODE=false
|
||||
# Product identifier reported in cf-orch coordinator analytics for per-app breakdown
|
||||
CF_APP_NAME=kiwi
|
||||
# USE_ORCH_SCHEDULER: use coordinator-aware multi-GPU scheduler instead of local FIFO.
|
||||
# Unset = auto-detect: true if CLOUD_MODE or circuitforge_orch is installed (paid+ local).
|
||||
# Set false to force LocalScheduler even when cf-orch is present.
|
||||
# USE_ORCH_SCHEDULER=false
|
||||
|
||||
# Cloud mode (set in compose.cloud.yml; also set here for reference)
|
||||
# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data
|
||||
|
|
@ -74,30 +51,5 @@ CF_APP_NAME=kiwi
|
|||
# HEIMDALL_URL=https://license.circuitforge.tech
|
||||
# HEIMDALL_ADMIN_TOKEN=
|
||||
|
||||
# Directus JWT (must match cf-directus SECRET env var exactly, including base64 == padding)
|
||||
# Directus JWT (must match cf-directus SECRET env var)
|
||||
# DIRECTUS_JWT_SECRET=
|
||||
|
||||
# E2E test account (Directus — free tier, used by automated tests)
|
||||
# E2E_TEST_EMAIL=e2e@circuitforge.tech
|
||||
# E2E_TEST_PASSWORD=
|
||||
# E2E_TEST_USER_ID=
|
||||
|
||||
# In-app feedback → Forgejo issue creation
|
||||
# FORGEJO_API_TOKEN=
|
||||
# FORGEJO_REPO=Circuit-Forge/kiwi
|
||||
# FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
||||
|
||||
# Affiliate links (optional — plain URLs are shown if unset)
|
||||
# Amazon Associates tag (circuitforge_core.affiliates, retailer="amazon")
|
||||
# AMAZON_ASSOCIATES_TAG=circuitforge-20
|
||||
# Instacart affiliate ID (circuitforge_core.affiliates, retailer="instacart")
|
||||
# INSTACART_AFFILIATE_ID=circuitforge
|
||||
# Walmart Impact network affiliate ID (inline, path-based redirect)
|
||||
# WALMART_AFFILIATE_ID=
|
||||
|
||||
|
||||
# Community PostgreSQL — shared across CF products (cloud only; leave unset for local dev)
|
||||
# Points at cf-orch's cf-community-postgres container (port 5434 on the orch host).
|
||||
# When unset, community write paths fail soft with a plain-language message.
|
||||
# COMMUNITY_DB_URL=postgresql://cf_community:changeme@cf-orch-host:5434/cf_community
|
||||
# COMMUNITY_PSEUDONYM_SALT=change-this-to-a-random-32-char-string
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
# Kiwi CI — lint, type-check, test on PR/push
|
||||
# Full-stack: FastAPI (Python) + Vue 3 SPA (Node)
|
||||
# Adapted from Circuit-Forge/cf-agents workflows/ci.yml (cf-agents#4 tracks the
|
||||
# upstream ci-fullstack.yml variant; update this file when that lands).
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add `npm run test` when vitest is wired (kiwi#XX).
|
||||
#
|
||||
# circuitforge-core is not on PyPI — installed from Forgejo git (public repo).
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, 'feature/**', 'fix/**']
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e ".[dev]" || pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
# Mirror push to GitHub and Codeberg on every push to main or tag.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/mirror.yml
|
||||
# Required secrets: GITHUB_MIRROR_TOKEN, CODEBERG_MIRROR_TOKEN
|
||||
|
||||
name: Mirror
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Mirror to GitHub
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add github "https://x-access-token:${GITHUB_TOKEN}@github.com/CircuitForgeLLC/${REPO}.git"
|
||||
git push github --mirror
|
||||
|
||||
- name: Mirror to Codeberg
|
||||
env:
|
||||
CODEBERG_TOKEN: ${{ secrets.CODEBERG_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add codeberg "https://CircuitForge:${CODEBERG_TOKEN}@codeberg.org/CircuitForge/${REPO}.git"
|
||||
git push codeberg --mirror
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
# Tag-triggered release workflow.
|
||||
# Generates changelog and creates Forgejo release on v* tags.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/release.yml
|
||||
#
|
||||
# Docker push is intentionally disabled — BSL 1.1 registry policy not yet resolved.
|
||||
# Tracked in Circuit-Forge/cf-agents#3. Re-enable the Docker steps when that lands.
|
||||
#
|
||||
# Required secrets: FORGEJO_RELEASE_TOKEN
|
||||
# (GHCR_TOKEN not needed until Docker push is enabled)
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# ── Changelog ────────────────────────────────────────────────────────────
|
||||
- name: Generate changelog
|
||||
uses: orhun/git-cliff-action@v3
|
||||
id: cliff
|
||||
with:
|
||||
config: .cliff.toml
|
||||
args: --latest --strip header
|
||||
env:
|
||||
OUTPUT: CHANGES.md
|
||||
|
||||
# ── Docker (disabled — BSL registry policy pending cf-agents#3) ──────────
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v3
|
||||
# - name: Set up Buildx
|
||||
# uses: docker/setup-buildx-action@v3
|
||||
# - name: Log in to GHCR
|
||||
# uses: docker/login-action@v3
|
||||
# with:
|
||||
# registry: ghcr.io
|
||||
# username: ${{ github.actor }}
|
||||
# password: ${{ secrets.GHCR_TOKEN }}
|
||||
# - name: Build and push Docker image
|
||||
# uses: docker/build-push-action@v6
|
||||
# with:
|
||||
# context: .
|
||||
# push: true
|
||||
# platforms: linux/amd64,linux/arm64
|
||||
# tags: |
|
||||
# ghcr.io/circuitforgellc/kiwi:${{ github.ref_name }}
|
||||
# ghcr.io/circuitforgellc/kiwi:latest
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
|
||||
# ── Forgejo Release ───────────────────────────────────────────────────────
|
||||
- name: Create Forgejo release
|
||||
env:
|
||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_RELEASE_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
NOTES: ${{ steps.cliff.outputs.content }}
|
||||
run: |
|
||||
curl -sS -X POST \
|
||||
"https://git.opensourcesolarpunk.com/api/v1/repos/Circuit-Forge/${REPO}/releases" \
|
||||
-H "Authorization: token ${FORGEJO_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$(jq -n --arg tag "$TAG" --arg body "$NOTES" \
|
||||
'{tag_name: $tag, name: $tag, body: $body}')"
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
|
|
@ -1,59 +0,0 @@
|
|||
# Kiwi CI — runs on GitHub mirror for public credibility badge.
|
||||
# Forgejo (.forgejo/workflows/ci.yml) is the canonical CI — keep these in sync.
|
||||
# No Forgejo-specific secrets used here; circuitforge-core is public on Forgejo.
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add 'npm run test' when vitest is wired.
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,7 +1,4 @@
|
|||
|
||||
# CLAUDE.md — gitignored per BSL 1.1 commercial policy
|
||||
CLAUDE.md
|
||||
|
||||
# Superpowers brainstorming artifacts
|
||||
.superpowers/
|
||||
|
||||
|
|
@ -25,6 +22,3 @@ data/
|
|||
|
||||
# Test artifacts (MagicMock sqlite files from pytest)
|
||||
<MagicMock*
|
||||
|
||||
# Playwright / debug screenshots
|
||||
debug-screenshots/
|
||||
|
|
|
|||
|
|
@ -1,34 +0,0 @@
|
|||
# Kiwi gitleaks config — extends base CircuitForge config with local rules
|
||||
|
||||
[extend]
|
||||
path = "/Library/Development/CircuitForge/circuitforge-hooks/gitleaks.toml"
|
||||
|
||||
# ── Global allowlist ──────────────────────────────────────────────────────────
|
||||
# Amazon grocery department IDs (rh=n:<10-digit>) false-positive as phone
|
||||
# numbers. locale_config.py is a static lookup table with no secrets.
|
||||
|
||||
[allowlist]
|
||||
# Amazon grocery dept IDs (rh=n:<digits>) false-positive as phone numbers.
|
||||
regexes = [
|
||||
'''rh=n:\d{8,12}''',
|
||||
]
|
||||
|
||||
# ── Test fixture allowlists ───────────────────────────────────────────────────
|
||||
|
||||
[[rules]]
|
||||
id = "cf-generic-env-token"
|
||||
description = "Generic KEY=<token> in env-style assignment — catches FORGEJO_API_TOKEN=hex etc."
|
||||
regex = '''(?i)(token|secret|key|password|passwd|pwd|api_key)\s*[=:]\s*['"]?[A-Za-z0-9\-_]{20,}['"]?'''
|
||||
[rules.allowlist]
|
||||
paths = [
|
||||
'.*test.*',
|
||||
]
|
||||
regexes = [
|
||||
'api_key:\s*ollama',
|
||||
'api_key:\s*any',
|
||||
'your-[a-z\-]+-here',
|
||||
'replace-with-',
|
||||
'xxxx',
|
||||
'test-fixture-',
|
||||
'CFG-KIWI-TEST-',
|
||||
]
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
# Findings suppressed here are historical false positives or already-rotated secrets.
|
||||
# .env was accidentally included in the initial commit; it is now gitignored.
|
||||
# Rotate DIRECTUS_JWT_SECRET if it has not been changed since 2026-03-30.
|
||||
|
||||
# c166e5216 (chore: initial commit) — .env included by mistake
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:generic-api-key:25
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:cf-generic-env-token:25
|
||||
12
Dockerfile
12
Dockerfile
|
|
@ -11,23 +11,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||
COPY circuitforge-core/ ./circuitforge-core/
|
||||
RUN conda run -n base pip install --no-cache-dir -e ./circuitforge-core
|
||||
|
||||
# Install circuitforge-orch — needed for the cf-orch-agent sidecar (compose.override.yml)
|
||||
COPY circuitforge-orch/ ./circuitforge-orch/
|
||||
|
||||
# Create kiwi conda env and install app
|
||||
COPY kiwi/environment.yml .
|
||||
RUN conda env create -f environment.yml
|
||||
|
||||
COPY kiwi/ ./kiwi/
|
||||
|
||||
# Remove gitignored config files that may exist locally — defense-in-depth.
|
||||
# The parent .dockerignore should exclude these, but an explicit rm guarantees
|
||||
# they never end up in the cloud image regardless of .dockerignore placement.
|
||||
RUN rm -f /app/kiwi/.env
|
||||
|
||||
# Install cf-core and cf-orch into the kiwi env BEFORE installing kiwi
|
||||
# Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep)
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-orch
|
||||
WORKDIR /app/kiwi
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e .
|
||||
|
||||
|
|
|
|||
28
LICENSE-BSL
28
LICENSE-BSL
|
|
@ -1,28 +0,0 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Circuit Forge LLC
|
||||
Licensed Work: Kiwi — Pantry tracking and leftover recipe suggestions
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
Additional Use Grant: You may use the Licensed Work for personal,
|
||||
non-commercial pantry tracking and recipe suggestion
|
||||
purposes only.
|
||||
Change Date: 2030-01-01
|
||||
Change License: MIT License
|
||||
|
||||
For the full Business Source License 1.1 text, see:
|
||||
https://mariadb.com/bsl11/
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/services/recipe/recipe_engine.py
|
||||
- app/services/recipe/assembly_recipes.py
|
||||
- app/services/recipe/llm_recipe.py
|
||||
- app/services/expiration_predictor.py
|
||||
- app/tasks/scheduler.py
|
||||
- app/tasks/runner.py
|
||||
- app/tiers.py
|
||||
- app/cloud_session.py
|
||||
- frontend/src/components/RecipesView.vue
|
||||
- frontend/src/stores/recipes.ts
|
||||
34
LICENSE-MIT
34
LICENSE-MIT
|
|
@ -1,34 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/api/endpoints/inventory.py
|
||||
- app/api/endpoints/ocr.py
|
||||
- app/db/store.py
|
||||
- app/db/migrations/
|
||||
- app/core/config.py
|
||||
- scripts/pipeline/
|
||||
- scripts/download_datasets.py
|
||||
- scripts/backfill_texture_profiles.py
|
||||
26
README.md
26
README.md
|
|
@ -6,11 +6,7 @@
|
|||
|
||||
Scan barcodes, photograph receipts, and get recipe ideas based on what you already have — before it expires.
|
||||
|
||||
**LLM support is optional.** Inventory tracking, barcode scanning, expiry alerts, CSV export, and receipt upload all work without any LLM configured. AI features (receipt OCR, recipe suggestions, meal planning) activate when a backend is available and are BYOK-unlockable at any tier.
|
||||
|
||||
**Status:** Beta · CircuitForge LLC
|
||||
|
||||
**[Documentation](https://docs.circuitforge.tech/kiwi/)** · [circuitforge.tech](https://circuitforge.tech)
|
||||
**Status:** Pre-alpha · CircuitForge LLC
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -18,14 +14,9 @@ Scan barcodes, photograph receipts, and get recipe ideas based on what you alrea
|
|||
|
||||
- **Inventory tracking** — add items by barcode scan, receipt upload, or manually
|
||||
- **Expiry alerts** — know what's about to go bad
|
||||
- **Recipe browser** — browse the full recipe corpus by cuisine, meal type, dietary preference, or main ingredient; pantry match percentage shown inline (Free)
|
||||
- **Saved recipes** — bookmark any recipe with notes, a 0–5 star rating, and free-text style tags (Free); organize into named collections (Paid)
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier, BYOK-unlockable)
|
||||
- **Recipe suggestions** — four levels from pantry-match to full LLM generation (Paid tier, BYOK-unlockable)
|
||||
- **Style auto-classifier** — LLM suggests style tags (comforting, hands-off, quick, etc.) for saved recipes (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Free, 5/day; unlimited at Paid+)
|
||||
- **LLM backend config** — configure inference via `circuitforge-core` env-var system; BYOK unlocks Paid AI features at any tier
|
||||
- **Feedback FAB** — in-app feedback button; status probed on load, hidden if CF feedback endpoint unreachable
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier)
|
||||
- **Recipe suggestions** — LLM-powered ideas based on what's expiring (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Premium tier)
|
||||
|
||||
## Stack
|
||||
|
||||
|
|
@ -61,16 +52,11 @@ cp .env.example .env
|
|||
| Receipt upload | ✓ | ✓ | ✓ |
|
||||
| Expiry alerts | ✓ | ✓ | ✓ |
|
||||
| CSV export | ✓ | ✓ | ✓ |
|
||||
| Recipe browser (domain/category) | ✓ | ✓ | ✓ |
|
||||
| Save recipes + notes + star rating | ✓ | ✓ | ✓ |
|
||||
| Style tags (manual, free-text) | ✓ | ✓ | ✓ |
|
||||
| Receipt OCR | BYOK | ✓ | ✓ |
|
||||
| Recipe suggestions (L1–L4) | BYOK | ✓ | ✓ |
|
||||
| Named recipe collections | — | ✓ | ✓ |
|
||||
| LLM style auto-classifier | — | BYOK | ✓ |
|
||||
| Recipe suggestions | BYOK | ✓ | ✓ |
|
||||
| Meal planning | — | ✓ | ✓ |
|
||||
| Multi-household | — | — | ✓ |
|
||||
| Leftover mode (5/day) | ✓ | ✓ | ✓ |
|
||||
| Leftover mode | — | — | ✓ |
|
||||
|
||||
BYOK = bring your own LLM backend (configure `~/.config/circuitforge/llm.yaml`)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,5 +3,5 @@
|
|||
Kiwi: Pantry tracking and leftover recipe suggestions.
|
||||
"""
|
||||
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.1.0"
|
||||
__author__ = "Alan 'pyr0ball' Weinstock"
|
||||
|
|
@ -1,358 +0,0 @@
|
|||
# app/api/endpoints/community.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.core.config import settings
|
||||
from app.db.store import Store
|
||||
from app.services.community.feed import posts_to_rss
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/community", tags=["community"])
|
||||
|
||||
_community_store = None
|
||||
|
||||
|
||||
def _get_community_store():
|
||||
return _community_store
|
||||
|
||||
|
||||
def init_community_store(community_db_url: str | None) -> None:
|
||||
global _community_store
|
||||
if not community_db_url:
|
||||
logger.info(
|
||||
"COMMUNITY_DB_URL not set — community write features disabled. "
|
||||
"Browse still works via cloud feed."
|
||||
)
|
||||
return
|
||||
from circuitforge_core.community import CommunityDB
|
||||
from app.services.community.community_store import KiwiCommunityStore
|
||||
db = CommunityDB(dsn=community_db_url)
|
||||
db.run_migrations()
|
||||
_community_store = KiwiCommunityStore(db)
|
||||
logger.info("Community store initialized.")
|
||||
|
||||
|
||||
def _visible(post, session=None) -> bool:
|
||||
"""Return False for premium-tier posts when the session is not paid/premium."""
|
||||
tier = getattr(post, "tier", None)
|
||||
if tier == "premium":
|
||||
if session is None or getattr(session, "tier", None) not in ("paid", "premium"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/posts")
|
||||
async def list_posts(
|
||||
post_type: str | None = None,
|
||||
dietary_tags: str | None = None,
|
||||
allergen_exclude: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {
|
||||
"posts": [],
|
||||
"total": 0,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"note": "Community DB not available on this instance.",
|
||||
}
|
||||
|
||||
dietary = [t.strip() for t in dietary_tags.split(",")] if dietary_tags else None
|
||||
allergen_ex = [t.strip() for t in allergen_exclude.split(",")] if allergen_exclude else None
|
||||
offset = (page - 1) * min(page_size, 100)
|
||||
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts,
|
||||
limit=min(page_size, 100),
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
dietary_tags=dietary,
|
||||
allergen_exclude=allergen_ex,
|
||||
)
|
||||
visible = [_post_to_dict(p) for p in posts if _visible(p)]
|
||||
return {"posts": visible, "total": len(visible), "page": page, "page_size": page_size}
|
||||
|
||||
|
||||
@router.get("/posts/{slug}")
|
||||
async def get_post(slug: str, request: Request):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available on this instance.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
|
||||
accept = request.headers.get("accept", "")
|
||||
if "application/activity+json" in accept or "application/ld+json" in accept:
|
||||
from app.services.community.ap_compat import post_to_ap_json_ld
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
return post_to_ap_json_ld(_post_to_dict(post), base_url=base_url)
|
||||
|
||||
return _post_to_dict(post)
|
||||
|
||||
|
||||
@router.get("/feed.rss")
|
||||
async def get_rss_feed(request: Request):
|
||||
store = _get_community_store()
|
||||
posts_data: list[dict] = []
|
||||
if store is not None:
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
posts_data = [_post_to_dict(p) for p in posts]
|
||||
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
rss = posts_to_rss(posts_data, base_url=base_url)
|
||||
return Response(content=rss, media_type="application/rss+xml; charset=utf-8")
|
||||
|
||||
|
||||
@router.get("/local-feed")
|
||||
async def local_feed():
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return []
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
return [_post_to_dict(p) for p in posts]
|
||||
|
||||
|
||||
@router.get("/hall-of-chaos")
|
||||
async def hall_of_chaos():
|
||||
"""Hidden easter egg endpoint -- returns the 10 most chaotic bloopers."""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {"posts": [], "chaos_level": 0}
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts, limit=10, post_type="recipe_blooper"
|
||||
)
|
||||
return {
|
||||
"posts": [_post_to_dict(p) for p in posts],
|
||||
"chaos_level": len(posts),
|
||||
}
|
||||
|
||||
|
||||
_VALID_POST_TYPES = {"plan", "recipe_success", "recipe_blooper"}
|
||||
_MAX_TITLE_LEN = 200
|
||||
_MAX_TEXT_LEN = 2000
|
||||
|
||||
|
||||
def _validate_publish_body(body: dict) -> None:
|
||||
"""Raise HTTPException(422) for any invalid fields in a publish request."""
|
||||
post_type = body.get("post_type", "plan")
|
||||
if post_type not in _VALID_POST_TYPES:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"post_type must be one of: {', '.join(sorted(_VALID_POST_TYPES))}",
|
||||
)
|
||||
title = body.get("title") or ""
|
||||
if len(title) > _MAX_TITLE_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"title exceeds {_MAX_TITLE_LEN} character limit.")
|
||||
for field in ("description", "outcome_notes", "recipe_name"):
|
||||
value = body.get(field)
|
||||
if value and len(str(value)) > _MAX_TEXT_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"{field} exceeds {_MAX_TEXT_LEN} character limit.")
|
||||
photo_url = body.get("photo_url")
|
||||
if photo_url and not str(photo_url).startswith("https://"):
|
||||
raise HTTPException(status_code=422, detail="photo_url must be an https:// URL.")
|
||||
|
||||
|
||||
@router.post("/posts", status_code=201)
|
||||
async def publish_post(body: dict, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_publish", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Community publishing requires Paid tier.")
|
||||
|
||||
_validate_publish_body(body)
|
||||
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="This Kiwi instance is not connected to a community database. "
|
||||
"Publishing is only available on cloud instances.",
|
||||
)
|
||||
|
||||
from app.services.community.community_store import get_or_create_pseudonym
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return get_or_create_pseudonym(
|
||||
store=s,
|
||||
directus_user_id=session.user_id,
|
||||
requested_name=body.get("pseudonym_name"),
|
||||
)
|
||||
finally:
|
||||
s.close()
|
||||
try:
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=422, detail=str(exc)) from exc
|
||||
|
||||
recipe_ids = [slot["recipe_id"] for slot in body.get("slots", []) if slot.get("recipe_id")]
|
||||
from app.services.community.element_snapshot import compute_snapshot
|
||||
def _snapshot():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return compute_snapshot(recipe_ids=recipe_ids, store=s)
|
||||
finally:
|
||||
s.close()
|
||||
snapshot = await asyncio.to_thread(_snapshot)
|
||||
|
||||
post_type = body.get("post_type", "plan")
|
||||
slug_title = re.sub(r"[^a-z0-9]+", "-", (body.get("title") or "plan").lower()).strip("-")
|
||||
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
slug = f"kiwi-{_post_type_prefix(post_type)}-{pseudonym.lower().replace(' ', '')}-{today}-{slug_title}"[:120]
|
||||
|
||||
from circuitforge_core.community.models import CommunityPost
|
||||
post = CommunityPost(
|
||||
slug=slug,
|
||||
pseudonym=pseudonym,
|
||||
post_type=post_type,
|
||||
published=datetime.now(timezone.utc),
|
||||
title=(body.get("title") or "Untitled")[:_MAX_TITLE_LEN],
|
||||
description=body.get("description"),
|
||||
photo_url=body.get("photo_url"),
|
||||
slots=body.get("slots", []),
|
||||
recipe_id=body.get("recipe_id"),
|
||||
recipe_name=body.get("recipe_name"),
|
||||
level=body.get("level"),
|
||||
outcome_notes=body.get("outcome_notes"),
|
||||
seasoning_score=snapshot.seasoning_score,
|
||||
richness_score=snapshot.richness_score,
|
||||
brightness_score=snapshot.brightness_score,
|
||||
depth_score=snapshot.depth_score,
|
||||
aroma_score=snapshot.aroma_score,
|
||||
structure_score=snapshot.structure_score,
|
||||
texture_profile=snapshot.texture_profile,
|
||||
dietary_tags=list(snapshot.dietary_tags),
|
||||
allergen_flags=list(snapshot.allergen_flags),
|
||||
flavor_molecules=list(snapshot.flavor_molecules),
|
||||
fat_pct=snapshot.fat_pct,
|
||||
protein_pct=snapshot.protein_pct,
|
||||
moisture_pct=snapshot.moisture_pct,
|
||||
)
|
||||
|
||||
try:
|
||||
inserted = await asyncio.to_thread(store.insert_post, post)
|
||||
except sqlite3.IntegrityError as exc:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A post with this title already exists today. Try a different title.",
|
||||
) from exc
|
||||
return _post_to_dict(inserted)
|
||||
|
||||
|
||||
@router.delete("/posts/{slug}", status_code=204)
|
||||
async def delete_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return s.get_current_pseudonym(session.user_id)
|
||||
finally:
|
||||
s.close()
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
if not pseudonym:
|
||||
raise HTTPException(status_code=400, detail="No pseudonym set. Cannot delete posts.")
|
||||
|
||||
deleted = await asyncio.to_thread(store.delete_post, slug=slug, pseudonym=pseudonym)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Post not found or you are not the author.")
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork", status_code=201)
|
||||
async def fork_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
if post.post_type != "plan":
|
||||
raise HTTPException(status_code=400, detail="Only plan posts can be forked as a meal plan.")
|
||||
|
||||
required_slot_keys = {"day", "meal_type", "recipe_id"}
|
||||
if any(not required_slot_keys.issubset(slot) for slot in post.slots):
|
||||
raise HTTPException(status_code=400, detail="Post contains malformed slots and cannot be forked.")
|
||||
|
||||
from datetime import date
|
||||
week_start = date.today().strftime("%Y-%m-%d")
|
||||
|
||||
def _create_plan():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
meal_types = list({slot["meal_type"] for slot in post.slots})
|
||||
plan = s.create_meal_plan(week_start=week_start, meal_types=meal_types or ["dinner"])
|
||||
for slot in post.slots:
|
||||
s.assign_recipe_to_slot(
|
||||
plan_id=plan["id"],
|
||||
day_of_week=slot["day"],
|
||||
meal_type=slot["meal_type"],
|
||||
recipe_id=slot["recipe_id"],
|
||||
)
|
||||
return plan
|
||||
finally:
|
||||
s.close()
|
||||
|
||||
plan = await asyncio.to_thread(_create_plan)
|
||||
return {"plan_id": plan["id"], "week_start": plan["week_start"], "forked_from": slug}
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork-adapt", status_code=201)
|
||||
async def fork_adapt_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_fork_adapt", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Fork with adaptation requires Paid tier or BYOK.")
|
||||
# Stub: full LLM adaptation deferred
|
||||
raise HTTPException(status_code=501, detail="Fork-adapt not yet implemented.")
|
||||
|
||||
|
||||
def _post_to_dict(post) -> dict:
|
||||
return {
|
||||
"slug": post.slug,
|
||||
"pseudonym": post.pseudonym,
|
||||
"post_type": post.post_type,
|
||||
"published": post.published.isoformat() if hasattr(post.published, "isoformat") else str(post.published),
|
||||
"title": post.title,
|
||||
"description": post.description,
|
||||
"photo_url": post.photo_url,
|
||||
"slots": list(post.slots),
|
||||
"recipe_id": post.recipe_id,
|
||||
"recipe_name": post.recipe_name,
|
||||
"level": post.level,
|
||||
"outcome_notes": post.outcome_notes,
|
||||
"element_profiles": {
|
||||
"seasoning_score": post.seasoning_score,
|
||||
"richness_score": post.richness_score,
|
||||
"brightness_score": post.brightness_score,
|
||||
"depth_score": post.depth_score,
|
||||
"aroma_score": post.aroma_score,
|
||||
"structure_score": post.structure_score,
|
||||
"texture_profile": post.texture_profile,
|
||||
},
|
||||
"dietary_tags": list(post.dietary_tags),
|
||||
"allergen_flags": list(post.allergen_flags),
|
||||
"flavor_molecules": list(post.flavor_molecules),
|
||||
"fat_pct": post.fat_pct,
|
||||
"protein_pct": post.protein_pct,
|
||||
"moisture_pct": post.moisture_pct,
|
||||
}
|
||||
|
||||
|
||||
def _post_type_prefix(post_type: str) -> str:
|
||||
return {"plan": "plan", "recipe_success": "success", "recipe_blooper": "blooper"}.get(post_type, "post")
|
||||
|
|
@ -1,11 +1,9 @@
|
|||
"""Export endpoints — CSV and JSON export of user data."""
|
||||
"""Export endpoints — CSV/Excel of receipt and inventory data."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
|
@ -47,33 +45,3 @@ async def export_inventory_csv(store: Store = Depends(get_store)):
|
|||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=inventory.csv"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/json")
|
||||
async def export_full_json(store: Store = Depends(get_store)):
|
||||
"""Export full pantry inventory + saved recipes as a single JSON file.
|
||||
|
||||
Intended for data portability — users can import this into another
|
||||
Kiwi instance or keep it as an offline backup.
|
||||
"""
|
||||
inventory, saved = await asyncio.gather(
|
||||
asyncio.to_thread(store.list_inventory),
|
||||
asyncio.to_thread(store.get_saved_recipes),
|
||||
)
|
||||
|
||||
export_doc = {
|
||||
"kiwi_export": {
|
||||
"version": "1.0",
|
||||
"exported_at": datetime.now(timezone.utc).isoformat(),
|
||||
"inventory": [dict(row) for row in inventory],
|
||||
"saved_recipes": [dict(row) for row in saved],
|
||||
}
|
||||
}
|
||||
|
||||
body = json.dumps(export_doc, default=str, indent=2)
|
||||
filename = f"kiwi-export-{datetime.now(timezone.utc).strftime('%Y%m%d')}.json"
|
||||
return StreamingResponse(
|
||||
iter([body]),
|
||||
media_type="application/json",
|
||||
headers={"Content-Disposition": f"attachment; filename={filename}"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
"""Feedback router — provided by circuitforge-core."""
|
||||
from circuitforge_core.api import make_feedback_router
|
||||
from app.core.config import settings
|
||||
|
||||
router = make_feedback_router(
|
||||
repo="Circuit-Forge/kiwi",
|
||||
product="kiwi",
|
||||
demo_mode_fn=lambda: settings.DEMO_MODE,
|
||||
)
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
"""Screenshot attachment endpoint for in-app feedback.
|
||||
|
||||
After the cf-core feedback router creates a Forgejo issue, the frontend
|
||||
can call POST /feedback/attach to upload a screenshot and pin it as a
|
||||
comment on that issue.
|
||||
|
||||
The endpoint is separate from the cf-core router so Kiwi owns it
|
||||
without modifying shared infrastructure.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import os
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_FORGEJO_BASE = os.environ.get(
|
||||
"FORGEJO_API_URL", "https://git.opensourcesolarpunk.com/api/v1"
|
||||
)
|
||||
_REPO = "Circuit-Forge/kiwi"
|
||||
_MAX_BYTES = 5 * 1024 * 1024 # 5 MB
|
||||
|
||||
|
||||
class AttachRequest(BaseModel):
|
||||
issue_number: int
|
||||
filename: str = Field(default="screenshot.png", max_length=80)
|
||||
image_b64: str # data URI or raw base64
|
||||
|
||||
|
||||
class AttachResponse(BaseModel):
|
||||
comment_url: str
|
||||
|
||||
|
||||
def _forgejo_headers() -> dict[str, str]:
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
return {"Authorization": f"token {token}"}
|
||||
|
||||
|
||||
def _decode_image(image_b64: str) -> tuple[bytes, str]:
|
||||
"""Return (raw_bytes, mime_type) from a base64 string or data URI."""
|
||||
if image_b64.startswith("data:"):
|
||||
header, _, data = image_b64.partition(",")
|
||||
mime = header.split(";")[0].split(":")[1] if ":" in header else "image/png"
|
||||
else:
|
||||
data = image_b64
|
||||
mime = "image/png"
|
||||
return base64.b64decode(data), mime
|
||||
|
||||
|
||||
@router.post("/attach", response_model=AttachResponse)
|
||||
def attach_screenshot(payload: AttachRequest) -> AttachResponse:
|
||||
"""Upload a screenshot to a Forgejo issue as a comment with embedded image.
|
||||
|
||||
The image is uploaded as an issue asset, then referenced in a comment
|
||||
so it is visible inline when the issue is viewed.
|
||||
"""
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
if not token:
|
||||
raise HTTPException(status_code=503, detail="Feedback not configured.")
|
||||
|
||||
raw_bytes, mime = _decode_image(payload.image_b64)
|
||||
|
||||
if len(raw_bytes) > _MAX_BYTES:
|
||||
raise HTTPException(
|
||||
status_code=413,
|
||||
detail=f"Screenshot exceeds 5 MB limit ({len(raw_bytes) // 1024} KB received).",
|
||||
)
|
||||
|
||||
# Upload image as issue asset
|
||||
asset_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/assets",
|
||||
headers=_forgejo_headers(),
|
||||
files={"attachment": (payload.filename, raw_bytes, mime)},
|
||||
timeout=20,
|
||||
)
|
||||
if not asset_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo asset upload failed: {asset_resp.text[:200]}",
|
||||
)
|
||||
|
||||
asset_url = asset_resp.json().get("browser_download_url", "")
|
||||
|
||||
# Pin as a comment so the image is visible inline
|
||||
comment_body = f"**Screenshot attached by reporter:**\n\n"
|
||||
comment_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/comments",
|
||||
headers={**_forgejo_headers(), "Content-Type": "application/json"},
|
||||
json={"body": comment_body},
|
||||
timeout=15,
|
||||
)
|
||||
if not comment_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo comment failed: {comment_resp.text[:200]}",
|
||||
)
|
||||
|
||||
comment_url = comment_resp.json().get("html_url", "")
|
||||
return AttachResponse(comment_url=comment_url)
|
||||
|
|
@ -1,217 +0,0 @@
|
|||
"""Household management endpoints — shared pantry for Premium users."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import sqlite3
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, CLOUD_DATA_ROOT, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.household import (
|
||||
HouseholdAcceptRequest,
|
||||
HouseholdAcceptResponse,
|
||||
HouseholdCreateResponse,
|
||||
HouseholdInviteResponse,
|
||||
HouseholdMember,
|
||||
HouseholdRemoveMemberRequest,
|
||||
HouseholdStatusResponse,
|
||||
MessageResponse,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
_INVITE_TTL_DAYS = 7
|
||||
_KIWI_BASE_URL = os.environ.get("KIWI_BASE_URL", "https://menagerie.circuitforge.tech/kiwi")
|
||||
|
||||
|
||||
def _require_premium(session: CloudUser = Depends(get_session)) -> CloudUser:
|
||||
if session.tier not in ("premium", "ultra", "local"):
|
||||
raise HTTPException(status_code=403, detail="Household features require Premium tier.")
|
||||
return session
|
||||
|
||||
|
||||
def _require_household_owner(session: CloudUser = Depends(_require_premium)) -> CloudUser:
|
||||
if not session.is_household_owner or not session.household_id:
|
||||
raise HTTPException(status_code=403, detail="Only the household owner can perform this action.")
|
||||
return session
|
||||
|
||||
|
||||
def _household_store(household_id: str) -> Store:
|
||||
"""Open the household DB directly (used during invite acceptance).
|
||||
Sets row_factory so dict-style column access works on raw conn queries.
|
||||
"""
|
||||
db_path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
store = Store(db_path)
|
||||
store.conn.row_factory = sqlite3.Row
|
||||
return store
|
||||
|
||||
|
||||
def _heimdall_post(path: str, body: dict) -> dict:
|
||||
"""Call Heimdall admin API. Returns response dict or raises HTTPException."""
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
log.warning("HEIMDALL_ADMIN_TOKEN not set — household Heimdall call skipped")
|
||||
return {}
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}{path}",
|
||||
json=body,
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=10,
|
||||
)
|
||||
if not resp.ok:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall error: {resp.text}")
|
||||
return resp.json()
|
||||
except requests.RequestException as exc:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall unreachable: {exc}")
|
||||
|
||||
|
||||
@router.post("/create", response_model=HouseholdCreateResponse)
|
||||
async def create_household(session: CloudUser = Depends(_require_premium)):
|
||||
"""Create a new household. The calling user becomes owner."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
data = _heimdall_post("/admin/household/create", {"owner_user_id": session.user_id})
|
||||
household_id = data.get("household_id")
|
||||
if not household_id:
|
||||
# Heimdall returned OK but without a household_id — treat as server error.
|
||||
# Fall back to a local stub only when HEIMDALL_ADMIN_TOKEN is unset (dev mode).
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
raise HTTPException(status_code=500, detail="Heimdall did not return a household_id.")
|
||||
household_id = "local-household"
|
||||
return HouseholdCreateResponse(
|
||||
household_id=household_id,
|
||||
message="Household created. Share an invite link to add members.",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/status", response_model=HouseholdStatusResponse)
|
||||
async def household_status(session: CloudUser = Depends(_require_premium)):
|
||||
"""Return current user's household membership status."""
|
||||
if not session.household_id:
|
||||
return HouseholdStatusResponse(in_household=False)
|
||||
|
||||
members: list[HouseholdMember] = []
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/admin/household/{session.household_id}",
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
raw = resp.json()
|
||||
for m in raw.get("members", []):
|
||||
members.append(HouseholdMember(
|
||||
user_id=m["user_id"],
|
||||
joined_at=m.get("joined_at", ""),
|
||||
is_owner=m["user_id"] == raw.get("owner_user_id"),
|
||||
))
|
||||
except Exception as exc:
|
||||
log.warning("Could not fetch household members: %s", exc)
|
||||
|
||||
return HouseholdStatusResponse(
|
||||
in_household=True,
|
||||
household_id=session.household_id,
|
||||
is_owner=session.is_household_owner,
|
||||
members=members,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/invite", response_model=HouseholdInviteResponse)
|
||||
async def create_invite(session: CloudUser = Depends(_require_household_owner)):
|
||||
"""Generate a one-time invite token valid for 7 days."""
|
||||
token = secrets.token_hex(32)
|
||||
expires_at = (datetime.now(timezone.utc) + timedelta(days=_INVITE_TTL_DAYS)).isoformat()
|
||||
store = Store(session.db)
|
||||
try:
|
||||
store.conn.execute(
|
||||
"""INSERT INTO household_invites (token, household_id, created_by, expires_at)
|
||||
VALUES (?, ?, ?, ?)""",
|
||||
(token, session.household_id, session.user_id, expires_at),
|
||||
)
|
||||
store.conn.commit()
|
||||
finally:
|
||||
store.close()
|
||||
invite_url = f"{_KIWI_BASE_URL}/#/join?household_id={session.household_id}&token={token}"
|
||||
return HouseholdInviteResponse(token=token, invite_url=invite_url, expires_at=expires_at)
|
||||
|
||||
|
||||
@router.post("/accept", response_model=HouseholdAcceptResponse)
|
||||
async def accept_invite(
|
||||
body: HouseholdAcceptRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Accept a household invite. Opens the household DB directly to validate the token."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
|
||||
hh_store = _household_store(body.household_id)
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
try:
|
||||
row = hh_store.conn.execute(
|
||||
"""SELECT token, expires_at, used_at FROM household_invites
|
||||
WHERE token = ? AND household_id = ?""",
|
||||
(body.token, body.household_id),
|
||||
).fetchone()
|
||||
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Invite not found.")
|
||||
if row["used_at"] is not None:
|
||||
raise HTTPException(status_code=410, detail="Invite already used.")
|
||||
if row["expires_at"] < now:
|
||||
raise HTTPException(status_code=410, detail="Invite has expired.")
|
||||
|
||||
hh_store.conn.execute(
|
||||
"UPDATE household_invites SET used_at = ?, used_by = ? WHERE token = ?",
|
||||
(now, session.user_id, body.token),
|
||||
)
|
||||
hh_store.conn.commit()
|
||||
finally:
|
||||
hh_store.close()
|
||||
|
||||
_heimdall_post("/admin/household/add-member", {
|
||||
"household_id": body.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
|
||||
return HouseholdAcceptResponse(
|
||||
message="You have joined the household. Reload the app to switch to the shared pantry.",
|
||||
household_id=body.household_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/leave", response_model=MessageResponse)
|
||||
async def leave_household(session: CloudUser = Depends(_require_premium)) -> MessageResponse:
|
||||
"""Leave the current household (non-owners only)."""
|
||||
if not session.household_id:
|
||||
raise HTTPException(status_code=400, detail="You are not in a household.")
|
||||
if session.is_household_owner:
|
||||
raise HTTPException(status_code=400, detail="The household owner cannot leave. Delete the household instead.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
return MessageResponse(message="You have left the household. Reload the app to return to your personal pantry.")
|
||||
|
||||
|
||||
@router.post("/remove-member", response_model=MessageResponse)
|
||||
async def remove_member(
|
||||
body: HouseholdRemoveMemberRequest,
|
||||
session: CloudUser = Depends(_require_household_owner),
|
||||
) -> MessageResponse:
|
||||
"""Remove a member from the household (owner only)."""
|
||||
if body.user_id == session.user_id:
|
||||
raise HTTPException(status_code=400, detail="Use /leave to remove yourself.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": body.user_id,
|
||||
})
|
||||
return MessageResponse(message=f"Member {body.user_id} removed from household.")
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
"""Kiwi — /api/v1/imitate/samples endpoint for Avocet Imitate tab.
|
||||
|
||||
Returns the actual assembled prompt Kiwi sends to its LLM for recipe generation,
|
||||
including the full pantry context (expiry-first ordering), dietary constraints
|
||||
(from user_settings if present), and the Level 3 format instructions.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import get_session, CloudUser
|
||||
from app.db.store import Store
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_LEVEL3_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"3. <continue for each step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
_LEVEL4_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
|
||||
def _read_user_settings(store: Store) -> dict:
|
||||
"""Read all key/value pairs from user_settings table."""
|
||||
try:
|
||||
rows = store.conn.execute("SELECT key, value FROM user_settings").fetchall()
|
||||
return {r["key"]: r["value"] for r in rows}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _build_recipe_prompt(
|
||||
pantry_names: list[str],
|
||||
expiring_names: list[str],
|
||||
constraints: list[str],
|
||||
allergies: list[str],
|
||||
level: int = 3,
|
||||
) -> str:
|
||||
"""Assemble the recipe generation prompt matching Kiwi's Level 3/4 format."""
|
||||
# Expiring items first, then remaining pantry items (deduped)
|
||||
expiring_set = set(expiring_names)
|
||||
ordered = list(expiring_names) + [n for n in pantry_names if n not in expiring_set]
|
||||
|
||||
if not ordered:
|
||||
ordered = pantry_names
|
||||
|
||||
if level == 4:
|
||||
lines = [
|
||||
"Surprise me with a creative, unexpected recipe.",
|
||||
"Only use ingredients that make culinary sense together. "
|
||||
"Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.",
|
||||
f"Ingredients available: {', '.join(ordered)}",
|
||||
]
|
||||
if constraints:
|
||||
lines.append(f"Constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergies)}")
|
||||
lines.append("Treat any mystery ingredient as a wildcard — use your imagination.")
|
||||
lines += _LEVEL4_FORMAT
|
||||
else:
|
||||
lines = [
|
||||
"You are a creative chef. Generate a recipe using the ingredients below.",
|
||||
"IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name "
|
||||
"from the pantry list. Do not add adjectives, quantities, or cooking states "
|
||||
"(e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').",
|
||||
"IMPORTANT: Only use pantry items that make culinary sense for the dish. "
|
||||
"Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, "
|
||||
"dessert sauces, flavoured syrups) into savoury dishes.",
|
||||
"IMPORTANT: Do not default to the same ingredient repeatedly across dishes. "
|
||||
"If a pantry item does not genuinely improve this specific dish, leave it out.",
|
||||
"",
|
||||
f"Pantry items: {', '.join(ordered)}",
|
||||
]
|
||||
if expiring_names:
|
||||
lines.append(
|
||||
f"Priority — use these soon (expiring): {', '.join(expiring_names)}"
|
||||
)
|
||||
if constraints:
|
||||
lines.append(f"Dietary constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergies)}")
|
||||
lines += _LEVEL3_FORMAT
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@router.get("/samples")
|
||||
async def imitate_samples(
|
||||
limit: int = 5,
|
||||
level: int = 3,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Return assembled recipe generation prompts for Avocet's Imitate tab.
|
||||
|
||||
Each sample includes:
|
||||
system_prompt empty (Kiwi uses no system context)
|
||||
input_text full Level 3/4 prompt with pantry items, expiring items,
|
||||
dietary constraints, and format instructions
|
||||
output_text empty (no prior LLM output stored per-request)
|
||||
|
||||
level: 3 (structured with element biasing context) or 4 (wildcard creative)
|
||||
limit: max number of distinct prompt variants to return (varies by pantry state)
|
||||
"""
|
||||
limit = max(1, min(limit, 10))
|
||||
store = Store(session.db)
|
||||
|
||||
# Full pantry for context
|
||||
all_items = store.list_inventory()
|
||||
pantry_names = [r["product_name"] for r in all_items if r.get("product_name")]
|
||||
|
||||
# Expiring items as priority ingredients
|
||||
expiring = store.expiring_soon(days=14)
|
||||
expiring_names = [r["product_name"] for r in expiring if r.get("product_name")]
|
||||
|
||||
# Dietary constraints from user_settings (keys: constraints, allergies)
|
||||
settings = _read_user_settings(store)
|
||||
import json as _json
|
||||
try:
|
||||
constraints = _json.loads(settings.get("dietary_constraints", "[]")) or []
|
||||
except Exception:
|
||||
constraints = []
|
||||
try:
|
||||
allergies = _json.loads(settings.get("dietary_allergies", "[]")) or []
|
||||
except Exception:
|
||||
allergies = []
|
||||
|
||||
if not pantry_names:
|
||||
return {"samples": [], "total": 0, "type": f"recipe_level{level}"}
|
||||
|
||||
# Build prompt variants: one per expiring item as the "anchor" ingredient,
|
||||
# plus one general pantry prompt. Cap at limit.
|
||||
samples = []
|
||||
seen_anchors: set[str] = set()
|
||||
|
||||
for item in (expiring[:limit - 1] if expiring else []):
|
||||
anchor = item.get("product_name", "")
|
||||
if not anchor or anchor in seen_anchors:
|
||||
continue
|
||||
seen_anchors.add(anchor)
|
||||
|
||||
# Put this item first in the list for the prompt
|
||||
ordered_expiring = [anchor] + [n for n in expiring_names if n != anchor]
|
||||
prompt = _build_recipe_prompt(pantry_names, ordered_expiring, constraints, allergies, level)
|
||||
|
||||
samples.append({
|
||||
"id": item.get("id", 0),
|
||||
"anchor_item": anchor,
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
# One general prompt using all expiring as priority
|
||||
if len(samples) < limit:
|
||||
prompt = _build_recipe_prompt(pantry_names, expiring_names, constraints, allergies, level)
|
||||
samples.append({
|
||||
"id": 0,
|
||||
"anchor_item": "full pantry",
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
return {"samples": samples, "total": len(samples), "type": f"recipe_level{level}"}
|
||||
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
|
@ -12,25 +11,15 @@ import aiofiles
|
|||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
_predictor = ExpirationPredictor()
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.inventory import (
|
||||
BarcodeScanResponse,
|
||||
BulkAddByNameRequest,
|
||||
BulkAddByNameResponse,
|
||||
BulkAddItemResult,
|
||||
DiscardRequest,
|
||||
InventoryItemCreate,
|
||||
InventoryItemResponse,
|
||||
InventoryItemUpdate,
|
||||
InventoryStats,
|
||||
PartialConsumeRequest,
|
||||
ProductCreate,
|
||||
ProductResponse,
|
||||
ProductUpdate,
|
||||
|
|
@ -41,34 +30,6 @@ from app.models.schemas.inventory import (
|
|||
router = APIRouter()
|
||||
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _enrich_item(item: dict) -> dict:
|
||||
"""Attach computed fields: opened_expiry_date, secondary_state/uses/warning."""
|
||||
from datetime import date, timedelta
|
||||
opened = item.get("opened_date")
|
||||
if opened:
|
||||
days = _predictor.days_after_opening(item.get("category"))
|
||||
if days is not None:
|
||||
try:
|
||||
opened_expiry = date.fromisoformat(opened) + timedelta(days=days)
|
||||
item = {**item, "opened_expiry_date": str(opened_expiry)}
|
||||
except ValueError:
|
||||
pass
|
||||
if "opened_expiry_date" not in item:
|
||||
item = {**item, "opened_expiry_date": None}
|
||||
|
||||
# Secondary use window — check sell-by date (not opened expiry)
|
||||
sec = _predictor.secondary_state(item.get("category"), item.get("expiration_date"))
|
||||
item = {
|
||||
**item,
|
||||
"secondary_state": sec["label"] if sec else None,
|
||||
"secondary_uses": sec["uses"] if sec else None,
|
||||
"secondary_warning": sec["warning"] if sec else None,
|
||||
}
|
||||
return item
|
||||
|
||||
|
||||
# ── Products ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/products", response_model=ProductResponse, status_code=status.HTTP_201_CREATED)
|
||||
|
|
@ -153,12 +114,7 @@ async def delete_product(product_id: int, store: Store = Depends(get_store)):
|
|||
# ── Inventory items ───────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_inventory_item(
|
||||
body: InventoryItemCreate,
|
||||
store: Store = Depends(get_store),
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
log.info("add_item auth=%s tier=%s product_id=%s", _auth_label(session.user_id), session.tier, body.product_id)
|
||||
async def create_inventory_item(body: InventoryItemCreate, store: Store = Depends(get_store)):
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
body.product_id,
|
||||
|
|
@ -171,38 +127,7 @@ async def create_inventory_item(
|
|||
notes=body.notes,
|
||||
source=body.source,
|
||||
)
|
||||
# RETURNING * omits joined columns (product_name, barcode, category).
|
||||
# Re-fetch with the products JOIN so the response is fully populated (#99).
|
||||
full_item = await asyncio.to_thread(store.get_inventory_item, item["id"])
|
||||
return InventoryItemResponse.model_validate(full_item)
|
||||
|
||||
|
||||
@router.post("/items/bulk-add-by-name", response_model=BulkAddByNameResponse)
|
||||
async def bulk_add_items_by_name(body: BulkAddByNameRequest, store: Store = Depends(get_store)):
|
||||
"""Create pantry items from a list of ingredient names (no barcode required).
|
||||
|
||||
Uses get_or_create_product so re-adding an existing product is idempotent.
|
||||
"""
|
||||
results: list[BulkAddItemResult] = []
|
||||
for entry in body.items:
|
||||
try:
|
||||
product, _ = await asyncio.to_thread(
|
||||
store.get_or_create_product, entry.name, None, source="manual"
|
||||
)
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"],
|
||||
entry.location,
|
||||
quantity=entry.quantity,
|
||||
unit=entry.unit,
|
||||
source="manual",
|
||||
)
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=True, item_id=item["id"]))
|
||||
except Exception as exc:
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=False, error=str(exc)))
|
||||
|
||||
added = sum(1 for r in results if r.ok)
|
||||
return BulkAddByNameResponse(added=added, failed=len(results) - added, results=results)
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.get("/items", response_model=List[InventoryItemResponse])
|
||||
|
|
@ -212,13 +137,13 @@ async def list_inventory_items(
|
|||
store: Store = Depends(get_store),
|
||||
):
|
||||
items = await asyncio.to_thread(store.list_inventory, location, item_status)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/expiring", response_model=List[InventoryItemResponse])
|
||||
async def get_expiring_items(days: int = 7, store: Store = Depends(get_store)):
|
||||
items = await asyncio.to_thread(store.expiring_soon, days)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -226,7 +151,7 @@ async def get_inventory_item(item_id: int, store: Store = Depends(get_store)):
|
|||
item = await asyncio.to_thread(store.get_inventory_item, item_id)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.patch("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -238,79 +163,24 @@ async def update_inventory_item(
|
|||
updates["purchase_date"] = str(updates["purchase_date"])
|
||||
if "expiration_date" in updates and updates["expiration_date"]:
|
||||
updates["expiration_date"] = str(updates["expiration_date"])
|
||||
if "opened_date" in updates and updates["opened_date"]:
|
||||
updates["opened_date"] = str(updates["opened_date"])
|
||||
item = await asyncio.to_thread(store.update_inventory_item, item_id, **updates)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/open", response_model=InventoryItemResponse)
|
||||
async def mark_item_opened(item_id: int, store: Store = Depends(get_store)):
|
||||
"""Record that this item was opened today, triggering secondary shelf-life tracking."""
|
||||
from datetime import date
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
opened_date=str(date.today()),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/consume", response_model=InventoryItemResponse)
|
||||
async def consume_item(
|
||||
item_id: int,
|
||||
body: Optional[PartialConsumeRequest] = None,
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Consume an inventory item fully or partially.
|
||||
|
||||
When body.quantity is provided, decrements by that amount and only marks
|
||||
status=consumed when quantity reaches zero. Omit body to consume all.
|
||||
"""
|
||||
async def consume_item(item_id: int, store: Store = Depends(get_store)):
|
||||
from datetime import datetime, timezone
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
if body is not None:
|
||||
item = await asyncio.to_thread(
|
||||
store.partial_consume_item, item_id, body.quantity, now
|
||||
)
|
||||
else:
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="consumed",
|
||||
consumed_at=now,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/discard", response_model=InventoryItemResponse)
|
||||
async def discard_item(
|
||||
item_id: int,
|
||||
body: DiscardRequest = DiscardRequest(),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Mark an item as discarded (not used, spoiled, etc).
|
||||
|
||||
Optional reason field accepts free text or a preset label
|
||||
('not used', 'spoiled', 'excess', 'other').
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="discarded",
|
||||
consumed_at=datetime.now(timezone.utc).isoformat(),
|
||||
disposal_reason=body.reason,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.delete("/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
|
@ -340,7 +210,6 @@ async def scan_barcode_text(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from a text string (e.g. from a hardware scanner or manual entry)."""
|
||||
log.info("scan auth=%s tier=%s barcode=%r", _auth_label(session.user_id), session.tier, body.barcode)
|
||||
from app.services.openfoodfacts import OpenFoodFactsService
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
|
|
@ -367,14 +236,10 @@ async def scan_barcode_text(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
# Use OFFs pack size when detected; caller-supplied quantity is a fallback
|
||||
resolved_qty = product_info.get("pack_quantity") or body.quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], body.location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=body.quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
|
|
@ -382,7 +247,6 @@ async def scan_barcode_text(
|
|||
else:
|
||||
result_product = None
|
||||
|
||||
product_found = product_info is not None
|
||||
return BarcodeScanResponse(
|
||||
success=True,
|
||||
barcodes_found=1,
|
||||
|
|
@ -392,8 +256,7 @@ async def scan_barcode_text(
|
|||
"product": result_product,
|
||||
"inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None,
|
||||
"added_to_inventory": inventory_item is not None,
|
||||
"needs_manual_entry": not product_found,
|
||||
"message": "Added to inventory" if inventory_item else "Not found in any product database — add manually",
|
||||
"message": "Added to inventory" if inventory_item else "Product not found in database",
|
||||
}],
|
||||
message="Barcode processed",
|
||||
)
|
||||
|
|
@ -409,7 +272,6 @@ async def scan_barcode_image(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from an uploaded image. Requires Phase 2 scanner integration."""
|
||||
log.info("scan_image auth=%s tier=%s", _auth_label(session.user_id), session.tier)
|
||||
temp_dir = Path("/tmp/kiwi_barcode_scans")
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}"
|
||||
|
|
@ -452,13 +314,10 @@ async def scan_barcode_image(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
resolved_qty = product_info.get("pack_quantity") or quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,325 +0,0 @@
|
|||
# app/api/endpoints/meal_plans.py
|
||||
"""Meal plan CRUD, shopping list, and prep session endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import (
|
||||
CreatePlanRequest,
|
||||
GapItem,
|
||||
PlanSummary,
|
||||
PrepSessionSummary,
|
||||
PrepTaskSummary,
|
||||
ShoppingListResponse,
|
||||
SlotSummary,
|
||||
UpdatePlanRequest,
|
||||
UpdatePrepTaskRequest,
|
||||
UpsertSlotRequest,
|
||||
VALID_MEAL_TYPES,
|
||||
)
|
||||
from app.services.meal_plan.affiliates import get_retailer_links
|
||||
from app.services.meal_plan.prep_scheduler import build_prep_tasks
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _slot_summary(row: dict) -> SlotSummary:
|
||||
return SlotSummary(
|
||||
id=row["id"],
|
||||
plan_id=row["plan_id"],
|
||||
day_of_week=row["day_of_week"],
|
||||
meal_type=row["meal_type"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
recipe_title=row.get("recipe_title"),
|
||||
servings=row["servings"],
|
||||
custom_label=row.get("custom_label"),
|
||||
)
|
||||
|
||||
|
||||
def _plan_summary(plan: dict, slots: list[dict]) -> PlanSummary:
|
||||
meal_types = plan.get("meal_types") or ["dinner"]
|
||||
if isinstance(meal_types, str):
|
||||
meal_types = json.loads(meal_types)
|
||||
return PlanSummary(
|
||||
id=plan["id"],
|
||||
week_start=plan["week_start"],
|
||||
meal_types=meal_types,
|
||||
slots=[_slot_summary(s) for s in slots],
|
||||
created_at=plan["created_at"],
|
||||
)
|
||||
|
||||
|
||||
def _prep_task_summary(row: dict) -> PrepTaskSummary:
|
||||
return PrepTaskSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
task_label=row["task_label"],
|
||||
duration_minutes=row.get("duration_minutes"),
|
||||
sequence_order=row["sequence_order"],
|
||||
equipment=row.get("equipment"),
|
||||
is_parallel=bool(row.get("is_parallel", False)),
|
||||
notes=row.get("notes"),
|
||||
user_edited=bool(row.get("user_edited", False)),
|
||||
)
|
||||
|
||||
|
||||
# ── plan CRUD ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/", response_model=PlanSummary)
|
||||
async def create_plan(
|
||||
req: CreatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
import sqlite3
|
||||
|
||||
# Free tier is locked to dinner-only; paid+ may configure meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
|
||||
try:
|
||||
plan = await asyncio.to_thread(store.create_meal_plan, str(req.week_start), meal_types)
|
||||
except sqlite3.IntegrityError:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"A meal plan for the week of {req.week_start} already exists.",
|
||||
)
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan["id"])
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[PlanSummary])
|
||||
async def list_plans(
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> list[PlanSummary]:
|
||||
plans = await asyncio.to_thread(store.list_meal_plans)
|
||||
result = []
|
||||
for p in plans:
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, p["id"])
|
||||
result.append(_plan_summary(p, slots))
|
||||
return result
|
||||
|
||||
|
||||
@router.patch("/{plan_id}", response_model=PlanSummary)
|
||||
async def update_plan(
|
||||
plan_id: int,
|
||||
req: UpdatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
# Free tier stays dinner-only; paid+ may add meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
updated = await asyncio.to_thread(store.update_meal_plan_types, plan_id, meal_types)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(updated, slots)
|
||||
|
||||
|
||||
@router.get("/{plan_id}", response_model=PlanSummary)
|
||||
async def get_plan(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
# ── slots ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.put("/{plan_id}/slots/{day_of_week}/{meal_type}", response_model=SlotSummary)
|
||||
async def upsert_slot(
|
||||
plan_id: int,
|
||||
day_of_week: int,
|
||||
meal_type: str,
|
||||
req: UpsertSlotRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> SlotSummary:
|
||||
if day_of_week < 0 or day_of_week > 6:
|
||||
raise HTTPException(status_code=422, detail="day_of_week must be 0-6.")
|
||||
if meal_type not in VALID_MEAL_TYPES:
|
||||
raise HTTPException(status_code=422, detail=f"Invalid meal_type '{meal_type}'.")
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
row = await asyncio.to_thread(
|
||||
store.upsert_slot,
|
||||
plan_id, day_of_week, meal_type,
|
||||
req.recipe_id, req.servings, req.custom_label,
|
||||
)
|
||||
return _slot_summary(row)
|
||||
|
||||
|
||||
@router.delete("/{plan_id}/slots/{slot_id}", status_code=204)
|
||||
async def delete_slot(
|
||||
plan_id: int,
|
||||
slot_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> None:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
await asyncio.to_thread(store.delete_slot, slot_id)
|
||||
|
||||
|
||||
# ── shopping list ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/shopping-list", response_model=ShoppingListResponse)
|
||||
async def get_shopping_list(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> ShoppingListResponse:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
inventory = await asyncio.to_thread(store.list_inventory)
|
||||
|
||||
gaps, covered = compute_shopping_list(recipes, inventory)
|
||||
|
||||
# Enrich gap items with retailer links
|
||||
def _to_schema(item, enrich: bool) -> GapItem:
|
||||
links = get_retailer_links(item.ingredient_name) if enrich else []
|
||||
return GapItem(
|
||||
ingredient_name=item.ingredient_name,
|
||||
needed_raw=item.needed_raw,
|
||||
have_quantity=item.have_quantity,
|
||||
have_unit=item.have_unit,
|
||||
covered=item.covered,
|
||||
retailer_links=links,
|
||||
)
|
||||
|
||||
gap_items = [_to_schema(g, enrich=True) for g in gaps]
|
||||
covered_items = [_to_schema(c, enrich=False) for c in covered]
|
||||
|
||||
disclosure = (
|
||||
"Some links may be affiliate links. Purchases through them support Kiwi development."
|
||||
if gap_items else None
|
||||
)
|
||||
|
||||
return ShoppingListResponse(
|
||||
plan_id=plan_id,
|
||||
gap_items=gap_items,
|
||||
covered_items=covered_items,
|
||||
disclosure=disclosure,
|
||||
)
|
||||
|
||||
|
||||
# ── prep session ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def get_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
prep_session = await asyncio.to_thread(store.get_prep_session_for_plan, plan_id)
|
||||
if prep_session is None:
|
||||
raise HTTPException(status_code=404, detail="No prep session for this plan.")
|
||||
raw_tasks = await asyncio.to_thread(store.get_prep_tasks, prep_session["id"])
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=plan_id,
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(t) for t in raw_tasks],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def create_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
prep_tasks = build_prep_tasks(slots=slots, recipes=recipes)
|
||||
|
||||
scheduled_date = date.today().isoformat()
|
||||
prep_session = await asyncio.to_thread(
|
||||
store.create_prep_session, plan_id, scheduled_date
|
||||
)
|
||||
session_id = prep_session["id"]
|
||||
|
||||
task_dicts = [
|
||||
{
|
||||
"recipe_id": t.recipe_id,
|
||||
"slot_id": t.slot_id,
|
||||
"task_label": t.task_label,
|
||||
"duration_minutes": t.duration_minutes,
|
||||
"sequence_order": t.sequence_order,
|
||||
"equipment": t.equipment,
|
||||
"is_parallel": t.is_parallel,
|
||||
"notes": t.notes,
|
||||
}
|
||||
for t in prep_tasks
|
||||
]
|
||||
inserted = await asyncio.to_thread(store.bulk_insert_prep_tasks, session_id, task_dicts)
|
||||
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=prep_session["plan_id"],
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(r) for r in inserted],
|
||||
)
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/{plan_id}/prep-session/tasks/{task_id}",
|
||||
response_model=PrepTaskSummary,
|
||||
)
|
||||
async def update_prep_task(
|
||||
plan_id: int,
|
||||
task_id: int,
|
||||
req: UpdatePrepTaskRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepTaskSummary:
|
||||
updated = await asyncio.to_thread(
|
||||
store.update_prep_task,
|
||||
task_id,
|
||||
duration_minutes=req.duration_minutes,
|
||||
sequence_order=req.sequence_order,
|
||||
notes=req.notes,
|
||||
equipment=req.equipment,
|
||||
)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Task not found.")
|
||||
return _prep_task_summary(updated)
|
||||
|
|
@ -219,7 +219,7 @@ def _commit_items(
|
|||
receipt_id=receipt_id,
|
||||
purchase_date=str(purchase_date) if purchase_date else None,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="receipt",
|
||||
source="receipt_ocr",
|
||||
)
|
||||
|
||||
created.append(ApprovedInventoryItem(
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
"""Proxy endpoint: exposes cf-orch call budget to the Kiwi frontend.
|
||||
|
||||
Only lifetime/founders users have a license_key — subscription and free
|
||||
users receive null (no budget UI shown).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.services.heimdall_orch import get_orch_usage
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def orch_usage_endpoint(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict | None:
|
||||
"""Return the current period's orch usage for the authenticated user.
|
||||
|
||||
Returns null if the user has no lifetime/founders license key (i.e. they
|
||||
are on a subscription or free plan — no budget cap applies to them).
|
||||
"""
|
||||
if session.license_key is None:
|
||||
return None
|
||||
return get_orch_usage(session.license_key, "kiwi")
|
||||
|
|
@ -42,11 +42,9 @@ async def upload_receipt(
|
|||
)
|
||||
# Only queue OCR if the feature is enabled server-side AND the user's tier allows it.
|
||||
# Check tier here, not inside the background task — once dispatched it can't be cancelled.
|
||||
# Pass session.db (a Path) rather than store — the store dependency closes before
|
||||
# background tasks run, so the task opens its own store from the DB path.
|
||||
ocr_allowed = settings.ENABLE_OCR and can_use("receipt_ocr", session.tier, session.has_byok)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
return ReceiptResponse.model_validate(receipt)
|
||||
|
||||
|
||||
|
|
@ -66,7 +64,7 @@ async def upload_receipts_batch(
|
|||
store.create_receipt, file.filename, str(saved)
|
||||
)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
results.append(ReceiptResponse.model_validate(receipt))
|
||||
return results
|
||||
|
||||
|
|
@ -99,13 +97,8 @@ async def get_receipt_quality(receipt_id: int, store: Store = Depends(get_store)
|
|||
return QualityAssessment.model_validate(qa)
|
||||
|
||||
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt.
|
||||
|
||||
Accepts db_path (not a Store instance) because FastAPI closes the request-scoped
|
||||
store before background tasks execute. This task owns its store lifecycle.
|
||||
"""
|
||||
store = Store(db_path)
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, store: Store) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt."""
|
||||
try:
|
||||
await asyncio.to_thread(store.update_receipt_status, receipt_id, "processing")
|
||||
from app.services.receipt_service import ReceiptService
|
||||
|
|
@ -115,5 +108,3 @@ async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path)
|
|||
await asyncio.to_thread(
|
||||
store.update_receipt_status, receipt_id, "error", str(exc)
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
|
|
|||
|
|
@ -1,43 +1,15 @@
|
|||
"""Recipe suggestion and browser endpoints."""
|
||||
"""Recipe suggestion endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.db.session import get_store
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import (
|
||||
AssemblyTemplateOut,
|
||||
BuildRequest,
|
||||
RecipeJobStatus,
|
||||
RecipeRequest,
|
||||
RecipeResult,
|
||||
RecipeSuggestion,
|
||||
RoleCandidatesResponse,
|
||||
)
|
||||
from app.services.recipe.assembly_recipes import (
|
||||
build_from_selection,
|
||||
get_role_candidates,
|
||||
get_templates_for_api,
|
||||
)
|
||||
from app.services.recipe.browser_domains import (
|
||||
DOMAINS,
|
||||
category_has_subcategories,
|
||||
get_category_names,
|
||||
get_domain_labels,
|
||||
get_keywords_for_category,
|
||||
get_keywords_for_subcategory,
|
||||
get_subcategory_names,
|
||||
)
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeResult
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
from app.services.heimdall_orch import check_orch_budget
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
|
@ -58,55 +30,13 @@ def _suggest_in_thread(db_path: Path, req: RecipeRequest) -> RecipeResult:
|
|||
store.close()
|
||||
|
||||
|
||||
async def _enqueue_recipe_job(session: CloudUser, req: RecipeRequest):
|
||||
"""Queue an async recipe_llm job and return 202 with job_id.
|
||||
|
||||
Falls back to synchronous generation in CLOUD_MODE (scheduler polls only
|
||||
the shared settings DB, not per-user DBs — see snipe#45 / kiwi backlog).
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from fastapi.responses import JSONResponse
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.tasks.runner import insert_task
|
||||
|
||||
if CLOUD_MODE:
|
||||
log.warning("recipe_llm async jobs not supported in CLOUD_MODE — falling back to sync")
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
return result
|
||||
|
||||
job_id = f"rec_{uuid.uuid4().hex}"
|
||||
|
||||
def _create(db_path: Path) -> int:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
row = store.create_recipe_job(job_id, session.user_id, req.model_dump_json())
|
||||
return row["id"]
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
int_id = await asyncio.to_thread(_create, session.db)
|
||||
params_json = json.dumps({"job_id": job_id})
|
||||
task_id, is_new = insert_task(session.db, "recipe_llm", int_id, params=params_json)
|
||||
if is_new:
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(session.db).enqueue(task_id, "recipe_llm", int_id, params_json)
|
||||
|
||||
return JSONResponse(content={"job_id": job_id, "status": "queued"}, status_code=202)
|
||||
|
||||
|
||||
@router.post("/suggest")
|
||||
@router.post("/suggest", response_model=RecipeResult)
|
||||
async def suggest_recipes(
|
||||
req: RecipeRequest,
|
||||
async_mode: bool = Query(default=False, alias="async"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
log.info("recipes auth=%s tier=%s level=%s", _auth_label(session.user_id), session.tier, req.level)
|
||||
) -> RecipeResult:
|
||||
# Inject session-authoritative tier/byok immediately — client-supplied values are ignored.
|
||||
# Also read stored unit_system preference; default to metric if not set.
|
||||
unit_system = store.get_setting("unit_system") or "metric"
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok, "unit_system": unit_system})
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok})
|
||||
if req.level == 4 and not req.wildcard_confirmed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
|
@ -119,279 +49,7 @@ async def suggest_recipes(
|
|||
)
|
||||
if req.style_id and not can_use("style_picker", req.tier):
|
||||
raise HTTPException(status_code=403, detail="Style picker requires Paid tier.")
|
||||
|
||||
# Orch budget check for lifetime/founders keys — downgrade to L2 (local) if exhausted.
|
||||
# Subscription and local/BYOK users skip this check entirely.
|
||||
orch_fallback = False
|
||||
if (
|
||||
req.level in (3, 4)
|
||||
and session.license_key is not None
|
||||
and not session.has_byok
|
||||
and session.tier != "local"
|
||||
):
|
||||
budget = check_orch_budget(session.license_key, "kiwi")
|
||||
if not budget.get("allowed", True):
|
||||
req = req.model_copy(update={"level": 2})
|
||||
orch_fallback = True
|
||||
|
||||
if req.level in (3, 4) and async_mode:
|
||||
return await _enqueue_recipe_job(session, req)
|
||||
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
if orch_fallback:
|
||||
result = result.model_copy(update={"orch_fallback": True})
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/jobs/{job_id}", response_model=RecipeJobStatus)
|
||||
async def get_recipe_job_status(
|
||||
job_id: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeJobStatus:
|
||||
"""Poll the status of an async recipe generation job.
|
||||
|
||||
Returns 404 when job_id is unknown or belongs to a different user.
|
||||
On status='done' with suggestions=[], the LLM returned empty — client
|
||||
should show a 'no recipe generated, try again' message.
|
||||
"""
|
||||
def _get(db_path: Path) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe_job(job_id, session.user_id)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
row = await asyncio.to_thread(_get, session.db)
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="Job not found.")
|
||||
|
||||
result = None
|
||||
if row["status"] == "done" and row["result"]:
|
||||
result = RecipeResult.model_validate_json(row["result"])
|
||||
|
||||
return RecipeJobStatus(
|
||||
job_id=row["job_id"],
|
||||
status=row["status"],
|
||||
result=result,
|
||||
error=row["error"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/browse/domains")
|
||||
async def list_browse_domains(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return available domain schemas for the recipe browser."""
|
||||
return get_domain_labels()
|
||||
|
||||
|
||||
@router.get("/browse/{domain}")
|
||||
async def list_browse_categories(
|
||||
domain: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return categories with recipe counts for a given domain."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
cat_names = get_category_names(domain)
|
||||
keywords_by_category = {cat: get_keywords_for_category(domain, cat) for cat in cat_names}
|
||||
has_subs = {cat: category_has_subcategories(domain, cat) for cat in cat_names}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_categories(domain, keywords_by_category, has_subs)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}/subcategories")
|
||||
async def list_browse_subcategories(
|
||||
domain: str,
|
||||
category: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for a category that supports subcategories."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
if not category_has_subcategories(domain, category):
|
||||
return []
|
||||
|
||||
subcat_names = get_subcategory_names(domain, category)
|
||||
keywords_by_subcat = {
|
||||
sub: get_keywords_for_subcategory(domain, category, sub)
|
||||
for sub in subcat_names
|
||||
}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_subcategories(domain, keywords_by_subcat)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}")
|
||||
async def browse_recipes(
|
||||
domain: str,
|
||||
category: str,
|
||||
page: Annotated[int, Query(ge=1)] = 1,
|
||||
page_size: Annotated[int, Query(ge=1, le=100)] = 20,
|
||||
pantry_items: Annotated[str | None, Query()] = None,
|
||||
subcategory: Annotated[str | None, Query()] = None,
|
||||
q: Annotated[str | None, Query(max_length=200)] = None,
|
||||
sort: Annotated[str, Query(pattern="^(default|alpha|alpha_desc)$")] = "default",
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return a paginated list of recipes for a domain/category.
|
||||
|
||||
Pass pantry_items as a comma-separated string to receive match_pct badges.
|
||||
Pass subcategory to narrow within a category that has subcategories.
|
||||
Pass q to filter by title substring. Pass sort for ordering (default/alpha/alpha_desc).
|
||||
"""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
if category == "_all":
|
||||
keywords = None # unfiltered browse
|
||||
elif subcategory:
|
||||
keywords = get_keywords_for_subcategory(domain, category, subcategory)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown subcategory '{subcategory}' in '{category}'.",
|
||||
)
|
||||
else:
|
||||
keywords = get_keywords_for_category(domain, category)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
|
||||
pantry_list = (
|
||||
[p.strip() for p in pantry_items.split(",") if p.strip()]
|
||||
if pantry_items
|
||||
else None
|
||||
)
|
||||
|
||||
def _browse(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
result = store.browse_recipes(
|
||||
keywords=keywords,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
pantry_items=pantry_list,
|
||||
q=q or None,
|
||||
sort=sort,
|
||||
)
|
||||
store.log_browser_telemetry(
|
||||
domain=domain,
|
||||
category=category,
|
||||
page=page,
|
||||
result_count=result["total"],
|
||||
)
|
||||
return result
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_browse, session.db)
|
||||
|
||||
|
||||
@router.get("/templates", response_model=list[AssemblyTemplateOut])
|
||||
async def list_assembly_templates() -> list[dict]:
|
||||
"""Return all 13 assembly templates with ordered role sequences.
|
||||
|
||||
Cache-friendly: static data, no per-user state.
|
||||
"""
|
||||
return get_templates_for_api()
|
||||
|
||||
|
||||
@router.get("/template-candidates", response_model=RoleCandidatesResponse)
|
||||
async def get_template_role_candidates(
|
||||
template_id: str = Query(..., description="Template slug, e.g. 'burrito_taco'"),
|
||||
role: str = Query(..., description="Role display name, e.g. 'protein'"),
|
||||
prior_picks: str = Query(default="", description="Comma-separated prior selections"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return pantry-matched candidates for one wizard step."""
|
||||
def _get(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
pantry_list = list(pantry_set)
|
||||
prior = [p.strip() for p in prior_picks.split(",") if p.strip()]
|
||||
profile_index = store.get_element_profiles(pantry_list + prior)
|
||||
return get_role_candidates(
|
||||
template_slug=template_id,
|
||||
role_display=role,
|
||||
pantry_set=pantry_set,
|
||||
prior_picks=prior,
|
||||
profile_index=profile_index,
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.post("/build", response_model=RecipeSuggestion)
|
||||
async def build_recipe(
|
||||
req: BuildRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeSuggestion:
|
||||
"""Build a recipe from explicit role selections."""
|
||||
def _build(db_path: Path) -> RecipeSuggestion | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
suggestion = build_from_selection(
|
||||
template_slug=req.template_id,
|
||||
role_overrides=req.role_overrides,
|
||||
pantry_set=pantry_set,
|
||||
)
|
||||
if suggestion is None:
|
||||
return None
|
||||
# Persist to recipes table so the result can be saved/bookmarked.
|
||||
# external_id encodes template + selections for stable dedup.
|
||||
import hashlib as _hl, json as _js
|
||||
sel_hash = _hl.md5(
|
||||
_js.dumps(req.role_overrides, sort_keys=True).encode()
|
||||
).hexdigest()[:8]
|
||||
external_id = f"assembly:{req.template_id}:{sel_hash}"
|
||||
real_id = store.upsert_built_recipe(
|
||||
external_id=external_id,
|
||||
title=suggestion.title,
|
||||
ingredients=suggestion.matched_ingredients,
|
||||
directions=suggestion.directions,
|
||||
)
|
||||
return suggestion.model_copy(update={"id": real_id})
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
result = await asyncio.to_thread(_build, session.db)
|
||||
if result is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Template not found or required ingredient missing.",
|
||||
)
|
||||
return result
|
||||
return await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
|
||||
|
||||
@router.get("/{recipe_id}")
|
||||
|
|
|
|||
|
|
@ -1,188 +0,0 @@
|
|||
"""Saved recipe bookmark endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.saved_recipe import (
|
||||
CollectionMemberRequest,
|
||||
CollectionRequest,
|
||||
CollectionSummary,
|
||||
SavedRecipeSummary,
|
||||
SaveRecipeRequest,
|
||||
UpdateSavedRecipeRequest,
|
||||
)
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _in_thread(db_path: Path, fn):
|
||||
"""Run a Store operation in a worker thread with its own connection."""
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
def _to_summary(row: dict, store: Store) -> SavedRecipeSummary:
|
||||
collection_ids = store.get_saved_recipe_collection_ids(row["id"])
|
||||
return SavedRecipeSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row["recipe_id"],
|
||||
title=row.get("title", ""),
|
||||
saved_at=row["saved_at"],
|
||||
notes=row.get("notes"),
|
||||
rating=row.get("rating"),
|
||||
style_tags=row.get("style_tags") or [],
|
||||
collection_ids=collection_ids,
|
||||
)
|
||||
|
||||
|
||||
# ── save / unsave ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=SavedRecipeSummary)
|
||||
async def save_recipe(
|
||||
req: SaveRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
row = store.save_recipe(req.recipe_id, req.notes, req.rating)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.delete("/{recipe_id}", status_code=204)
|
||||
async def unsave_recipe(
|
||||
recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.unsave_recipe(recipe_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/{recipe_id}", response_model=SavedRecipeSummary)
|
||||
async def update_saved_recipe(
|
||||
recipe_id: int,
|
||||
req: UpdateSavedRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
if not store.is_recipe_saved(recipe_id):
|
||||
raise HTTPException(status_code=404, detail="Recipe not saved.")
|
||||
row = store.update_saved_recipe(
|
||||
recipe_id, req.notes, req.rating, req.style_tags
|
||||
)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.get("", response_model=list[SavedRecipeSummary])
|
||||
async def list_saved_recipes(
|
||||
sort_by: str = "saved_at",
|
||||
collection_id: int | None = None,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[SavedRecipeSummary]:
|
||||
def _run(store: Store) -> list[SavedRecipeSummary]:
|
||||
rows = store.get_saved_recipes(sort_by=sort_by, collection_id=collection_id)
|
||||
return [_to_summary(r, store) for r in rows]
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── collections (Paid) ────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/collections", response_model=list[CollectionSummary])
|
||||
async def list_collections(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[CollectionSummary]:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
rows = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.get_collections()
|
||||
)
|
||||
return [CollectionSummary(**r) for r in rows]
|
||||
|
||||
|
||||
@router.post("/collections", response_model=CollectionSummary)
|
||||
async def create_collection(
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Collections require Paid tier.",
|
||||
)
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.create_collection(req.name, req.description),
|
||||
)
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.delete("/collections/{collection_id}", status_code=204)
|
||||
async def delete_collection(
|
||||
collection_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_collection(collection_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/collections/{collection_id}", response_model=CollectionSummary)
|
||||
async def rename_collection(
|
||||
collection_id: int,
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.rename_collection(collection_id, req.name, req.description),
|
||||
)
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Collection not found.")
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.post("/collections/{collection_id}/members", status_code=204)
|
||||
async def add_to_collection(
|
||||
collection_id: int,
|
||||
req: CollectionMemberRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.add_to_collection(collection_id, req.saved_recipe_id),
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/collections/{collection_id}/members/{saved_recipe_id}", status_code=204
|
||||
)
|
||||
async def remove_from_collection(
|
||||
collection_id: int,
|
||||
saved_recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.remove_from_collection(collection_id, saved_recipe_id),
|
||||
)
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
"""Session bootstrap endpoint — called once per app load by the frontend.
|
||||
|
||||
Logs auth= + tier= for log-based analytics without client-side tracking.
|
||||
See Circuit-Forge/kiwi#86.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/bootstrap")
|
||||
def session_bootstrap(session: CloudUser = Depends(get_session)) -> dict:
|
||||
"""Record auth type and tier for log-based analytics.
|
||||
|
||||
Expected log output:
|
||||
INFO:app.api.endpoints.session: session auth=authed tier=paid
|
||||
INFO:app.api.endpoints.session: session auth=anon tier=free
|
||||
|
||||
E2E test sessions (E2E_TEST_USER_ID) are logged at DEBUG so they don't
|
||||
pollute analytics counts while still being visible when DEBUG=true.
|
||||
"""
|
||||
is_test = bool(settings.E2E_TEST_USER_ID and session.user_id == settings.E2E_TEST_USER_ID)
|
||||
logger = log.debug if is_test else log.info
|
||||
logger("session auth=%s tier=%s%s", _auth_label(session.user_id), session.tier, " e2e=true" if is_test else "")
|
||||
return {
|
||||
"auth": _auth_label(session.user_id),
|
||||
"tier": session.tier,
|
||||
"has_byok": session.has_byok,
|
||||
}
|
||||
|
|
@ -10,7 +10,7 @@ from app.db.store import Store
|
|||
|
||||
router = APIRouter()
|
||||
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment", "unit_system"})
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment"})
|
||||
|
||||
|
||||
class SettingBody(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,224 +0,0 @@
|
|||
"""Shopping list endpoints.
|
||||
|
||||
Free tier for all users (anonymous guests included — shopping list is the
|
||||
primary affiliate revenue surface). Confirm-purchase action is also Free:
|
||||
it moves a checked item into pantry inventory without a tier gate so the
|
||||
flow works for anyone who signs up or browses without an account.
|
||||
|
||||
Routes:
|
||||
GET /shopping — list items (with affiliate links)
|
||||
POST /shopping — add item manually
|
||||
PATCH /shopping/{id} — update (check/uncheck, rename, qty)
|
||||
DELETE /shopping/{id} — remove single item
|
||||
DELETE /shopping/checked — clear all checked items
|
||||
DELETE /shopping/all — clear entire list
|
||||
POST /shopping/from-recipe — bulk add gaps from a recipe
|
||||
POST /shopping/{id}/confirm — confirm purchase → add to pantry inventory
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.shopping import (
|
||||
BulkAddFromRecipeRequest,
|
||||
ConfirmPurchaseRequest,
|
||||
ShoppingItemCreate,
|
||||
ShoppingItemResponse,
|
||||
ShoppingItemUpdate,
|
||||
)
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _enrich(item: dict, builder: GroceryLinkBuilder) -> ShoppingItemResponse:
|
||||
"""Attach live affiliate links to a raw store row."""
|
||||
links = builder.build_links(item["name"])
|
||||
return ShoppingItemResponse(
|
||||
**{**item, "checked": bool(item.get("checked", 0))},
|
||||
grocery_links=[{"ingredient": l.ingredient, "retailer": l.retailer, "url": l.url} for l in links],
|
||||
)
|
||||
|
||||
|
||||
def _in_thread(db_path, fn):
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
# ── List ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("", response_model=list[ShoppingItemResponse])
|
||||
async def list_shopping_items(
|
||||
include_checked: bool = True,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
items = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.list_shopping_items(include_checked)
|
||||
)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Add manually ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=ShoppingItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def add_shopping_item(
|
||||
body: ShoppingItemCreate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.add_shopping_item(
|
||||
name=body.name,
|
||||
quantity=body.quantity,
|
||||
unit=body.unit,
|
||||
category=body.category,
|
||||
notes=body.notes,
|
||||
source=body.source,
|
||||
recipe_id=body.recipe_id,
|
||||
sort_order=body.sort_order,
|
||||
),
|
||||
)
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Bulk add from recipe ───────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/from-recipe", response_model=list[ShoppingItemResponse], status_code=status.HTTP_201_CREATED)
|
||||
async def add_from_recipe(
|
||||
body: BulkAddFromRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Add missing ingredients from a recipe to the shopping list.
|
||||
|
||||
Runs pantry gap analysis and adds only the items the user doesn't have
|
||||
(unless include_covered=True). Skips duplicates already on the list.
|
||||
"""
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
|
||||
def _run(store: Store):
|
||||
recipe = store.get_recipe(body.recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
inventory = store.list_inventory()
|
||||
gaps, covered = compute_shopping_list([recipe], inventory)
|
||||
targets = (gaps + covered) if body.include_covered else gaps
|
||||
|
||||
# Avoid duplicates already on the list
|
||||
existing = {i["name"].lower() for i in store.list_shopping_items()}
|
||||
added = []
|
||||
for gap in targets:
|
||||
if gap.ingredient_name.lower() in existing:
|
||||
continue
|
||||
item = store.add_shopping_item(
|
||||
name=gap.ingredient_name,
|
||||
quantity=None,
|
||||
unit=gap.have_unit,
|
||||
source="recipe",
|
||||
recipe_id=body.recipe_id,
|
||||
)
|
||||
added.append(item)
|
||||
return added
|
||||
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
items = await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Update ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.patch("/{item_id}", response_model=ShoppingItemResponse)
|
||||
async def update_shopping_item(
|
||||
item_id: int,
|
||||
body: ShoppingItemUpdate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.update_shopping_item(item_id, **body.model_dump(exclude_none=True)),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Confirm purchase → pantry ─────────────────────────────────────────────────
|
||||
|
||||
@router.post("/{item_id}/confirm", status_code=status.HTTP_201_CREATED)
|
||||
async def confirm_purchase(
|
||||
item_id: int,
|
||||
body: ConfirmPurchaseRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Confirm a checked item was purchased and add it to pantry inventory.
|
||||
|
||||
Human approval step: the user explicitly confirms what they actually bought
|
||||
before it lands in their pantry. Returns the new inventory item.
|
||||
"""
|
||||
def _run(store: Store):
|
||||
shopping_item = store.get_shopping_item(item_id)
|
||||
if not shopping_item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
qty = body.quantity if body.quantity is not None else (shopping_item.get("quantity") or 1.0)
|
||||
unit = body.unit or shopping_item.get("unit") or "count"
|
||||
category = shopping_item.get("category")
|
||||
|
||||
product = store.get_or_create_product(
|
||||
name=shopping_item["name"],
|
||||
category=category,
|
||||
)
|
||||
inv_item = store.add_inventory_item(
|
||||
product_id=product["id"],
|
||||
location=body.location,
|
||||
quantity=qty,
|
||||
unit=unit,
|
||||
source="manual",
|
||||
)
|
||||
# Mark the shopping item checked and leave it for the user to clear
|
||||
store.update_shopping_item(item_id, checked=True)
|
||||
return inv_item
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── Delete ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_shopping_item(
|
||||
item_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
deleted = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_shopping_item(item_id)
|
||||
)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
|
||||
@router.delete("/checked", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_checked(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_checked_shopping_items()
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/all", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_all(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_all_shopping_items()
|
||||
)
|
||||
|
|
@ -1,24 +1,13 @@
|
|||
from fastapi import APIRouter
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, feedback_attach, household, saved_recipes, imitate, meal_plans, orch_usage, session, shopping
|
||||
from app.api.endpoints.community import router as community_router
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
api_router.include_router(session.router, prefix="/session", tags=["session"])
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
||||
api_router.include_router(export.router, tags=["export"])
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
api_router.include_router(saved_recipes.router, prefix="/recipes/saved", tags=["saved-recipes"])
|
||||
api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(staples.router, prefix="/staples", tags=["staples"])
|
||||
api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(feedback_attach.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(household.router, prefix="/household", tags=["household"])
|
||||
api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"])
|
||||
api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"])
|
||||
api_router.include_router(orch_usage.router, prefix="/orch-usage", tags=["orch-usage"])
|
||||
api_router.include_router(shopping.router, prefix="/shopping", tags=["shopping"])
|
||||
api_router.include_router(community_router)
|
||||
|
|
|
|||
|
|
@ -22,12 +22,10 @@ import time
|
|||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import uuid
|
||||
|
||||
import jwt as pyjwt
|
||||
import requests
|
||||
import yaml
|
||||
from fastapi import Depends, HTTPException, Request, Response
|
||||
from fastapi import Depends, HTTPException, Request
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -78,21 +76,12 @@ def _is_bypass_ip(ip: str) -> bool:
|
|||
|
||||
_LOCAL_KIWI_DB: Path = Path(os.environ.get("KIWI_DB", "data/kiwi.db"))
|
||||
|
||||
_TIER_CACHE: dict[str, tuple[dict, float]] = {}
|
||||
_TIER_CACHE: dict[str, tuple[str, float]] = {}
|
||||
_TIER_CACHE_TTL = 300 # 5 minutes
|
||||
|
||||
TIERS = ["free", "paid", "premium", "ultra"]
|
||||
|
||||
|
||||
def _auth_label(user_id: str) -> str:
|
||||
"""Classify a user_id into a short tag for structured log lines. No PII emitted."""
|
||||
if user_id in ("local", "local-dev"):
|
||||
return "local"
|
||||
if user_id.startswith("anon-"):
|
||||
return "anon"
|
||||
return "authed"
|
||||
|
||||
|
||||
# ── Domain ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
|
@ -101,9 +90,6 @@ class CloudUser:
|
|||
tier: str # free | paid | premium | ultra | local
|
||||
db: Path # per-user SQLite DB path
|
||||
has_byok: bool # True if a configured LLM backend is present in llm.yaml
|
||||
household_id: str | None = None
|
||||
is_household_owner: bool = False
|
||||
license_key: str | None = None # key_display for lifetime/founders keys; None for subscription/free
|
||||
|
||||
|
||||
# ── JWT validation ─────────────────────────────────────────────────────────────
|
||||
|
|
@ -144,16 +130,14 @@ def _ensure_provisioned(user_id: str) -> None:
|
|||
log.warning("Heimdall provision failed for user %s: %s", user_id, exc)
|
||||
|
||||
|
||||
def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
||||
"""Returns (tier, household_id | None, is_household_owner, license_key | None)."""
|
||||
def _fetch_cloud_tier(user_id: str) -> str:
|
||||
now = time.monotonic()
|
||||
cached = _TIER_CACHE.get(user_id)
|
||||
if cached and (now - cached[1]) < _TIER_CACHE_TTL:
|
||||
entry = cached[0]
|
||||
return entry["tier"], entry.get("household_id"), entry.get("is_household_owner", False), entry.get("license_key")
|
||||
return cached[0]
|
||||
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
return "free", None, False, None
|
||||
return "free"
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/admin/cloud/resolve",
|
||||
|
|
@ -161,39 +145,21 @@ def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
|||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
data = resp.json() if resp.ok else {}
|
||||
tier = data.get("tier", "free")
|
||||
household_id = data.get("household_id")
|
||||
is_owner = data.get("is_household_owner", False)
|
||||
license_key = data.get("key_display")
|
||||
tier = resp.json().get("tier", "free") if resp.ok else "free"
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc)
|
||||
tier, household_id, is_owner, license_key = "free", None, False, None
|
||||
tier = "free"
|
||||
|
||||
_TIER_CACHE[user_id] = ({"tier": tier, "household_id": household_id, "is_household_owner": is_owner, "license_key": license_key}, now)
|
||||
return tier, household_id, is_owner, license_key
|
||||
_TIER_CACHE[user_id] = (tier, now)
|
||||
return tier
|
||||
|
||||
|
||||
def _user_db_path(user_id: str, household_id: str | None = None) -> Path:
|
||||
if household_id:
|
||||
path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
else:
|
||||
def _user_db_path(user_id: str) -> Path:
|
||||
path = CLOUD_DATA_ROOT / user_id / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def _anon_guest_db_path(guest_id: str) -> Path:
|
||||
"""Per-session DB for unauthenticated guest visitors.
|
||||
|
||||
Each anonymous visitor gets an isolated SQLite DB keyed by their guest UUID
|
||||
cookie, so shopping lists and affiliate interactions never bleed across sessions.
|
||||
"""
|
||||
path = CLOUD_DATA_ROOT / f"anon-{guest_id}" / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
# ── BYOK detection ────────────────────────────────────────────────────────────
|
||||
|
||||
_LLM_CONFIG_PATH = Path.home() / ".config" / "circuitforge" / "llm.yaml"
|
||||
|
|
@ -219,52 +185,22 @@ def _detect_byok(config_path: Path = _LLM_CONFIG_PATH) -> bool:
|
|||
|
||||
# ── FastAPI dependency ────────────────────────────────────────────────────────
|
||||
|
||||
_GUEST_COOKIE = "kiwi_guest_id"
|
||||
_GUEST_COOKIE_MAX_AGE = 60 * 60 * 24 * 90 # 90 days
|
||||
|
||||
|
||||
def _resolve_guest_session(request: Request, response: Response, has_byok: bool) -> CloudUser:
|
||||
"""Return a per-session anonymous CloudUser, creating a guest UUID cookie if needed."""
|
||||
guest_id = request.cookies.get(_GUEST_COOKIE, "").strip()
|
||||
is_new = not guest_id
|
||||
if is_new:
|
||||
guest_id = str(uuid.uuid4())
|
||||
log.debug("New guest session assigned: anon-%s", guest_id[:8])
|
||||
# Secure flag only when the request actually arrived over HTTPS
|
||||
# (Caddy sets X-Forwarded-Proto=https in cloud; absent on direct port access).
|
||||
# Avoids losing the session cookie on HTTP direct-port testing of the cloud stack.
|
||||
is_https = request.headers.get("x-forwarded-proto", "http").lower() == "https"
|
||||
response.set_cookie(
|
||||
key=_GUEST_COOKIE,
|
||||
value=guest_id,
|
||||
max_age=_GUEST_COOKIE_MAX_AGE,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=is_https,
|
||||
)
|
||||
return CloudUser(
|
||||
user_id=f"anon-{guest_id}",
|
||||
tier="free",
|
||||
db=_anon_guest_db_path(guest_id),
|
||||
has_byok=has_byok,
|
||||
)
|
||||
|
||||
|
||||
def get_session(request: Request, response: Response) -> CloudUser:
|
||||
def get_session(request: Request) -> CloudUser:
|
||||
"""FastAPI dependency — resolves the current user from the request.
|
||||
|
||||
Local mode: fully-privileged "local" user pointing at local DB.
|
||||
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
||||
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
||||
returns a "local" session without JWT validation (dev/LAN use only).
|
||||
Anonymous: per-session UUID cookie isolates each guest visitor's data.
|
||||
"""
|
||||
has_byok = _detect_byok()
|
||||
|
||||
if not CLOUD_MODE:
|
||||
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
||||
|
||||
# Prefer X-Real-IP (set by Caddy from the actual client address) over the
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
client_ip = (
|
||||
request.headers.get("x-real-ip", "")
|
||||
|
|
@ -276,32 +212,21 @@ def get_session(request: Request, response: Response) -> CloudUser:
|
|||
dev_db = _user_db_path("local-dev")
|
||||
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
||||
|
||||
# Resolve cf_session JWT: prefer the explicit header injected by Caddy, then
|
||||
# fall back to the cf_session cookie value. Other cookies (e.g. kiwi_guest_id)
|
||||
# must never be treated as auth tokens.
|
||||
raw_session = request.headers.get("x-cf-session", "").strip()
|
||||
if not raw_session:
|
||||
raw_session = request.cookies.get("cf_session", "").strip()
|
||||
raw_header = (
|
||||
request.headers.get("x-cf-session", "")
|
||||
or request.headers.get("cookie", "")
|
||||
)
|
||||
if not raw_header:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
if not raw_session:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
|
||||
token = _extract_session_token(raw_session) # gitleaks:allow — function name, not a secret
|
||||
token = _extract_session_token(raw_header)
|
||||
if not token:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = validate_session_jwt(token)
|
||||
_ensure_provisioned(user_id)
|
||||
tier, household_id, is_household_owner, license_key = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(
|
||||
user_id=user_id,
|
||||
tier=tier,
|
||||
db=_user_db_path(user_id, household_id=household_id),
|
||||
has_byok=has_byok,
|
||||
household_id=household_id,
|
||||
is_household_owner=is_household_owner,
|
||||
license_key=license_key,
|
||||
)
|
||||
tier = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(user_id=user_id, tier=tier, db=_user_db_path(user_id), has_byok=has_byok)
|
||||
|
||||
|
||||
def require_tier(min_tier: str):
|
||||
|
|
|
|||
|
|
@ -35,16 +35,6 @@ class Settings:
|
|||
# Database
|
||||
DB_PATH: Path = Path(os.environ.get("DB_PATH", str(DATA_DIR / "kiwi.db")))
|
||||
|
||||
# Community feature settings
|
||||
COMMUNITY_DB_URL: str | None = os.environ.get("COMMUNITY_DB_URL") or None
|
||||
COMMUNITY_PSEUDONYM_SALT: str = os.environ.get(
|
||||
"COMMUNITY_PSEUDONYM_SALT", "kiwi-default-salt-change-in-prod"
|
||||
)
|
||||
COMMUNITY_CLOUD_FEED_URL: str = os.environ.get(
|
||||
"COMMUNITY_CLOUD_FEED_URL",
|
||||
"https://menagerie.circuitforge.tech/kiwi/api/v1/community/posts",
|
||||
)
|
||||
|
||||
# Processing
|
||||
MAX_CONCURRENT_JOBS: int = int(os.environ.get("MAX_CONCURRENT_JOBS", "4"))
|
||||
USE_GPU: bool = os.environ.get("USE_GPU", "true").lower() in ("1", "true", "yes")
|
||||
|
|
@ -56,23 +46,8 @@ class Settings:
|
|||
# CF-core resource coordinator (VRAM lease management)
|
||||
COORDINATOR_URL: str = os.environ.get("COORDINATOR_URL", "http://localhost:7700")
|
||||
|
||||
# Hosted cf-orch coordinator — bearer token for managed cloud GPU inference (Paid+)
|
||||
# CFOrchClient reads CF_LICENSE_KEY automatically; exposed here for startup validation.
|
||||
CF_LICENSE_KEY: str | None = os.environ.get("CF_LICENSE_KEY")
|
||||
|
||||
# E2E test account — analytics logging is suppressed for this user_id so test
|
||||
# runs don't pollute session counts. Set to the Directus UUID of the test user.
|
||||
E2E_TEST_USER_ID: str | None = os.environ.get("E2E_TEST_USER_ID") or None
|
||||
|
||||
# Feature flags
|
||||
ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes")
|
||||
# Use OrchestratedScheduler (coordinator-aware, multi-GPU fan-out) instead of
|
||||
# LocalScheduler. Defaults to true in CLOUD_MODE; can be set independently
|
||||
# for multi-GPU local rigs that don't need full cloud auth.
|
||||
USE_ORCH_SCHEDULER: bool | None = (
|
||||
None if os.environ.get("USE_ORCH_SCHEDULER") is None
|
||||
else os.environ.get("USE_ORCH_SCHEDULER", "").lower() in ("1", "true", "yes")
|
||||
)
|
||||
|
||||
# Runtime
|
||||
DEBUG: bool = os.environ.get("DEBUG", "false").lower() in ("1", "true", "yes")
|
||||
|
|
|
|||
|
|
@ -14,25 +14,3 @@ CREATE VIRTUAL TABLE IF NOT EXISTS recipes_fts USING fts5(
|
|||
);
|
||||
|
||||
INSERT INTO recipes_fts(recipes_fts) VALUES('rebuild');
|
||||
|
||||
-- Triggers to keep the FTS index in sync with the recipes table.
|
||||
-- Without these, rows inserted after the initial rebuild are invisible to FTS queries.
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 016: Add FTS5 sync triggers for the recipes_fts content table.
|
||||
--
|
||||
-- Migration 015 created recipes_fts and did a one-time rebuild, but omitted
|
||||
-- triggers. Without them, INSERT/UPDATE/DELETE on recipes does not update the
|
||||
-- FTS index, so new rows are invisible to MATCH queries.
|
||||
--
|
||||
-- CREATE TRIGGER IF NOT EXISTS is idempotent — safe to re-run.
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 017_household_invites.sql
|
||||
CREATE TABLE IF NOT EXISTS household_invites (
|
||||
token TEXT PRIMARY KEY,
|
||||
household_id TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
expires_at TEXT NOT NULL,
|
||||
used_at TEXT,
|
||||
used_by TEXT
|
||||
);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 018: saved recipes bookmarks.
|
||||
|
||||
CREATE TABLE saved_recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
saved_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
notes TEXT,
|
||||
rating INTEGER CHECK (rating IS NULL OR (rating >= 0 AND rating <= 5)),
|
||||
style_tags TEXT NOT NULL DEFAULT '[]',
|
||||
UNIQUE (recipe_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_saved_recipes_saved_at ON saved_recipes (saved_at DESC);
|
||||
CREATE INDEX idx_saved_recipes_rating ON saved_recipes (rating);
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
-- Migration 019: recipe collections (Paid tier organisation).
|
||||
|
||||
CREATE TABLE recipe_collections (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE recipe_collection_members (
|
||||
collection_id INTEGER NOT NULL REFERENCES recipe_collections(id) ON DELETE CASCADE,
|
||||
saved_recipe_id INTEGER NOT NULL REFERENCES saved_recipes(id) ON DELETE CASCADE,
|
||||
added_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
PRIMARY KEY (collection_id, saved_recipe_id)
|
||||
);
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
-- Migration 020: recipe browser navigation telemetry.
|
||||
-- Used to determine whether category nesting depth needs increasing.
|
||||
-- Review: if any category has page > 5 and result_count > 100 consistently,
|
||||
-- consider adding a third nesting level for that category.
|
||||
|
||||
CREATE TABLE browser_telemetry (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT NOT NULL,
|
||||
category TEXT NOT NULL,
|
||||
page INTEGER NOT NULL,
|
||||
result_count INTEGER NOT NULL,
|
||||
recorded_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
-- Migration 021: FTS5 inverted index for the recipe browser (category + keywords).
|
||||
--
|
||||
-- The browser domain queries were using LIKE '%keyword%' against category and
|
||||
-- keywords columns — a leading wildcard prevents any B-tree index use, so every
|
||||
-- query was a full sequential scan of 3.1M rows. This FTS5 index replaces those
|
||||
-- scans with O(log N) token lookups.
|
||||
--
|
||||
-- Content-table backed: stores only the inverted index, no text duplication.
|
||||
-- The keywords column is a JSON array; FTS5 tokenises it as plain text, stripping
|
||||
-- the punctuation, which gives correct per-word matching.
|
||||
--
|
||||
-- One-time rebuild cost on 3.1M rows: ~20-40 seconds at first startup.
|
||||
-- Subsequent startups skip this migration (IF NOT EXISTS guard).
|
||||
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
content=recipes,
|
||||
content_rowid=id,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
-- 022_meal_plans.sql
|
||||
CREATE TABLE meal_plans (
|
||||
id INTEGER PRIMARY KEY,
|
||||
week_start TEXT NOT NULL,
|
||||
meal_types TEXT NOT NULL DEFAULT '["dinner"]',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 022: Add is_generic flag to recipes
|
||||
-- Generic recipes are catch-all/dump recipes with loose ingredient lists
|
||||
-- that should not appear in Level 1 (deterministic "use what I have") results.
|
||||
-- Admins can mark recipes via the recipe editor or a bulk backfill script.
|
||||
ALTER TABLE recipes ADD COLUMN is_generic INTEGER NOT NULL DEFAULT 0;
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
-- 023_meal_plan_slots.sql
|
||||
CREATE TABLE meal_plan_slots (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
day_of_week INTEGER NOT NULL CHECK(day_of_week BETWEEN 0 AND 6),
|
||||
meal_type TEXT NOT NULL,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
servings REAL NOT NULL DEFAULT 2.0,
|
||||
custom_label TEXT,
|
||||
UNIQUE(plan_id, day_of_week, meal_type)
|
||||
);
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 024_prep_sessions.sql
|
||||
CREATE TABLE prep_sessions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
scheduled_date TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'draft'
|
||||
CHECK(status IN ('draft','reviewed','done')),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
-- 025_prep_tasks.sql
|
||||
CREATE TABLE prep_tasks (
|
||||
id INTEGER PRIMARY KEY,
|
||||
session_id INTEGER NOT NULL REFERENCES prep_sessions(id) ON DELETE CASCADE,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
slot_id INTEGER REFERENCES meal_plan_slots(id),
|
||||
task_label TEXT NOT NULL,
|
||||
duration_minutes INTEGER,
|
||||
sequence_order INTEGER NOT NULL,
|
||||
equipment TEXT,
|
||||
is_parallel INTEGER NOT NULL DEFAULT 0,
|
||||
notes TEXT,
|
||||
user_edited INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- 028_community_pseudonyms.sql
|
||||
-- Per-user pseudonym store: maps the user's chosen community display name
|
||||
-- to their Directus user ID. This table lives in per-user kiwi.db only.
|
||||
-- It is NEVER replicated to the community PostgreSQL — pseudonym isolation is by design.
|
||||
--
|
||||
-- A user may have one active pseudonym. Old pseudonyms are retained for reference
|
||||
-- (posts published under them keep their pseudonym attribution) but only one is
|
||||
-- flagged as current (is_current = 1).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS community_pseudonyms (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pseudonym TEXT NOT NULL,
|
||||
directus_user_id TEXT NOT NULL,
|
||||
is_current INTEGER NOT NULL DEFAULT 1 CHECK (is_current IN (0, 1)),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
-- Only one pseudonym can be current at a time per user
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_community_pseudonyms_current
|
||||
ON community_pseudonyms (directus_user_id)
|
||||
WHERE is_current = 1;
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
-- Migration 029: Add inferred_tags column and update FTS index to include it.
|
||||
--
|
||||
-- inferred_tags holds a JSON array of normalized tag strings derived by
|
||||
-- scripts/pipeline/infer_recipe_tags.py (e.g. ["cuisine:Italian",
|
||||
-- "dietary:Low-Carb", "flavor:Umami", "can_be:Gluten-Free"]).
|
||||
--
|
||||
-- The FTS5 browser table is rebuilt to index inferred_tags alongside
|
||||
-- category and keywords so browse domain queries match against all signals.
|
||||
|
||||
-- 1. Add inferred_tags column (empty array default; populated by pipeline run)
|
||||
ALTER TABLE recipes ADD COLUMN inferred_tags TEXT NOT NULL DEFAULT '[]';
|
||||
|
||||
-- 2. Drop old FTS table and triggers that only covered category + keywords
|
||||
DROP TRIGGER IF EXISTS recipes_ai;
|
||||
DROP TRIGGER IF EXISTS recipes_ad;
|
||||
DROP TRIGGER IF EXISTS recipes_au;
|
||||
DROP TABLE IF EXISTS recipe_browser_fts;
|
||||
|
||||
-- 3. Recreate FTS5 table: now indexes category, keywords, AND inferred_tags
|
||||
CREATE VIRTUAL TABLE recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
inferred_tags,
|
||||
content=recipes,
|
||||
content_rowid=id
|
||||
);
|
||||
|
||||
-- 4. Triggers to keep FTS in sync with recipes table changes
|
||||
CREATE TRIGGER recipes_ai AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_ad AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_au AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
-- 5. Populate FTS from current table state
|
||||
-- (inferred_tags is '[]' for all rows at this point; run infer_recipe_tags.py
|
||||
-- to populate, then the FTS will be rebuilt as part of that script.)
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 030: open-package tracking
|
||||
-- Adds opened_date to track when a multi-use item was first opened,
|
||||
-- enabling secondary shelf-life windows (e.g. salsa: 1 year sealed → 2 weeks opened).
|
||||
|
||||
ALTER TABLE inventory_items ADD COLUMN opened_date TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- Migration 031: add disposal_reason for waste logging (#60)
|
||||
-- status='discarded' already exists in the CHECK constraint from migration 002.
|
||||
-- This column stores free-text reason (optional) and calm-framing presets.
|
||||
ALTER TABLE inventory_items ADD COLUMN disposal_reason TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- 032_meal_plan_unique_week.sql
|
||||
-- Prevent duplicate plans for the same week.
|
||||
-- Existing duplicates must be resolved before applying (keep MIN(id) per week_start).
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_meal_plans_week_start ON meal_plans (week_start);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- Migration 033: standalone shopping list
|
||||
-- Items can be added manually, from recipe gap analysis, or from the recipe browser.
|
||||
-- Affiliate links are computed at query time by the API layer (never stored).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS shopping_list_items (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
quantity REAL,
|
||||
unit TEXT,
|
||||
category TEXT,
|
||||
checked INTEGER NOT NULL DEFAULT 0, -- 0=want, 1=in-cart/checked off
|
||||
notes TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'manual', -- manual | recipe | meal_plan
|
||||
recipe_id INTEGER REFERENCES recipes(id) ON DELETE SET NULL,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_shopping_list_checked
|
||||
ON shopping_list_items (checked, sort_order);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 034: async recipe generation job queue
|
||||
CREATE TABLE IF NOT EXISTS recipe_jobs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
job_id TEXT NOT NULL UNIQUE,
|
||||
user_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
request TEXT NOT NULL,
|
||||
result TEXT,
|
||||
error TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_job_id ON recipe_jobs (job_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_user_id ON recipe_jobs (user_id, created_at DESC);
|
||||
750
app/db/store.py
750
app/db/store.py
|
|
@ -14,34 +14,14 @@ from circuitforge_core.db.migrations import run_migrations
|
|||
|
||||
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
||||
|
||||
# Module-level cache for recipe counts by keyword set.
|
||||
# The recipe corpus is static at runtime — counts are computed once per
|
||||
# (db_path, keyword_set) and reused for all subsequent requests.
|
||||
# Key: (db_path_str, sorted_keywords_tuple) → int
|
||||
_COUNT_CACHE: dict[tuple[str, ...], int] = {}
|
||||
|
||||
|
||||
class Store:
|
||||
def __init__(self, db_path: Path, key: str = "") -> None:
|
||||
import os
|
||||
self._db_path = str(db_path)
|
||||
self.conn: sqlite3.Connection = get_connection(db_path, key)
|
||||
self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
self.conn.execute("PRAGMA foreign_keys=ON")
|
||||
run_migrations(self.conn, MIGRATIONS_DIR)
|
||||
|
||||
# When RECIPE_DB_PATH is set (cloud mode), attach the shared read-only
|
||||
# corpus DB as the "corpus" schema so per-user DBs can access recipe data.
|
||||
# _cp (corpus prefix) is "corpus." in cloud mode, "" in local mode.
|
||||
corpus_path = os.environ.get("RECIPE_DB_PATH", "")
|
||||
if corpus_path:
|
||||
self.conn.execute("ATTACH DATABASE ? AS corpus", (corpus_path,))
|
||||
self._cp = "corpus."
|
||||
self._corpus_path = corpus_path
|
||||
else:
|
||||
self._cp = ""
|
||||
self._corpus_path = self._db_path
|
||||
|
||||
def close(self) -> None:
|
||||
self.conn.close()
|
||||
|
||||
|
|
@ -55,11 +35,7 @@ class Store:
|
|||
"warnings",
|
||||
# recipe columns
|
||||
"ingredients", "ingredient_names", "directions",
|
||||
"keywords", "element_coverage",
|
||||
# saved recipe columns
|
||||
"style_tags",
|
||||
# meal plan columns
|
||||
"meal_types"):
|
||||
"keywords", "element_coverage"):
|
||||
if key in d and isinstance(d[key], str):
|
||||
try:
|
||||
d[key] = json.loads(d[key])
|
||||
|
|
@ -231,8 +207,7 @@ class Store:
|
|||
|
||||
def update_inventory_item(self, item_id: int, **kwargs) -> dict[str, Any] | None:
|
||||
allowed = {"quantity", "unit", "location", "sublocation",
|
||||
"purchase_date", "expiration_date", "opened_date",
|
||||
"status", "notes", "consumed_at", "disposal_reason"}
|
||||
"expiration_date", "status", "notes", "consumed_at"}
|
||||
updates = {k: v for k, v in kwargs.items() if k in allowed}
|
||||
if not updates:
|
||||
return self.get_inventory_item(item_id)
|
||||
|
|
@ -245,32 +220,6 @@ class Store:
|
|||
self.conn.commit()
|
||||
return self.get_inventory_item(item_id)
|
||||
|
||||
def partial_consume_item(
|
||||
self,
|
||||
item_id: int,
|
||||
consume_qty: float,
|
||||
consumed_at: str,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Decrement quantity by consume_qty. Mark consumed when quantity reaches 0."""
|
||||
row = self.get_inventory_item(item_id)
|
||||
if row is None:
|
||||
return None
|
||||
remaining = max(0.0, round(row["quantity"] - consume_qty, 6))
|
||||
if remaining <= 0:
|
||||
self.conn.execute(
|
||||
"UPDATE inventory_items SET quantity = 0, status = 'consumed',"
|
||||
" consumed_at = ?, updated_at = datetime('now') WHERE id = ?",
|
||||
(consumed_at, item_id),
|
||||
)
|
||||
else:
|
||||
self.conn.execute(
|
||||
"UPDATE inventory_items SET quantity = ?, updated_at = datetime('now')"
|
||||
" WHERE id = ?",
|
||||
(remaining, item_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
return self.get_inventory_item(item_id)
|
||||
|
||||
def expiring_soon(self, days: int = 7) -> list[dict[str, Any]]:
|
||||
return self._fetch_all(
|
||||
"""SELECT i.*, p.name as product_name, p.category
|
||||
|
|
@ -385,9 +334,8 @@ class Store:
|
|||
|
||||
def _fts_ready(self) -> bool:
|
||||
"""Return True if the recipes_fts virtual table exists."""
|
||||
schema = "corpus" if self._cp else "main"
|
||||
row = self._fetch_one(
|
||||
f"SELECT 1 FROM {schema}.sqlite_master WHERE type='table' AND name='recipes_fts'"
|
||||
"SELECT 1 FROM sqlite_master WHERE type='table' AND name='recipes_fts'"
|
||||
)
|
||||
return row is not None
|
||||
|
||||
|
|
@ -614,7 +562,6 @@ class Store:
|
|||
max_carbs_g: float | None = None,
|
||||
max_sodium_mg: float | None = None,
|
||||
excluded_ids: list[int] | None = None,
|
||||
exclude_generic: bool = False,
|
||||
) -> list[dict]:
|
||||
"""Find recipes containing any of the given ingredient names.
|
||||
Scores by match count and returns highest-scoring first.
|
||||
|
|
@ -624,9 +571,6 @@ class Store:
|
|||
|
||||
Nutrition filters use NULL-passthrough: rows without nutrition data
|
||||
always pass (they may be estimated or absent entirely).
|
||||
|
||||
exclude_generic: when True, skips recipes marked is_generic=1.
|
||||
Pass True for Level 1 ("Use What I Have") to suppress catch-all recipes.
|
||||
"""
|
||||
if not ingredient_names:
|
||||
return []
|
||||
|
|
@ -652,8 +596,6 @@ class Store:
|
|||
placeholders = ",".join("?" * len(excluded_ids))
|
||||
extra_clauses.append(f"r.id NOT IN ({placeholders})")
|
||||
extra_params.extend(excluded_ids)
|
||||
if exclude_generic:
|
||||
extra_clauses.append("r.is_generic = 0")
|
||||
where_extra = (" AND " + " AND ".join(extra_clauses)) if extra_clauses else ""
|
||||
|
||||
if self._fts_ready():
|
||||
|
|
@ -678,12 +620,10 @@ class Store:
|
|||
return []
|
||||
|
||||
# Pull up to 10× limit candidates so ranking has enough headroom.
|
||||
# FTS5 pseudo-column in WHERE uses bare table name, not schema-qualified.
|
||||
c = self._cp
|
||||
sql = f"""
|
||||
SELECT r.*
|
||||
FROM {c}recipes_fts
|
||||
JOIN {c}recipes r ON r.id = {c}recipes_fts.rowid
|
||||
FROM recipes_fts
|
||||
JOIN recipes r ON r.id = recipes_fts.rowid
|
||||
WHERE recipes_fts MATCH ?
|
||||
{where_extra}
|
||||
LIMIT ?
|
||||
|
|
@ -717,10 +657,9 @@ class Store:
|
|||
"CASE WHEN r.ingredient_names LIKE ? THEN 1 ELSE 0 END"
|
||||
for _ in ingredient_names
|
||||
)
|
||||
c = self._cp
|
||||
sql = f"""
|
||||
SELECT r.*, ({match_score}) AS match_count
|
||||
FROM {c}recipes r
|
||||
FROM recipes r
|
||||
WHERE ({like_clauses})
|
||||
{where_extra}
|
||||
ORDER BY match_count DESC, r.id ASC
|
||||
|
|
@ -730,107 +669,7 @@ class Store:
|
|||
return self._fetch_all(sql, tuple(all_params))
|
||||
|
||||
def get_recipe(self, recipe_id: int) -> dict | None:
|
||||
row = self._fetch_one(f"SELECT * FROM {self._cp}recipes WHERE id = ?", (recipe_id,))
|
||||
if row is None and self._cp:
|
||||
# Fall back to user's own assembled recipes in main schema
|
||||
row = self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
||||
return row
|
||||
|
||||
# --- Async recipe jobs ---
|
||||
|
||||
def create_recipe_job(self, job_id: str, user_id: str, request_json: str) -> sqlite3.Row:
|
||||
return self._insert_returning(
|
||||
"INSERT INTO recipe_jobs (job_id, user_id, status, request) VALUES (?,?,?,?) RETURNING *",
|
||||
(job_id, user_id, "queued", request_json),
|
||||
)
|
||||
|
||||
def get_recipe_job(self, job_id: str, user_id: str) -> sqlite3.Row | None:
|
||||
return self._fetch_one(
|
||||
"SELECT * FROM recipe_jobs WHERE job_id=? AND user_id=?",
|
||||
(job_id, user_id),
|
||||
)
|
||||
|
||||
def update_recipe_job_running(self, job_id: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='running', updated_at=datetime('now') WHERE job_id=?",
|
||||
(job_id,),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def complete_recipe_job(self, job_id: str, result_json: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='done', result=?, updated_at=datetime('now') WHERE job_id=?",
|
||||
(result_json, job_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def fail_recipe_job(self, job_id: str, error: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE recipe_jobs SET status='failed', error=?, updated_at=datetime('now') WHERE job_id=?",
|
||||
(error, job_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def upsert_built_recipe(
|
||||
self,
|
||||
external_id: str,
|
||||
title: str,
|
||||
ingredients: list[str],
|
||||
directions: list[str],
|
||||
) -> int:
|
||||
"""Persist an assembly-built recipe and return its DB id.
|
||||
|
||||
Uses external_id as a stable dedup key so the same build slug doesn't
|
||||
accumulate duplicate rows across multiple user sessions.
|
||||
"""
|
||||
import json as _json
|
||||
self.conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO recipes
|
||||
(external_id, title, ingredients, ingredient_names, directions, source)
|
||||
VALUES (?, ?, ?, ?, ?, 'assembly')
|
||||
""",
|
||||
(
|
||||
external_id,
|
||||
title,
|
||||
_json.dumps(ingredients),
|
||||
_json.dumps(ingredients),
|
||||
_json.dumps(directions),
|
||||
),
|
||||
)
|
||||
# Update title in case the build was re-run with tweaked selections
|
||||
self.conn.execute(
|
||||
"UPDATE recipes SET title = ? WHERE external_id = ?",
|
||||
(title, external_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
row = self._fetch_one(
|
||||
"SELECT id FROM recipes WHERE external_id = ?", (external_id,)
|
||||
)
|
||||
return row["id"] # type: ignore[index]
|
||||
|
||||
def get_element_profiles(self, names: list[str]) -> dict[str, list[str]]:
|
||||
"""Return {ingredient_name: [element_tag, ...]} for the given names.
|
||||
|
||||
Only names present in ingredient_profiles are returned -- missing names
|
||||
are silently omitted so callers can distinguish "no profile" from "empty
|
||||
elements list".
|
||||
"""
|
||||
if not names:
|
||||
return {}
|
||||
placeholders = ",".join("?" * len(names))
|
||||
rows = self._fetch_all(
|
||||
f"SELECT name, elements FROM {self._cp}ingredient_profiles WHERE name IN ({placeholders})",
|
||||
tuple(names),
|
||||
)
|
||||
result: dict[str, list[str]] = {}
|
||||
for row in rows:
|
||||
try:
|
||||
elements = json.loads(row["elements"]) if row["elements"] else []
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
elements = []
|
||||
result[row["name"]] = elements
|
||||
return result
|
||||
return self._fetch_one("SELECT * FROM recipes WHERE id = ?", (recipe_id,))
|
||||
|
||||
# ── rate limits ───────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -896,578 +735,3 @@ class Store:
|
|||
int(approved), int(opted_in),
|
||||
))
|
||||
self.conn.commit()
|
||||
|
||||
# ── saved recipes ─────────────────────────────────────────────────────
|
||||
|
||||
def save_recipe(
|
||||
self,
|
||||
recipe_id: int,
|
||||
notes: str | None,
|
||||
rating: int | None,
|
||||
) -> dict:
|
||||
return self._insert_returning(
|
||||
"""
|
||||
INSERT INTO saved_recipes (recipe_id, notes, rating)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(recipe_id) DO UPDATE SET
|
||||
notes = excluded.notes,
|
||||
rating = excluded.rating
|
||||
RETURNING *
|
||||
""",
|
||||
(recipe_id, notes, rating),
|
||||
)
|
||||
|
||||
def unsave_recipe(self, recipe_id: int) -> None:
|
||||
self.conn.execute(
|
||||
"DELETE FROM saved_recipes WHERE recipe_id = ?", (recipe_id,)
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def is_recipe_saved(self, recipe_id: int) -> bool:
|
||||
row = self._fetch_one(
|
||||
"SELECT id FROM saved_recipes WHERE recipe_id = ?", (recipe_id,)
|
||||
)
|
||||
return row is not None
|
||||
|
||||
def update_saved_recipe(
|
||||
self,
|
||||
recipe_id: int,
|
||||
notes: str | None,
|
||||
rating: int | None,
|
||||
style_tags: list[str],
|
||||
) -> dict:
|
||||
self.conn.execute(
|
||||
"""
|
||||
UPDATE saved_recipes
|
||||
SET notes = ?, rating = ?, style_tags = ?
|
||||
WHERE recipe_id = ?
|
||||
""",
|
||||
(notes, rating, self._dump(style_tags), recipe_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
row = self._fetch_one(
|
||||
"SELECT * FROM saved_recipes WHERE recipe_id = ?", (recipe_id,)
|
||||
)
|
||||
return row # type: ignore[return-value]
|
||||
|
||||
def get_saved_recipes(
|
||||
self,
|
||||
sort_by: str = "saved_at",
|
||||
collection_id: int | None = None,
|
||||
) -> list[dict]:
|
||||
order = {
|
||||
"saved_at": "sr.saved_at DESC",
|
||||
"rating": "sr.rating DESC",
|
||||
"title": "r.title ASC",
|
||||
}.get(sort_by, "sr.saved_at DESC")
|
||||
|
||||
c = self._cp
|
||||
# In corpus-attached (cloud) mode: try corpus recipes first, fall back
|
||||
# to user's own assembled recipes. In local mode: single join suffices.
|
||||
if c:
|
||||
recipe_join = (
|
||||
f"LEFT JOIN {c}recipes rc ON rc.id = sr.recipe_id "
|
||||
"LEFT JOIN recipes rm ON rm.id = sr.recipe_id"
|
||||
)
|
||||
title_col = "COALESCE(rc.title, rm.title) AS title"
|
||||
else:
|
||||
recipe_join = "JOIN recipes rc ON rc.id = sr.recipe_id"
|
||||
title_col = "rc.title"
|
||||
|
||||
if collection_id is not None:
|
||||
return self._fetch_all(
|
||||
f"""
|
||||
SELECT sr.*, {title_col}
|
||||
FROM saved_recipes sr
|
||||
{recipe_join}
|
||||
JOIN recipe_collection_members rcm ON rcm.saved_recipe_id = sr.id
|
||||
WHERE rcm.collection_id = ?
|
||||
ORDER BY {order}
|
||||
""",
|
||||
(collection_id,),
|
||||
)
|
||||
return self._fetch_all(
|
||||
f"""
|
||||
SELECT sr.*, {title_col}
|
||||
FROM saved_recipes sr
|
||||
{recipe_join}
|
||||
ORDER BY {order}
|
||||
""",
|
||||
)
|
||||
|
||||
def get_saved_recipe_collection_ids(self, saved_recipe_id: int) -> list[int]:
|
||||
rows = self._fetch_all(
|
||||
"SELECT collection_id FROM recipe_collection_members WHERE saved_recipe_id = ?",
|
||||
(saved_recipe_id,),
|
||||
)
|
||||
return [r["collection_id"] for r in rows]
|
||||
|
||||
# ── recipe collections ────────────────────────────────────────────────
|
||||
|
||||
def create_collection(self, name: str, description: str | None) -> dict:
|
||||
# INSERT RETURNING * omits aggregate columns (e.g. member_count); re-query
|
||||
# with the same SELECT used by get_collections() so the response shape is consistent.
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO recipe_collections (name, description) VALUES (?, ?)",
|
||||
(name, description),
|
||||
)
|
||||
self.conn.commit()
|
||||
new_id = cur.lastrowid
|
||||
row = self._fetch_one(
|
||||
"""
|
||||
SELECT rc.*,
|
||||
COUNT(rcm.saved_recipe_id) AS member_count
|
||||
FROM recipe_collections rc
|
||||
LEFT JOIN recipe_collection_members rcm ON rcm.collection_id = rc.id
|
||||
WHERE rc.id = ?
|
||||
GROUP BY rc.id
|
||||
""",
|
||||
(new_id,),
|
||||
)
|
||||
return row # type: ignore[return-value]
|
||||
|
||||
def delete_collection(self, collection_id: int) -> None:
|
||||
self.conn.execute(
|
||||
"DELETE FROM recipe_collections WHERE id = ?", (collection_id,)
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def rename_collection(
|
||||
self, collection_id: int, name: str, description: str | None
|
||||
) -> dict:
|
||||
self.conn.execute(
|
||||
"""
|
||||
UPDATE recipe_collections
|
||||
SET name = ?, description = ?, updated_at = datetime('now')
|
||||
WHERE id = ?
|
||||
""",
|
||||
(name, description, collection_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
row = self._fetch_one(
|
||||
"SELECT * FROM recipe_collections WHERE id = ?", (collection_id,)
|
||||
)
|
||||
return row # type: ignore[return-value]
|
||||
|
||||
def get_collections(self) -> list[dict]:
|
||||
return self._fetch_all(
|
||||
"""
|
||||
SELECT rc.*,
|
||||
COUNT(rcm.saved_recipe_id) AS member_count
|
||||
FROM recipe_collections rc
|
||||
LEFT JOIN recipe_collection_members rcm ON rcm.collection_id = rc.id
|
||||
GROUP BY rc.id
|
||||
ORDER BY rc.created_at ASC
|
||||
"""
|
||||
)
|
||||
|
||||
def add_to_collection(self, collection_id: int, saved_recipe_id: int) -> None:
|
||||
self.conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO recipe_collection_members (collection_id, saved_recipe_id)
|
||||
VALUES (?, ?)
|
||||
""",
|
||||
(collection_id, saved_recipe_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def remove_from_collection(
|
||||
self, collection_id: int, saved_recipe_id: int
|
||||
) -> None:
|
||||
self.conn.execute(
|
||||
"""
|
||||
DELETE FROM recipe_collection_members
|
||||
WHERE collection_id = ? AND saved_recipe_id = ?
|
||||
""",
|
||||
(collection_id, saved_recipe_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
# ── recipe browser ────────────────────────────────────────────────────
|
||||
|
||||
def get_browser_categories(
|
||||
self,
|
||||
domain: str,
|
||||
keywords_by_category: dict[str, list[str]],
|
||||
has_subcategories_by_category: dict[str, bool] | None = None,
|
||||
) -> list[dict]:
|
||||
"""Return [{category, recipe_count, has_subcategories}] for each category.
|
||||
|
||||
keywords_by_category maps category name → keyword list for counting.
|
||||
has_subcategories_by_category maps category name → bool (optional;
|
||||
defaults to False for all categories when omitted).
|
||||
"""
|
||||
results = []
|
||||
for category, keywords in keywords_by_category.items():
|
||||
count = self._count_recipes_for_keywords(keywords)
|
||||
results.append({
|
||||
"category": category,
|
||||
"recipe_count": count,
|
||||
"has_subcategories": (has_subcategories_by_category or {}).get(category, False),
|
||||
})
|
||||
return results
|
||||
|
||||
def get_browser_subcategories(
|
||||
self, domain: str, keywords_by_subcategory: dict[str, list[str]]
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for each subcategory.
|
||||
|
||||
Mirrors get_browser_categories but for the second level.
|
||||
"""
|
||||
results = []
|
||||
for subcat, keywords in keywords_by_subcategory.items():
|
||||
count = self._count_recipes_for_keywords(keywords)
|
||||
results.append({"subcategory": subcat, "recipe_count": count})
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def _browser_fts_query(keywords: list[str]) -> str:
|
||||
"""Build an FTS5 MATCH expression that ORs all keywords as exact phrases."""
|
||||
phrases = ['"' + kw.replace('"', '""') + '"' for kw in keywords]
|
||||
return " OR ".join(phrases)
|
||||
|
||||
def _count_recipes_for_keywords(self, keywords: list[str]) -> int:
|
||||
if not keywords:
|
||||
return 0
|
||||
# Use corpus path as cache key so all cloud users share the same counts.
|
||||
cache_key = (self._corpus_path, *sorted(keywords))
|
||||
if cache_key in _COUNT_CACHE:
|
||||
return _COUNT_CACHE[cache_key]
|
||||
match_expr = self._browser_fts_query(keywords)
|
||||
c = self._cp
|
||||
# FTS5 pseudo-column in WHERE is always the bare (unqualified) table name,
|
||||
# even when the table is accessed through an ATTACHed schema.
|
||||
row = self.conn.execute(
|
||||
f"SELECT count(*) FROM {c}recipe_browser_fts WHERE recipe_browser_fts MATCH ?",
|
||||
(match_expr,),
|
||||
).fetchone()
|
||||
count = row[0] if row else 0
|
||||
_COUNT_CACHE[cache_key] = count
|
||||
return count
|
||||
|
||||
def browse_recipes(
|
||||
self,
|
||||
keywords: list[str] | None,
|
||||
page: int,
|
||||
page_size: int,
|
||||
pantry_items: list[str] | None = None,
|
||||
q: str | None = None,
|
||||
sort: str = "default",
|
||||
) -> dict:
|
||||
"""Return a page of recipes matching the keyword set.
|
||||
|
||||
Pass keywords=None to browse all recipes without category filtering.
|
||||
Each recipe row includes match_pct (float | None) when pantry_items
|
||||
is provided. match_pct is the fraction of ingredient_names covered by
|
||||
the pantry set — computed deterministically, no LLM needed.
|
||||
|
||||
q: optional title substring filter (case-insensitive LIKE).
|
||||
sort: "default" (corpus order) | "alpha" (A→Z) | "alpha_desc" (Z→A).
|
||||
"""
|
||||
if keywords is not None and not keywords:
|
||||
return {"recipes": [], "total": 0, "page": page}
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
c = self._cp
|
||||
|
||||
order_clause = {
|
||||
"alpha": "ORDER BY title ASC",
|
||||
"alpha_desc": "ORDER BY title DESC",
|
||||
}.get(sort, "ORDER BY id ASC")
|
||||
|
||||
q_param = f"%{q.strip()}%" if q and q.strip() else None
|
||||
cols = (
|
||||
f"SELECT id, title, category, keywords, ingredient_names,"
|
||||
f" calories, fat_g, protein_g, sodium_mg FROM {c}recipes"
|
||||
)
|
||||
|
||||
if keywords is None:
|
||||
if q_param:
|
||||
total = self.conn.execute(
|
||||
f"SELECT COUNT(*) FROM {c}recipes WHERE LOWER(title) LIKE LOWER(?)",
|
||||
(q_param,),
|
||||
).fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE LOWER(title) LIKE LOWER(?) {order_clause} LIMIT ? OFFSET ?",
|
||||
(q_param, page_size, offset),
|
||||
)
|
||||
else:
|
||||
total = self.conn.execute(f"SELECT COUNT(*) FROM {c}recipes").fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} {order_clause} LIMIT ? OFFSET ?",
|
||||
(page_size, offset),
|
||||
)
|
||||
else:
|
||||
match_expr = self._browser_fts_query(keywords)
|
||||
fts_sub = f"id IN (SELECT rowid FROM {c}recipe_browser_fts WHERE recipe_browser_fts MATCH ?)"
|
||||
if q_param:
|
||||
total = self.conn.execute(
|
||||
f"SELECT COUNT(*) FROM {c}recipes WHERE {fts_sub} AND LOWER(title) LIKE LOWER(?)",
|
||||
(match_expr, q_param),
|
||||
).fetchone()[0]
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE {fts_sub} AND LOWER(title) LIKE LOWER(?) {order_clause} LIMIT ? OFFSET ?",
|
||||
(match_expr, q_param, page_size, offset),
|
||||
)
|
||||
else:
|
||||
# Reuse cached count — avoids a second index scan on every page turn.
|
||||
total = self._count_recipes_for_keywords(keywords)
|
||||
rows = self._fetch_all(
|
||||
f"{cols} WHERE {fts_sub} {order_clause} LIMIT ? OFFSET ?",
|
||||
(match_expr, page_size, offset),
|
||||
)
|
||||
|
||||
pantry_set = {p.lower() for p in pantry_items} if pantry_items else None
|
||||
recipes = []
|
||||
for r in rows:
|
||||
entry = {
|
||||
"id": r["id"],
|
||||
"title": r["title"],
|
||||
"category": r["category"],
|
||||
"match_pct": None,
|
||||
}
|
||||
if pantry_set:
|
||||
names = r.get("ingredient_names") or []
|
||||
if names:
|
||||
matched = sum(
|
||||
1 for n in names if n.lower() in pantry_set
|
||||
)
|
||||
entry["match_pct"] = round(matched / len(names), 3)
|
||||
recipes.append(entry)
|
||||
|
||||
return {"recipes": recipes, "total": total, "page": page}
|
||||
|
||||
def log_browser_telemetry(
|
||||
self,
|
||||
domain: str,
|
||||
category: str,
|
||||
page: int,
|
||||
result_count: int,
|
||||
) -> None:
|
||||
self.conn.execute(
|
||||
"""
|
||||
INSERT INTO browser_telemetry (domain, category, page, result_count)
|
||||
VALUES (?, ?, ?, ?)
|
||||
""",
|
||||
(domain, category, page, result_count),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
# ── meal plans ────────────────────────────────────────────────────────
|
||||
|
||||
def create_meal_plan(self, week_start: str, meal_types: list[str]) -> dict:
|
||||
return self._insert_returning(
|
||||
"INSERT INTO meal_plans (week_start, meal_types) VALUES (?, ?) RETURNING *",
|
||||
(week_start, json.dumps(meal_types)),
|
||||
)
|
||||
|
||||
def get_meal_plan(self, plan_id: int) -> dict | None:
|
||||
return self._fetch_one("SELECT * FROM meal_plans WHERE id = ?", (plan_id,))
|
||||
|
||||
def update_meal_plan_types(self, plan_id: int, meal_types: list[str]) -> dict | None:
|
||||
return self._fetch_one(
|
||||
"UPDATE meal_plans SET meal_types = ? WHERE id = ? RETURNING *",
|
||||
(json.dumps(meal_types), plan_id),
|
||||
)
|
||||
|
||||
def list_meal_plans(self) -> list[dict]:
|
||||
return self._fetch_all("SELECT * FROM meal_plans ORDER BY week_start DESC")
|
||||
|
||||
def upsert_slot(
|
||||
self,
|
||||
plan_id: int,
|
||||
day_of_week: int,
|
||||
meal_type: str,
|
||||
recipe_id: int | None,
|
||||
servings: float,
|
||||
custom_label: str | None,
|
||||
) -> dict:
|
||||
return self._insert_returning(
|
||||
"""INSERT INTO meal_plan_slots
|
||||
(plan_id, day_of_week, meal_type, recipe_id, servings, custom_label)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(plan_id, day_of_week, meal_type) DO UPDATE SET
|
||||
recipe_id = excluded.recipe_id,
|
||||
servings = excluded.servings,
|
||||
custom_label = excluded.custom_label
|
||||
RETURNING *""",
|
||||
(plan_id, day_of_week, meal_type, recipe_id, servings, custom_label),
|
||||
)
|
||||
|
||||
def delete_slot(self, slot_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM meal_plan_slots WHERE id = ?", (slot_id,))
|
||||
self.conn.commit()
|
||||
|
||||
def get_plan_slots(self, plan_id: int) -> list[dict]:
|
||||
c = self._cp
|
||||
return self._fetch_all(
|
||||
f"""SELECT s.*, r.title AS recipe_title
|
||||
FROM meal_plan_slots s
|
||||
LEFT JOIN {c}recipes r ON r.id = s.recipe_id
|
||||
WHERE s.plan_id = ?
|
||||
ORDER BY s.day_of_week, s.meal_type""",
|
||||
(plan_id,),
|
||||
)
|
||||
|
||||
def get_plan_recipes(self, plan_id: int) -> list[dict]:
|
||||
"""Return full recipe rows for all recipes assigned to a plan."""
|
||||
c = self._cp
|
||||
return self._fetch_all(
|
||||
f"""SELECT DISTINCT r.*
|
||||
FROM meal_plan_slots s
|
||||
JOIN {c}recipes r ON r.id = s.recipe_id
|
||||
WHERE s.plan_id = ? AND s.recipe_id IS NOT NULL""",
|
||||
(plan_id,),
|
||||
)
|
||||
|
||||
# ── prep sessions ─────────────────────────────────────────────────────
|
||||
|
||||
def create_prep_session(self, plan_id: int, scheduled_date: str) -> dict:
|
||||
return self._insert_returning(
|
||||
"INSERT INTO prep_sessions (plan_id, scheduled_date) VALUES (?, ?) RETURNING *",
|
||||
(plan_id, scheduled_date),
|
||||
)
|
||||
|
||||
def get_prep_session_for_plan(self, plan_id: int) -> dict | None:
|
||||
return self._fetch_one(
|
||||
"SELECT * FROM prep_sessions WHERE plan_id = ? ORDER BY id DESC LIMIT 1",
|
||||
(plan_id,),
|
||||
)
|
||||
|
||||
def bulk_insert_prep_tasks(self, session_id: int, tasks: list[dict]) -> list[dict]:
|
||||
"""Insert multiple prep tasks and return them all."""
|
||||
inserted = []
|
||||
for t in tasks:
|
||||
row = self._insert_returning(
|
||||
"""INSERT INTO prep_tasks
|
||||
(session_id, recipe_id, slot_id, task_label, duration_minutes,
|
||||
sequence_order, equipment, is_parallel, notes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING *""",
|
||||
(
|
||||
session_id, t.get("recipe_id"), t.get("slot_id"),
|
||||
t["task_label"], t.get("duration_minutes"),
|
||||
t["sequence_order"], t.get("equipment"),
|
||||
int(t.get("is_parallel", False)), t.get("notes"),
|
||||
),
|
||||
)
|
||||
inserted.append(row)
|
||||
return inserted
|
||||
|
||||
def get_prep_tasks(self, session_id: int) -> list[dict]:
|
||||
return self._fetch_all(
|
||||
"SELECT * FROM prep_tasks WHERE session_id = ? ORDER BY sequence_order",
|
||||
(session_id,),
|
||||
)
|
||||
|
||||
def update_prep_task(self, task_id: int, **kwargs: object) -> dict | None:
|
||||
allowed = {"duration_minutes", "sequence_order", "notes", "equipment"}
|
||||
invalid = set(kwargs) - allowed # check raw kwargs BEFORE filtering
|
||||
if invalid:
|
||||
raise ValueError(f"Unexpected column(s) in update_prep_task: {invalid}")
|
||||
updates = {k: v for k, v in kwargs.items() if v is not None}
|
||||
if not updates:
|
||||
return self._fetch_one("SELECT * FROM prep_tasks WHERE id = ?", (task_id,))
|
||||
set_clause = ", ".join(f"{k} = ?" for k in updates)
|
||||
values = list(updates.values()) + [1, task_id]
|
||||
self.conn.execute(
|
||||
f"UPDATE prep_tasks SET {set_clause}, user_edited = ? WHERE id = ?",
|
||||
values,
|
||||
)
|
||||
self.conn.commit()
|
||||
return self._fetch_one("SELECT * FROM prep_tasks WHERE id = ?", (task_id,))
|
||||
|
||||
# ── community ─────────────────────────────────────────────────────────
|
||||
|
||||
def get_current_pseudonym(self, directus_user_id: str) -> str | None:
|
||||
"""Return the current community pseudonym for this user, or None if not set."""
|
||||
cur = self.conn.execute(
|
||||
"SELECT pseudonym FROM community_pseudonyms "
|
||||
"WHERE directus_user_id = ? AND is_current = 1 LIMIT 1",
|
||||
(directus_user_id,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
return row["pseudonym"] if row else None
|
||||
|
||||
def set_pseudonym(self, directus_user_id: str, pseudonym: str) -> None:
|
||||
"""Set the current community pseudonym for this user.
|
||||
|
||||
Marks any previous pseudonym as non-current (retains history for attribution).
|
||||
"""
|
||||
self.conn.execute(
|
||||
"UPDATE community_pseudonyms SET is_current = 0 WHERE directus_user_id = ?",
|
||||
(directus_user_id,),
|
||||
)
|
||||
self.conn.execute(
|
||||
"INSERT INTO community_pseudonyms (pseudonym, directus_user_id, is_current) "
|
||||
"VALUES (?, ?, 1)",
|
||||
(pseudonym, directus_user_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
# ── Shopping list ─────────────────────────────────────────────────────────
|
||||
|
||||
def add_shopping_item(
|
||||
self,
|
||||
name: str,
|
||||
quantity: float | None = None,
|
||||
unit: str | None = None,
|
||||
category: str | None = None,
|
||||
notes: str | None = None,
|
||||
source: str = "manual",
|
||||
recipe_id: int | None = None,
|
||||
sort_order: int = 0,
|
||||
) -> dict:
|
||||
return self._insert_returning(
|
||||
"""INSERT INTO shopping_list_items
|
||||
(name, quantity, unit, category, notes, source, recipe_id, sort_order)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING *""",
|
||||
(name, quantity, unit, category, notes, source, recipe_id, sort_order),
|
||||
)
|
||||
|
||||
def list_shopping_items(self, include_checked: bool = True) -> list[dict]:
|
||||
where = "" if include_checked else "WHERE checked = 0"
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
rows = self.conn.execute(
|
||||
f"SELECT * FROM shopping_list_items {where} ORDER BY checked, sort_order, id",
|
||||
).fetchall()
|
||||
return [self._row_to_dict(r) for r in rows]
|
||||
|
||||
def get_shopping_item(self, item_id: int) -> dict | None:
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
row = self.conn.execute(
|
||||
"SELECT * FROM shopping_list_items WHERE id = ?", (item_id,)
|
||||
).fetchone()
|
||||
return self._row_to_dict(row) if row else None
|
||||
|
||||
def update_shopping_item(self, item_id: int, **kwargs) -> dict | None:
|
||||
allowed = {"name", "quantity", "unit", "category", "checked", "notes", "sort_order"}
|
||||
fields = {k: v for k, v in kwargs.items() if k in allowed and v is not None}
|
||||
if not fields:
|
||||
return self.get_shopping_item(item_id)
|
||||
if "checked" in fields:
|
||||
fields["checked"] = 1 if fields["checked"] else 0
|
||||
set_clause = ", ".join(f"{k} = ?" for k in fields)
|
||||
values = list(fields.values()) + [item_id]
|
||||
self.conn.execute(
|
||||
f"UPDATE shopping_list_items SET {set_clause}, updated_at = datetime('now') WHERE id = ?",
|
||||
values,
|
||||
)
|
||||
self.conn.commit()
|
||||
return self.get_shopping_item(item_id)
|
||||
|
||||
def delete_shopping_item(self, item_id: int) -> bool:
|
||||
cur = self.conn.execute(
|
||||
"DELETE FROM shopping_list_items WHERE id = ?", (item_id,)
|
||||
)
|
||||
self.conn.commit()
|
||||
return cur.rowcount > 0
|
||||
|
||||
def clear_checked_shopping_items(self) -> int:
|
||||
cur = self.conn.execute("DELETE FROM shopping_list_items WHERE checked = 1")
|
||||
self.conn.commit()
|
||||
return cur.rowcount
|
||||
|
||||
def clear_all_shopping_items(self) -> int:
|
||||
cur = self.conn.execute("DELETE FROM shopping_list_items")
|
||||
self.conn.commit()
|
||||
return cur.rowcount
|
||||
|
|
|
|||
11
app/main.py
11
app/main.py
|
|
@ -9,11 +9,7 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
|
||||
from app.api.routes import api_router
|
||||
from app.core.config import settings
|
||||
from app.services.meal_plan.affiliates import register_kiwi_programs
|
||||
|
||||
# Structured key=value log lines — grep/awk-friendly for log-based analytics.
|
||||
# Without basicConfig, app-level INFO logs are silently dropped.
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s:%(name)s: %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -21,17 +17,12 @@ logger = logging.getLogger(__name__)
|
|||
async def lifespan(app: FastAPI):
|
||||
logger.info("Starting Kiwi API...")
|
||||
settings.ensure_dirs()
|
||||
register_kiwi_programs()
|
||||
|
||||
# Start LLM background task scheduler
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(settings.DB_PATH)
|
||||
logger.info("Task scheduler started.")
|
||||
|
||||
# Initialize community store (no-op if COMMUNITY_DB_URL is not set)
|
||||
from app.api.endpoints.community import init_community_store
|
||||
init_community_store(settings.COMMUNITY_DB_URL)
|
||||
|
||||
yield
|
||||
|
||||
# Graceful scheduler shutdown
|
||||
|
|
@ -44,7 +35,7 @@ async def lifespan(app: FastAPI):
|
|||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
description="Pantry tracking + leftover recipe suggestions",
|
||||
version="0.2.0",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
"""Pydantic schemas for household management endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class HouseholdCreateResponse(BaseModel):
|
||||
household_id: str
|
||||
message: str
|
||||
|
||||
|
||||
class HouseholdMember(BaseModel):
|
||||
user_id: str
|
||||
joined_at: str
|
||||
is_owner: bool
|
||||
|
||||
|
||||
class HouseholdStatusResponse(BaseModel):
|
||||
in_household: bool
|
||||
household_id: str | None = None
|
||||
is_owner: bool = False
|
||||
members: list[HouseholdMember] = Field(default_factory=list)
|
||||
max_seats: int = 4
|
||||
|
||||
|
||||
class HouseholdInviteResponse(BaseModel):
|
||||
invite_url: str
|
||||
token: str
|
||||
expires_at: str
|
||||
|
||||
|
||||
class HouseholdAcceptRequest(BaseModel):
|
||||
household_id: str
|
||||
token: str
|
||||
|
||||
|
||||
class HouseholdAcceptResponse(BaseModel):
|
||||
message: str
|
||||
household_id: str
|
||||
|
||||
|
||||
class HouseholdRemoveMemberRequest(BaseModel):
|
||||
user_id: str
|
||||
|
||||
|
||||
class MessageResponse(BaseModel):
|
||||
message: str
|
||||
|
|
@ -89,20 +89,9 @@ class InventoryItemUpdate(BaseModel):
|
|||
unit: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
sublocation: Optional[str] = None
|
||||
purchase_date: Optional[date] = None
|
||||
expiration_date: Optional[date] = None
|
||||
opened_date: Optional[date] = None
|
||||
status: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
disposal_reason: Optional[str] = None
|
||||
|
||||
|
||||
class PartialConsumeRequest(BaseModel):
|
||||
quantity: float = Field(..., gt=0, description="Amount to consume from this item")
|
||||
|
||||
|
||||
class DiscardRequest(BaseModel):
|
||||
reason: Optional[str] = Field(None, max_length=200)
|
||||
|
||||
|
||||
class InventoryItemResponse(BaseModel):
|
||||
|
|
@ -117,14 +106,8 @@ class InventoryItemResponse(BaseModel):
|
|||
sublocation: Optional[str]
|
||||
purchase_date: Optional[str]
|
||||
expiration_date: Optional[str]
|
||||
opened_date: Optional[str] = None
|
||||
opened_expiry_date: Optional[str] = None
|
||||
secondary_state: Optional[str] = None
|
||||
secondary_uses: Optional[List[str]] = None
|
||||
secondary_warning: Optional[str] = None
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
disposal_reason: Optional[str] = None
|
||||
source: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
|
@ -140,7 +123,6 @@ class BarcodeScanResult(BaseModel):
|
|||
product: Optional[ProductResponse]
|
||||
inventory_item: Optional[InventoryItemResponse]
|
||||
added_to_inventory: bool
|
||||
needs_manual_entry: bool = False
|
||||
message: str
|
||||
|
||||
|
||||
|
|
@ -151,32 +133,6 @@ class BarcodeScanResponse(BaseModel):
|
|||
message: str
|
||||
|
||||
|
||||
# ── Bulk add by name ─────────────────────────────────────────────────────────
|
||||
|
||||
class BulkAddItem(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: float = Field(default=1.0, gt=0)
|
||||
unit: str = "count"
|
||||
location: str = "pantry"
|
||||
|
||||
|
||||
class BulkAddByNameRequest(BaseModel):
|
||||
items: List[BulkAddItem] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class BulkAddItemResult(BaseModel):
|
||||
name: str
|
||||
ok: bool
|
||||
item_id: Optional[int] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class BulkAddByNameResponse(BaseModel):
|
||||
added: int
|
||||
failed: int
|
||||
results: List[BulkAddItemResult]
|
||||
|
||||
|
||||
# ── Stats ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
class InventoryStats(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,100 +0,0 @@
|
|||
# app/models/schemas/meal_plan.py
|
||||
"""Pydantic schemas for meal planning endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date as _date
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
VALID_MEAL_TYPES = {"breakfast", "lunch", "dinner", "snack"}
|
||||
|
||||
|
||||
class CreatePlanRequest(BaseModel):
|
||||
week_start: _date
|
||||
meal_types: list[str] = Field(default_factory=lambda: ["dinner"])
|
||||
|
||||
@field_validator("week_start")
|
||||
@classmethod
|
||||
def must_be_monday(cls, v: _date) -> _date:
|
||||
if v.weekday() != 0:
|
||||
raise ValueError("week_start must be a Monday (weekday 0)")
|
||||
return v
|
||||
|
||||
|
||||
class UpdatePlanRequest(BaseModel):
|
||||
meal_types: list[str]
|
||||
|
||||
|
||||
class UpsertSlotRequest(BaseModel):
|
||||
recipe_id: int | None = None
|
||||
servings: float = Field(2.0, gt=0)
|
||||
custom_label: str | None = None
|
||||
|
||||
|
||||
class SlotSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
day_of_week: int
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
recipe_title: str | None
|
||||
servings: float
|
||||
custom_label: str | None
|
||||
|
||||
|
||||
class PlanSummary(BaseModel):
|
||||
id: int
|
||||
week_start: str
|
||||
meal_types: list[str]
|
||||
slots: list[SlotSummary]
|
||||
created_at: str
|
||||
|
||||
|
||||
class RetailerLink(BaseModel):
|
||||
retailer: str
|
||||
label: str
|
||||
url: str
|
||||
|
||||
|
||||
class GapItem(BaseModel):
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # e.g. "2 cups" from recipe text
|
||||
have_quantity: float | None # from pantry
|
||||
have_unit: str | None
|
||||
covered: bool # True = pantry has it
|
||||
retailer_links: list[RetailerLink] = Field(default_factory=list)
|
||||
|
||||
|
||||
class ShoppingListResponse(BaseModel):
|
||||
plan_id: int
|
||||
gap_items: list[GapItem]
|
||||
covered_items: list[GapItem]
|
||||
disclosure: str | None = None # affiliate disclosure text when links present
|
||||
|
||||
|
||||
class PrepTaskSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool
|
||||
notes: str | None
|
||||
user_edited: bool
|
||||
|
||||
|
||||
class PrepSessionSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
scheduled_date: str
|
||||
status: str
|
||||
tasks: list[PrepTaskSummary]
|
||||
|
||||
|
||||
class UpdatePrepTaskRequest(BaseModel):
|
||||
duration_minutes: int | None = None
|
||||
sequence_order: int | None = None
|
||||
notes: str | None = None
|
||||
equipment: str | None = None
|
||||
|
|
@ -32,7 +32,6 @@ class RecipeSuggestion(BaseModel):
|
|||
match_count: int
|
||||
element_coverage: dict[str, float] = Field(default_factory=dict)
|
||||
swap_candidates: list[SwapCandidate] = Field(default_factory=list)
|
||||
matched_ingredients: list[str] = Field(default_factory=list)
|
||||
missing_ingredients: list[str] = Field(default_factory=list)
|
||||
directions: list[str] = Field(default_factory=list)
|
||||
prep_notes: list[str] = Field(default_factory=list)
|
||||
|
|
@ -40,9 +39,6 @@ class RecipeSuggestion(BaseModel):
|
|||
level: int = 1
|
||||
is_wildcard: bool = False
|
||||
nutrition: NutritionPanel | None = None
|
||||
source_url: str | None = None
|
||||
complexity: str | None = None # 'easy' | 'moderate' | 'involved'
|
||||
estimated_time_min: int | None = None # derived from step count + method signals
|
||||
|
||||
|
||||
class GroceryLink(BaseModel):
|
||||
|
|
@ -58,19 +54,6 @@ class RecipeResult(BaseModel):
|
|||
grocery_links: list[GroceryLink] = Field(default_factory=list)
|
||||
rate_limited: bool = False
|
||||
rate_limit_count: int = 0
|
||||
orch_fallback: bool = False # True when orch budget exhausted; fell back to local LLM
|
||||
|
||||
|
||||
class RecipeJobQueued(BaseModel):
|
||||
job_id: str
|
||||
status: str = "queued"
|
||||
|
||||
|
||||
class RecipeJobStatus(BaseModel):
|
||||
job_id: str
|
||||
status: str
|
||||
result: RecipeResult | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class NutritionFilters(BaseModel):
|
||||
|
|
@ -83,10 +66,6 @@ class NutritionFilters(BaseModel):
|
|||
|
||||
class RecipeRequest(BaseModel):
|
||||
pantry_items: list[str]
|
||||
# Maps product name → secondary state label for items past nominal expiry
|
||||
# but still within their secondary use window (e.g. {"Bread": "stale"}).
|
||||
# Used by the recipe engine to boost recipes suited to those specific states.
|
||||
secondary_pantry_items: dict[str, str] = Field(default_factory=dict)
|
||||
level: int = Field(default=1, ge=1, le=4)
|
||||
constraints: list[str] = Field(default_factory=list)
|
||||
expiry_first: bool = False
|
||||
|
|
@ -100,53 +79,3 @@ class RecipeRequest(BaseModel):
|
|||
allergies: list[str] = Field(default_factory=list)
|
||||
nutrition_filters: NutritionFilters = Field(default_factory=NutritionFilters)
|
||||
excluded_ids: list[int] = Field(default_factory=list)
|
||||
shopping_mode: bool = False
|
||||
pantry_match_only: bool = False # when True, only return recipes with zero missing ingredients
|
||||
complexity_filter: str | None = None # 'easy' | 'moderate' | 'involved' — None = any
|
||||
max_time_min: int | None = None # filter by estimated cooking time ceiling
|
||||
unit_system: str = "metric" # "metric" | "imperial"
|
||||
|
||||
|
||||
# ── Build Your Own schemas ──────────────────────────────────────────────────
|
||||
|
||||
|
||||
class AssemblyRoleOut(BaseModel):
|
||||
"""One role slot in a template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
display: str
|
||||
required: bool
|
||||
keywords: list[str]
|
||||
hint: str = ""
|
||||
|
||||
|
||||
class AssemblyTemplateOut(BaseModel):
|
||||
"""One assembly template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
id: str # slug, e.g. "burrito_taco"
|
||||
title: str
|
||||
icon: str
|
||||
descriptor: str
|
||||
role_sequence: list[AssemblyRoleOut]
|
||||
|
||||
|
||||
class RoleCandidateItem(BaseModel):
|
||||
"""One candidate ingredient for a wizard picker step."""
|
||||
|
||||
name: str
|
||||
in_pantry: bool
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RoleCandidatesResponse(BaseModel):
|
||||
"""Response from GET /api/recipes/template-candidates."""
|
||||
|
||||
compatible: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
other: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
available_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class BuildRequest(BaseModel):
|
||||
"""Request body for POST /api/recipes/build."""
|
||||
|
||||
template_id: str
|
||||
role_overrides: dict[str, str] = Field(default_factory=dict)
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
"""Pydantic schemas for saved recipes and collections."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SaveRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
|
||||
|
||||
class UpdateSavedRecipeRequest(BaseModel):
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
style_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class SavedRecipeSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int
|
||||
title: str
|
||||
saved_at: str
|
||||
notes: str | None
|
||||
rating: int | None
|
||||
style_tags: list[str]
|
||||
collection_ids: list[int] = Field(default_factory=list)
|
||||
|
||||
|
||||
class CollectionSummary(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: str | None
|
||||
member_count: int
|
||||
created_at: str
|
||||
|
||||
|
||||
class CollectionRequest(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
|
||||
|
||||
class CollectionMemberRequest(BaseModel):
|
||||
saved_recipe_id: int
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""Pydantic schemas for the shopping list endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ShoppingItemCreate(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
source: str = "manual"
|
||||
recipe_id: Optional[int] = None
|
||||
sort_order: int = 0
|
||||
|
||||
|
||||
class ShoppingItemUpdate(BaseModel):
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
checked: Optional[bool] = None
|
||||
notes: Optional[str] = None
|
||||
sort_order: Optional[int] = None
|
||||
|
||||
|
||||
class GroceryLinkOut(BaseModel):
|
||||
ingredient: str
|
||||
retailer: str
|
||||
url: str
|
||||
|
||||
|
||||
class ShoppingItemResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
quantity: Optional[float]
|
||||
unit: Optional[str]
|
||||
category: Optional[str]
|
||||
checked: bool
|
||||
notes: Optional[str]
|
||||
source: str
|
||||
recipe_id: Optional[int]
|
||||
sort_order: int
|
||||
created_at: str
|
||||
updated_at: str
|
||||
grocery_links: list[GroceryLinkOut] = []
|
||||
|
||||
|
||||
class BulkAddFromRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
include_covered: bool = False # if True, add pantry-covered items too
|
||||
|
||||
|
||||
class ConfirmPurchaseRequest(BaseModel):
|
||||
"""Move a checked item into pantry inventory."""
|
||||
location: str = "pantry"
|
||||
quantity: Optional[float] = None # override the list quantity
|
||||
unit: Optional[str] = None
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
# app/services/community/ap_compat.py
|
||||
# MIT License — AP scaffold only (no actor, inbox, outbox)
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
def post_to_ap_json_ld(post: dict, base_url: str) -> dict:
|
||||
"""Serialize a community post dict to an ActivityPub-compatible JSON-LD Note.
|
||||
|
||||
This is a read-only scaffold. No AP actor, inbox, or outbox.
|
||||
The slug URI is stable so a future full AP implementation can reuse posts
|
||||
without a DB migration.
|
||||
"""
|
||||
slug = post["slug"]
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
published_str = published.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
else:
|
||||
published_str = str(published)
|
||||
|
||||
dietary_tags: list[str] = post.get("dietary_tags") or []
|
||||
tags = [{"type": "Hashtag", "name": "#kiwi"}]
|
||||
for tag in dietary_tags:
|
||||
tags.append({"type": "Hashtag", "name": f"#{tag.replace('-', '').replace(' ', '')}"})
|
||||
|
||||
return {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Note",
|
||||
"id": f"{base_url}/api/v1/community/posts/{slug}",
|
||||
"attributedTo": post.get("pseudonym", "anonymous"),
|
||||
"content": _build_content(post),
|
||||
"published": published_str,
|
||||
"tag": tags,
|
||||
}
|
||||
|
||||
|
||||
def _build_content(post: dict) -> str:
|
||||
title = post.get("title") or "Untitled"
|
||||
desc = post.get("description")
|
||||
if desc:
|
||||
return f"{title} — {desc}"
|
||||
return title
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
# app/services/community/community_store.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from circuitforge_core.community import CommunityPost, SharedStore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KiwiCommunityStore(SharedStore):
|
||||
"""Kiwi-specific community store: adds kiwi-domain query methods on top of SharedStore."""
|
||||
|
||||
def list_meal_plans(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
dietary_tags: list[str] | None = None,
|
||||
allergen_exclude: list[str] | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type="plan",
|
||||
dietary_tags=dietary_tags,
|
||||
allergen_exclude=allergen_exclude,
|
||||
source_product="kiwi",
|
||||
)
|
||||
|
||||
def list_outcomes(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
post_type: str | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
if post_type in ("recipe_success", "recipe_blooper"):
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
source_product="kiwi",
|
||||
)
|
||||
success = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_success",
|
||||
source_product="kiwi",
|
||||
)
|
||||
bloopers = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_blooper",
|
||||
source_product="kiwi",
|
||||
)
|
||||
merged = sorted(success + bloopers, key=lambda p: p.published, reverse=True)
|
||||
return merged[:limit]
|
||||
|
||||
|
||||
def get_or_create_pseudonym(
|
||||
store,
|
||||
directus_user_id: str,
|
||||
requested_name: str | None,
|
||||
) -> str:
|
||||
"""Return the user's current pseudonym, creating it if it doesn't exist.
|
||||
|
||||
If the user has an existing pseudonym, return it (ignore requested_name).
|
||||
If not, create using requested_name (must be provided for first-time setup).
|
||||
|
||||
Raises ValueError if no existing pseudonym and requested_name is None or blank.
|
||||
"""
|
||||
existing = store.get_current_pseudonym(directus_user_id)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
if not requested_name or not requested_name.strip():
|
||||
raise ValueError(
|
||||
"A pseudonym is required for first publish. "
|
||||
"Pass requested_name with the user's chosen display name."
|
||||
)
|
||||
|
||||
name = requested_name.strip()
|
||||
if "@" in name:
|
||||
raise ValueError(
|
||||
"Pseudonym must not contain '@' — use a display name, not an email address."
|
||||
)
|
||||
|
||||
store.set_pseudonym(directus_user_id, name)
|
||||
return name
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
# app/services/community/element_snapshot.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Ingredient name substrings → allergen flag
|
||||
_ALLERGEN_MAP: dict[str, str] = {
|
||||
"milk": "dairy", "cream": "dairy", "cheese": "dairy", "butter": "dairy",
|
||||
"yogurt": "dairy", "whey": "dairy",
|
||||
"egg": "eggs",
|
||||
"wheat": "gluten", "pasta": "gluten", "flour": "gluten", "bread": "gluten",
|
||||
"barley": "gluten", "rye": "gluten",
|
||||
"peanut": "nuts", "almond": "nuts", "cashew": "nuts", "walnut": "nuts",
|
||||
"pecan": "nuts", "hazelnut": "nuts", "pistachio": "nuts", "macadamia": "nuts",
|
||||
"soy": "soy", "tofu": "soy", "edamame": "soy", "miso": "soy", "tempeh": "soy",
|
||||
"shrimp": "shellfish", "crab": "shellfish", "lobster": "shellfish",
|
||||
"clam": "shellfish", "mussel": "shellfish", "scallop": "shellfish",
|
||||
"fish": "fish", "salmon": "fish", "tuna": "fish", "cod": "fish",
|
||||
"tilapia": "fish", "halibut": "fish",
|
||||
"sesame": "sesame",
|
||||
}
|
||||
|
||||
_MEAT_KEYWORDS = frozenset([
|
||||
"chicken", "beef", "pork", "lamb", "turkey", "bacon", "ham", "sausage",
|
||||
"salami", "prosciutto", "guanciale", "pancetta", "steak", "ground meat",
|
||||
"mince", "veal", "duck", "venison", "bison", "lard",
|
||||
])
|
||||
_SEAFOOD_KEYWORDS = frozenset([
|
||||
"fish", "shrimp", "crab", "lobster", "tuna", "salmon", "clam", "mussel",
|
||||
"scallop", "anchovy", "sardine", "cod", "tilapia",
|
||||
])
|
||||
_ANIMAL_PRODUCT_KEYWORDS = frozenset([
|
||||
"milk", "cream", "cheese", "butter", "egg", "honey", "yogurt", "whey",
|
||||
])
|
||||
|
||||
|
||||
def _detect_allergens(ingredient_names: list[str]) -> list[str]:
|
||||
found: set[str] = set()
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
for ingredient in lowered:
|
||||
for keyword, flag in _ALLERGEN_MAP.items():
|
||||
if keyword in ingredient:
|
||||
found.add(flag)
|
||||
return sorted(found)
|
||||
|
||||
|
||||
def _detect_dietary_tags(ingredient_names: list[str]) -> list[str]:
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
all_text = " ".join(lowered)
|
||||
|
||||
has_meat = any(k in all_text for k in _MEAT_KEYWORDS)
|
||||
has_seafood = any(k in all_text for k in _SEAFOOD_KEYWORDS)
|
||||
has_animal_products = any(k in all_text for k in _ANIMAL_PRODUCT_KEYWORDS)
|
||||
|
||||
tags: list[str] = []
|
||||
if not has_meat and not has_seafood:
|
||||
tags.append("vegetarian")
|
||||
if not has_meat and not has_seafood and not has_animal_products:
|
||||
tags.append("vegan")
|
||||
return tags
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ElementSnapshot:
|
||||
seasoning_score: float
|
||||
richness_score: float
|
||||
brightness_score: float
|
||||
depth_score: float
|
||||
aroma_score: float
|
||||
structure_score: float
|
||||
texture_profile: str
|
||||
dietary_tags: tuple
|
||||
allergen_flags: tuple
|
||||
flavor_molecules: tuple
|
||||
fat_pct: float | None
|
||||
protein_pct: float | None
|
||||
moisture_pct: float | None
|
||||
|
||||
|
||||
def compute_snapshot(recipe_ids: list[int], store) -> ElementSnapshot:
|
||||
"""Compute an element snapshot from a list of recipe IDs.
|
||||
|
||||
Pulls SFAH scores, ingredient lists, and USDA FDC macros from the corpus.
|
||||
Averages numeric scores across all recipes. Unions allergen flags and dietary tags.
|
||||
Call at publish time only — snapshot is stored denormalized in community_posts.
|
||||
"""
|
||||
if not recipe_ids:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
rows = store.get_recipes_by_ids(recipe_ids)
|
||||
if not rows:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
def _avg(field: str) -> float:
|
||||
vals = [r.get(field) or 0.0 for r in rows]
|
||||
return sum(vals) / len(vals)
|
||||
|
||||
all_ingredients: list[str] = []
|
||||
for r in rows:
|
||||
names = r.get("ingredient_names") or []
|
||||
all_ingredients.extend(names if isinstance(names, list) else [])
|
||||
|
||||
allergens = _detect_allergens(all_ingredients)
|
||||
dietary = _detect_dietary_tags(all_ingredients)
|
||||
|
||||
texture = rows[0].get("texture_profile") or ""
|
||||
|
||||
fat_vals = [r.get("fat") for r in rows if r.get("fat") is not None]
|
||||
prot_vals = [r.get("protein") for r in rows if r.get("protein") is not None]
|
||||
moist_vals = [r.get("moisture") for r in rows if r.get("moisture") is not None]
|
||||
|
||||
return ElementSnapshot(
|
||||
seasoning_score=_avg("seasoning_score"),
|
||||
richness_score=_avg("richness_score"),
|
||||
brightness_score=_avg("brightness_score"),
|
||||
depth_score=_avg("depth_score"),
|
||||
aroma_score=_avg("aroma_score"),
|
||||
structure_score=_avg("structure_score"),
|
||||
texture_profile=texture,
|
||||
dietary_tags=tuple(dietary),
|
||||
allergen_flags=tuple(allergens),
|
||||
flavor_molecules=(),
|
||||
fat_pct=(sum(fat_vals) / len(fat_vals)) if fat_vals else None,
|
||||
protein_pct=(sum(prot_vals) / len(prot_vals)) if prot_vals else None,
|
||||
moisture_pct=(sum(moist_vals) / len(moist_vals)) if moist_vals else None,
|
||||
)
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
# app/services/community/feed.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from email.utils import format_datetime
|
||||
from xml.etree.ElementTree import Element, SubElement, tostring
|
||||
|
||||
|
||||
def posts_to_rss(posts: list[dict], base_url: str) -> str:
|
||||
"""Generate an RSS 2.0 feed from a list of community post dicts.
|
||||
|
||||
base_url: the root URL of this Kiwi instance (no trailing slash).
|
||||
Returns UTF-8 XML string.
|
||||
"""
|
||||
rss = Element("rss", version="2.0")
|
||||
channel = SubElement(rss, "channel")
|
||||
|
||||
_sub(channel, "title", "Kiwi Community Feed")
|
||||
_sub(channel, "link", f"{base_url}/community")
|
||||
_sub(channel, "description", "Meal plans and recipe outcomes from the Kiwi community")
|
||||
_sub(channel, "language", "en")
|
||||
_sub(channel, "lastBuildDate", format_datetime(datetime.now(timezone.utc)))
|
||||
|
||||
for post in posts:
|
||||
item = SubElement(channel, "item")
|
||||
_sub(item, "title", post.get("title") or "Untitled")
|
||||
_sub(item, "link", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
_sub(item, "guid", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
if post.get("description"):
|
||||
_sub(item, "description", post["description"])
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
_sub(item, "pubDate", format_datetime(published))
|
||||
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring(rss, encoding="unicode")
|
||||
|
||||
|
||||
def _sub(parent: Element, tag: str, text: str) -> Element:
|
||||
el = SubElement(parent, tag)
|
||||
el.text = text
|
||||
return el
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
# app/services/community/mdns.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import socket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Import deferred to avoid hard failure when zeroconf is not installed
|
||||
try:
|
||||
from zeroconf import ServiceInfo, Zeroconf
|
||||
_ZEROCONF_AVAILABLE = True
|
||||
except ImportError:
|
||||
_ZEROCONF_AVAILABLE = False
|
||||
|
||||
|
||||
class KiwiMDNS:
|
||||
"""Advertise this Kiwi instance on the LAN via mDNS (_kiwi._tcp.local).
|
||||
|
||||
Defaults to disabled (enabled=False). User must explicitly opt in via the
|
||||
Settings page. This matches the CF a11y requirement: no surprise broadcasting.
|
||||
|
||||
Usage:
|
||||
mdns = KiwiMDNS(enabled=settings.MDNS_ENABLED, port=settings.PORT,
|
||||
feed_url=f"http://{hostname}:{settings.PORT}/api/v1/community/local-feed")
|
||||
mdns.start() # in lifespan startup
|
||||
mdns.stop() # in lifespan shutdown
|
||||
"""
|
||||
|
||||
SERVICE_TYPE = "_kiwi._tcp.local."
|
||||
|
||||
def __init__(self, enabled: bool, port: int, feed_url: str) -> None:
|
||||
self._enabled = enabled
|
||||
self._port = port
|
||||
self._feed_url = feed_url
|
||||
self._zc: "Zeroconf | None" = None
|
||||
self._info: "ServiceInfo | None" = None
|
||||
|
||||
def start(self) -> None:
|
||||
if not self._enabled:
|
||||
logger.debug("mDNS advertisement disabled (user has not opted in)")
|
||||
return
|
||||
if not _ZEROCONF_AVAILABLE:
|
||||
logger.warning("zeroconf package not installed — mDNS advertisement unavailable")
|
||||
return
|
||||
|
||||
hostname = socket.gethostname()
|
||||
service_name = f"kiwi-{hostname}.{self.SERVICE_TYPE}"
|
||||
self._info = ServiceInfo(
|
||||
type_=self.SERVICE_TYPE,
|
||||
name=service_name,
|
||||
port=self._port,
|
||||
properties={
|
||||
b"feed_url": self._feed_url.encode(),
|
||||
b"version": b"1",
|
||||
},
|
||||
addresses=[socket.inet_aton("127.0.0.1")],
|
||||
)
|
||||
self._zc = Zeroconf()
|
||||
self._zc.register_service(self._info)
|
||||
logger.info("mDNS: advertising %s on port %d", service_name, self._port)
|
||||
|
||||
def stop(self) -> None:
|
||||
if self._zc is None or self._info is None:
|
||||
return
|
||||
self._zc.unregister_service(self._info)
|
||||
self._zc.close()
|
||||
self._zc = None
|
||||
self._info = None
|
||||
logger.info("mDNS: advertisement stopped")
|
||||
|
|
@ -116,140 +116,6 @@ class ExpirationPredictor:
|
|||
'prepared_foods': {'fridge': 4, 'freezer': 90},
|
||||
}
|
||||
|
||||
# Secondary shelf life in days after a package is opened.
|
||||
# Sources: USDA FoodKeeper app, FDA consumer guides.
|
||||
# Only categories where opening significantly shortens shelf life are listed.
|
||||
# Items not listed default to None (no secondary window tracked).
|
||||
SHELF_LIFE_AFTER_OPENING: dict[str, int] = {
|
||||
# Dairy — once opened, clock ticks fast
|
||||
'dairy': 5,
|
||||
'milk': 5,
|
||||
'cream': 3,
|
||||
'yogurt': 7,
|
||||
'cheese': 14,
|
||||
'butter': 30,
|
||||
# Condiments — refrigerated after opening
|
||||
'condiments': 30,
|
||||
'ketchup': 30,
|
||||
'mustard': 30,
|
||||
'mayo': 14,
|
||||
'salad_dressing': 30,
|
||||
'soy_sauce': 90,
|
||||
# Canned goods — once opened, very short
|
||||
'canned_goods': 4,
|
||||
# Beverages
|
||||
'juice': 7,
|
||||
'soda': 4,
|
||||
# Bread / Bakery
|
||||
'bread': 5,
|
||||
'bakery': 3,
|
||||
# Produce
|
||||
'leafy_greens': 3,
|
||||
'berries': 3,
|
||||
# Pantry staples (open bag)
|
||||
'chips': 14,
|
||||
'cookies': 14,
|
||||
'cereal': 30,
|
||||
'flour': 90,
|
||||
}
|
||||
|
||||
# Post-expiry secondary use window.
|
||||
# These are NOT spoilage extensions — they describe a qualitative state
|
||||
# change where the ingredient is specifically suited for certain preparations.
|
||||
# Sources: USDA FoodKeeper, food science, culinary tradition.
|
||||
SECONDARY_WINDOW: dict[str, dict] = {
|
||||
'bread': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['croutons', 'stuffing', 'bread pudding', 'French toast', 'panzanella'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
},
|
||||
'bakery': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['French toast', 'bread pudding', 'crumbles'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
},
|
||||
'bananas': {
|
||||
'window_days': 5,
|
||||
'label': 'overripe',
|
||||
'uses': ['banana bread', 'smoothies', 'pancakes', 'muffins'],
|
||||
'warning': None,
|
||||
},
|
||||
'milk': {
|
||||
'window_days': 3,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'quick breads', 'baking', 'sauces'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
},
|
||||
'dairy': {
|
||||
'window_days': 2,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'quick breads', 'baking'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
},
|
||||
'cheese': {
|
||||
'window_days': 14,
|
||||
'label': 'well-aged',
|
||||
'uses': ['broth', 'soups', 'risotto', 'gratins'],
|
||||
'warning': None,
|
||||
},
|
||||
'rice': {
|
||||
'window_days': 2,
|
||||
'label': 'day-old',
|
||||
'uses': ['fried rice', 'rice bowls', 'rice porridge'],
|
||||
'warning': 'Refrigerate immediately after cooking — do not leave at room temp.',
|
||||
},
|
||||
'tortillas': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['chilaquiles', 'migas', 'tortilla soup', 'casserole'],
|
||||
'warning': None,
|
||||
},
|
||||
}
|
||||
|
||||
def days_after_opening(self, category: str | None) -> int | None:
|
||||
"""Return days of shelf life remaining once a package is opened.
|
||||
|
||||
Returns None if the category is unknown or not tracked after opening
|
||||
(e.g. frozen items, raw meat — category check irrelevant once opened).
|
||||
"""
|
||||
if not category:
|
||||
return None
|
||||
return self.SHELF_LIFE_AFTER_OPENING.get(category.lower())
|
||||
|
||||
def secondary_state(
|
||||
self, category: str | None, expiry_date: str | None
|
||||
) -> dict | None:
|
||||
"""Return secondary use info if the item is in its post-expiry secondary window.
|
||||
|
||||
Returns a dict with label, uses, warning, days_past, and window_days when the
|
||||
item is past its nominal expiry date but still within the secondary use window.
|
||||
Returns None in all other cases (unknown category, no window defined, not yet
|
||||
expired, or past the secondary window).
|
||||
"""
|
||||
if not category or not expiry_date:
|
||||
return None
|
||||
entry = self.SECONDARY_WINDOW.get(category.lower())
|
||||
if not entry:
|
||||
return None
|
||||
try:
|
||||
from datetime import date
|
||||
today = date.today()
|
||||
exp = date.fromisoformat(expiry_date)
|
||||
days_past = (today - exp).days
|
||||
if 0 <= days_past <= entry['window_days']:
|
||||
return {
|
||||
'label': entry['label'],
|
||||
'uses': list(entry['uses']),
|
||||
'warning': entry['warning'],
|
||||
'days_past': days_past,
|
||||
'window_days': entry['window_days'],
|
||||
}
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
# Keyword lists are checked in declaration order — most specific first.
|
||||
# Rules:
|
||||
# - canned/processed goods BEFORE raw-meat terms (canned chicken != raw chicken)
|
||||
|
|
|
|||
|
|
@ -1,80 +0,0 @@
|
|||
"""Heimdall cf-orch budget client.
|
||||
|
||||
Calls Heimdall's /orch/* endpoints to gate and record cf-orch usage for
|
||||
lifetime/founders license holders. Always fails open on network errors —
|
||||
a Heimdall outage should never block the user.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
|
||||
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
|
||||
|
||||
|
||||
def _headers() -> dict[str, str]:
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
return {"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"}
|
||||
return {}
|
||||
|
||||
|
||||
def check_orch_budget(key_display: str, product: str) -> dict:
|
||||
"""Call POST /orch/check and return the response dict.
|
||||
|
||||
On any error (network, auth, etc.) returns a permissive dict so the
|
||||
caller can proceed without blocking the user.
|
||||
"""
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/orch/check",
|
||||
json={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/check returned %s for key %s", resp.status_code, key_display[:12])
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/check failed (fail-open): %s", exc)
|
||||
|
||||
# Fail open — Heimdall outage must never block the user
|
||||
return {
|
||||
"allowed": True,
|
||||
"calls_used": 0,
|
||||
"calls_total": 0,
|
||||
"topup_calls": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
||||
|
||||
def get_orch_usage(key_display: str, product: str) -> dict:
|
||||
"""Call GET /orch/usage and return the response dict.
|
||||
|
||||
Returns zeros on error (non-blocking).
|
||||
"""
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/orch/usage",
|
||||
params={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/usage returned %s", resp.status_code)
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/usage failed: %s", exc)
|
||||
|
||||
return {
|
||||
"calls_used": 0,
|
||||
"topup_calls": 0,
|
||||
"calls_total": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""Meal planning service layer — no FastAPI imports (extraction-ready for cf-core)."""
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
# app/services/meal_plan/affiliates.py
|
||||
"""Register Kiwi-specific affiliate programs and provide search URL builders.
|
||||
|
||||
Called once at API startup. Programs not yet in core.affiliates are registered
|
||||
here. The actual affiliate IDs are read from environment variables at call
|
||||
time, so the process can start before accounts are approved (plain URLs
|
||||
returned when env vars are absent).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import AffiliateProgram, register_program, wrap_url
|
||||
|
||||
|
||||
# ── URL builders ──────────────────────────────────────────────────────────────
|
||||
|
||||
def _walmart_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}affil=apa&affiliateId={affiliate_id}"
|
||||
|
||||
|
||||
def _target_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}afid={affiliate_id}"
|
||||
|
||||
|
||||
def _thrive_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}raf={affiliate_id}"
|
||||
|
||||
|
||||
def _misfits_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}ref={affiliate_id}"
|
||||
|
||||
|
||||
# ── Registration ──────────────────────────────────────────────────────────────
|
||||
|
||||
def register_kiwi_programs() -> None:
|
||||
"""Register Kiwi retailer programs. Safe to call multiple times (idempotent)."""
|
||||
register_program(AffiliateProgram(
|
||||
name="Walmart",
|
||||
retailer_key="walmart",
|
||||
env_var="WALMART_AFFILIATE_ID",
|
||||
build_url=_walmart_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Target",
|
||||
retailer_key="target",
|
||||
env_var="TARGET_AFFILIATE_ID",
|
||||
build_url=_target_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Thrive Market",
|
||||
retailer_key="thrive",
|
||||
env_var="THRIVE_AFFILIATE_ID",
|
||||
build_url=_thrive_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Misfits Market",
|
||||
retailer_key="misfits",
|
||||
env_var="MISFITS_AFFILIATE_ID",
|
||||
build_url=_misfits_search,
|
||||
))
|
||||
|
||||
|
||||
# ── Search URL helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
_SEARCH_TEMPLATES: dict[str, str] = {
|
||||
"amazon": "https://www.amazon.com/s?k={q}",
|
||||
"instacart": "https://www.instacart.com/store/search_v3/term?term={q}",
|
||||
"walmart": "https://www.walmart.com/search?q={q}",
|
||||
"target": "https://www.target.com/s?searchTerm={q}",
|
||||
"thrive": "https://thrivemarket.com/search?q={q}",
|
||||
"misfits": "https://www.misfitsmarket.com/shop?search={q}",
|
||||
}
|
||||
|
||||
KIWI_RETAILERS = list(_SEARCH_TEMPLATES.keys())
|
||||
|
||||
|
||||
def get_retailer_links(ingredient_name: str) -> list[dict]:
|
||||
"""Return affiliate-wrapped search links for *ingredient_name*.
|
||||
|
||||
Returns a list of dicts: {"retailer": str, "label": str, "url": str}.
|
||||
Falls back to plain search URL when no affiliate ID is configured.
|
||||
"""
|
||||
q = quote_plus(ingredient_name)
|
||||
links = []
|
||||
for key, template in _SEARCH_TEMPLATES.items():
|
||||
plain_url = template.format(q=q)
|
||||
try:
|
||||
affiliate_url = wrap_url(plain_url, retailer=key)
|
||||
except Exception:
|
||||
affiliate_url = plain_url
|
||||
links.append({"retailer": key, "label": _label(key), "url": affiliate_url})
|
||||
return links
|
||||
|
||||
|
||||
def _label(key: str) -> str:
|
||||
return {
|
||||
"amazon": "Amazon",
|
||||
"instacart": "Instacart",
|
||||
"walmart": "Walmart",
|
||||
"target": "Target",
|
||||
"thrive": "Thrive Market",
|
||||
"misfits": "Misfits Market",
|
||||
}.get(key, key.title())
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/llm_planner.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""LLM-assisted full-week meal plan generation.
|
||||
|
||||
Returns suggestions for human review — never writes to the DB directly.
|
||||
The API endpoint presents the suggestions and waits for user approval
|
||||
before calling store.upsert_slot().
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_PLAN_SYSTEM = """\
|
||||
You are a practical meal planning assistant. Given a pantry inventory and
|
||||
dietary preferences, suggest a week of dinners (or other configured meals).
|
||||
|
||||
Prioritise ingredients that are expiring soon. Prefer variety across the week.
|
||||
Respect all dietary restrictions.
|
||||
|
||||
Respond with a JSON array only — no prose, no markdown fences.
|
||||
Each item: {"day": 0-6, "meal_type": "dinner", "recipe_id": <int or null>, "suggestion": "<recipe name>"}
|
||||
|
||||
day 0 = Monday, day 6 = Sunday.
|
||||
If you cannot match a known recipe_id, set recipe_id to null and provide a suggestion name.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PlanSuggestion:
|
||||
day: int # 0 = Monday
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
suggestion: str # human-readable name
|
||||
|
||||
|
||||
def generate_plan(
|
||||
pantry_items: list[str],
|
||||
meal_types: list[str],
|
||||
dietary_notes: str,
|
||||
router,
|
||||
) -> list[PlanSuggestion]:
|
||||
"""Return a list of PlanSuggestion for user review.
|
||||
|
||||
Never writes to DB — caller must upsert slots after user approves.
|
||||
Returns an empty list if router is None or response is unparseable.
|
||||
"""
|
||||
if router is None:
|
||||
return []
|
||||
|
||||
pantry_text = "\n".join(f"- {item}" for item in pantry_items[:50])
|
||||
meal_text = ", ".join(meal_types)
|
||||
user_msg = (
|
||||
f"Meal types: {meal_text}\n"
|
||||
f"Dietary notes: {dietary_notes or 'none'}\n\n"
|
||||
f"Pantry (partial):\n{pantry_text}"
|
||||
)
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_PLAN_SYSTEM,
|
||||
user=user_msg,
|
||||
max_tokens=512,
|
||||
temperature=0.7,
|
||||
)
|
||||
items = json.loads(response.strip())
|
||||
suggestions = []
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
day = item.get("day")
|
||||
meal_type = item.get("meal_type", "dinner")
|
||||
if not isinstance(day, int) or day < 0 or day > 6:
|
||||
continue
|
||||
suggestions.append(PlanSuggestion(
|
||||
day=day,
|
||||
meal_type=meal_type,
|
||||
recipe_id=item.get("recipe_id"),
|
||||
suggestion=str(item.get("suggestion", "")),
|
||||
))
|
||||
return suggestions
|
||||
except Exception as exc:
|
||||
logger.debug("LLM plan generation failed: %s", exc)
|
||||
return []
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
# app/services/meal_plan/llm_router.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Provide a router-compatible LLM client for meal plan generation tasks.
|
||||
|
||||
Cloud (CF_ORCH_URL set):
|
||||
Allocates a cf-text service via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Returns an _OrchTextRouter that wraps the cf-text HTTP endpoint
|
||||
with a .complete(system, user, **kwargs) interface.
|
||||
|
||||
Local / self-hosted (no CF_ORCH_URL):
|
||||
Returns an LLMRouter instance which tries ollama, vllm, or any
|
||||
backend configured in ~/.config/circuitforge/llm.yaml.
|
||||
|
||||
Both paths expose the same interface so llm_timing.py and llm_planner.py
|
||||
need no knowledge of the backend.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import nullcontext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# cf-orch service name and VRAM budget for meal plan LLM tasks.
|
||||
# These are lighter than recipe_llm (4.0 GB) — cf-text handles them.
|
||||
_SERVICE_TYPE = "cf-text"
|
||||
_TTL_S = 120.0
|
||||
_CALLER = "kiwi-meal-plan"
|
||||
|
||||
|
||||
class _OrchTextRouter:
|
||||
"""Thin adapter that makes a cf-text HTTP endpoint look like LLMRouter."""
|
||||
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
|
||||
def complete(
|
||||
self,
|
||||
system: str = "",
|
||||
user: str = "",
|
||||
max_tokens: int = 512,
|
||||
temperature: float = 0.7,
|
||||
**_kwargs,
|
||||
) -> str:
|
||||
from openai import OpenAI
|
||||
client = OpenAI(base_url=self._base_url + "/v1", api_key="any")
|
||||
messages = []
|
||||
if system:
|
||||
messages.append({"role": "system", "content": system})
|
||||
messages.append({"role": "user", "content": user})
|
||||
try:
|
||||
model = client.models.list().data[0].id
|
||||
except Exception:
|
||||
model = "local"
|
||||
resp = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
return resp.choices[0].message.content or ""
|
||||
|
||||
|
||||
def get_meal_plan_router():
|
||||
"""Return an LLM client for meal plan tasks.
|
||||
|
||||
Tries cf-orch cf-text allocation first (cloud); falls back to LLMRouter
|
||||
(local ollama/vllm). Returns None if no backend is available.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
ctx = client.allocate(
|
||||
service=_SERVICE_TYPE,
|
||||
ttl_s=_TTL_S,
|
||||
caller=_CALLER,
|
||||
)
|
||||
alloc = ctx.__enter__()
|
||||
if alloc is not None:
|
||||
return _OrchTextRouter(alloc.url), ctx
|
||||
except Exception as exc:
|
||||
logger.debug("cf-orch cf-text allocation failed, falling back to LLMRouter: %s", exc)
|
||||
|
||||
# Local fallback: LLMRouter (ollama / vllm / openai-compat)
|
||||
try:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter(), nullcontext(None)
|
||||
except FileNotFoundError:
|
||||
logger.debug("LLMRouter: no llm.yaml and no LLM env vars — meal plan LLM disabled")
|
||||
return None, nullcontext(None)
|
||||
except Exception as exc:
|
||||
logger.debug("LLMRouter init failed: %s", exc)
|
||||
return None, nullcontext(None)
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
# app/services/meal_plan/llm_timing.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Estimate cook times for recipes missing corpus prep/cook time fields.
|
||||
|
||||
Used only when tier allows `meal_plan_llm_timing`. Falls back gracefully
|
||||
when no LLM backend is available.
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_TIMING_PROMPT = """\
|
||||
You are a practical cook. Given a recipe name and its ingredients, estimate:
|
||||
1. prep_time: minutes of active prep work (chopping, mixing, etc.)
|
||||
2. cook_time: minutes of cooking (oven, stovetop, etc.)
|
||||
|
||||
Respond with ONLY two integers on separate lines:
|
||||
prep_time
|
||||
cook_time
|
||||
|
||||
If you cannot estimate, respond with:
|
||||
0
|
||||
0
|
||||
"""
|
||||
|
||||
|
||||
def estimate_timing(recipe_name: str, ingredients: list[str], router) -> tuple[int | None, int | None]:
|
||||
"""Return (prep_minutes, cook_minutes) for a recipe using LLMRouter.
|
||||
|
||||
Returns (None, None) if the router is unavailable or the response is
|
||||
unparseable. Never raises.
|
||||
|
||||
Args:
|
||||
recipe_name: Name of the recipe.
|
||||
ingredients: List of raw ingredient strings from the corpus.
|
||||
router: An LLMRouter instance (from circuitforge_core.llm).
|
||||
"""
|
||||
if router is None:
|
||||
return None, None
|
||||
|
||||
ingredient_list = "\n".join(f"- {i}" for i in (ingredients or [])[:15])
|
||||
prompt = f"Recipe: {recipe_name}\n\nIngredients:\n{ingredient_list}"
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_TIMING_PROMPT,
|
||||
user=prompt,
|
||||
max_tokens=16,
|
||||
temperature=0.0,
|
||||
)
|
||||
lines = response.strip().splitlines()
|
||||
prep = int(lines[0].strip()) if lines else 0
|
||||
cook = int(lines[1].strip()) if len(lines) > 1 else 0
|
||||
if prep == 0 and cook == 0:
|
||||
return None, None
|
||||
return prep or None, cook or None
|
||||
except Exception as exc:
|
||||
logger.debug("LLM timing estimation failed for %r: %s", recipe_name, exc)
|
||||
return None, None
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# app/services/meal_plan/planner.py
|
||||
"""Plan and slot orchestration — thin layer over Store.
|
||||
|
||||
No FastAPI imports. Provides helpers used by the API endpoint.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import VALID_MEAL_TYPES
|
||||
|
||||
|
||||
def create_plan(store: Store, week_start: str, meal_types: list[str]) -> dict:
|
||||
"""Create a plan, filtering meal_types to valid values only."""
|
||||
valid = [t for t in meal_types if t in VALID_MEAL_TYPES]
|
||||
if not valid:
|
||||
valid = ["dinner"]
|
||||
return store.create_meal_plan(week_start, valid)
|
||||
|
||||
|
||||
def get_plan_with_slots(store: Store, plan_id: int) -> dict | None:
|
||||
"""Return a plan row with its slots list attached, or None."""
|
||||
plan = store.get_meal_plan(plan_id)
|
||||
if plan is None:
|
||||
return None
|
||||
slots = store.get_plan_slots(plan_id)
|
||||
return {**plan, "slots": slots}
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/prep_scheduler.py
|
||||
"""Sequence prep tasks for a batch cooking session.
|
||||
|
||||
Pure function — no DB or network calls. Sorts tasks by equipment priority
|
||||
(oven first to maximise oven utilisation) then assigns sequence_order.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
_EQUIPMENT_PRIORITY = {"oven": 0, "stovetop": 1, "cold": 2, "no-heat": 3}
|
||||
_DEFAULT_PRIORITY = 4
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrepTask:
|
||||
recipe_id: int | None
|
||||
slot_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool = False
|
||||
notes: str | None = None
|
||||
user_edited: bool = False
|
||||
|
||||
|
||||
def _total_minutes(recipe: dict) -> int | None:
|
||||
prep = recipe.get("prep_time")
|
||||
cook = recipe.get("cook_time")
|
||||
if prep is None and cook is None:
|
||||
return None
|
||||
return (prep or 0) + (cook or 0)
|
||||
|
||||
|
||||
def _equipment(recipe: dict) -> str | None:
|
||||
# Corpus recipes don't have an explicit equipment field; use test helper
|
||||
# field if present, otherwise infer from cook_time (long = oven heuristic).
|
||||
if "_equipment" in recipe:
|
||||
return recipe["_equipment"]
|
||||
minutes = _total_minutes(recipe)
|
||||
if minutes and minutes >= 45:
|
||||
return "oven"
|
||||
return "stovetop"
|
||||
|
||||
|
||||
def build_prep_tasks(slots: list[dict], recipes: list[dict]) -> list[PrepTask]:
|
||||
"""Return a sequenced list of PrepTask objects from plan slots + recipe rows.
|
||||
|
||||
Algorithm:
|
||||
1. Build a recipe_id → recipe dict lookup.
|
||||
2. Create one task per slot that has a recipe assigned.
|
||||
3. Sort by equipment priority (oven first).
|
||||
4. Assign contiguous sequence_order starting at 1.
|
||||
"""
|
||||
if not slots or not recipes:
|
||||
return []
|
||||
|
||||
recipe_map: dict[int, dict] = {r["id"]: r for r in recipes}
|
||||
raw_tasks: list[tuple[int, dict]] = [] # (priority, kwargs)
|
||||
|
||||
for slot in slots:
|
||||
recipe_id = slot.get("recipe_id")
|
||||
if not recipe_id:
|
||||
continue
|
||||
recipe = recipe_map.get(recipe_id)
|
||||
if not recipe:
|
||||
continue
|
||||
|
||||
eq = _equipment(recipe)
|
||||
priority = _EQUIPMENT_PRIORITY.get(eq or "", _DEFAULT_PRIORITY)
|
||||
raw_tasks.append((priority, {
|
||||
"recipe_id": recipe_id,
|
||||
"slot_id": slot.get("id"),
|
||||
"task_label": recipe.get("name", f"Recipe {recipe_id}"),
|
||||
"duration_minutes": _total_minutes(recipe),
|
||||
"equipment": eq,
|
||||
}))
|
||||
|
||||
raw_tasks.sort(key=lambda t: t[0])
|
||||
return [
|
||||
PrepTask(
|
||||
recipe_id=kw["recipe_id"],
|
||||
slot_id=kw["slot_id"],
|
||||
task_label=kw["task_label"],
|
||||
duration_minutes=kw["duration_minutes"],
|
||||
sequence_order=i,
|
||||
equipment=kw["equipment"],
|
||||
)
|
||||
for i, (_, kw) in enumerate(raw_tasks, 1)
|
||||
]
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# app/services/meal_plan/shopping_list.py
|
||||
"""Compute a shopping list from a meal plan and current pantry inventory.
|
||||
|
||||
Pure function — no DB or network calls. Takes plain dicts from the Store
|
||||
and returns GapItem dataclasses.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GapItem:
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # first quantity token from recipe text, e.g. "300g"
|
||||
have_quantity: float | None # pantry quantity when partial match
|
||||
have_unit: str | None
|
||||
covered: bool
|
||||
retailer_links: list = field(default_factory=list) # filled by API layer
|
||||
|
||||
|
||||
_QUANTITY_RE = re.compile(r"^(\d+[\d./]*\s*(?:g|kg|ml|l|oz|lb|cup|cups|tsp|tbsp|tbsps|tsps)?)\b", re.I)
|
||||
|
||||
|
||||
def _extract_quantity(ingredient_text: str) -> str | None:
|
||||
"""Pull the leading quantity string from a raw ingredient line."""
|
||||
m = _QUANTITY_RE.match(ingredient_text.strip())
|
||||
return m.group(1).strip() if m else None
|
||||
|
||||
|
||||
def _normalise(name: str) -> str:
|
||||
"""Lowercase, strip possessives and plural -s for fuzzy matching."""
|
||||
return name.lower().strip().rstrip("s")
|
||||
|
||||
|
||||
def compute_shopping_list(
|
||||
recipes: list[dict],
|
||||
inventory: list[dict],
|
||||
) -> tuple[list[GapItem], list[GapItem]]:
|
||||
"""Return (gap_items, covered_items) for a list of recipe dicts + inventory dicts.
|
||||
|
||||
Deduplicates by normalised ingredient name — the first recipe's quantity
|
||||
string wins when the same ingredient appears in multiple recipes.
|
||||
"""
|
||||
if not recipes:
|
||||
return [], []
|
||||
|
||||
# Build pantry lookup: normalised_name → inventory row
|
||||
pantry: dict[str, dict] = {}
|
||||
for item in inventory:
|
||||
pantry[_normalise(item["name"])] = item
|
||||
|
||||
# Collect unique ingredients with their first quantity token
|
||||
seen: dict[str, str | None] = {} # normalised_name → needed_raw
|
||||
for recipe in recipes:
|
||||
names: list[str] = recipe.get("ingredient_names") or []
|
||||
raw_lines: list[str] = recipe.get("ingredients") or []
|
||||
for i, name in enumerate(names):
|
||||
key = _normalise(name)
|
||||
if key in seen:
|
||||
continue
|
||||
raw = raw_lines[i] if i < len(raw_lines) else ""
|
||||
seen[key] = _extract_quantity(raw)
|
||||
|
||||
gaps: list[GapItem] = []
|
||||
covered: list[GapItem] = []
|
||||
|
||||
for norm_name, needed_raw in seen.items():
|
||||
pantry_row = pantry.get(norm_name)
|
||||
if pantry_row:
|
||||
covered.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=pantry_row.get("quantity"),
|
||||
have_unit=pantry_row.get("unit"),
|
||||
covered=True,
|
||||
))
|
||||
else:
|
||||
gaps.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=None,
|
||||
have_unit=None,
|
||||
covered=False,
|
||||
))
|
||||
|
||||
return gaps, covered
|
||||
|
|
@ -33,7 +33,7 @@ def _try_docuvision(image_path: str | Path) -> str | None:
|
|||
if not cf_orch_url:
|
||||
return None
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
from app.services.ocr.docuvision_client import DocuvisionClient
|
||||
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
|
|
|
|||
|
|
@ -15,72 +15,63 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class OpenFoodFactsService:
|
||||
"""
|
||||
Service for interacting with the Open*Facts family of databases.
|
||||
Service for interacting with the OpenFoodFacts API.
|
||||
|
||||
Primary: OpenFoodFacts (food products).
|
||||
Fallback chain: Open Beauty Facts (personal care) → Open Products Facts (household).
|
||||
All three databases share the same API path and JSON format.
|
||||
OpenFoodFacts is a free, open database of food products with
|
||||
ingredients, allergens, and nutrition facts.
|
||||
"""
|
||||
|
||||
BASE_URL = "https://world.openfoodfacts.org/api/v2"
|
||||
USER_AGENT = "Kiwi/0.1.0 (https://circuitforge.tech)"
|
||||
|
||||
# Fallback databases tried in order when OFFs returns no match.
|
||||
# Same API format as OFFs — only the host differs.
|
||||
_FALLBACK_DATABASES = [
|
||||
"https://world.openbeautyfacts.org/api/v2",
|
||||
"https://world.openproductsfacts.org/api/v2",
|
||||
]
|
||||
|
||||
async def _lookup_in_database(
|
||||
self, barcode: str, base_url: str, client: httpx.AsyncClient
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Try one Open*Facts database using an existing client. Returns parsed product dict or None."""
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{base_url}/product/{barcode}.json",
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
if data.get("status") != 1:
|
||||
return None
|
||||
return self._parse_product_data(data, barcode)
|
||||
except httpx.HTTPError as e:
|
||||
logger.debug("HTTP error for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.debug("Lookup failed for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
|
||||
async def lookup_product(self, barcode: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Look up a product by barcode, trying OFFs then fallback databases.
|
||||
|
||||
A single httpx.AsyncClient is created for the whole lookup chain so that
|
||||
connection pooling and TLS session reuse apply across all database attempts.
|
||||
Look up a product by barcode in the OpenFoodFacts database.
|
||||
|
||||
Args:
|
||||
barcode: UPC/EAN barcode (8-13 digits)
|
||||
|
||||
Returns:
|
||||
Dictionary with product information, or None if not found in any database.
|
||||
Dictionary with product information, or None if not found
|
||||
|
||||
Example response:
|
||||
{
|
||||
"name": "Organic Milk",
|
||||
"brand": "Horizon",
|
||||
"categories": ["Dairy", "Milk"],
|
||||
"image_url": "https://...",
|
||||
"nutrition_data": {...},
|
||||
"raw_data": {...} # Full API response
|
||||
}
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
result = await self._lookup_in_database(barcode, self.BASE_URL, client)
|
||||
if result:
|
||||
return result
|
||||
url = f"{self.BASE_URL}/product/{barcode}.json"
|
||||
|
||||
for db_url in self._FALLBACK_DATABASES:
|
||||
result = await self._lookup_in_database(barcode, db_url, client)
|
||||
if result:
|
||||
logger.info("Barcode %s found in fallback database: %s", barcode, db_url)
|
||||
return result
|
||||
response = await client.get(
|
||||
url,
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
logger.info("Barcode %s not found in any Open*Facts database", barcode)
|
||||
if response.status_code == 404:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if data.get("status") != 1:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
return self._parse_product_data(data, barcode)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(f"HTTP error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
|
||||
def _parse_product_data(self, data: Dict[str, Any], barcode: str) -> Dict[str, Any]:
|
||||
|
|
@ -123,9 +114,6 @@ class OpenFoodFactsService:
|
|||
allergens = product.get("allergens_tags", [])
|
||||
labels = product.get("labels_tags", [])
|
||||
|
||||
# Pack size detection: prefer explicit unit_count, fall back to serving count
|
||||
pack_quantity, pack_unit = self._extract_pack_size(product)
|
||||
|
||||
return {
|
||||
"name": name,
|
||||
"brand": brand,
|
||||
|
|
@ -136,47 +124,9 @@ class OpenFoodFactsService:
|
|||
"nutrition_data": nutrition_data,
|
||||
"allergens": allergens,
|
||||
"labels": labels,
|
||||
"pack_quantity": pack_quantity,
|
||||
"pack_unit": pack_unit,
|
||||
"raw_data": product, # Store full response for debugging
|
||||
}
|
||||
|
||||
def _extract_pack_size(self, product: Dict[str, Any]) -> tuple[float | None, str | None]:
|
||||
"""Return (quantity, unit) for multi-pack products, or (None, None).
|
||||
|
||||
OFFs fields tried in order:
|
||||
1. `number_of_units` (explicit count, highest confidence)
|
||||
2. `serving_quantity` + `product_quantity_unit` (e.g. 6 x 150g yoghurt)
|
||||
3. Parse `quantity` string like "4 x 113 g" or "6 pack"
|
||||
|
||||
Returns None, None when data is absent, ambiguous, or single-unit.
|
||||
"""
|
||||
import re
|
||||
|
||||
# Field 1: explicit unit count
|
||||
unit_count = product.get("number_of_units")
|
||||
if unit_count:
|
||||
try:
|
||||
n = float(unit_count)
|
||||
if n > 1:
|
||||
return n, product.get("serving_size_unit") or "unit"
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Field 2: parse quantity string for "N x ..." pattern
|
||||
qty_str = product.get("quantity", "")
|
||||
if qty_str:
|
||||
m = re.match(r"^(\d+(?:\.\d+)?)\s*[xX×]\s*", qty_str.strip())
|
||||
if m:
|
||||
n = float(m.group(1))
|
||||
if n > 1:
|
||||
# Try to get a sensible sub-unit label from the rest
|
||||
rest = qty_str[m.end():].strip()
|
||||
unit_label = re.sub(r"[\d.,\s]+", "", rest).strip()[:20] or "unit"
|
||||
return n, unit_label
|
||||
|
||||
return None, None
|
||||
|
||||
def _extract_nutrition_data(self, product: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract nutrition facts from product data.
|
||||
|
|
|
|||
|
|
@ -42,21 +42,11 @@ class AssemblyRole:
|
|||
class AssemblyTemplate:
|
||||
"""A template assembly dish."""
|
||||
id: int
|
||||
slug: str # URL-safe identifier, e.g. "burrito_taco"
|
||||
icon: str # emoji
|
||||
descriptor: str # one-line description shown in template grid
|
||||
title: str
|
||||
required: list[AssemblyRole]
|
||||
optional: list[AssemblyRole]
|
||||
directions: list[str]
|
||||
notes: str = ""
|
||||
# Per-role hints shown in the wizard picker header
|
||||
# keys match role.display values; missing keys fall back to ""
|
||||
role_hints: dict[str, str] = None # type: ignore[assignment]
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.role_hints is None:
|
||||
self.role_hints = {}
|
||||
|
||||
|
||||
def _matches_role(role: AssemblyRole, pantry_set: set[str]) -> list[str]:
|
||||
|
|
@ -148,9 +138,6 @@ def _personalized_title(tmpl: AssemblyTemplate, pantry_set: set[str], seed: int)
|
|||
ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
||||
AssemblyTemplate(
|
||||
id=-1,
|
||||
slug="burrito_taco",
|
||||
icon="🌯",
|
||||
descriptor="Protein, veg, and sauce in a tortilla or over rice",
|
||||
title="Burrito / Taco",
|
||||
required=[
|
||||
AssemblyRole("tortilla or wrap", [
|
||||
|
|
@ -183,21 +170,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Fold in the sides and roll tightly. Optionally toast seam-side down 1-2 minutes.",
|
||||
],
|
||||
notes="Works as a burrito (rolled), taco (folded), or quesadilla (cheese only, pressed flat).",
|
||||
role_hints={
|
||||
"tortilla or wrap": "The foundation -- what holds everything",
|
||||
"protein": "The main filling",
|
||||
"rice or starch": "Optional base layer",
|
||||
"cheese": "Optional -- melts into the filling",
|
||||
"salsa or sauce": "Optional -- adds moisture and heat",
|
||||
"sour cream or yogurt": "Optional -- cool contrast to heat",
|
||||
"vegetables": "Optional -- adds texture and colour",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-2,
|
||||
slug="fried_rice",
|
||||
icon="🍳",
|
||||
descriptor="Rice + egg + whatever's in the fridge",
|
||||
title="Fried Rice",
|
||||
required=[
|
||||
AssemblyRole("cooked rice", [
|
||||
|
|
@ -230,21 +205,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season with soy sauce and any other sauces. Toss to combine.",
|
||||
],
|
||||
notes="Add a fried egg on top. A drizzle of sesame oil at the end adds a lot.",
|
||||
role_hints={
|
||||
"cooked rice": "Day-old cold rice works best",
|
||||
"protein": "Pre-cooked or raw -- cook before adding rice",
|
||||
"soy sauce or seasoning": "The primary flavour driver",
|
||||
"oil": "High smoke-point oil for high heat",
|
||||
"egg": "Scrambled in the same pan",
|
||||
"vegetables": "Add crunch and colour",
|
||||
"garlic or ginger": "Aromatic base -- add first",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-3,
|
||||
slug="omelette_scramble",
|
||||
icon="🥚",
|
||||
descriptor="Eggs with fillings, pan-cooked",
|
||||
title="Omelette / Scramble",
|
||||
required=[
|
||||
AssemblyRole("eggs", ["egg"]),
|
||||
|
|
@ -275,19 +238,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season and serve immediately.",
|
||||
],
|
||||
notes="Works for breakfast, lunch, or a quick dinner. Any leftover vegetables work well.",
|
||||
role_hints={
|
||||
"eggs": "The base -- beat with a splash of water",
|
||||
"cheese": "Fold in just before serving",
|
||||
"vegetables": "Saute first, then add eggs",
|
||||
"protein": "Cook through before adding eggs",
|
||||
"herbs or seasoning": "Season at the end",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-4,
|
||||
slug="stir_fry",
|
||||
icon="🥢",
|
||||
descriptor="High-heat protein + veg in sauce",
|
||||
title="Stir Fry",
|
||||
required=[
|
||||
AssemblyRole("vegetables", [
|
||||
|
|
@ -295,8 +248,6 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"zucchini", "mushroom", "corn", "onion", "bean sprout",
|
||||
"cabbage", "spinach", "asparagus",
|
||||
]),
|
||||
# Starch base required — prevents this from firing on any pantry with vegetables
|
||||
AssemblyRole("starch base", ["rice", "noodle", "pasta", "ramen", "cauliflower rice"]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
|
|
@ -306,6 +257,7 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"soy sauce", "teriyaki", "oyster sauce", "hoisin",
|
||||
"stir fry sauce", "sesame",
|
||||
]),
|
||||
AssemblyRole("starch base", ["rice", "noodle", "pasta", "ramen"]),
|
||||
AssemblyRole("garlic or ginger", ["garlic", "ginger"]),
|
||||
AssemblyRole("oil", ["oil", "sesame"]),
|
||||
],
|
||||
|
|
@ -318,20 +270,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Serve over rice or noodles.",
|
||||
],
|
||||
notes="High heat is the key. Do not crowd the pan -- cook in batches if needed.",
|
||||
role_hints={
|
||||
"vegetables": "Cut to similar size for even cooking",
|
||||
"starch base": "Serve under or toss with the stir fry",
|
||||
"protein": "Cook first, remove, add back at end",
|
||||
"sauce": "Add last -- toss for 1-2 minutes only",
|
||||
"garlic or ginger": "Add early for aromatic base",
|
||||
"oil": "High smoke-point oil only",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-5,
|
||||
slug="pasta",
|
||||
icon="🍝",
|
||||
descriptor="Pantry pasta with flexible sauce",
|
||||
title="Pasta with Whatever You Have",
|
||||
required=[
|
||||
AssemblyRole("pasta", [
|
||||
|
|
@ -365,20 +306,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Toss cooked pasta with sauce. Finish with cheese if using.",
|
||||
],
|
||||
notes="Pasta water is the secret -- the starch thickens and binds any sauce.",
|
||||
role_hints={
|
||||
"pasta": "The base -- cook al dente, reserve pasta water",
|
||||
"sauce base": "Simmer 5 min; pasta water loosens it",
|
||||
"protein": "Cook through before adding sauce",
|
||||
"cheese": "Finish off heat to avoid graininess",
|
||||
"vegetables": "Saute until tender before adding sauce",
|
||||
"garlic": "Saute in oil first -- the flavour foundation",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-6,
|
||||
slug="sandwich_wrap",
|
||||
icon="🥪",
|
||||
descriptor="Protein + veg between bread or in a wrap",
|
||||
title="Sandwich / Wrap",
|
||||
required=[
|
||||
AssemblyRole("bread or wrap", [
|
||||
|
|
@ -410,19 +340,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Press together and cut diagonally.",
|
||||
],
|
||||
notes="Leftovers, deli meat, canned fish -- nearly anything works between bread.",
|
||||
role_hints={
|
||||
"bread or wrap": "Toast for better texture",
|
||||
"protein": "Layer on first after condiments",
|
||||
"cheese": "Goes on top of protein",
|
||||
"condiment": "Spread on both inner surfaces",
|
||||
"vegetables": "Top layer -- keeps bread from getting soggy",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-7,
|
||||
slug="grain_bowl",
|
||||
icon="🥗",
|
||||
descriptor="Grain base + protein + toppings + dressing",
|
||||
title="Grain Bowl",
|
||||
required=[
|
||||
AssemblyRole("grain base", [
|
||||
|
|
@ -456,25 +376,14 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Drizzle with dressing and add toppings.",
|
||||
],
|
||||
notes="Great for meal prep -- cook grains and proteins in bulk, assemble bowls all week.",
|
||||
role_hints={
|
||||
"grain base": "Season while cooking -- bland grains sink the bowl",
|
||||
"protein": "Slice or shred; arrange on top",
|
||||
"vegetables": "Roast or saute for best flavour",
|
||||
"dressing or sauce": "Drizzle last -- ties everything together",
|
||||
"toppings": "Add crunch and contrast",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-8,
|
||||
slug="soup_stew",
|
||||
icon="🥣",
|
||||
descriptor="Liquid-based, flexible ingredients",
|
||||
title="Soup / Stew",
|
||||
required=[
|
||||
# Narrow to dedicated soup bases — tomato sauce and coconut milk are
|
||||
# pantry staples used in too many non-soup dishes to serve as anchors.
|
||||
AssemblyRole("broth or stock", [
|
||||
"broth", "stock", "bouillon", "cream of",
|
||||
AssemblyRole("broth or liquid base", [
|
||||
"broth", "stock", "bouillon",
|
||||
"tomato sauce", "coconut milk", "cream of",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
|
|
@ -504,19 +413,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season to taste and simmer at least 20 minutes for flavors to develop.",
|
||||
],
|
||||
notes="Soups and stews improve overnight in the fridge. Almost any combination works.",
|
||||
role_hints={
|
||||
"broth or stock": "The liquid base -- determines overall flavour",
|
||||
"protein": "Brown first for deeper flavour",
|
||||
"vegetables": "Dense veg first; quick-cooking veg last",
|
||||
"starch thickener": "Adds body and turns soup into stew",
|
||||
"seasoning": "Taste and adjust after 20 min simmer",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-9,
|
||||
slug="casserole_bake",
|
||||
icon="🫙",
|
||||
descriptor="Oven bake with protein, veg, starch",
|
||||
title="Casserole / Bake",
|
||||
required=[
|
||||
AssemblyRole("starch or base", [
|
||||
|
|
@ -556,20 +455,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Bake covered 25 minutes, then uncovered 15 minutes until golden and bubbly.",
|
||||
],
|
||||
notes="Classic pantry dump dinner. Cream of anything soup is the universal binder.",
|
||||
role_hints={
|
||||
"starch or base": "Cook slightly underdone -- finishes in oven",
|
||||
"binder or sauce": "Coats everything and holds the bake together",
|
||||
"protein": "Pre-cook before mixing in",
|
||||
"vegetables": "Chop small for even distribution",
|
||||
"cheese topping": "Goes on last -- browns in the final 15 min",
|
||||
"seasoning": "Casseroles need more salt than you think",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-10,
|
||||
slug="pancakes_quickbread",
|
||||
icon="🥞",
|
||||
descriptor="Batter-based; sweet or savory",
|
||||
title="Pancakes / Waffles / Quick Bread",
|
||||
required=[
|
||||
AssemblyRole("flour or baking mix", [
|
||||
|
|
@ -605,20 +493,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"For muffins or quick bread: pour into greased pan, bake at 375 F until a toothpick comes out clean.",
|
||||
],
|
||||
notes="Overmixing develops gluten and makes pancakes tough. Stop when just combined.",
|
||||
role_hints={
|
||||
"flour or baking mix": "Whisk dry ingredients together first",
|
||||
"leavening or egg": "Activates rise -- don't skip",
|
||||
"liquid": "Add to dry ingredients; lumps are fine",
|
||||
"fat": "Adds richness and prevents sticking",
|
||||
"sweetener": "Mix into wet ingredients",
|
||||
"mix-ins": "Fold in last -- gently",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-11,
|
||||
slug="porridge_oatmeal",
|
||||
icon="🌾",
|
||||
descriptor="Oat or grain base with toppings",
|
||||
title="Porridge / Oatmeal",
|
||||
required=[
|
||||
AssemblyRole("oats or grain porridge", [
|
||||
|
|
@ -641,20 +518,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Top with fruit, nuts, or seeds and serve immediately.",
|
||||
],
|
||||
notes="Overnight oats: skip cooking — soak oats in cold milk overnight in the fridge.",
|
||||
role_hints={
|
||||
"oats or grain porridge": "1 part oats to 2 parts liquid",
|
||||
"liquid": "Use milk for creamier result",
|
||||
"sweetener": "Stir in after cooking",
|
||||
"fruit": "Add fresh on top or simmer dried fruit in",
|
||||
"toppings": "Add last for crunch",
|
||||
"spice": "Stir in with sweetener",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-12,
|
||||
slug="pie_pot_pie",
|
||||
icon="🥧",
|
||||
descriptor="Pastry or biscuit crust with filling",
|
||||
title="Pie / Pot Pie",
|
||||
required=[
|
||||
AssemblyRole("pastry or crust", [
|
||||
|
|
@ -693,20 +559,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"For sweet pie: fill unbaked crust with fruit filling, top with second crust or crumble, bake similarly.",
|
||||
],
|
||||
notes="Puff pastry from the freezer is the shortcut to impressive pot pies. Thaw in the fridge overnight.",
|
||||
role_hints={
|
||||
"pastry or crust": "Thaw puff pastry overnight in fridge",
|
||||
"protein filling": "Cook through before adding to filling",
|
||||
"vegetables": "Chop small; cook until just tender",
|
||||
"sauce or binder": "Holds the filling together in the crust",
|
||||
"seasoning": "Fillings need generous seasoning",
|
||||
"sweet filling": "For dessert pies -- fruit + sugar",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-13,
|
||||
slug="pudding_custard",
|
||||
icon="🍮",
|
||||
descriptor="Dairy-based set dessert",
|
||||
title="Pudding / Custard",
|
||||
required=[
|
||||
AssemblyRole("dairy or dairy-free milk", [
|
||||
|
|
@ -717,12 +572,6 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"egg", "cornstarch", "custard powder", "gelatin",
|
||||
"agar", "tapioca", "arrowroot",
|
||||
]),
|
||||
# Require a clear dessert-intent signal — milk + eggs alone is too generic
|
||||
# (also covers white sauce, quiche, etc.)
|
||||
AssemblyRole("sweetener or flavouring", [
|
||||
"sugar", "honey", "maple syrup", "condensed milk",
|
||||
"vanilla", "chocolate", "cocoa", "caramel", "custard powder",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("sweetener", ["sugar", "honey", "maple syrup", "condensed milk"]),
|
||||
|
|
@ -744,58 +593,10 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Pour into dishes and refrigerate at least 2 hours to set.",
|
||||
],
|
||||
notes="UK-style pudding is broad — bread pudding, rice pudding, spotted dick, treacle sponge all count.",
|
||||
role_hints={
|
||||
"dairy or dairy-free milk": "Heat until steaming before adding to eggs",
|
||||
"thickener or set": "Cornstarch for stovetop; eggs for baked custard",
|
||||
"sweetener or flavouring": "Signals dessert intent -- required",
|
||||
"sweetener": "Adjust to taste",
|
||||
"flavouring": "Add off-heat to preserve aroma",
|
||||
"starchy base": "For bread pudding or rice pudding",
|
||||
"fruit": "Layer in or fold through before setting",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# Slug to template lookup (built once at import time)
|
||||
_TEMPLATE_BY_SLUG: dict[str, AssemblyTemplate] = {
|
||||
t.slug: t for t in ASSEMBLY_TEMPLATES
|
||||
}
|
||||
|
||||
|
||||
def get_templates_for_api() -> list[dict]:
|
||||
"""Serialise all 13 templates for GET /api/recipes/templates.
|
||||
|
||||
Combines required and optional roles into a single ordered role_sequence
|
||||
with required roles first.
|
||||
"""
|
||||
out = []
|
||||
for tmpl in ASSEMBLY_TEMPLATES:
|
||||
roles = []
|
||||
for role in tmpl.required:
|
||||
roles.append({
|
||||
"display": role.display,
|
||||
"required": True,
|
||||
"keywords": role.keywords,
|
||||
"hint": tmpl.role_hints.get(role.display, ""),
|
||||
})
|
||||
for role in tmpl.optional:
|
||||
roles.append({
|
||||
"display": role.display,
|
||||
"required": False,
|
||||
"keywords": role.keywords,
|
||||
"hint": tmpl.role_hints.get(role.display, ""),
|
||||
})
|
||||
out.append({
|
||||
"id": tmpl.slug,
|
||||
"title": tmpl.title,
|
||||
"icon": tmpl.icon,
|
||||
"descriptor": tmpl.descriptor,
|
||||
"role_sequence": roles,
|
||||
})
|
||||
return out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -804,20 +605,14 @@ def match_assembly_templates(
|
|||
pantry_items: list[str],
|
||||
pantry_set: set[str],
|
||||
excluded_ids: list[int],
|
||||
expiring_set: set[str] | None = None,
|
||||
) -> list[RecipeSuggestion]:
|
||||
"""Return assembly-dish suggestions whose required roles are all satisfied.
|
||||
|
||||
Titles are personalized with specific pantry items (deterministically chosen
|
||||
from the pantry contents so the same pantry always produces the same title).
|
||||
Skips templates whose id is in excluded_ids (dismiss/load-more support).
|
||||
|
||||
expiring_set: expanded pantry set of items close to expiry. Templates that
|
||||
use an expiring item in a required role get +2 added to match_count so they
|
||||
rank higher when the caller sorts the combined result list.
|
||||
"""
|
||||
excluded = set(excluded_ids)
|
||||
expiring = expiring_set or set()
|
||||
seed = _pantry_hash(pantry_set)
|
||||
results: list[RecipeSuggestion] = []
|
||||
|
||||
|
|
@ -825,40 +620,20 @@ def match_assembly_templates(
|
|||
if tmpl.id in excluded:
|
||||
continue
|
||||
|
||||
# All required roles must be satisfied; collect matched items for required roles
|
||||
required_matches: list[str] = []
|
||||
skip = False
|
||||
for role in tmpl.required:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if not hits:
|
||||
skip = True
|
||||
break
|
||||
required_matches.append(_pick_one(hits, seed + tmpl.id))
|
||||
if skip:
|
||||
# All required roles must be satisfied
|
||||
if any(not _matches_role(role, pantry_set) for role in tmpl.required):
|
||||
continue
|
||||
|
||||
# Collect matched items for optional roles (one representative per matched role)
|
||||
optional_matches: list[str] = []
|
||||
for role in tmpl.optional:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if hits:
|
||||
optional_matches.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
matched = required_matches + optional_matches
|
||||
|
||||
# Expiry boost: +2 if any required ingredient is in the expiring set,
|
||||
# so time-sensitive templates surface first in the merged ranking.
|
||||
expiry_bonus = 2 if expiring and any(
|
||||
item.lower() in expiring for item in required_matches
|
||||
) else 0
|
||||
optional_hit_count = sum(
|
||||
1 for role in tmpl.optional if _matches_role(role, pantry_set)
|
||||
)
|
||||
|
||||
results.append(RecipeSuggestion(
|
||||
id=tmpl.id,
|
||||
title=_personalized_title(tmpl, pantry_set, seed + tmpl.id),
|
||||
match_count=len(matched) + expiry_bonus,
|
||||
match_count=len(tmpl.required) + optional_hit_count,
|
||||
element_coverage={},
|
||||
swap_candidates=[],
|
||||
matched_ingredients=matched,
|
||||
missing_ingredients=[],
|
||||
directions=tmpl.directions,
|
||||
notes=tmpl.notes,
|
||||
|
|
@ -870,148 +645,3 @@ def match_assembly_templates(
|
|||
# Sort by optional coverage descending — best-matched templates first
|
||||
results.sort(key=lambda s: s.match_count, reverse=True)
|
||||
return results
|
||||
|
||||
|
||||
def get_role_candidates(
|
||||
template_slug: str,
|
||||
role_display: str,
|
||||
pantry_set: set[str],
|
||||
prior_picks: list[str],
|
||||
profile_index: dict[str, list[str]],
|
||||
) -> dict:
|
||||
"""Return ingredient candidates for one wizard step.
|
||||
|
||||
Splits candidates into 'compatible' (element overlap with prior picks)
|
||||
and 'other' (valid for role but no overlap).
|
||||
|
||||
profile_index: {ingredient_name: [element_tag, ...]} -- pre-loaded from
|
||||
Store.get_element_profiles() by the caller so this function stays DB-free.
|
||||
|
||||
Returns {"compatible": [...], "other": [...], "available_tags": [...]}
|
||||
where each item is {"name": str, "in_pantry": bool, "tags": [str]}.
|
||||
"""
|
||||
tmpl = _TEMPLATE_BY_SLUG.get(template_slug)
|
||||
if tmpl is None:
|
||||
return {"compatible": [], "other": [], "available_tags": []}
|
||||
|
||||
# Find the AssemblyRole for this display name
|
||||
target_role: AssemblyRole | None = None
|
||||
for role in tmpl.required + tmpl.optional:
|
||||
if role.display == role_display:
|
||||
target_role = role
|
||||
break
|
||||
if target_role is None:
|
||||
return {"compatible": [], "other": [], "available_tags": []}
|
||||
|
||||
# Build prior-pick element set for compatibility scoring
|
||||
prior_elements: set[str] = set()
|
||||
for pick in prior_picks:
|
||||
prior_elements.update(profile_index.get(pick, []))
|
||||
|
||||
# Find pantry items that match this role
|
||||
pantry_matches = _matches_role(target_role, pantry_set)
|
||||
|
||||
# Build keyword-based "other" candidates from role keywords not in pantry
|
||||
pantry_lower = {p.lower() for p in pantry_set}
|
||||
other_names: list[str] = []
|
||||
for kw in target_role.keywords:
|
||||
if not any(kw in item.lower() for item in pantry_lower):
|
||||
if len(kw) >= 4:
|
||||
other_names.append(kw.title())
|
||||
|
||||
def _make_item(name: str, in_pantry: bool) -> dict:
|
||||
tags = profile_index.get(name, profile_index.get(name.lower(), []))
|
||||
return {"name": name, "in_pantry": in_pantry, "tags": tags}
|
||||
|
||||
# Score: compatible if shares any element with prior picks (or no prior picks yet)
|
||||
compatible: list[dict] = []
|
||||
other: list[dict] = []
|
||||
for name in pantry_matches:
|
||||
item_elements = set(profile_index.get(name, []))
|
||||
item = _make_item(name, in_pantry=True)
|
||||
if not prior_elements or item_elements & prior_elements:
|
||||
compatible.append(item)
|
||||
else:
|
||||
other.append(item)
|
||||
|
||||
for name in other_names:
|
||||
other.append(_make_item(name, in_pantry=False))
|
||||
|
||||
# available_tags: union of all tags in the full candidate set
|
||||
all_tags: set[str] = set()
|
||||
for item in compatible + other:
|
||||
all_tags.update(item["tags"])
|
||||
|
||||
return {
|
||||
"compatible": compatible,
|
||||
"other": other,
|
||||
"available_tags": sorted(all_tags),
|
||||
}
|
||||
|
||||
|
||||
def build_from_selection(
|
||||
template_slug: str,
|
||||
role_overrides: dict[str, str],
|
||||
pantry_set: set[str],
|
||||
) -> "RecipeSuggestion | None":
|
||||
"""Build a RecipeSuggestion from explicit role selections.
|
||||
|
||||
role_overrides: {role.display -> chosen pantry item name}
|
||||
|
||||
Returns None if template not found or any required role is uncovered.
|
||||
"""
|
||||
tmpl = _TEMPLATE_BY_SLUG.get(template_slug)
|
||||
if tmpl is None:
|
||||
return None
|
||||
|
||||
seed = _pantry_hash(pantry_set)
|
||||
|
||||
# Validate required roles: covered by override OR pantry match
|
||||
matched_required: list[str] = []
|
||||
for role in tmpl.required:
|
||||
chosen = role_overrides.get(role.display)
|
||||
if chosen:
|
||||
matched_required.append(chosen)
|
||||
else:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if not hits:
|
||||
return None
|
||||
matched_required.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
# Collect optional matches (override preferred, then pantry match)
|
||||
matched_optional: list[str] = []
|
||||
for role in tmpl.optional:
|
||||
chosen = role_overrides.get(role.display)
|
||||
if chosen:
|
||||
matched_optional.append(chosen)
|
||||
else:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if hits:
|
||||
matched_optional.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
all_matched = matched_required + matched_optional
|
||||
|
||||
# Build title: prefer override items for personalisation
|
||||
effective_pantry = pantry_set | set(role_overrides.values())
|
||||
title = _personalized_title(tmpl, effective_pantry, seed + tmpl.id)
|
||||
|
||||
# Items in role_overrides that aren't in the user's pantry = shopping list
|
||||
missing = [
|
||||
item for item in role_overrides.values()
|
||||
if item and item not in pantry_set
|
||||
]
|
||||
|
||||
return RecipeSuggestion(
|
||||
id=tmpl.id,
|
||||
title=title,
|
||||
match_count=len(all_matched),
|
||||
element_coverage={},
|
||||
swap_candidates=[],
|
||||
matched_ingredients=all_matched,
|
||||
missing_ingredients=missing,
|
||||
directions=tmpl.directions,
|
||||
notes=tmpl.notes,
|
||||
level=1,
|
||||
is_wildcard=False,
|
||||
nutrition=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,633 +0,0 @@
|
|||
"""
|
||||
Recipe browser domain schemas.
|
||||
|
||||
Each domain provides a two-level category hierarchy for browsing the recipe corpus.
|
||||
Keyword matching is case-insensitive against the recipes.category column and the
|
||||
recipes.keywords JSON array. A recipe may appear in multiple categories (correct).
|
||||
|
||||
Category values are either:
|
||||
- list[str] — flat keyword list (no subcategories)
|
||||
- dict — {"keywords": list[str], "subcategories": {name: list[str]}}
|
||||
keywords covers the whole category (used for "All X" browse);
|
||||
subcategories each have their own narrower keyword list.
|
||||
|
||||
These are starter mappings based on the food.com dataset structure. Run:
|
||||
|
||||
SELECT category, count(*) FROM recipes
|
||||
GROUP BY category ORDER BY count(*) DESC LIMIT 50;
|
||||
|
||||
against the corpus to verify coverage and refine keyword lists before the first
|
||||
production deploy.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
DOMAINS: dict[str, dict] = {
|
||||
"cuisine": {
|
||||
"label": "Cuisine",
|
||||
"categories": {
|
||||
"Italian": {
|
||||
"keywords": ["italian", "pasta", "pizza", "risotto", "lasagna", "carbonara"],
|
||||
"subcategories": {
|
||||
"Sicilian": ["sicilian", "sicily", "arancini", "caponata",
|
||||
"involtini", "cannoli"],
|
||||
"Neapolitan": ["neapolitan", "naples", "pizza napoletana",
|
||||
"sfogliatelle", "ragù"],
|
||||
"Tuscan": ["tuscan", "tuscany", "ribollita", "bistecca",
|
||||
"pappardelle", "crostini"],
|
||||
"Roman": ["roman", "rome", "cacio e pepe", "carbonara",
|
||||
"amatriciana", "gricia", "supplì"],
|
||||
"Venetian": ["venetian", "venice", "risotto", "bigoli",
|
||||
"baccalà", "sarde in saor"],
|
||||
"Ligurian": ["ligurian", "liguria", "pesto", "focaccia",
|
||||
"trofie", "farinata"],
|
||||
},
|
||||
},
|
||||
"Mexican": {
|
||||
"keywords": ["mexican", "taco", "enchilada", "burrito", "salsa",
|
||||
"guacamole", "mole", "tamale"],
|
||||
"subcategories": {
|
||||
"Oaxacan": ["oaxacan", "oaxaca", "mole negro", "tlayuda",
|
||||
"chapulines", "mezcal", "tasajo", "memelas"],
|
||||
"Yucatecan": ["yucatecan", "yucatan", "cochinita pibil", "poc chuc",
|
||||
"sopa de lima", "panuchos", "papadzules"],
|
||||
"Veracruz": ["veracruz", "veracruzana", "huachinango",
|
||||
"picadas", "enfrijoladas", "caldo de mariscos"],
|
||||
"Street Food": ["taco", "elote", "tlacoyos", "torta", "tamale",
|
||||
"quesadilla", "tostada", "sope", "gordita"],
|
||||
"Mole": ["mole", "mole negro", "mole rojo", "mole verde",
|
||||
"mole poblano", "mole amarillo", "pipián"],
|
||||
"Baja / Cal-Mex": ["baja", "baja california", "cal-mex", "baja fish taco",
|
||||
"fish taco", "carne asada fries", "california burrito",
|
||||
"birria", "birria tacos", "quesabirria",
|
||||
"lobster puerto nuevo", "tijuana", "ensenada",
|
||||
"agua fresca", "caesar salad tijuana"],
|
||||
"Mexico City": ["mexico city", "chilaquiles", "tlayuda cdmx",
|
||||
"tacos de canasta", "torta ahogada", "pozole",
|
||||
"chiles en nogada"],
|
||||
},
|
||||
},
|
||||
"Asian": {
|
||||
"keywords": ["asian", "chinese", "japanese", "thai", "korean", "vietnamese",
|
||||
"stir fry", "stir-fry", "ramen", "sushi", "malaysian",
|
||||
"taiwanese", "singaporean", "burmese", "cambodian",
|
||||
"laotian", "mongolian", "hong kong"],
|
||||
"subcategories": {
|
||||
"Korean": ["korean", "kimchi", "bibimbap", "bulgogi", "japchae",
|
||||
"doenjang", "gochujang", "tteokbokki", "sundubu",
|
||||
"galbi", "jjigae", "kbbq", "korean fried chicken"],
|
||||
"Japanese": ["japanese", "sushi", "ramen", "tempura", "miso",
|
||||
"teriyaki", "udon", "soba", "bento", "yakitori",
|
||||
"tonkatsu", "onigiri", "okonomiyaki", "takoyaki",
|
||||
"kaiseki", "izakaya"],
|
||||
"Chinese": ["chinese", "dim sum", "fried rice", "dumplings", "wonton",
|
||||
"spring roll", "szechuan", "sichuan", "cantonese",
|
||||
"chow mein", "mapo tofu", "lo mein", "hot pot",
|
||||
"peking duck", "char siu", "congee"],
|
||||
"Thai": ["thai", "pad thai", "green curry", "red curry",
|
||||
"coconut milk", "lemongrass", "satay", "tom yum",
|
||||
"larb", "khao man gai", "massaman", "pad see ew"],
|
||||
"Vietnamese": ["vietnamese", "pho", "banh mi", "spring rolls",
|
||||
"vermicelli", "nuoc cham", "bun bo hue",
|
||||
"banh xeo", "com tam", "bun cha"],
|
||||
"Filipino": ["filipino", "adobo", "sinigang", "pancit", "lumpia",
|
||||
"kare-kare", "lechon", "sisig", "halo-halo",
|
||||
"dinuguan", "tinola", "bistek"],
|
||||
"Indonesian": ["indonesian", "rendang", "nasi goreng", "gado-gado",
|
||||
"tempeh", "sambal", "soto", "opor ayam",
|
||||
"bakso", "mie goreng", "nasi uduk"],
|
||||
"Malaysian": ["malaysian", "laksa", "nasi lemak", "char kway teow",
|
||||
"satay malaysia", "roti canai", "bak kut teh",
|
||||
"cendol", "mee goreng mamak", "curry laksa"],
|
||||
"Taiwanese": ["taiwanese", "beef noodle soup", "lu rou fan",
|
||||
"oyster vermicelli", "scallion pancake taiwan",
|
||||
"pork chop rice", "three cup chicken",
|
||||
"bubble tea", "stinky tofu", "ba wan"],
|
||||
"Singaporean": ["singaporean", "chicken rice", "chili crab",
|
||||
"singaporean laksa", "bak chor mee", "rojak",
|
||||
"kaya toast", "nasi padang", "satay singapore"],
|
||||
"Burmese": ["burmese", "myanmar", "mohinga", "laphet thoke",
|
||||
"tea leaf salad", "ohn no khao swe",
|
||||
"mont di", "nangyi thoke"],
|
||||
"Hong Kong": ["hong kong", "hk style", "pineapple bun",
|
||||
"wonton noodle soup", "hk milk tea", "egg tart",
|
||||
"typhoon shelter crab", "char siu bao", "jook",
|
||||
"congee hk", "silk stocking tea", "dan tat",
|
||||
"siu mai hk", "cheung fun"],
|
||||
"Cambodian": ["cambodian", "khmer", "amok", "lok lak",
|
||||
"kuy teav", "bai sach chrouk", "nom banh chok",
|
||||
"samlor korko", "beef loc lac"],
|
||||
"Laotian": ["laotian", "lao", "larb", "tam mak hoong",
|
||||
"or lam", "khao niaw", "ping kai",
|
||||
"naem khao", "khao piak sen", "mok pa"],
|
||||
"Mongolian": ["mongolian", "buuz", "khuushuur", "tsuivan",
|
||||
"boodog", "airag", "khorkhog", "bansh",
|
||||
"guriltai shol", "suutei tsai"],
|
||||
"South Asian Fusion": ["south asian fusion", "indo-chinese",
|
||||
"hakka chinese", "chilli chicken",
|
||||
"manchurian", "schezwan"],
|
||||
},
|
||||
},
|
||||
"Indian": {
|
||||
"keywords": ["indian", "curry", "lentil", "dal", "tikka", "masala",
|
||||
"biryani", "naan", "chutney", "pakistani", "sri lankan",
|
||||
"bangladeshi", "nepali"],
|
||||
"subcategories": {
|
||||
"North Indian": ["north indian", "punjabi", "mughal", "tikka masala",
|
||||
"naan", "tandoori", "butter chicken", "palak paneer",
|
||||
"chole", "rajma", "aloo gobi"],
|
||||
"South Indian": ["south indian", "tamil", "kerala", "dosa", "idli",
|
||||
"sambar", "rasam", "coconut chutney", "appam",
|
||||
"fish curry kerala", "puttu", "payasam"],
|
||||
"Bengali": ["bengali", "mustard fish", "hilsa", "shorshe ilish",
|
||||
"mishti doi", "rasgulla", "kosha mangsho"],
|
||||
"Gujarati": ["gujarati", "dhokla", "thepla", "undhiyu",
|
||||
"khandvi", "fafda", "gujarati dal"],
|
||||
"Pakistani": ["pakistani", "nihari", "haleem", "seekh kebab",
|
||||
"karahi", "biryani karachi", "chapli kebab",
|
||||
"halwa puri", "paya"],
|
||||
"Sri Lankan": ["sri lankan", "kottu roti", "hoppers", "pol sambol",
|
||||
"sri lankan curry", "lamprais", "string hoppers",
|
||||
"wambatu moju"],
|
||||
"Bangladeshi": ["bangladeshi", "bangladesh", "dhaka biryani",
|
||||
"shutki", "pitha", "hilsa curry", "kacchi biryani",
|
||||
"bhuna khichuri", "doi maach", "rezala"],
|
||||
"Nepali": ["nepali", "dal bhat", "momos", "sekuwa",
|
||||
"sel roti", "gundruk", "thukpa"],
|
||||
},
|
||||
},
|
||||
"Mediterranean": {
|
||||
"keywords": ["mediterranean", "greek", "middle eastern", "turkish",
|
||||
"lebanese", "jewish", "palestinian", "yemeni", "egyptian",
|
||||
"syrian", "iraqi", "jordanian"],
|
||||
"subcategories": {
|
||||
"Greek": ["greek", "feta", "tzatziki", "moussaka", "spanakopita",
|
||||
"souvlaki", "dolmades", "spanakopita", "tiropita",
|
||||
"galaktoboureko"],
|
||||
"Turkish": ["turkish", "kebab", "borek", "meze", "baklava",
|
||||
"lahmacun", "menemen", "pide", "iskender",
|
||||
"kisir", "simit"],
|
||||
"Syrian": ["syrian", "fattet hummus", "kibbeh syria",
|
||||
"muhammara", "maklouba syria", "sfeeha",
|
||||
"halawet el jibn"],
|
||||
"Lebanese": ["lebanese", "middle eastern", "hummus", "falafel",
|
||||
"tabbouleh", "kibbeh", "fattoush", "manakish",
|
||||
"kafta", "sfiha"],
|
||||
"Jewish": ["jewish", "israeli", "ashkenazi", "sephardic",
|
||||
"shakshuka", "sabich", "za'atar", "tahini",
|
||||
"zhug", "zhoug", "s'khug", "z'houg",
|
||||
"hawaiij", "hawaij", "hawayej",
|
||||
"matzo", "latke", "rugelach", "babka", "challah",
|
||||
"cholent", "gefilte fish", "brisket", "kugel",
|
||||
"new york jewish", "new york deli", "pastrami",
|
||||
"knish", "lox", "bagel and lox", "jewish deli"],
|
||||
"Palestinian": ["palestinian", "musakhan", "maqluba", "knafeh",
|
||||
"maftoul", "freekeh", "sumac chicken"],
|
||||
"Yemeni": ["yemeni", "saltah", "lahoh", "bint al-sahn",
|
||||
"zhug", "zhoug", "hulba", "fahsa",
|
||||
"hawaiij", "hawaij", "hawayej"],
|
||||
"Egyptian": ["egyptian", "koshari", "molokhia", "mahshi",
|
||||
"ful medames", "ta'ameya", "feteer meshaltet"],
|
||||
},
|
||||
},
|
||||
"American": {
|
||||
"keywords": ["american", "southern", "comfort food", "cajun", "creole",
|
||||
"hawaiian", "tex-mex", "soul food"],
|
||||
"subcategories": {
|
||||
"Southern": ["southern", "soul food", "fried chicken",
|
||||
"collard greens", "cornbread", "biscuits and gravy",
|
||||
"mac and cheese", "sweet potato pie", "okra"],
|
||||
"Cajun/Creole": ["cajun", "creole", "new orleans", "gumbo",
|
||||
"jambalaya", "etouffee", "dirty rice", "po'boy",
|
||||
"muffuletta", "red beans and rice"],
|
||||
"Tex-Mex": ["tex-mex", "southwestern", "chili", "fajita",
|
||||
"queso", "breakfast taco", "chile con carne"],
|
||||
"New England": ["new england", "chowder", "lobster", "clam",
|
||||
"maple", "yankee", "boston baked beans",
|
||||
"johnnycake", "fish and chips"],
|
||||
"Pacific Northwest": ["pacific northwest", "pnw", "dungeness crab",
|
||||
"salmon", "cedar plank", "razor clam",
|
||||
"geoduck", "chanterelle", "marionberry"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "plate lunch", "loco moco",
|
||||
"poke", "spam musubi", "kalua pig", "lau lau",
|
||||
"haupia", "poi", "manapua", "garlic shrimp",
|
||||
"saimin", "huli huli", "malasada"],
|
||||
},
|
||||
},
|
||||
"BBQ & Smoke": {
|
||||
"keywords": ["bbq", "barbecue", "smoked", "pit", "smoke ring",
|
||||
"low and slow", "brisket", "pulled pork", "ribs"],
|
||||
"subcategories": {
|
||||
"Texas BBQ": ["texas bbq", "central texas bbq", "brisket",
|
||||
"beef ribs", "post oak", "salt and pepper rub",
|
||||
"east texas bbq", "lockhart", "franklin style"],
|
||||
"Carolina BBQ": ["carolina bbq", "north carolina bbq", "whole hog",
|
||||
"vinegar sauce", "lexington style", "eastern nc",
|
||||
"south carolina bbq", "mustard sauce"],
|
||||
"Kansas City BBQ": ["kansas city bbq", "kc bbq", "burnt ends",
|
||||
"sweet bbq sauce", "tomato molasses sauce",
|
||||
"baby back ribs kc"],
|
||||
"Memphis BBQ": ["memphis bbq", "dry rub ribs", "wet ribs",
|
||||
"memphis style", "dry rub pork"],
|
||||
"Alabama BBQ": ["alabama bbq", "white sauce", "alabama white sauce",
|
||||
"smoked chicken alabama"],
|
||||
"Kentucky BBQ": ["kentucky bbq", "mutton bbq", "owensboro bbq",
|
||||
"black dip", "western kentucky barbecue"],
|
||||
"St. Louis BBQ": ["st louis bbq", "st. louis ribs", "st louis cut ribs",
|
||||
"st louis style spare ribs"],
|
||||
"Backyard Grill": ["backyard bbq", "cookout", "grilled burgers",
|
||||
"charcoal grill", "kettle grill", "tailgate"],
|
||||
},
|
||||
},
|
||||
"European": {
|
||||
"keywords": ["french", "german", "spanish", "british", "irish", "scottish",
|
||||
"welsh", "scandinavian", "nordic", "eastern european"],
|
||||
"subcategories": {
|
||||
"French": ["french", "provencal", "beurre", "crepe",
|
||||
"ratatouille", "cassoulet", "bouillabaisse"],
|
||||
"Spanish": ["spanish", "paella", "tapas", "gazpacho",
|
||||
"tortilla espanola", "chorizo"],
|
||||
"German": ["german", "bratwurst", "sauerkraut", "schnitzel",
|
||||
"pretzel", "strudel"],
|
||||
"British": ["british", "english", "pub food", "cornish",
|
||||
"shepherd's pie", "bangers", "toad in the hole",
|
||||
"coronation chicken", "london", "londoner",
|
||||
"cornish pasty", "ploughman's"],
|
||||
"Irish": ["irish", "ireland", "colcannon", "coddle",
|
||||
"irish stew", "soda bread", "boxty", "champ"],
|
||||
"Scottish": ["scottish", "scotland", "haggis", "cullen skink",
|
||||
"cranachan", "scotch broth", "glaswegian",
|
||||
"neeps and tatties", "tablet"],
|
||||
"Scandinavian": ["scandinavian", "nordic", "swedish", "norwegian",
|
||||
"danish", "finnish", "gravlax", "swedish meatballs",
|
||||
"lefse", "smörgåsbord", "fika", "crispbread",
|
||||
"cardamom bun", "herring", "æbleskiver",
|
||||
"lingonberry", "lutefisk", "janssons frestelse",
|
||||
"knäckebröd", "kladdkaka"],
|
||||
"Eastern European": ["eastern european", "polish", "russian", "ukrainian",
|
||||
"czech", "hungarian", "pierogi", "borscht",
|
||||
"goulash", "kielbasa", "varenyky", "pelmeni"],
|
||||
},
|
||||
},
|
||||
"Latin American": {
|
||||
"keywords": ["latin american", "peruvian", "argentinian", "colombian",
|
||||
"cuban", "caribbean", "brazilian", "venezuelan", "chilean"],
|
||||
"subcategories": {
|
||||
"Peruvian": ["peruvian", "ceviche", "lomo saltado", "anticucho",
|
||||
"aji amarillo", "causa", "leche de tigre",
|
||||
"arroz con leche peru", "pollo a la brasa"],
|
||||
"Brazilian": ["brazilian", "churrasco", "feijoada", "pao de queijo",
|
||||
"brigadeiro", "coxinha", "moqueca", "vatapa",
|
||||
"caipirinha", "acai bowl"],
|
||||
"Colombian": ["colombian", "bandeja paisa", "arepas", "empanadas",
|
||||
"sancocho", "ajiaco", "buñuelos", "changua"],
|
||||
"Argentinian": ["argentinian", "asado", "chimichurri", "empanadas argentina",
|
||||
"milanesa", "locro", "dulce de leche", "medialunas"],
|
||||
"Venezuelan": ["venezuelan", "pabellón criollo", "arepas venezuela",
|
||||
"hallacas", "cachapas", "tequeños", "caraotas"],
|
||||
"Chilean": ["chilean", "cazuela", "pastel de choclo", "curanto",
|
||||
"sopaipillas", "charquicán", "completo"],
|
||||
"Cuban": ["cuban", "ropa vieja", "moros y cristianos",
|
||||
"picadillo", "lechon cubano", "vaca frita",
|
||||
"tostones", "platanos maduros"],
|
||||
"Jamaican": ["jamaican", "jerk chicken", "jerk pork", "ackee saltfish",
|
||||
"curry goat", "rice and peas", "escovitch",
|
||||
"jamaican patty", "callaloo jamaica", "festival"],
|
||||
"Puerto Rican": ["puerto rican", "mofongo", "pernil", "arroz con gandules",
|
||||
"sofrito", "pasteles", "tostones pr", "tembleque",
|
||||
"coquito", "asopao"],
|
||||
"Dominican": ["dominican", "mangu", "sancocho dominicano",
|
||||
"pollo guisado", "habichuelas guisadas",
|
||||
"tostones dominicanos", "morir soñando"],
|
||||
"Haitian": ["haitian", "griot", "pikliz", "riz et pois",
|
||||
"joumou", "akra", "pain patate", "labouyi"],
|
||||
"Trinidad": ["trinidadian", "doubles", "roti trinidad", "pelau",
|
||||
"callaloo trinidad", "bake and shark",
|
||||
"curry duck", "oil down"],
|
||||
},
|
||||
},
|
||||
"Central American": {
|
||||
"keywords": ["central american", "salvadoran", "guatemalan",
|
||||
"honduran", "nicaraguan", "costa rican", "panamanian"],
|
||||
"subcategories": {
|
||||
"Salvadoran": ["salvadoran", "el salvador", "pupusas", "curtido",
|
||||
"sopa de pata", "nuégados", "atol shuco"],
|
||||
"Guatemalan": ["guatemalan", "pepián", "jocon", "kak'ik",
|
||||
"hilachas", "rellenitos", "fiambre"],
|
||||
"Costa Rican": ["costa rican", "gallo pinto", "casado",
|
||||
"olla de carne", "arroz con leche cr",
|
||||
"tres leches cr"],
|
||||
"Honduran": ["honduran", "baleadas", "sopa de caracol",
|
||||
"tapado", "machuca", "catrachitas"],
|
||||
"Nicaraguan": ["nicaraguan", "nacatamal", "vigorón", "indio viejo",
|
||||
"gallo pinto nicaragua", "güirilas"],
|
||||
},
|
||||
},
|
||||
"African": {
|
||||
"keywords": ["african", "west african", "east african", "ethiopian",
|
||||
"nigerian", "ghanaian", "kenyan", "south african",
|
||||
"senegalese", "tunisian"],
|
||||
"subcategories": {
|
||||
"West African": ["west african", "nigerian", "ghanaian",
|
||||
"jollof rice", "egusi soup", "fufu", "suya",
|
||||
"groundnut stew", "kelewele", "kontomire",
|
||||
"waakye", "ofam", "bitterleaf soup"],
|
||||
"Senegalese": ["senegalese", "senegal", "thieboudienne",
|
||||
"yassa", "mafe", "thiou", "ceebu jen",
|
||||
"domoda"],
|
||||
"Ethiopian & Eritrean": ["ethiopian", "eritrean", "injera", "doro wat",
|
||||
"kitfo", "tibs", "shiro", "misir wat",
|
||||
"gomen", "ful ethiopian", "tegamino"],
|
||||
"East African": ["east african", "kenyan", "tanzanian", "ugandan",
|
||||
"nyama choma", "ugali", "sukuma wiki",
|
||||
"pilau kenya", "mandazi", "matoke",
|
||||
"githeri", "irio"],
|
||||
"North African": ["north african", "tunisian", "algerian", "libyan",
|
||||
"brik", "lablabi", "merguez", "shakshuka tunisian",
|
||||
"harissa tunisian", "couscous algerian"],
|
||||
"South African": ["south african", "braai", "bobotie", "boerewors",
|
||||
"bunny chow", "pap", "chakalaka", "biltong",
|
||||
"malva pudding", "koeksister", "potjiekos"],
|
||||
"Moroccan": ["moroccan", "tagine", "couscous morocco",
|
||||
"harissa", "chermoula", "preserved lemon",
|
||||
"pastilla", "mechoui", "bastilla"],
|
||||
},
|
||||
},
|
||||
"Pacific & Oceania": {
|
||||
"keywords": ["pacific", "oceania", "polynesian", "melanesian",
|
||||
"micronesian", "maori", "fijian", "samoan", "tongan",
|
||||
"hawaiian", "australian", "new zealand"],
|
||||
"subcategories": {
|
||||
"Māori / New Zealand": ["maori", "new zealand", "hangi", "rewena bread",
|
||||
"boil-up", "paua", "kumara", "pavlova nz",
|
||||
"whitebait fritter", "kina", "hokey pokey"],
|
||||
"Australian": ["australian", "meat pie", "lamington",
|
||||
"anzac biscuits", "damper", "barramundi",
|
||||
"vegemite", "pavlova australia", "tim tam",
|
||||
"sausage sizzle", "chiko roll", "fairy bread"],
|
||||
"Fijian": ["fijian", "fiji", "kokoda", "lovo",
|
||||
"rourou", "palusami fiji", "duruka",
|
||||
"vakalolo"],
|
||||
"Samoan": ["samoan", "samoa", "palusami", "oka",
|
||||
"fa'ausi", "chop suey samoa", "sapasui",
|
||||
"koko alaisa", "supo esi"],
|
||||
"Tongan": ["tongan", "tonga", "lu pulu", "'ota 'ika",
|
||||
"fekkai", "faikakai topai", "kapisi pulu"],
|
||||
"Papua New Guinean": ["papua new guinea", "png", "mumu",
|
||||
"sago", "aibika", "kaukau",
|
||||
"taro png", "coconut crab"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "poke", "loco moco",
|
||||
"plate lunch", "kalua pig", "haupia",
|
||||
"spam musubi", "poi", "malasada"],
|
||||
},
|
||||
},
|
||||
"Central Asian & Caucasus": {
|
||||
"keywords": ["central asian", "caucasus", "georgian", "armenian", "uzbek",
|
||||
"afghan", "persian", "iranian", "azerbaijani", "kazakh"],
|
||||
"subcategories": {
|
||||
"Persian / Iranian": ["persian", "iranian", "ghormeh sabzi", "fesenjan",
|
||||
"tahdig", "joojeh kabab", "ash reshteh",
|
||||
"zereshk polo", "khoresh", "mast o khiar",
|
||||
"kashk-e-bademjan", "mirza ghasemi",
|
||||
"baghali polo"],
|
||||
"Georgian": ["georgian", "georgia", "khachapuri", "khinkali",
|
||||
"churchkhela", "ajapsandali", "satsivi",
|
||||
"pkhali", "lobiani", "badrijani nigvzit"],
|
||||
"Armenian": ["armenian", "dolma armenia", "lahmajoun",
|
||||
"manti armenia", "ghapama", "basturma",
|
||||
"harissa armenia", "nazook", "tolma"],
|
||||
"Azerbaijani": ["azerbaijani", "azerbaijan", "plov azerbaijan",
|
||||
"dolma azeri", "dushbara", "levengi",
|
||||
"shah plov", "gutab"],
|
||||
"Uzbek": ["uzbek", "uzbekistan", "plov", "samsa",
|
||||
"lagman", "shashlik", "manti uzbek",
|
||||
"non bread", "dimlama", "sumalak"],
|
||||
"Afghan": ["afghan", "afghanistan", "kabuli pulao", "mantu",
|
||||
"bolani", "qorma", "ashak", "shorwa",
|
||||
"aushak", "borani banjan"],
|
||||
"Kazakh": ["kazakh", "beshbarmak", "kuyrdak", "baursak",
|
||||
"kurt", "shubat", "kazy"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"meal_type": {
|
||||
"label": "Meal Type",
|
||||
"categories": {
|
||||
"Breakfast": {
|
||||
"keywords": ["breakfast", "brunch", "eggs", "pancakes", "waffles",
|
||||
"oatmeal", "muffin"],
|
||||
"subcategories": {
|
||||
"Eggs": ["egg", "omelette", "frittata", "quiche",
|
||||
"scrambled", "benedict", "shakshuka"],
|
||||
"Pancakes & Waffles": ["pancake", "waffle", "crepe", "french toast"],
|
||||
"Baked Goods": ["muffin", "scone", "biscuit", "quick bread",
|
||||
"coffee cake", "danish"],
|
||||
"Oats & Grains": ["oatmeal", "granola", "porridge", "muesli",
|
||||
"overnight oats"],
|
||||
},
|
||||
},
|
||||
"Lunch": {
|
||||
"keywords": ["lunch", "sandwich", "wrap", "salad", "soup", "light meal"],
|
||||
"subcategories": {
|
||||
"Sandwiches": ["sandwich", "sub", "hoagie", "panini", "club",
|
||||
"grilled cheese", "blt"],
|
||||
"Salads": ["salad", "grain bowl", "chopped", "caesar",
|
||||
"niçoise", "cobb"],
|
||||
"Soups": ["soup", "bisque", "chowder", "gazpacho",
|
||||
"minestrone", "lentil soup"],
|
||||
"Wraps": ["wrap", "burrito bowl", "pita", "lettuce wrap",
|
||||
"quesadilla"],
|
||||
},
|
||||
},
|
||||
"Dinner": {
|
||||
"keywords": ["dinner", "main dish", "entree", "main course", "supper"],
|
||||
"subcategories": {
|
||||
"Casseroles": ["casserole", "bake", "gratin", "lasagna",
|
||||
"sheperd's pie", "pot pie"],
|
||||
"Stews": ["stew", "braise", "slow cooker", "pot roast",
|
||||
"daube", "ragù"],
|
||||
"Grilled": ["grilled", "grill", "barbecue", "charred",
|
||||
"kebab", "skewer"],
|
||||
"Stir-Fries": ["stir fry", "stir-fry", "wok", "sauté",
|
||||
"sauteed"],
|
||||
"Roasts": ["roast", "roasted", "oven", "baked chicken",
|
||||
"pot roast"],
|
||||
},
|
||||
},
|
||||
"Snack": {
|
||||
"keywords": ["snack", "appetizer", "finger food", "dip", "bite",
|
||||
"starter"],
|
||||
"subcategories": {
|
||||
"Dips & Spreads": ["dip", "spread", "hummus", "guacamole",
|
||||
"salsa", "pate"],
|
||||
"Finger Foods": ["finger food", "bite", "skewer", "slider",
|
||||
"wing", "nugget"],
|
||||
"Chips & Crackers": ["chip", "cracker", "crisp", "popcorn",
|
||||
"pretzel"],
|
||||
},
|
||||
},
|
||||
"Dessert": {
|
||||
"keywords": ["dessert", "cake", "cookie", "pie", "sweet", "pudding",
|
||||
"ice cream", "brownie"],
|
||||
"subcategories": {
|
||||
"Cakes": ["cake", "cupcake", "layer cake", "bundt",
|
||||
"cheesecake", "torte"],
|
||||
"Cookies & Bars": ["cookie", "brownie", "blondie", "bar",
|
||||
"biscotti", "shortbread"],
|
||||
"Pies & Tarts": ["pie", "tart", "galette", "cobbler", "crisp",
|
||||
"crumble"],
|
||||
"Frozen": ["ice cream", "gelato", "sorbet", "frozen dessert",
|
||||
"popsicle", "granita"],
|
||||
"Puddings": ["pudding", "custard", "mousse", "panna cotta",
|
||||
"flan", "creme brulee"],
|
||||
"Candy": ["candy", "fudge", "truffle", "brittle",
|
||||
"caramel", "toffee"],
|
||||
},
|
||||
},
|
||||
"Beverage": ["drink", "smoothie", "cocktail", "beverage", "juice", "shake"],
|
||||
"Side Dish": ["side dish", "side", "accompaniment", "garnish"],
|
||||
},
|
||||
},
|
||||
"dietary": {
|
||||
"label": "Dietary",
|
||||
"categories": {
|
||||
"Vegetarian": ["vegetarian"],
|
||||
"Vegan": ["vegan", "plant-based", "plant based"],
|
||||
"Gluten-Free": ["gluten-free", "gluten free", "celiac"],
|
||||
"Low-Carb": ["low-carb", "low carb", "keto", "ketogenic"],
|
||||
"High-Protein": ["high protein", "high-protein"],
|
||||
"Low-Fat": ["low-fat", "low fat", "light"],
|
||||
"Dairy-Free": ["dairy-free", "dairy free", "lactose"],
|
||||
},
|
||||
},
|
||||
"main_ingredient": {
|
||||
"label": "Main Ingredient",
|
||||
"categories": {
|
||||
# keywords use exact inferred_tag strings (main:X) — indexed into recipe_browser_fts.
|
||||
"Chicken": {
|
||||
"keywords": ["main:Chicken"],
|
||||
"subcategories": {
|
||||
"Baked": ["baked chicken", "roast chicken", "chicken casserole",
|
||||
"chicken bake"],
|
||||
"Grilled": ["grilled chicken", "chicken kebab", "bbq chicken",
|
||||
"chicken skewer"],
|
||||
"Fried": ["fried chicken", "chicken cutlet", "chicken schnitzel",
|
||||
"crispy chicken"],
|
||||
"Stewed": ["chicken stew", "chicken soup", "coq au vin",
|
||||
"chicken curry", "chicken braise"],
|
||||
},
|
||||
},
|
||||
"Beef": {
|
||||
"keywords": ["main:Beef"],
|
||||
"subcategories": {
|
||||
"Ground Beef": ["ground beef", "hamburger", "meatball", "meatloaf",
|
||||
"bolognese", "burger"],
|
||||
"Steak": ["steak", "sirloin", "ribeye", "flank steak",
|
||||
"filet mignon", "t-bone"],
|
||||
"Roasts": ["beef roast", "pot roast", "brisket", "prime rib",
|
||||
"chuck roast"],
|
||||
"Stews": ["beef stew", "beef braise", "beef bourguignon",
|
||||
"short ribs"],
|
||||
},
|
||||
},
|
||||
"Pork": {
|
||||
"keywords": ["main:Pork"],
|
||||
"subcategories": {
|
||||
"Chops": ["pork chop", "pork loin", "pork cutlet"],
|
||||
"Pulled/Slow": ["pulled pork", "pork shoulder", "pork butt",
|
||||
"carnitas", "slow cooker pork"],
|
||||
"Sausage": ["sausage", "bratwurst", "chorizo", "andouille",
|
||||
"Italian sausage"],
|
||||
"Ribs": ["pork ribs", "baby back ribs", "spare ribs",
|
||||
"pork belly"],
|
||||
},
|
||||
},
|
||||
"Fish": {
|
||||
"keywords": ["main:Fish"],
|
||||
"subcategories": {
|
||||
"Salmon": ["salmon", "smoked salmon", "gravlax"],
|
||||
"Tuna": ["tuna", "albacore", "ahi"],
|
||||
"White Fish": ["cod", "tilapia", "halibut", "sole", "snapper",
|
||||
"flounder", "bass"],
|
||||
"Shellfish": ["shrimp", "prawn", "crab", "lobster", "scallop",
|
||||
"mussel", "clam", "oyster"],
|
||||
},
|
||||
},
|
||||
"Pasta": ["main:Pasta"],
|
||||
"Vegetables": {
|
||||
"keywords": ["main:Vegetables"],
|
||||
"subcategories": {
|
||||
"Root Veg": ["potato", "sweet potato", "carrot", "beet",
|
||||
"parsnip", "turnip"],
|
||||
"Leafy": ["spinach", "kale", "chard", "arugula",
|
||||
"collard greens", "lettuce"],
|
||||
"Brassicas": ["broccoli", "cauliflower", "brussels sprouts",
|
||||
"cabbage", "bok choy"],
|
||||
"Nightshades": ["tomato", "eggplant", "bell pepper", "zucchini",
|
||||
"squash"],
|
||||
"Mushrooms": ["mushroom", "portobello", "shiitake", "oyster mushroom",
|
||||
"chanterelle"],
|
||||
},
|
||||
},
|
||||
"Eggs": ["main:Eggs"],
|
||||
"Legumes": ["main:Legumes"],
|
||||
"Grains": ["main:Grains"],
|
||||
"Cheese": ["main:Cheese"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _get_category_def(domain: str, category: str) -> list[str] | dict | None:
|
||||
"""Return the raw category definition, or None if not found."""
|
||||
return DOMAINS.get(domain, {}).get("categories", {}).get(category)
|
||||
|
||||
|
||||
def get_domain_labels() -> list[dict]:
|
||||
"""Return [{id, label}] for all available domains."""
|
||||
return [{"id": k, "label": v["label"]} for k, v in DOMAINS.items()]
|
||||
|
||||
|
||||
def get_keywords_for_category(domain: str, category: str) -> list[str]:
|
||||
"""Return the keyword list for the category (top-level, covers all subcategories).
|
||||
|
||||
For flat categories returns the list directly.
|
||||
For nested categories returns the 'keywords' key.
|
||||
Returns [] if category or domain not found.
|
||||
"""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if cat_def is None:
|
||||
return []
|
||||
if isinstance(cat_def, list):
|
||||
return cat_def
|
||||
return cat_def.get("keywords", [])
|
||||
|
||||
|
||||
def category_has_subcategories(domain: str, category: str) -> bool:
|
||||
"""Return True when a category has a subcategory level."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return False
|
||||
return bool(cat_def.get("subcategories"))
|
||||
|
||||
|
||||
def get_subcategory_names(domain: str, category: str) -> list[str]:
|
||||
"""Return subcategory names for a category, or [] if none exist."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return list(cat_def.get("subcategories", {}).keys())
|
||||
|
||||
|
||||
def get_keywords_for_subcategory(domain: str, category: str, subcategory: str) -> list[str]:
|
||||
"""Return keyword list for a specific subcategory, or [] if not found."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return cat_def.get("subcategories", {}).get(subcategory, [])
|
||||
|
||||
|
||||
def get_category_names(domain: str) -> list[str]:
|
||||
"""Return category names for a domain, or [] if domain unknown."""
|
||||
domain_data = DOMAINS.get(domain, {})
|
||||
return list(domain_data.get("categories", {}).keys())
|
||||
|
|
@ -84,9 +84,8 @@ class ElementClassifier:
|
|||
name = ingredient_name.lower().strip()
|
||||
if not name:
|
||||
return IngredientProfile(name="", elements=[], source="heuristic")
|
||||
c = self._store._cp
|
||||
row = self._store._fetch_one(
|
||||
f"SELECT * FROM {c}ingredient_profiles WHERE name = ?", (name,)
|
||||
"SELECT * FROM ingredient_profiles WHERE name = ?", (name,)
|
||||
)
|
||||
if row:
|
||||
return self._row_to_profile(row)
|
||||
|
|
|
|||
|
|
@ -1,76 +1,69 @@
|
|||
"""
|
||||
GroceryLinkBuilder — affiliate deeplinks for missing ingredient grocery lists.
|
||||
|
||||
Delegates URL wrapping to circuitforge_core.affiliates.wrap_url, which handles
|
||||
the full resolution chain: opt-out → BYOK id → CF env var → plain URL.
|
||||
Free tier: URL construction only (Amazon Fresh, Walmart, Instacart).
|
||||
Paid+: live product search API (stubbed — future task).
|
||||
|
||||
Registered programs (via cf-core):
|
||||
amazon — Amazon Associates (env: AMAZON_ASSOCIATES_TAG)
|
||||
instacart — Instacart (env: INSTACART_AFFILIATE_ID)
|
||||
|
||||
Walmart is kept inline until cf-core adds Impact network support:
|
||||
env: WALMART_AFFILIATE_ID
|
||||
|
||||
Links are always generated (plain URLs are useful even without affiliate IDs).
|
||||
Walmart links only appear when WALMART_AFFILIATE_ID is set.
|
||||
Config (env vars, all optional — missing = retailer disabled):
|
||||
AMAZON_AFFILIATE_TAG — e.g. "circuitforge-20"
|
||||
INSTACART_AFFILIATE_ID — e.g. "circuitforge"
|
||||
WALMART_AFFILIATE_ID — e.g. "circuitforge" (Impact affiliate network)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import wrap_url
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _amazon_fresh_link(ingredient: str) -> GroceryLink:
|
||||
def _amazon_link(ingredient: str, tag: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
base = f"https://www.amazon.com/s?k={q}&i=amazonfresh"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Amazon Fresh", url=wrap_url(base, "amazon"))
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
base = f"https://www.instacart.com/store/s?k={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=wrap_url(base, "instacart"))
|
||||
url = f"https://www.amazon.com/s?k={q}&i=amazonfresh&tag={tag}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Amazon Fresh", url=url)
|
||||
|
||||
|
||||
def _walmart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
# Walmart uses Impact network — affiliate ID is in the redirect path, not a param
|
||||
url = (
|
||||
f"https://goto.walmart.com/c/{affiliate_id}/walmart"
|
||||
f"?u=https://www.walmart.com/search?q={q}"
|
||||
)
|
||||
# Walmart Impact affiliate deeplink pattern
|
||||
url = f"https://goto.walmart.com/c/{affiliate_id}/walmart?u=https://www.walmart.com/search?q={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Walmart Grocery", url=url)
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
url = f"https://www.instacart.com/store/s?k={q}&aff={affiliate_id}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=url)
|
||||
|
||||
|
||||
class GroceryLinkBuilder:
|
||||
def __init__(self, tier: str = "free", has_byok: bool = False) -> None:
|
||||
self._tier = tier
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "").strip()
|
||||
self._has_byok = has_byok
|
||||
self._amazon_tag = os.environ.get("AMAZON_AFFILIATE_TAG", "")
|
||||
self._instacart_id = os.environ.get("INSTACART_AFFILIATE_ID", "")
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "")
|
||||
|
||||
def build_links(self, ingredient: str) -> list[GroceryLink]:
|
||||
"""Build grocery deeplinks for a single ingredient.
|
||||
"""Build affiliate deeplinks for a single ingredient.
|
||||
|
||||
Amazon Fresh and Instacart links are always included; wrap_url handles
|
||||
affiliate ID injection (or returns a plain URL if none is configured).
|
||||
Walmart requires WALMART_AFFILIATE_ID to be set (Impact network uses a
|
||||
path-based redirect that doesn't degrade cleanly to a plain URL).
|
||||
Free tier: URL construction only.
|
||||
Paid+: would call live product search APIs (stubbed).
|
||||
"""
|
||||
if not ingredient.strip():
|
||||
return []
|
||||
links: list[GroceryLink] = []
|
||||
|
||||
links: list[GroceryLink] = [
|
||||
_amazon_fresh_link(ingredient),
|
||||
_instacart_link(ingredient),
|
||||
]
|
||||
if self._amazon_tag:
|
||||
links.append(_amazon_link(ingredient, self._amazon_tag))
|
||||
if self._walmart_id:
|
||||
links.append(_walmart_link(ingredient, self._walmart_id))
|
||||
if self._instacart_id:
|
||||
links.append(_instacart_link(ingredient, self._instacart_id))
|
||||
|
||||
# Paid+: live API stub (future task)
|
||||
# if self._tier in ("paid", "premium") and not self._has_byok:
|
||||
# links.extend(self._search_kroger_api(ingredient))
|
||||
|
||||
return links
|
||||
|
||||
|
|
|
|||
|
|
@ -84,13 +84,7 @@ class LLMRecipeGenerator:
|
|||
if template.aromatics:
|
||||
lines.append(f"Preferred aromatics: {', '.join(template.aromatics[:4])}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
unit_line,
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
|
|
@ -124,14 +118,8 @@ class LLMRecipeGenerator:
|
|||
if allergy_list:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
"Treat any mystery ingredient as a wildcard — use your imagination.",
|
||||
unit_line,
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
|
|
@ -143,26 +131,25 @@ class LLMRecipeGenerator:
|
|||
|
||||
return "\n".join(lines)
|
||||
|
||||
_SERVICE_TYPE = "cf-text"
|
||||
_TTL_S = 300.0
|
||||
_CALLER = "kiwi-recipe"
|
||||
_MODEL_CANDIDATES: list[str] = ["Ouro-2.6B-Thinking", "Ouro-1.4B"]
|
||||
|
||||
def _get_llm_context(self):
|
||||
"""Return a sync context manager that yields an Allocation or None.
|
||||
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a cf-text allocation
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a vLLM allocation
|
||||
(which handles service lifecycle and VRAM). Falls back to nullcontext(None)
|
||||
when the env var is absent or CFOrchClient raises on construction.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
return client.allocate(
|
||||
service=self._SERVICE_TYPE,
|
||||
ttl_s=self._TTL_S,
|
||||
caller=self._CALLER,
|
||||
service="vllm",
|
||||
model_candidates=self._MODEL_CANDIDATES,
|
||||
ttl_s=300.0,
|
||||
caller="kiwi-recipe",
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug("CFOrchClient init failed, falling back to direct URL: %s", exc)
|
||||
|
|
@ -173,31 +160,10 @@ class LLMRecipeGenerator:
|
|||
|
||||
With CF_ORCH_URL set: acquires a vLLM allocation via CFOrchClient and
|
||||
calls the OpenAI-compatible API directly against the allocated service URL.
|
||||
Allocation failure falls through to LLMRouter rather than silently returning "".
|
||||
Without CF_ORCH_URL: uses LLMRouter directly.
|
||||
Without CF_ORCH_URL: falls back to LLMRouter using its configured backends.
|
||||
"""
|
||||
ctx = self._get_llm_context()
|
||||
alloc = None
|
||||
try:
|
||||
alloc = ctx.__enter__()
|
||||
except Exception as exc:
|
||||
msg = str(exc)
|
||||
# 429 = coordinator at capacity (all nodes at max_concurrent limit).
|
||||
# Don't fall back to LLMRouter — it's also overloaded and the slow
|
||||
# fallback causes nginx 504s. Return "" fast so the caller degrades
|
||||
# gracefully (empty recipe result) rather than timing out.
|
||||
if "429" in msg or "max_concurrent" in msg.lower():
|
||||
logger.info("cf-orch at capacity — returning empty result (graceful degradation)")
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
logger.debug("cf-orch allocation failed, falling back to LLMRouter: %s", exc)
|
||||
ctx = None # __enter__ raised — do not call __exit__
|
||||
|
||||
try:
|
||||
with self._get_llm_context() as alloc:
|
||||
if alloc is not None:
|
||||
base_url = alloc.url.rstrip("/") + "/v1"
|
||||
client = OpenAI(base_url=base_url, api_key="any")
|
||||
|
|
@ -211,16 +177,11 @@ class LLMRecipeGenerator:
|
|||
return resp.choices[0].message.content or ""
|
||||
else:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter().complete(prompt)
|
||||
router = LLMRouter()
|
||||
return router.complete(prompt)
|
||||
except Exception as exc:
|
||||
logger.error("LLM call failed: %s", exc)
|
||||
return ""
|
||||
finally:
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Strips markdown bold/italic markers so "**Directions:**" parses like "Directions:"
|
||||
_MD_BOLD = re.compile(r"\*{1,2}([^*]+)\*{1,2}")
|
||||
|
|
|
|||
|
|
@ -1,160 +0,0 @@
|
|||
"""
|
||||
Shopping locale configuration.
|
||||
|
||||
Maps a locale key to Amazon domain, currency metadata, and retailer availability.
|
||||
Instacart and Walmart are US/CA-only; all other locales get Amazon only.
|
||||
Amazon Fresh (&i=amazonfresh) is US-only — international domains use the general
|
||||
grocery department (&rh=n:16310101) where available, plain search elsewhere.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class LocaleConfig(TypedDict):
|
||||
amazon_domain: str
|
||||
amazon_grocery_dept: str # URL fragment for grocery department on this locale's site
|
||||
currency_code: str
|
||||
currency_symbol: str
|
||||
instacart: bool
|
||||
walmart: bool
|
||||
|
||||
|
||||
LOCALES: dict[str, LocaleConfig] = {
|
||||
"us": {
|
||||
"amazon_domain": "amazon.com",
|
||||
"amazon_grocery_dept": "i=amazonfresh",
|
||||
"currency_code": "USD",
|
||||
"currency_symbol": "$",
|
||||
"instacart": True,
|
||||
"walmart": True,
|
||||
},
|
||||
"ca": {
|
||||
"amazon_domain": "amazon.ca",
|
||||
"amazon_grocery_dept": "rh=n:6967215011", # Grocery dept on .ca # gitleaks:allow
|
||||
"currency_code": "CAD",
|
||||
"currency_symbol": "CA$",
|
||||
"instacart": True,
|
||||
"walmart": False,
|
||||
},
|
||||
"gb": {
|
||||
"amazon_domain": "amazon.co.uk",
|
||||
"amazon_grocery_dept": "rh=n:340831031", # Grocery dept on .co.uk
|
||||
"currency_code": "GBP",
|
||||
"currency_symbol": "£",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"au": {
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # Pantry/grocery on .com.au # gitleaks:allow
|
||||
"currency_code": "AUD",
|
||||
"currency_symbol": "A$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nz": {
|
||||
# NZ has no Amazon storefront — route to .com.au as nearest option
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # gitleaks:allow
|
||||
"currency_code": "NZD",
|
||||
"currency_symbol": "NZ$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"de": {
|
||||
"amazon_domain": "amazon.de",
|
||||
"amazon_grocery_dept": "rh=n:340843031", # Lebensmittel & Getränke
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"fr": {
|
||||
"amazon_domain": "amazon.fr",
|
||||
"amazon_grocery_dept": "rh=n:197858031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"it": {
|
||||
"amazon_domain": "amazon.it",
|
||||
"amazon_grocery_dept": "rh=n:525616031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"es": {
|
||||
"amazon_domain": "amazon.es",
|
||||
"amazon_grocery_dept": "rh=n:599364031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nl": {
|
||||
"amazon_domain": "amazon.nl",
|
||||
"amazon_grocery_dept": "rh=n:16584827031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"se": {
|
||||
"amazon_domain": "amazon.se",
|
||||
"amazon_grocery_dept": "rh=n:20741393031",
|
||||
"currency_code": "SEK",
|
||||
"currency_symbol": "kr",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"jp": {
|
||||
"amazon_domain": "amazon.co.jp",
|
||||
"amazon_grocery_dept": "rh=n:2246283051", # gitleaks:allow
|
||||
"currency_code": "JPY",
|
||||
"currency_symbol": "¥",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"in": {
|
||||
"amazon_domain": "amazon.in",
|
||||
"amazon_grocery_dept": "rh=n:2454178031", # gitleaks:allow
|
||||
"currency_code": "INR",
|
||||
"currency_symbol": "₹",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"mx": {
|
||||
"amazon_domain": "amazon.com.mx",
|
||||
"amazon_grocery_dept": "rh=n:10737659011",
|
||||
"currency_code": "MXN",
|
||||
"currency_symbol": "MX$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"br": {
|
||||
"amazon_domain": "amazon.com.br",
|
||||
"amazon_grocery_dept": "rh=n:17878420011",
|
||||
"currency_code": "BRL",
|
||||
"currency_symbol": "R$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"sg": {
|
||||
"amazon_domain": "amazon.sg",
|
||||
"amazon_grocery_dept": "rh=n:6981647051", # gitleaks:allow
|
||||
"currency_code": "SGD",
|
||||
"currency_symbol": "S$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
}
|
||||
|
||||
DEFAULT_LOCALE = "us"
|
||||
|
||||
|
||||
def get_locale(key: str) -> LocaleConfig:
|
||||
"""Return locale config for *key*, falling back to US if unknown."""
|
||||
return LOCALES.get(key, LOCALES[DEFAULT_LOCALE])
|
||||
|
|
@ -21,6 +21,7 @@ if TYPE_CHECKING:
|
|||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink, NutritionPanel, RecipeRequest, RecipeResult, RecipeSuggestion, SwapCandidate
|
||||
from app.services.recipe.assembly_recipes import match_assembly_templates
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
from app.services.recipe.substitution_engine import SubstitutionEngine
|
||||
|
|
@ -155,24 +156,6 @@ _PANTRY_LABEL_SYNONYMS: dict[str, str] = {
|
|||
}
|
||||
|
||||
|
||||
# When a pantry item is in a secondary state (e.g. bread → "stale"), expand
|
||||
# the pantry set with terms that recipe ingredients commonly use to describe
|
||||
# that state. This lets "stale bread" in a recipe ingredient match a pantry
|
||||
# entry that is simply called "Bread" but is past its nominal use-by date.
|
||||
# Each key is (category_in_SECONDARY_WINDOW, label_returned_by_secondary_state).
|
||||
# Values are additional strings added to the pantry set for FTS coverage.
|
||||
_SECONDARY_STATE_SYNONYMS: dict[tuple[str, str], list[str]] = {
|
||||
("bread", "stale"): ["stale bread", "day-old bread", "old bread", "dried bread"],
|
||||
("bakery", "day-old"): ["day-old bread", "stale bread", "stale pastry"],
|
||||
("bananas", "overripe"): ["overripe bananas", "very ripe banana", "ripe bananas", "mashed banana"],
|
||||
("milk", "sour"): ["sour milk", "slightly sour milk", "buttermilk"],
|
||||
("dairy", "sour"): ["sour milk", "slightly sour milk"],
|
||||
("cheese", "well-aged"): ["parmesan rind", "cheese rind", "aged cheese"],
|
||||
("rice", "day-old"): ["day-old rice", "leftover rice", "cold rice", "cooked rice"],
|
||||
("tortillas", "stale"): ["stale tortillas", "dried tortillas", "day-old tortillas"],
|
||||
}
|
||||
|
||||
|
||||
# Matches leading quantity/unit prefixes in recipe ingredient strings,
|
||||
# e.g. "2 cups flour" → "flour", "1/2 c. ketchup" → "ketchup",
|
||||
# "3 oz. butter" → "butter"
|
||||
|
|
@ -302,24 +285,14 @@ def _prep_note_for(ingredient: str) -> str | None:
|
|||
return template.format(ingredient=ingredient_name)
|
||||
|
||||
|
||||
def _expand_pantry_set(
|
||||
pantry_items: list[str],
|
||||
secondary_pantry_items: dict[str, str] | None = None,
|
||||
) -> set[str]:
|
||||
def _expand_pantry_set(pantry_items: list[str]) -> set[str]:
|
||||
"""Return pantry_set expanded with canonical recipe-corpus synonyms.
|
||||
|
||||
For each pantry item, checks _PANTRY_LABEL_SYNONYMS for substring matches
|
||||
and adds the canonical form. This lets single-word recipe ingredients
|
||||
("hamburger", "chicken") match product-label pantry entries
|
||||
("burger patties", "rotisserie chicken").
|
||||
|
||||
If secondary_pantry_items is provided (product_name → state label), items
|
||||
in a secondary state also receive state-specific synonym expansion so that
|
||||
recipe ingredients like "stale bread" or "day-old rice" are matched.
|
||||
"""
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
_predictor = ExpirationPredictor()
|
||||
|
||||
expanded: set[str] = set()
|
||||
for item in pantry_items:
|
||||
lower = item.lower().strip()
|
||||
|
|
@ -327,15 +300,6 @@ def _expand_pantry_set(
|
|||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
expanded.add(canonical)
|
||||
|
||||
# Secondary state expansion — adds terms like "stale bread", "day-old rice"
|
||||
if secondary_pantry_items and item in secondary_pantry_items:
|
||||
state_label = secondary_pantry_items[item]
|
||||
category = _predictor.get_category_from_product(item)
|
||||
if category:
|
||||
synonyms = _SECONDARY_STATE_SYNONYMS.get((category, state_label), [])
|
||||
expanded.update(synonyms)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
|
|
@ -403,156 +367,6 @@ def _pantry_creative_swap(required: str, pantry_items: set[str]) -> str | None:
|
|||
return best
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Functional-category swap table (Level 2 only)
|
||||
# ---------------------------------------------------------------------------
|
||||
# Maps cleaned ingredient names → functional category label. Used as a
|
||||
# fallback when _pantry_creative_swap returns None (which always happens for
|
||||
# single-token ingredients, because that function requires ≥2 shared tokens).
|
||||
# A pantry item that belongs to the same category is offered as a substitute.
|
||||
_FUNCTIONAL_SWAP_CATEGORIES: dict[str, str] = {
|
||||
# Solid fats
|
||||
"butter": "solid_fat",
|
||||
"margarine": "solid_fat",
|
||||
"shortening": "solid_fat",
|
||||
"lard": "solid_fat",
|
||||
"ghee": "solid_fat",
|
||||
# Liquid/neutral cooking oils
|
||||
"oil": "liquid_fat",
|
||||
"vegetable oil": "liquid_fat",
|
||||
"olive oil": "liquid_fat",
|
||||
"canola oil": "liquid_fat",
|
||||
"sunflower oil": "liquid_fat",
|
||||
"avocado oil": "liquid_fat",
|
||||
# Sweeteners
|
||||
"sugar": "sweetener",
|
||||
"brown sugar": "sweetener",
|
||||
"honey": "sweetener",
|
||||
"maple syrup": "sweetener",
|
||||
"agave": "sweetener",
|
||||
"molasses": "sweetener",
|
||||
"stevia": "sweetener",
|
||||
"powdered sugar": "sweetener",
|
||||
# All-purpose flours and baking bases
|
||||
"flour": "flour",
|
||||
"all-purpose flour": "flour",
|
||||
"whole wheat flour": "flour",
|
||||
"bread flour": "flour",
|
||||
"self-rising flour": "flour",
|
||||
"cake flour": "flour",
|
||||
# Dairy and non-dairy milk
|
||||
"milk": "dairy_milk",
|
||||
"whole milk": "dairy_milk",
|
||||
"skim milk": "dairy_milk",
|
||||
"2% milk": "dairy_milk",
|
||||
"oat milk": "dairy_milk",
|
||||
"almond milk": "dairy_milk",
|
||||
"soy milk": "dairy_milk",
|
||||
"rice milk": "dairy_milk",
|
||||
# Heavy/whipping creams
|
||||
"cream": "heavy_cream",
|
||||
"heavy cream": "heavy_cream",
|
||||
"whipping cream": "heavy_cream",
|
||||
"double cream": "heavy_cream",
|
||||
"coconut cream": "heavy_cream",
|
||||
# Cultured dairy (acid + thick)
|
||||
"sour cream": "cultured_dairy",
|
||||
"greek yogurt": "cultured_dairy",
|
||||
"yogurt": "cultured_dairy",
|
||||
"buttermilk": "cultured_dairy",
|
||||
# Starch thickeners
|
||||
"cornstarch": "thickener",
|
||||
"arrowroot": "thickener",
|
||||
"tapioca starch": "thickener",
|
||||
"potato starch": "thickener",
|
||||
"rice flour": "thickener",
|
||||
# Egg binders
|
||||
"egg": "egg_binder",
|
||||
"eggs": "egg_binder",
|
||||
# Acids
|
||||
"vinegar": "acid",
|
||||
"apple cider vinegar": "acid",
|
||||
"white vinegar": "acid",
|
||||
"red wine vinegar": "acid",
|
||||
"lemon juice": "acid",
|
||||
"lime juice": "acid",
|
||||
# Stocks and broths
|
||||
"broth": "stock",
|
||||
"stock": "stock",
|
||||
"chicken broth": "stock",
|
||||
"beef broth": "stock",
|
||||
"vegetable broth": "stock",
|
||||
"chicken stock": "stock",
|
||||
"beef stock": "stock",
|
||||
"bouillon": "stock",
|
||||
# Hard cheeses (grating / melting interchangeable)
|
||||
"parmesan": "hard_cheese",
|
||||
"romano": "hard_cheese",
|
||||
"pecorino": "hard_cheese",
|
||||
"asiago": "hard_cheese",
|
||||
# Melting cheeses
|
||||
"cheddar": "melting_cheese",
|
||||
"mozzarella": "melting_cheese",
|
||||
"swiss": "melting_cheese",
|
||||
"gouda": "melting_cheese",
|
||||
"monterey jack": "melting_cheese",
|
||||
"colby": "melting_cheese",
|
||||
"provolone": "melting_cheese",
|
||||
# Canned tomato products
|
||||
"tomato sauce": "canned_tomato",
|
||||
"tomato paste": "canned_tomato",
|
||||
"crushed tomatoes": "canned_tomato",
|
||||
"diced tomatoes": "canned_tomato",
|
||||
"marinara": "canned_tomato",
|
||||
}
|
||||
|
||||
|
||||
def _category_swap(ingredient: str, pantry_items: set[str]) -> str | None:
|
||||
"""Level-2 fallback: find a same-category pantry substitute for a single-token ingredient.
|
||||
|
||||
_pantry_creative_swap requires ≥2 shared content tokens, so it always returns
|
||||
None for single-word ingredients like 'butter' or 'flour'. This function looks
|
||||
up the ingredient's functional category and returns any pantry item in that
|
||||
same category, enabling swaps like butter → ghee, milk → oat milk.
|
||||
"""
|
||||
clean = _strip_quantity(ingredient).lower()
|
||||
category = _FUNCTIONAL_SWAP_CATEGORIES.get(clean)
|
||||
if not category:
|
||||
return None
|
||||
for item in pantry_items:
|
||||
if item.lower() == clean:
|
||||
continue
|
||||
item_lower = item.lower()
|
||||
# Direct match: pantry item name is a known member of the same category
|
||||
if _FUNCTIONAL_SWAP_CATEGORIES.get(item_lower) == category:
|
||||
return item
|
||||
# Substring match: handles "organic oat milk" containing "oat milk"
|
||||
for known_ing, cat in _FUNCTIONAL_SWAP_CATEGORIES.items():
|
||||
if cat == category and known_ing in item_lower and item_lower != clean:
|
||||
return item
|
||||
return None
|
||||
|
||||
|
||||
# Assembly template caps by tier — prevents flooding results with templates
|
||||
# when a well-stocked pantry satisfies every required role.
|
||||
_SOURCE_URL_BUILDERS: dict[str, str] = {
|
||||
"foodcom": "https://www.food.com/recipe/{id}",
|
||||
}
|
||||
|
||||
|
||||
def _build_source_url(row: dict) -> str | None:
|
||||
"""Construct a canonical source URL from DB row fields, or None for generated recipes."""
|
||||
source = row.get("source") or ""
|
||||
external_id = row.get("external_id")
|
||||
template = _SOURCE_URL_BUILDERS.get(source)
|
||||
if not template or not external_id:
|
||||
return None
|
||||
try:
|
||||
return template.format(id=int(float(external_id)))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
# Method complexity classification patterns
|
||||
_EASY_METHODS = re.compile(
|
||||
r"\b(microwave|mix|stir|blend|toast|assemble|heat)\b", re.IGNORECASE
|
||||
|
|
@ -561,56 +375,6 @@ _INVOLVED_METHODS = re.compile(
|
|||
r"\b(braise|roast|knead|deep.?fry|fry|sauté|saute|bake|boil)\b", re.IGNORECASE
|
||||
)
|
||||
|
||||
# Hard day mode sort tier patterns
|
||||
_PREMADE_TITLE_RE = re.compile(
|
||||
r"\b(frozen|instant|microwave|ready.?made|pre.?made|packaged|heat.?and.?eat)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
_HEAT_ONLY_RE = re.compile(r"\b(microwave|heat|warm|thaw)\b", re.IGNORECASE)
|
||||
|
||||
|
||||
def _hard_day_sort_tier(
|
||||
title: str,
|
||||
ingredient_names: list[str],
|
||||
directions: list[str],
|
||||
) -> int:
|
||||
"""Return a sort priority tier for hard day mode.
|
||||
|
||||
0 — premade / heat-only (frozen dinner, quesadilla, microwave meal)
|
||||
1 — super simple (≤3 ingredients, easy method)
|
||||
2 — easy/moderate (everything else that passed the 'involved' filter)
|
||||
|
||||
Lower tier surfaces first.
|
||||
"""
|
||||
dir_text = " ".join(directions)
|
||||
n_ingredients = len(ingredient_names)
|
||||
n_steps = len(directions)
|
||||
|
||||
# Tier 0: title signals premade, OR very few ingredients with heat-only steps
|
||||
if _PREMADE_TITLE_RE.search(title):
|
||||
return 0
|
||||
if n_ingredients <= 2 and n_steps <= 3 and _HEAT_ONLY_RE.search(dir_text):
|
||||
return 0
|
||||
|
||||
# Tier 1: ≤3 ingredients with any easy method (quesadilla, cheese toast, etc.)
|
||||
if n_ingredients <= 3 and _EASY_METHODS.search(dir_text):
|
||||
return 1
|
||||
|
||||
return 2
|
||||
|
||||
|
||||
def _estimate_time_min(directions: list[str], complexity: str) -> int:
|
||||
"""Rough cooking time estimate from step count and method complexity.
|
||||
|
||||
Not precise — intended for filtering and display hints only.
|
||||
"""
|
||||
steps = len(directions)
|
||||
if complexity == "easy":
|
||||
return max(5, 10 + steps * 3)
|
||||
if complexity == "involved":
|
||||
return max(20, 30 + steps * 6)
|
||||
return max(10, 20 + steps * 4) # moderate
|
||||
|
||||
|
||||
def _classify_method_complexity(
|
||||
directions: list[str],
|
||||
|
|
@ -671,7 +435,7 @@ class RecipeEngine:
|
|||
|
||||
profiles = self._classifier.classify_batch(req.pantry_items)
|
||||
gaps = self._classifier.identify_gaps(profiles)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items, req.secondary_pantry_items or None)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items)
|
||||
|
||||
if req.level >= 3:
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
|
@ -679,11 +443,6 @@ class RecipeEngine:
|
|||
return gen.generate(req, profiles, gaps)
|
||||
|
||||
# Level 1 & 2: deterministic path
|
||||
# L1 ("Use What I Have") applies strict quality gates:
|
||||
# - exclude_generic: filter catch-all recipes at the DB level
|
||||
# - effective_max_missing: default to 2 when user hasn't set a cap
|
||||
# - match ratio: require ≥60% ingredient coverage to avoid low-signal results
|
||||
_l1 = req.level == 1 and not req.shopping_mode
|
||||
nf = req.nutrition_filters
|
||||
rows = self._store.search_recipes_by_ingredients(
|
||||
req.pantry_items,
|
||||
|
|
@ -694,18 +453,8 @@ class RecipeEngine:
|
|||
max_carbs_g=nf.max_carbs_g,
|
||||
max_sodium_mg=nf.max_sodium_mg,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
exclude_generic=_l1,
|
||||
)
|
||||
|
||||
# L1 strict defaults: cap missing ingredients and require a minimum ratio.
|
||||
_L1_MAX_MISSING_DEFAULT = 2
|
||||
_L1_MIN_MATCH_RATIO = 0.6
|
||||
effective_max_missing = req.max_missing
|
||||
if _l1 and effective_max_missing is None:
|
||||
effective_max_missing = _L1_MAX_MISSING_DEFAULT
|
||||
|
||||
suggestions = []
|
||||
hard_day_tier_map: dict[int, int] = {} # recipe_id → tier when hard_day_mode
|
||||
|
||||
for row in rows:
|
||||
ingredient_names: list[str] = row.get("ingredient_names") or []
|
||||
|
|
@ -719,21 +468,15 @@ class RecipeEngine:
|
|||
# When covered, collect any prep-state annotations (e.g. "melted butter"
|
||||
# → note "Melt the butter before starting.") to surface separately.
|
||||
swap_candidates: list[SwapCandidate] = []
|
||||
matched: list[str] = []
|
||||
missing: list[str] = []
|
||||
prep_note_set: set[str] = set()
|
||||
for n in ingredient_names:
|
||||
if _ingredient_in_pantry(n, pantry_set):
|
||||
matched.append(_strip_quantity(n))
|
||||
note = _prep_note_for(n)
|
||||
if note:
|
||||
prep_note_set.add(note)
|
||||
continue
|
||||
swap_item = _pantry_creative_swap(n, pantry_set)
|
||||
# L2: also try functional-category swap for single-token ingredients
|
||||
# that _pantry_creative_swap can't match (requires ≥2 shared tokens).
|
||||
if swap_item is None and req.level == 2:
|
||||
swap_item = _category_swap(n, pantry_set)
|
||||
if swap_item:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=n,
|
||||
|
|
@ -745,51 +488,20 @@ class RecipeEngine:
|
|||
else:
|
||||
missing.append(n)
|
||||
|
||||
# Filter by max_missing — skipped in shopping mode (user is willing to buy)
|
||||
if not req.shopping_mode and effective_max_missing is not None and len(missing) > effective_max_missing:
|
||||
# Filter by max_missing (pantry swaps don't count as missing)
|
||||
if req.max_missing is not None and len(missing) > req.max_missing:
|
||||
continue
|
||||
|
||||
# "Can make now" toggle: drop any recipe that still has missing ingredients
|
||||
# after swaps are applied. Swapped items count as covered.
|
||||
if req.pantry_match_only and missing:
|
||||
continue
|
||||
|
||||
# L1 match ratio gate: drop results where less than 60% of the recipe's
|
||||
# ingredients are in the pantry. Prevents low-signal results like a
|
||||
# 10-ingredient recipe matching on only one common item.
|
||||
if _l1 and ingredient_names:
|
||||
match_ratio = len(matched) / len(ingredient_names)
|
||||
if match_ratio < _L1_MIN_MATCH_RATIO:
|
||||
continue
|
||||
|
||||
# Parse directions — needed for complexity, hard_day_mode, and time estimate.
|
||||
# Filter by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
directions: list[str] = row.get("directions") or []
|
||||
if isinstance(directions, str):
|
||||
try:
|
||||
directions = json.loads(directions)
|
||||
except Exception:
|
||||
directions = [directions]
|
||||
|
||||
# Compute complexity for every suggestion (used for badge + filter).
|
||||
row_complexity = _classify_method_complexity(directions, available_equipment)
|
||||
row_time_min = _estimate_time_min(directions, row_complexity)
|
||||
|
||||
# Filter and tier-rank by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
if row_complexity == "involved":
|
||||
continue
|
||||
hard_day_tier_map[row["id"]] = _hard_day_sort_tier(
|
||||
title=row.get("title", ""),
|
||||
ingredient_names=ingredient_names,
|
||||
directions=directions,
|
||||
)
|
||||
|
||||
# Complexity filter (#58)
|
||||
if req.complexity_filter and row_complexity != req.complexity_filter:
|
||||
continue
|
||||
|
||||
# Max time filter (#58)
|
||||
if req.max_time_min is not None and row_time_min > req.max_time_min:
|
||||
complexity = _classify_method_complexity(directions, available_equipment)
|
||||
if complexity == "involved":
|
||||
continue
|
||||
|
||||
# Level 2: also add dietary constraint swaps from substitution_pairs
|
||||
|
|
@ -835,27 +547,20 @@ class RecipeEngine:
|
|||
match_count=int(row.get("match_count") or 0),
|
||||
element_coverage=coverage_raw,
|
||||
swap_candidates=swap_candidates,
|
||||
matched_ingredients=matched,
|
||||
missing_ingredients=missing,
|
||||
prep_notes=sorted(prep_note_set),
|
||||
level=req.level,
|
||||
nutrition=nutrition if has_nutrition else None,
|
||||
source_url=_build_source_url(row),
|
||||
complexity=row_complexity,
|
||||
estimated_time_min=row_time_min,
|
||||
))
|
||||
|
||||
# Sort corpus results — assembly templates are now served from a dedicated tab.
|
||||
# Hard day mode: primary sort by tier (0=premade, 1=simple, 2=moderate),
|
||||
# then by match_count within each tier.
|
||||
# Normal mode: sort by match_count descending.
|
||||
if req.hard_day_mode and hard_day_tier_map:
|
||||
suggestions = sorted(
|
||||
suggestions,
|
||||
key=lambda s: (hard_day_tier_map.get(s.id, 1), -s.match_count),
|
||||
# Prepend assembly-dish templates (burrito, stir fry, omelette, etc.)
|
||||
# These fire regardless of corpus coverage — any pantry can make a burrito.
|
||||
assembly = match_assembly_templates(
|
||||
pantry_items=req.pantry_items,
|
||||
pantry_set=pantry_set,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
)
|
||||
else:
|
||||
suggestions = sorted(suggestions, key=lambda s: -s.match_count)
|
||||
suggestions = assembly + suggestions
|
||||
|
||||
# Build grocery list — deduplicated union of all missing ingredients
|
||||
seen: set[str] = set()
|
||||
|
|
|
|||
|
|
@ -55,12 +55,11 @@ class SubstitutionEngine:
|
|||
ingredient_name: str,
|
||||
constraint: str,
|
||||
) -> list[SubstitutionSwap]:
|
||||
c = self._store._cp
|
||||
rows = self._store._fetch_all(f"""
|
||||
rows = self._store._fetch_all("""
|
||||
SELECT substitute_name, constraint_label,
|
||||
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
||||
occurrence_count, compensation_hints
|
||||
FROM {c}substitution_pairs
|
||||
FROM substitution_pairs
|
||||
WHERE original_name = ? AND constraint_label = ?
|
||||
ORDER BY occurrence_count DESC
|
||||
""", (ingredient_name.lower(), constraint))
|
||||
|
|
|
|||
|
|
@ -1,316 +0,0 @@
|
|||
"""
|
||||
Recipe tag inference engine.
|
||||
|
||||
Derives normalized tags from a recipe's title, ingredient names, existing corpus
|
||||
tags (category + keywords), enriched ingredient profile data, and optional
|
||||
nutrition data.
|
||||
|
||||
Tags are organized into five namespaces:
|
||||
cuisine:* -- cuisine/region classification
|
||||
dietary:* -- dietary restriction / nutrition profile
|
||||
flavor:* -- flavor profile (spicy, smoky, sweet, etc.)
|
||||
time:* -- effort / time signals
|
||||
meal:* -- meal type
|
||||
can_be:* -- achievable with substitutions (e.g. can_be:Gluten-Free)
|
||||
|
||||
Output is a flat sorted list of strings, e.g.:
|
||||
["can_be:Gluten-Free", "cuisine:Italian", "dietary:Low-Carb",
|
||||
"flavor:Savory", "flavor:Umami", "time:Quick"]
|
||||
|
||||
These populate recipes.inferred_tags and are FTS5-indexed so browse domain
|
||||
queries find recipes the food.com corpus tags alone would miss.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Text-signal tables
|
||||
# (tag, [case-insensitive substrings to search in combined title+ingredient text])
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_CUISINE_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("cuisine:Japanese", ["miso", "dashi", "ramen", "sushi", "teriyaki", "sake", "mirin",
|
||||
"wasabi", "panko", "edamame", "tonkatsu", "yakitori", "ponzu"]),
|
||||
("cuisine:Korean", ["gochujang", "kimchi", "doenjang", "gochugaru",
|
||||
"bulgogi", "bibimbap", "japchae"]),
|
||||
("cuisine:Thai", ["fish sauce", "lemongrass", "galangal", "pad thai", "thai basil",
|
||||
"kaffir lime", "tom yum", "green curry", "red curry", "nam pla"]),
|
||||
("cuisine:Chinese", ["hoisin", "oyster sauce", "five spice", "bok choy", "chow mein",
|
||||
"dumpling", "wonton", "mapo", "char siu", "sichuan"]),
|
||||
("cuisine:Vietnamese", ["pho", "banh mi", "nuoc cham", "rice paper", "vietnamese"]),
|
||||
("cuisine:Indian", ["garam masala", "turmeric", "cardamom", "fenugreek", "paneer",
|
||||
"tikka", "masala", "biryani", "dal", "naan", "tandoori",
|
||||
"curry leaf", "tamarind", "chutney"]),
|
||||
("cuisine:Middle Eastern", ["tahini", "harissa", "za'atar", "sumac", "baharat", "rose water",
|
||||
"pomegranate molasses", "freekeh", "fattoush", "shakshuka"]),
|
||||
("cuisine:Greek", ["feta", "tzatziki", "moussaka", "spanakopita", "orzo",
|
||||
"kalamata", "gyro", "souvlaki", "dolma"]),
|
||||
("cuisine:Mediterranean", ["hummus", "pita", "couscous", "preserved lemon"]),
|
||||
("cuisine:Italian", ["pasta", "pizza", "risotto", "lasagna", "carbonara", "gnocchi",
|
||||
"parmesan", "mozzarella", "ricotta", "prosciutto", "pancetta",
|
||||
"arancini", "osso buco", "tiramisu", "pesto", "bolognese",
|
||||
"cannoli", "polenta", "bruschetta", "focaccia"]),
|
||||
("cuisine:French", ["croissant", "quiche", "crepe", "coq au vin",
|
||||
"ratatouille", "bearnaise", "hollandaise", "bouillabaisse",
|
||||
"herbes de provence", "dijon", "gruyere", "brie", "cassoulet"]),
|
||||
("cuisine:Spanish", ["paella", "chorizo", "gazpacho", "tapas", "patatas bravas",
|
||||
"sofrito", "manchego", "albondigas"]),
|
||||
("cuisine:German", ["sauerkraut", "bratwurst", "schnitzel", "pretzel", "strudel",
|
||||
"spaetzle", "sauerbraten"]),
|
||||
("cuisine:Mexican", ["taco", "burrito", "enchilada", "salsa", "guacamole", "chipotle",
|
||||
"queso", "tamale", "mole", "jalapeno", "tortilla", "carnitas",
|
||||
"chile verde", "posole", "tostada", "quesadilla"]),
|
||||
("cuisine:Latin American", ["plantain", "yuca", "chimichurri", "ceviche", "adobo", "empanada"]),
|
||||
("cuisine:American", ["bbq sauce", "buffalo sauce", "ranch dressing", "coleslaw",
|
||||
"cornbread", "mac and cheese", "brisket", "cheeseburger"]),
|
||||
("cuisine:Southern", ["collard greens", "black-eyed peas", "okra", "grits", "catfish",
|
||||
"hush puppies", "pecan pie"]),
|
||||
("cuisine:Cajun", ["cajun", "creole", "gumbo", "jambalaya", "andouille", "etouffee"]),
|
||||
("cuisine:African", ["injera", "berbere", "jollof", "suya", "egusi", "fufu", "tagine"]),
|
||||
("cuisine:Caribbean", ["jerk", "scotch bonnet", "callaloo", "ackee"]),
|
||||
]
|
||||
|
||||
_DIETARY_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("dietary:Vegan", ["vegan", "plant-based", "plant based"]),
|
||||
("dietary:Vegetarian", ["vegetarian", "meatless"]),
|
||||
("dietary:Gluten-Free", ["gluten-free", "gluten free", "celiac"]),
|
||||
("dietary:Dairy-Free", ["dairy-free", "dairy free", "lactose free", "non-dairy"]),
|
||||
("dietary:Low-Carb", ["low-carb", "low carb", "keto", "ketogenic", "very low carbs"]),
|
||||
("dietary:High-Protein", ["high protein", "high-protein"]),
|
||||
("dietary:Low-Fat", ["low-fat", "low fat", "fat-free", "reduced fat"]),
|
||||
("dietary:Paleo", ["paleo", "whole30"]),
|
||||
("dietary:Nut-Free", ["nut-free", "nut free", "peanut free"]),
|
||||
("dietary:Egg-Free", ["egg-free", "egg free"]),
|
||||
("dietary:Low-Sodium", ["low sodium", "no salt"]),
|
||||
("dietary:Healthy", ["healthy", "low cholesterol", "heart healthy", "wholesome"]),
|
||||
]
|
||||
|
||||
_FLAVOR_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("flavor:Spicy", ["jalapeno", "habanero", "ghost pepper", "sriracha",
|
||||
"chili flake", "red pepper flake", "cayenne", "hot sauce",
|
||||
"gochujang", "harissa", "scotch bonnet", "szechuan pepper", "spicy"]),
|
||||
("flavor:Smoky", ["smoked", "liquid smoke", "smoked paprika",
|
||||
"bbq sauce", "barbecue", "hickory", "mesquite"]),
|
||||
("flavor:Sweet", ["honey", "maple syrup", "brown sugar", "caramel", "chocolate",
|
||||
"vanilla", "condensed milk", "molasses", "agave"]),
|
||||
("flavor:Savory", ["soy sauce", "fish sauce", "miso", "worcestershire", "anchovy",
|
||||
"parmesan", "blue cheese", "bone broth"]),
|
||||
("flavor:Tangy", ["lemon juice", "lime juice", "vinegar", "balsamic", "buttermilk",
|
||||
"sour cream", "fermented", "pickled", "tamarind", "sumac"]),
|
||||
("flavor:Herby", ["fresh basil", "fresh cilantro", "fresh dill", "fresh mint",
|
||||
"fresh tarragon", "fresh thyme", "herbes de provence"]),
|
||||
("flavor:Rich", ["heavy cream", "creme fraiche", "mascarpone", "double cream",
|
||||
"ghee", "coconut cream", "cream cheese"]),
|
||||
("flavor:Umami", ["mushroom", "nutritional yeast", "tomato paste",
|
||||
"parmesan rind", "bonito", "kombu"]),
|
||||
]
|
||||
|
||||
_TIME_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("time:Quick", ["< 15 mins", "< 30 mins", "weeknight", "easy"]),
|
||||
("time:Under 1 Hour", ["< 60 mins"]),
|
||||
("time:Make-Ahead", ["freezer", "overnight", "refrigerator", "make-ahead", "make ahead"]),
|
||||
("time:Slow Cook", ["slow cooker", "crockpot", "< 4 hours", "braise"]),
|
||||
]
|
||||
|
||||
_MAIN_INGREDIENT_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("main:Chicken", ["chicken", "poultry", "turkey"]),
|
||||
("main:Beef", ["beef", "ground beef", "steak", "brisket", "pot roast"]),
|
||||
("main:Pork", ["pork", "bacon", "ham", "sausage", "prosciutto"]),
|
||||
("main:Fish", ["salmon", "tuna", "tilapia", "cod", "halibut", "shrimp", "seafood", "fish"]),
|
||||
("main:Pasta", ["pasta", "noodle", "spaghetti", "penne", "fettuccine", "linguine"]),
|
||||
("main:Vegetables", ["broccoli", "cauliflower", "zucchini", "eggplant", "carrot",
|
||||
"vegetable", "veggie"]),
|
||||
("main:Eggs", ["egg", "frittata", "omelette", "omelet", "quiche"]),
|
||||
("main:Legumes", ["bean", "lentil", "chickpea", "tofu", "tempeh", "edamame"]),
|
||||
("main:Grains", ["rice", "quinoa", "barley", "farro", "oat", "grain"]),
|
||||
("main:Cheese", ["cheddar", "mozzarella", "parmesan", "ricotta", "brie",
|
||||
"cheese"]),
|
||||
]
|
||||
|
||||
# food.com corpus tag -> normalized tags
|
||||
_CORPUS_TAG_MAP: dict[str, list[str]] = {
|
||||
"european": ["cuisine:Italian", "cuisine:French", "cuisine:German",
|
||||
"cuisine:Spanish"],
|
||||
"asian": ["cuisine:Chinese", "cuisine:Japanese", "cuisine:Thai",
|
||||
"cuisine:Korean", "cuisine:Vietnamese"],
|
||||
"chinese": ["cuisine:Chinese"],
|
||||
"japanese": ["cuisine:Japanese"],
|
||||
"thai": ["cuisine:Thai"],
|
||||
"vietnamese": ["cuisine:Vietnamese"],
|
||||
"indian": ["cuisine:Indian"],
|
||||
"greek": ["cuisine:Greek"],
|
||||
"mexican": ["cuisine:Mexican"],
|
||||
"african": ["cuisine:African"],
|
||||
"caribbean": ["cuisine:Caribbean"],
|
||||
"vegan": ["dietary:Vegan", "dietary:Vegetarian"],
|
||||
"vegetarian": ["dietary:Vegetarian"],
|
||||
"healthy": ["dietary:Healthy"],
|
||||
"low cholesterol": ["dietary:Healthy"],
|
||||
"very low carbs": ["dietary:Low-Carb"],
|
||||
"high in...": ["dietary:High-Protein"],
|
||||
"lactose free": ["dietary:Dairy-Free"],
|
||||
"egg free": ["dietary:Egg-Free"],
|
||||
"< 15 mins": ["time:Quick"],
|
||||
"< 30 mins": ["time:Quick"],
|
||||
"< 60 mins": ["time:Under 1 Hour"],
|
||||
"< 4 hours": ["time:Slow Cook"],
|
||||
"weeknight": ["time:Quick"],
|
||||
"freezer": ["time:Make-Ahead"],
|
||||
"dessert": ["meal:Dessert"],
|
||||
"breakfast": ["meal:Breakfast"],
|
||||
"lunch/snacks": ["meal:Lunch", "meal:Snack"],
|
||||
"beverages": ["meal:Beverage"],
|
||||
"cookie & brownie": ["meal:Dessert"],
|
||||
"breads": ["meal:Bread"],
|
||||
}
|
||||
|
||||
# ingredient_profiles.elements value -> flavor tag
|
||||
_ELEMENT_TO_FLAVOR: dict[str, str] = {
|
||||
"Aroma": "flavor:Herby",
|
||||
"Richness": "flavor:Rich",
|
||||
"Structure": "", # no flavor tag
|
||||
"Binding": "",
|
||||
"Crust": "flavor:Smoky",
|
||||
"Lift": "",
|
||||
"Emulsion": "flavor:Rich",
|
||||
"Acid": "flavor:Tangy",
|
||||
}
|
||||
|
||||
|
||||
def _build_text(title: str, ingredient_names: list[str]) -> str:
|
||||
parts = [title.lower()]
|
||||
parts.extend(i.lower() for i in ingredient_names)
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def _match_signals(text: str, table: list[tuple[str, list[str]]]) -> list[str]:
|
||||
return [tag for tag, pats in table if any(p in text for p in pats)]
|
||||
|
||||
|
||||
def infer_tags(
|
||||
title: str,
|
||||
ingredient_names: list[str],
|
||||
corpus_keywords: list[str],
|
||||
corpus_category: str = "",
|
||||
# Enriched ingredient profile signals (from ingredient_profiles cross-ref)
|
||||
element_coverage: dict[str, float] | None = None,
|
||||
fermented_count: int = 0,
|
||||
glutamate_total: float = 0.0,
|
||||
ph_min: float | None = None,
|
||||
available_sub_constraints: list[str] | None = None,
|
||||
# Nutrition data for macro-based tags
|
||||
calories: float | None = None,
|
||||
protein_g: float | None = None,
|
||||
fat_g: float | None = None,
|
||||
carbs_g: float | None = None,
|
||||
servings: float | None = None,
|
||||
) -> list[str]:
|
||||
"""
|
||||
Derive normalized tags for a recipe.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
title, ingredient_names, corpus_keywords, corpus_category
|
||||
: Primary recipe data.
|
||||
element_coverage
|
||||
: Dict from recipes.element_coverage -- element name to coverage ratio
|
||||
(e.g. {"Aroma": 0.6, "Richness": 0.4}). Derived from ingredient_profiles.
|
||||
fermented_count
|
||||
: Number of fermented ingredients (from ingredient_profiles.is_fermented).
|
||||
glutamate_total
|
||||
: Sum of glutamate_mg across all profiled ingredients. High values signal umami.
|
||||
ph_min
|
||||
: Minimum ph_estimate across profiled ingredients. Low values signal acidity.
|
||||
available_sub_constraints
|
||||
: Substitution constraint labels achievable for this recipe
|
||||
(e.g. ["gluten_free", "low_carb"]). From substitution_pairs cross-ref.
|
||||
These become can_be:* tags.
|
||||
calories, protein_g, fat_g, carbs_g, servings
|
||||
: Nutrition data for macro-based dietary tags.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Sorted list of unique normalized tag strings.
|
||||
"""
|
||||
tags: set[str] = set()
|
||||
|
||||
# 1. Map corpus tags to normalized vocabulary
|
||||
for kw in corpus_keywords:
|
||||
for t in _CORPUS_TAG_MAP.get(kw.lower(), []):
|
||||
tags.add(t)
|
||||
if corpus_category:
|
||||
for t in _CORPUS_TAG_MAP.get(corpus_category.lower(), []):
|
||||
tags.add(t)
|
||||
|
||||
# 2. Text-signal matching
|
||||
text = _build_text(title, ingredient_names)
|
||||
tags.update(_match_signals(text, _CUISINE_SIGNALS))
|
||||
tags.update(_match_signals(text, _DIETARY_SIGNALS))
|
||||
tags.update(_match_signals(text, _FLAVOR_SIGNALS))
|
||||
tags.update(_match_signals(text, _MAIN_INGREDIENT_SIGNALS))
|
||||
|
||||
# 3. Time signals from corpus keywords + text
|
||||
corpus_text = " ".join(kw.lower() for kw in corpus_keywords)
|
||||
tags.update(_match_signals(corpus_text, _TIME_SIGNALS))
|
||||
tags.update(_match_signals(text, _TIME_SIGNALS))
|
||||
|
||||
# 4. Enriched profile signals
|
||||
if element_coverage:
|
||||
for element, coverage in element_coverage.items():
|
||||
if coverage > 0.2: # >20% of ingredients carry this element
|
||||
flavor_tag = _ELEMENT_TO_FLAVOR.get(element, "")
|
||||
if flavor_tag:
|
||||
tags.add(flavor_tag)
|
||||
|
||||
if glutamate_total > 50:
|
||||
tags.add("flavor:Umami")
|
||||
|
||||
if fermented_count > 0:
|
||||
tags.add("flavor:Tangy")
|
||||
|
||||
if ph_min is not None and ph_min < 4.5:
|
||||
tags.add("flavor:Tangy")
|
||||
|
||||
# 5. Achievable-via-substitution tags
|
||||
if available_sub_constraints:
|
||||
label_to_tag = {
|
||||
"gluten_free": "can_be:Gluten-Free",
|
||||
"low_calorie": "can_be:Low-Calorie",
|
||||
"low_carb": "can_be:Low-Carb",
|
||||
"vegan": "can_be:Vegan",
|
||||
"dairy_free": "can_be:Dairy-Free",
|
||||
"low_sodium": "can_be:Low-Sodium",
|
||||
}
|
||||
for label in available_sub_constraints:
|
||||
tag = label_to_tag.get(label)
|
||||
if tag:
|
||||
tags.add(tag)
|
||||
|
||||
# 6. Macro-based dietary tags
|
||||
if servings and servings > 0 and any(
|
||||
v is not None for v in (protein_g, fat_g, carbs_g, calories)
|
||||
):
|
||||
def _per(v: float | None) -> float | None:
|
||||
return v / servings if v is not None else None
|
||||
|
||||
prot_s = _per(protein_g)
|
||||
fat_s = _per(fat_g)
|
||||
carb_s = _per(carbs_g)
|
||||
cal_s = _per(calories)
|
||||
|
||||
if prot_s is not None and prot_s >= 20:
|
||||
tags.add("dietary:High-Protein")
|
||||
if fat_s is not None and fat_s <= 5:
|
||||
tags.add("dietary:Low-Fat")
|
||||
if carb_s is not None and carb_s <= 10:
|
||||
tags.add("dietary:Low-Carb")
|
||||
if cal_s is not None and cal_s <= 250:
|
||||
tags.add("dietary:Light")
|
||||
elif protein_g is not None and protein_g >= 20:
|
||||
tags.add("dietary:High-Protein")
|
||||
|
||||
# 7. Vegan implies vegetarian
|
||||
if "dietary:Vegan" in tags:
|
||||
tags.add("dietary:Vegetarian")
|
||||
|
||||
return sorted(tags)
|
||||
|
|
@ -22,7 +22,7 @@ from app.services.expiration_predictor import ExpirationPredictor
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
LLM_TASK_TYPES: frozenset[str] = frozenset({"expiry_llm_fallback", "recipe_llm"})
|
||||
LLM_TASK_TYPES: frozenset[str] = frozenset({"expiry_llm_fallback"})
|
||||
|
||||
VRAM_BUDGETS: dict[str, float] = {
|
||||
# ExpirationPredictor uses a small LLM (16 tokens out, single pass).
|
||||
|
|
@ -88,8 +88,6 @@ def run_task(
|
|||
try:
|
||||
if task_type == "expiry_llm_fallback":
|
||||
_run_expiry_llm_fallback(db_path, job_id, params)
|
||||
elif task_type == "recipe_llm":
|
||||
_run_recipe_llm(db_path, job_id, params)
|
||||
else:
|
||||
raise ValueError(f"Unknown kiwi task type: {task_type!r}")
|
||||
_update_task_status(db_path, task_id, "completed")
|
||||
|
|
@ -145,41 +143,3 @@ def _run_expiry_llm_fallback(
|
|||
expiry,
|
||||
days,
|
||||
)
|
||||
|
||||
|
||||
def _run_recipe_llm(db_path: Path, _job_id_int: int, params: str | None) -> None:
|
||||
"""Run LLM recipe generation for an async recipe job.
|
||||
|
||||
params JSON keys:
|
||||
job_id (required) — recipe_jobs.job_id string (e.g. "rec_a1b2c3...")
|
||||
|
||||
Creates its own Store — follows same pattern as _suggest_in_thread.
|
||||
MUST call store.fail_recipe_job() before re-raising so recipe_jobs.status
|
||||
doesn't stay 'running' while background_tasks shows 'failed'.
|
||||
"""
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import RecipeRequest
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
|
||||
p = json.loads(params or "{}")
|
||||
recipe_job_id: str = p.get("job_id", "")
|
||||
if not recipe_job_id:
|
||||
raise ValueError("recipe_llm: 'job_id' is required in params")
|
||||
|
||||
store = Store(db_path)
|
||||
try:
|
||||
store.update_recipe_job_running(recipe_job_id)
|
||||
row = store._fetch_one(
|
||||
"SELECT request FROM recipe_jobs WHERE job_id=?", (recipe_job_id,)
|
||||
)
|
||||
if row is None:
|
||||
raise ValueError(f"recipe_llm: recipe_jobs row not found: {recipe_job_id!r}")
|
||||
req = RecipeRequest.model_validate_json(row["request"])
|
||||
result = RecipeEngine(store).suggest(req)
|
||||
store.complete_recipe_job(recipe_job_id, result.model_dump_json())
|
||||
log.info("recipe_llm: job %s completed (%d suggestion(s))", recipe_job_id, len(result.suggestions))
|
||||
except Exception as exc:
|
||||
store.fail_recipe_job(recipe_job_id, str(exc))
|
||||
raise
|
||||
finally:
|
||||
store.close()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
# app/tasks/scheduler.py
|
||||
"""Kiwi LLM task scheduler — thin shim over circuitforge_core.tasks.scheduler.
|
||||
|
||||
Local mode (CLOUD_MODE unset): LocalScheduler — simple FIFO, no coordinator.
|
||||
Cloud mode (CLOUD_MODE=true): OrchestratedScheduler — coordinator-aware, fans
|
||||
out concurrent jobs across all registered cf-orch GPU nodes.
|
||||
"""
|
||||
"""Kiwi LLM task scheduler — thin shim over circuitforge_core.tasks.scheduler."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
|
@ -12,68 +7,15 @@ from pathlib import Path
|
|||
from circuitforge_core.tasks.scheduler import (
|
||||
TaskScheduler,
|
||||
get_scheduler as _base_get_scheduler,
|
||||
reset_scheduler as _reset_local, # re-export for tests
|
||||
reset_scheduler, # re-export for tests
|
||||
)
|
||||
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.core.config import settings
|
||||
from app.tasks.runner import LLM_TASK_TYPES, VRAM_BUDGETS, run_task
|
||||
|
||||
|
||||
def _orch_available() -> bool:
|
||||
"""Return True if circuitforge_orch is installed in this environment."""
|
||||
try:
|
||||
import circuitforge_orch # noqa: F401
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def _use_orch() -> bool:
|
||||
"""Return True if the OrchestratedScheduler should be used.
|
||||
|
||||
Priority order:
|
||||
1. USE_ORCH_SCHEDULER env var — explicit override always wins.
|
||||
2. CLOUD_MODE=true — use orch in managed cloud deployments.
|
||||
3. circuitforge_orch installed — paid+ local users who have cf-orch
|
||||
set up get coordinator-aware scheduling (local GPU first) automatically.
|
||||
"""
|
||||
override = settings.USE_ORCH_SCHEDULER
|
||||
if override is not None:
|
||||
return override
|
||||
return CLOUD_MODE or _orch_available()
|
||||
|
||||
|
||||
def get_scheduler(db_path: Path) -> TaskScheduler:
|
||||
"""Return the process-level TaskScheduler singleton for Kiwi.
|
||||
|
||||
OrchestratedScheduler: coordinator-aware, fans out concurrent jobs across
|
||||
all registered cf-orch GPU nodes. Active when USE_ORCH_SCHEDULER=true,
|
||||
CLOUD_MODE=true, or circuitforge_orch is installed locally (paid+ users
|
||||
running their own cf-orch stack get this automatically; local GPU is
|
||||
preferred by the coordinator's allocation queue).
|
||||
|
||||
LocalScheduler: serial FIFO, no coordinator dependency. Free-tier local
|
||||
installs without circuitforge_orch installed use this automatically.
|
||||
"""
|
||||
if _use_orch():
|
||||
try:
|
||||
from circuitforge_orch.scheduler import get_orch_scheduler
|
||||
except ImportError:
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(
|
||||
"circuitforge_orch not installed — falling back to LocalScheduler"
|
||||
)
|
||||
else:
|
||||
return get_orch_scheduler(
|
||||
db_path=db_path,
|
||||
run_task_fn=run_task,
|
||||
task_types=LLM_TASK_TYPES,
|
||||
vram_budgets=VRAM_BUDGETS,
|
||||
coordinator_url=settings.COORDINATOR_URL,
|
||||
service_name="kiwi",
|
||||
)
|
||||
|
||||
"""Return the process-level TaskScheduler singleton for Kiwi."""
|
||||
return _base_get_scheduler(
|
||||
db_path=db_path,
|
||||
run_task_fn=run_task,
|
||||
|
|
@ -82,15 +24,3 @@ def get_scheduler(db_path: Path) -> TaskScheduler:
|
|||
coordinator_url=settings.COORDINATOR_URL,
|
||||
service_name="kiwi",
|
||||
)
|
||||
|
||||
|
||||
def reset_scheduler() -> None:
|
||||
"""Shut down and clear the active scheduler singleton. TEST TEARDOWN ONLY."""
|
||||
if _use_orch():
|
||||
try:
|
||||
from circuitforge_orch.scheduler import reset_orch_scheduler
|
||||
reset_orch_scheduler()
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
_reset_local()
|
||||
|
|
|
|||
23
app/tiers.py
23
app/tiers.py
|
|
@ -15,22 +15,8 @@ KIWI_BYOK_UNLOCKABLE: frozenset[str] = frozenset({
|
|||
"recipe_suggestions",
|
||||
"expiry_llm_matching",
|
||||
"receipt_ocr",
|
||||
"style_classifier",
|
||||
"meal_plan_llm",
|
||||
"meal_plan_llm_timing",
|
||||
"community_fork_adapt",
|
||||
})
|
||||
|
||||
# Sources subject to monthly cf-orch call caps. Subscription-based sources are uncapped.
|
||||
LIFETIME_SOURCES: frozenset[str] = frozenset({"lifetime", "founders"})
|
||||
|
||||
# (source, tier) → monthly cf-orch call allowance
|
||||
LIFETIME_ORCH_CAPS: dict[tuple[str, str], int] = {
|
||||
("lifetime", "paid"): 60,
|
||||
("lifetime", "premium"): 180,
|
||||
("founders", "premium"): 300,
|
||||
}
|
||||
|
||||
# Feature → minimum tier required
|
||||
KIWI_FEATURES: dict[str, str] = {
|
||||
# Free tier
|
||||
|
|
@ -46,16 +32,9 @@ KIWI_FEATURES: dict[str, str] = {
|
|||
"receipt_ocr": "paid", # BYOK-unlockable
|
||||
"recipe_suggestions": "paid", # BYOK-unlockable
|
||||
"expiry_llm_matching": "paid", # BYOK-unlockable
|
||||
"meal_planning": "free",
|
||||
"meal_plan_config": "paid", # configurable meal types (breakfast/lunch/snack)
|
||||
"meal_plan_llm": "paid", # LLM-assisted full-week plan generation; BYOK-unlockable
|
||||
"meal_plan_llm_timing": "paid", # LLM time fill-in for recipes missing corpus times; BYOK-unlockable
|
||||
"meal_planning": "paid",
|
||||
"dietary_profiles": "paid",
|
||||
"style_picker": "paid",
|
||||
"recipe_collections": "paid",
|
||||
"style_classifier": "paid", # LLM auto-tag for saved recipe style tags; BYOK-unlockable
|
||||
"community_publish": "paid", # Publish plans/outcomes to community feed
|
||||
"community_fork_adapt": "paid", # Fork with LLM pantry adaptation (BYOK-unlockable)
|
||||
|
||||
# Premium tier
|
||||
"multi_household": "premium",
|
||||
|
|
|
|||
|
|
@ -13,23 +13,12 @@ services:
|
|||
environment:
|
||||
CLOUD_MODE: "true"
|
||||
CLOUD_DATA_ROOT: /devl/kiwi-cloud-data
|
||||
RECIPE_DB_PATH: /devl/kiwi-corpus/recipes.db
|
||||
KIWI_BASE_URL: https://menagerie.circuitforge.tech/kiwi
|
||||
# DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN — set in .env
|
||||
# DEV ONLY: comma-separated IPs that bypass JWT auth (LAN testing without Caddy).
|
||||
# Production deployments must NOT set this. Leave blank or omit entirely.
|
||||
CLOUD_AUTH_BYPASS_IPS: ${CLOUD_AUTH_BYPASS_IPS:-}
|
||||
# cf-orch: route LLM calls through the coordinator for managed GPU inference
|
||||
CF_ORCH_URL: http://host.docker.internal:7700
|
||||
# Community PostgreSQL — shared across CF products; unset = community features unavailable (fail soft)
|
||||
COMMUNITY_DB_URL: ${COMMUNITY_DB_URL:-}
|
||||
COMMUNITY_PSEUDONYM_SALT: ${COMMUNITY_PSEUDONYM_SALT:-}
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
- /devl/kiwi-cloud-data:/devl/kiwi-cloud-data
|
||||
# Recipe corpus — shared read-only NFS-backed SQLite (3.1M recipes, 2.9GB)
|
||||
- /Library/Assets/kiwi/kiwi.db:/devl/kiwi-corpus/recipes.db:ro
|
||||
# LLM config — shared with other CF products; read-only in container
|
||||
- ${HOME}/.config/circuitforge:/root/.config/circuitforge:ro
|
||||
networks:
|
||||
|
|
|
|||
|
|
@ -2,15 +2,9 @@
|
|||
# Not used in cloud or demo stacks (those use compose.cloud.yml / compose.demo.yml directly).
|
||||
|
||||
services:
|
||||
api:
|
||||
volumes:
|
||||
# Symlink /data/kiwi.db → /Library/Assets/kiwi/kiwi.db; mount the NAS path so
|
||||
# Docker can follow the symlink inside the container.
|
||||
- /Library/Assets/kiwi:/Library/Assets/kiwi:rw
|
||||
|
||||
# cf-orch agent sidecar: registers this machine as GPU node "sif" with the coordinator.
|
||||
# cf-orch agent sidecar: registers kiwi as a GPU node with the coordinator.
|
||||
# The API scheduler uses COORDINATOR_URL to lease VRAM cooperatively; this
|
||||
# agent makes the local VRAM usage visible on the orchestrator dashboard.
|
||||
# agent makes kiwi's VRAM usage visible on the orchestrator dashboard.
|
||||
cf-orch-agent:
|
||||
image: kiwi-api # reuse local api image — cf-core already installed there
|
||||
network_mode: host
|
||||
|
|
@ -21,7 +15,7 @@ services:
|
|||
command: >
|
||||
conda run -n kiwi cf-orch agent
|
||||
--coordinator ${COORDINATOR_URL:-http://10.1.10.71:7700}
|
||||
--node-id sif
|
||||
--node-id kiwi
|
||||
--host 0.0.0.0
|
||||
--port 7702
|
||||
--advertise-host ${CF_ORCH_ADVERTISE_HOST:-10.1.10.71}
|
||||
|
|
|
|||
|
|
@ -1,74 +0,0 @@
|
|||
# Kiwi — LLM backend configuration
|
||||
#
|
||||
# Copy to ~/.config/circuitforge/llm.yaml (shared across all CF products)
|
||||
# or to config/llm.yaml (Kiwi-local, takes precedence).
|
||||
#
|
||||
# Kiwi uses LLMs for:
|
||||
# - Expiry prediction fallback (unknown products not in the lookup table)
|
||||
# - Meal planning suggestions
|
||||
#
|
||||
# Local inference (Ollama / vLLM) is the default path — no API key required.
|
||||
# BYOK (bring your own key): set api_key_env to point at your API key env var.
|
||||
# cf-orch trunk: set CF_ORCH_URL env var to allocate cf-text on-demand via
|
||||
# the coordinator instead of hitting a static URL.
|
||||
|
||||
backends:
|
||||
ollama:
|
||||
type: openai_compat
|
||||
enabled: true
|
||||
base_url: http://localhost:11434/v1
|
||||
model: llama3.2:3b
|
||||
api_key: ollama
|
||||
supports_images: false
|
||||
|
||||
vllm:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: http://localhost:8000/v1
|
||||
model: __auto__ # resolved from /v1/models at runtime
|
||||
api_key: ''
|
||||
supports_images: false
|
||||
|
||||
# ── cf-orch trunk services ──────────────────────────────────────────────────
|
||||
# These allocate via cf-orch rather than connecting to a static URL.
|
||||
# cf-orch starts the service on-demand and returns its live URL.
|
||||
# Set CF_ORCH_URL env var or fill in url below; leave enabled: false if
|
||||
# cf-orch is not deployed in your environment.
|
||||
|
||||
cf_text:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: http://localhost:8008/v1 # fallback when cf-orch is not available
|
||||
model: __auto__
|
||||
api_key: any
|
||||
supports_images: false
|
||||
cf_orch:
|
||||
service: cf-text
|
||||
# model_candidates: leave empty to use the service's default_model,
|
||||
# or specify a catalog alias (e.g. "qwen2.5-3b").
|
||||
model_candidates: []
|
||||
ttl_s: 3600
|
||||
|
||||
# ── Cloud / BYOK ───────────────────────────────────────────────────────────
|
||||
|
||||
anthropic:
|
||||
type: anthropic
|
||||
enabled: false
|
||||
model: claude-haiku-4-5-20251001
|
||||
api_key_env: ANTHROPIC_API_KEY
|
||||
supports_images: false
|
||||
|
||||
openai:
|
||||
type: openai_compat
|
||||
enabled: false
|
||||
base_url: https://api.openai.com/v1
|
||||
model: gpt-4o-mini
|
||||
api_key_env: OPENAI_API_KEY
|
||||
supports_images: false
|
||||
|
||||
fallback_order:
|
||||
- cf_text
|
||||
- ollama
|
||||
- vllm
|
||||
- anthropic
|
||||
- openai
|
||||
|
|
@ -8,10 +8,8 @@ server {
|
|||
# Proxy API requests to the FastAPI container via Docker bridge network.
|
||||
location /api/ {
|
||||
proxy_pass http://api:8512;
|
||||
proxy_set_header Host $http_host;
|
||||
# Prefer X-Real-IP set by Caddy (real client address); fall back to $remote_addr
|
||||
# when accessed directly on LAN without Caddy in the path.
|
||||
proxy_set_header X-Real-IP $http_x_real_ip;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
|
||||
# Forward the session header injected by Caddy from cf_session cookie.
|
||||
|
|
@ -20,22 +18,6 @@ server {
|
|||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
# Direct-port LAN access (localhost:8515): when VITE_API_BASE='/kiwi', the frontend
|
||||
# builds API calls as /kiwi/api/v1/... — proxy these to the API container.
|
||||
# Through Caddy the /kiwi prefix is stripped before reaching nginx, so this block
|
||||
# is only active for direct-port access without Caddy in the path.
|
||||
# Longer prefix (/kiwi/api/ = 10 chars) beats ^~/kiwi/ (6 chars) per nginx rules.
|
||||
location /kiwi/api/ {
|
||||
rewrite ^/kiwi(/api/.*)$ $1 break;
|
||||
proxy_pass http://api:8512;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $http_x_real_ip;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
|
||||
proxy_set_header X-CF-Session $http_x_cf_session;
|
||||
client_max_body_size 20m;
|
||||
}
|
||||
|
||||
# When accessed directly (localhost:8515) instead of via Caddy (/kiwi path-strip),
|
||||
# Vite's /kiwi base URL means assets are requested at /kiwi/assets/... but stored
|
||||
# at /assets/... in nginx's root. Alias /kiwi/ → root so direct port access works.
|
||||
|
|
|
|||
|
|
@ -1,69 +0,0 @@
|
|||
# Installation
|
||||
|
||||
Kiwi runs as a Docker Compose stack: a FastAPI backend and a Vue 3 frontend served by nginx. No external services are required for the core feature set.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
- 500 MB disk for images + space for your pantry database
|
||||
|
||||
## Quick setup
|
||||
|
||||
```bash
|
||||
git clone https://git.opensourcesolarpunk.com/Circuit-Forge/kiwi
|
||||
cd kiwi
|
||||
cp .env.example .env
|
||||
./manage.sh build
|
||||
./manage.sh start
|
||||
```
|
||||
|
||||
The web UI opens at `http://localhost:8511`. The FastAPI backend is at `http://localhost:8512`.
|
||||
|
||||
## manage.sh commands
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `./manage.sh start` | Start all services |
|
||||
| `./manage.sh stop` | Stop all services |
|
||||
| `./manage.sh restart` | Restart all services |
|
||||
| `./manage.sh status` | Show running containers |
|
||||
| `./manage.sh logs` | Tail logs (all services) |
|
||||
| `./manage.sh build` | Rebuild images |
|
||||
| `./manage.sh open` | Open browser to the web UI |
|
||||
|
||||
## Environment variables
|
||||
|
||||
Copy `.env.example` to `.env` and configure:
|
||||
|
||||
```bash
|
||||
# Required — generate a random secret
|
||||
SECRET_KEY=your-random-secret-here
|
||||
|
||||
# Optional — LLM backend for AI features (receipt OCR, recipe suggestions)
|
||||
# See LLM Setup guide for details
|
||||
LLM_BACKEND=ollama # ollama | openai-compatible | vllm
|
||||
LLM_BASE_URL=http://localhost:11434
|
||||
LLM_MODEL=llama3.1
|
||||
```
|
||||
|
||||
## Data location
|
||||
|
||||
By default, Kiwi stores its SQLite database in `./data/kiwi.db` inside the repo directory. The `data/` folder is bind-mounted into the container so your pantry survives image rebuilds.
|
||||
|
||||
## Updating
|
||||
|
||||
```bash
|
||||
git pull
|
||||
./manage.sh build
|
||||
./manage.sh restart
|
||||
```
|
||||
|
||||
Database migrations run automatically on startup.
|
||||
|
||||
## Uninstalling
|
||||
|
||||
```bash
|
||||
./manage.sh stop
|
||||
docker compose down -v # removes containers and volumes
|
||||
rm -rf data/ # removes local database
|
||||
```
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
# LLM Backend Setup (Optional)
|
||||
|
||||
An LLM backend unlocks **receipt OCR**, **recipe suggestions (L3–L4)**, and **style auto-classification**. Everything else works without one.
|
||||
|
||||
You can use any OpenAI-compatible inference server: Ollama, vLLM, LM Studio, a local llama.cpp server, or a commercial API.
|
||||
|
||||
## BYOK — Bring Your Own Key
|
||||
|
||||
BYOK means you provide your own LLM backend. Paid AI features are unlocked at **any tier** when a valid backend is configured. You pay for your own inference; Kiwi just uses it.
|
||||
|
||||
## Choosing a backend
|
||||
|
||||
| Backend | Best for | Notes |
|
||||
|---------|----------|-------|
|
||||
| **Ollama** | Local, easy setup | Recommended for getting started |
|
||||
| **vLLM** | Local, high throughput | Better for faster hardware |
|
||||
| **OpenAI API** | No local GPU | Requires paid API key |
|
||||
| **Anthropic API** | No local GPU | Requires paid API key |
|
||||
|
||||
## Ollama setup (recommended)
|
||||
|
||||
```bash
|
||||
# Install Ollama
|
||||
curl -fsSL https://ollama.ai/install.sh | sh
|
||||
|
||||
# Pull a model — llama3.1 8B works well for recipe tasks
|
||||
ollama pull llama3.1
|
||||
|
||||
# Verify it's running
|
||||
ollama list
|
||||
```
|
||||
|
||||
In your Kiwi `.env`:
|
||||
|
||||
```bash
|
||||
LLM_BACKEND=ollama
|
||||
LLM_BASE_URL=http://host.docker.internal:11434
|
||||
LLM_MODEL=llama3.1
|
||||
```
|
||||
|
||||
!!! note "Docker networking"
|
||||
Use `host.docker.internal` instead of `localhost` when Ollama is running on your host and Kiwi is in Docker.
|
||||
|
||||
## OpenAI-compatible API
|
||||
|
||||
```bash
|
||||
LLM_BACKEND=openai
|
||||
LLM_BASE_URL=https://api.openai.com/v1
|
||||
LLM_API_KEY=sk-your-key-here
|
||||
LLM_MODEL=gpt-4o-mini
|
||||
```
|
||||
|
||||
## Verify the connection
|
||||
|
||||
In the Kiwi **Settings** page, the LLM status indicator shows whether the backend is reachable. A green checkmark means OCR and L3–L4 recipe suggestions are active.
|
||||
|
||||
## What LLM is used for
|
||||
|
||||
| Feature | LLM required |
|
||||
|---------|-------------|
|
||||
| Receipt OCR (line-item extraction) | Yes |
|
||||
| Recipe suggestions L1 (pantry match) | No |
|
||||
| Recipe suggestions L2 (substitution) | No |
|
||||
| Recipe suggestions L3 (style templates) | Yes |
|
||||
| Recipe suggestions L4 (full generation) | Yes |
|
||||
| Style auto-classifier | Yes |
|
||||
|
||||
L1 and L2 suggestions use deterministic matching — they work without any LLM configured. See [Recipe Engine](../reference/recipe-engine.md) for the full algorithm breakdown.
|
||||
|
||||
## Model recommendations
|
||||
|
||||
- **Receipt OCR**: any model with vision capability (LLaVA, GPT-4o, etc.)
|
||||
- **Recipe suggestions**: 7B–13B instruction-tuned models work well; larger models produce more creative L4 output
|
||||
- **Style classification**: small models handle this fine (3B+)
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
# Quick Start
|
||||
|
||||
This guide walks you through adding your first pantry item and getting a recipe suggestion. No LLM backend needed for these steps.
|
||||
|
||||
## 1. Add an item by barcode
|
||||
|
||||
Open the **Inventory** tab. Tap the barcode icon or click **Scan barcode**, then point your camera at a product barcode. Kiwi looks up the product in the open barcode database and adds it to your pantry.
|
||||
|
||||
If the barcode isn't recognized, you'll be prompted to enter the product name and details manually.
|
||||
|
||||
## 2. Add an item manually
|
||||
|
||||
Click **Add item** and fill in:
|
||||
|
||||
- **Name** — what is it? (e.g., "Canned chickpeas")
|
||||
- **Quantity** — how many or how much
|
||||
- **Expiry date** — when does it expire? (optional but recommended)
|
||||
- **Category** — used for dietary filtering and pantry stats
|
||||
|
||||
## 3. Upload a receipt
|
||||
|
||||
Click **Receipts** in the sidebar, then **Upload receipt**. Take a photo of a grocery receipt or upload an image from your device.
|
||||
|
||||
- **Free tier**: the receipt is stored for you to review; line items are entered manually
|
||||
- **Paid / BYOK**: OCR runs automatically and extracts items for you to approve
|
||||
|
||||
## 4. Browse recipes
|
||||
|
||||
Click **Recipes** in the sidebar. The recipe browser shows your **pantry match percentage** for each recipe — how much of the ingredient list you already have.
|
||||
|
||||
Use the filters to narrow by:
|
||||
|
||||
- **Cuisine** — Italian, Mexican, Japanese, etc.
|
||||
- **Meal type** — breakfast, lunch, dinner, snack
|
||||
- **Dietary** — vegetarian, vegan, gluten-free, dairy-free, etc.
|
||||
- **Main ingredient** — chicken, pasta, lentils, etc.
|
||||
|
||||
## 5. Get a suggestion based on what's expiring
|
||||
|
||||
Click **Leftover mode** (the clock icon or toggle). Kiwi re-ranks suggestions to surface recipes that use your nearly-expired items first.
|
||||
|
||||
Free accounts get 5 leftover-mode requests per day. Paid accounts get unlimited.
|
||||
|
||||
## 6. Save a recipe
|
||||
|
||||
Click the bookmark icon on any recipe card to save it. You can add:
|
||||
|
||||
- **Notes** — cooking tips, modifications, family preferences
|
||||
- **Star rating** — 0 to 5 stars
|
||||
- **Style tags** — quick, comforting, weeknight, etc.
|
||||
|
||||
Saved recipes appear in the **Saved** tab. Paid accounts can organize them into named collections.
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
# Kiwi — Pantry Tracker
|
||||
|
||||
**Stop throwing food away. Cook what you already have.**
|
||||
|
||||
Kiwi tracks your pantry, watches for expiry dates, and suggests recipes based on what's about to go bad. Scan barcodes, photograph receipts, and let Kiwi tell you what to make for dinner — without needing an AI backend to do it.
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## What Kiwi does
|
||||
|
||||
- **Inventory tracking** — add items by barcode scan, receipt photo, or manual entry
|
||||
- **Expiry alerts** — know what's about to go bad before it does
|
||||
- **Recipe browser** — browse by cuisine, meal type, dietary preference, or main ingredient; see pantry match percentage inline
|
||||
- **Leftover mode** — prioritize nearly-expired items when getting recipe suggestions
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid / BYOK)
|
||||
- **Recipe suggestions** — four levels from pantry-match corpus to full LLM generation (Paid / BYOK)
|
||||
- **Saved recipes** — bookmark any recipe with notes, 0–5 star rating, and style tags
|
||||
- **CSV export** — export your full pantry inventory anytime
|
||||
|
||||
## Quick links
|
||||
|
||||
- [Installation](getting-started/installation.md) — local self-hosted setup
|
||||
- [Quick Start](getting-started/quick-start.md) — add your first item and get a recipe
|
||||
- [LLM Setup](getting-started/llm-setup.md) — unlock AI features with your own backend
|
||||
- [Tier System](reference/tier-system.md) — what's free vs. paid
|
||||
|
||||
## No AI required
|
||||
|
||||
Inventory tracking, barcode scanning, expiry alerts, the recipe browser, saved recipes, and CSV export all work without any LLM configured. AI features (receipt OCR, recipe suggestions, style auto-classification) are optional and BYOK-unlockable at any tier.
|
||||
|
||||
## Free and open core
|
||||
|
||||
Discovery and pipeline code is MIT-licensed. AI features are BSL 1.1 — free for personal non-commercial self-hosting, commercial SaaS requires a license. See the [tier table](reference/tier-system.md) for the full breakdown.
|
||||
|
|
@ -1 +0,0 @@
|
|||
(function(){var s=document.createElement("script");s.defer=true;s.dataset.domain="docs.circuitforge.tech,circuitforge.tech";s.dataset.api="https://analytics.circuitforge.tech/api/event";s.src="https://analytics.circuitforge.tech/js/script.js";document.head.appendChild(s);})();
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
# Architecture
|
||||
|
||||
Kiwi is a self-contained Docker Compose stack with a Vue 3 (SPA) frontend and a FastAPI backend backed by SQLite.
|
||||
|
||||
## Stack
|
||||
|
||||
| Layer | Technology |
|
||||
|-------|-----------|
|
||||
| Frontend | Vue 3 + TypeScript + Vite |
|
||||
| Backend | FastAPI (Python 3.11+) |
|
||||
| Database | SQLite (via circuitforge-core) |
|
||||
| Auth (cloud) | CF session cookie → Directus JWT |
|
||||
| Licensing | Heimdall (RS256 JWT, offline-capable) |
|
||||
| LLM inference | Pluggable — Ollama, vLLM, OpenAI-compatible |
|
||||
| Barcode lookup | Open Food Facts / UPC Database API |
|
||||
| OCR | LLM vision model (configurable) |
|
||||
|
||||
## Data flow
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
User -->|browser| Vue3[Vue 3 SPA]
|
||||
Vue3 -->|/api/*| FastAPI
|
||||
FastAPI -->|SQL| SQLite[(SQLite DB)]
|
||||
FastAPI -->|HTTP| LLM[LLM Backend]
|
||||
FastAPI -->|HTTP| Barcode[Barcode DB API]
|
||||
FastAPI -->|JWT| Heimdall[Heimdall License]
|
||||
```
|
||||
|
||||
## Docker Compose services
|
||||
|
||||
```yaml
|
||||
services:
|
||||
api:
|
||||
# FastAPI backend — network_mode: host in dev
|
||||
# Exposed at port 8512
|
||||
web:
|
||||
# Vue 3 SPA served by nginx
|
||||
# Exposed at port 8511
|
||||
```
|
||||
|
||||
In development, the API uses host networking so nginx can reach it at `172.17.0.1:8512` (Docker bridge gateway).
|
||||
|
||||
## Database
|
||||
|
||||
SQLite at `./data/kiwi.db`. The schema is managed by numbered migration files in `app/db/migrations/`. Migrations run automatically on startup — the startup script applies any new `*.sql` files in order.
|
||||
|
||||
Key tables:
|
||||
|
||||
| Table | Purpose |
|
||||
|-------|---------|
|
||||
| `products` | Product catalog (shared, barcode-keyed) |
|
||||
| `pantry_items` | User's pantry (quantity, expiry, notes) |
|
||||
| `recipes` | Recipe corpus |
|
||||
| `saved_recipes` | User-bookmarked recipes |
|
||||
| `collections` | Named recipe collections (Paid) |
|
||||
| `receipts` | Receipt uploads and OCR results |
|
||||
| `user_preferences` | User settings (dietary, LLM config) |
|
||||
|
||||
## Cloud mode
|
||||
|
||||
In cloud mode (managed instance at `menagerie.circuitforge.tech/kiwi`), each user gets their own SQLite database isolated under `/devl/kiwi-cloud-data/<user_id>/kiwi.db`. The cloud compose stack adds:
|
||||
|
||||
- `CLOUD_MODE=true` environment variable
|
||||
- Directus JWT validation for session resolution
|
||||
- Heimdall tier check on AI feature endpoints
|
||||
|
||||
The same codebase runs in both local and cloud modes — the cloud session middleware is a thin wrapper around the local auth logic.
|
||||
|
||||
## LLM integration
|
||||
|
||||
Kiwi uses `circuitforge-core`'s LLM router, which abstracts over Ollama, vLLM, and OpenAI-compatible APIs. The router is configured via environment variables at startup. All LLM calls are asynchronous and non-blocking — if the backend is unavailable, Kiwi falls back to the highest deterministic level (L2) and returns results without waiting.
|
||||
|
||||
## Privacy
|
||||
|
||||
- No PII is logged in production
|
||||
- Pantry data stays on your machine in self-hosted mode
|
||||
- Cloud mode: data stored per-user on Heimdall server, not shared with third parties, not used for training
|
||||
- LLM calls include pantry context in the prompt — if using a cloud API, that context leaves your machine
|
||||
- Using a local LLM backend (Ollama, vLLM) keeps all data on-device
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue