feat: cf-orch integration — CFOrchClient for recipe gen + Docuvision OCR fast-path #11
203 changed files with 582 additions and 26857 deletions
44
.cliff.toml
44
.cliff.toml
|
|
@ -1,44 +0,0 @@
|
|||
# git-cliff changelog configuration for Kiwi
|
||||
# See: https://git-cliff.org/docs/configuration
|
||||
|
||||
[changelog]
|
||||
header = """
|
||||
# Changelog\n
|
||||
"""
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [Unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}**{{ commit.scope }}:** {% endif %}{{ commit.message | upper_first }}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
trim = true
|
||||
|
||||
[git]
|
||||
conventional_commits = true
|
||||
filter_unconventional = true
|
||||
split_commits = false
|
||||
commit_preprocessors = []
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^refactor", group = "Refactoring" },
|
||||
{ message = "^docs", group = "Documentation" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore", group = "Chores" },
|
||||
{ message = "^ci", group = "CI/CD" },
|
||||
{ message = "^revert", group = "Reverts" },
|
||||
]
|
||||
filter_commits = false
|
||||
tag_pattern = "v[0-9].*"
|
||||
skip_tags = ""
|
||||
ignore_tags = ""
|
||||
topo_order = false
|
||||
sort_commits = "oldest"
|
||||
50
.env.example
50
.env.example
|
|
@ -21,23 +21,6 @@ DATA_DIR=./data
|
|||
# IP this machine advertises to the coordinator (must be reachable from coordinator host)
|
||||
# CF_ORCH_ADVERTISE_HOST=10.1.10.71
|
||||
|
||||
# CF-core hosted coordinator (managed cloud GPU inference — Paid+ tier)
|
||||
# Set CF_ORCH_URL to use a hosted cf-orch coordinator instead of self-hosting.
|
||||
# CF_LICENSE_KEY is read automatically by CFOrchClient for bearer auth.
|
||||
# CF_ORCH_URL=https://orch.circuitforge.tech
|
||||
# CF_LICENSE_KEY=CFG-KIWI-xxxx-xxxx-xxxx
|
||||
|
||||
# LLM backend — env-var auto-config (no llm.yaml needed for bare-metal users)
|
||||
# LLMRouter checks these in priority order:
|
||||
# 1. Anthropic cloud — set ANTHROPIC_API_KEY
|
||||
# 2. OpenAI cloud — set OPENAI_API_KEY
|
||||
# 3. Local Ollama — set OLLAMA_HOST (+ optionally OLLAMA_MODEL)
|
||||
# All three are optional; leave unset to rely on a local llm.yaml instead.
|
||||
# ANTHROPIC_API_KEY=sk-ant-...
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# OLLAMA_HOST=http://localhost:11434
|
||||
# OLLAMA_MODEL=llama3.2
|
||||
|
||||
# Processing
|
||||
USE_GPU=true
|
||||
GPU_MEMORY_LIMIT=6144
|
||||
|
|
@ -51,12 +34,6 @@ ENABLE_OCR=false
|
|||
DEBUG=false
|
||||
CLOUD_MODE=false
|
||||
DEMO_MODE=false
|
||||
# Product identifier reported in cf-orch coordinator analytics for per-app breakdown
|
||||
CF_APP_NAME=kiwi
|
||||
# USE_ORCH_SCHEDULER: use coordinator-aware multi-GPU scheduler instead of local FIFO.
|
||||
# Unset = auto-detect: true if CLOUD_MODE or circuitforge_orch is installed (paid+ local).
|
||||
# Set false to force LocalScheduler even when cf-orch is present.
|
||||
# USE_ORCH_SCHEDULER=false
|
||||
|
||||
# Cloud mode (set in compose.cloud.yml; also set here for reference)
|
||||
# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data
|
||||
|
|
@ -74,30 +51,5 @@ CF_APP_NAME=kiwi
|
|||
# HEIMDALL_URL=https://license.circuitforge.tech
|
||||
# HEIMDALL_ADMIN_TOKEN=
|
||||
|
||||
# Directus JWT (must match cf-directus SECRET env var exactly, including base64 == padding)
|
||||
# Directus JWT (must match cf-directus SECRET env var)
|
||||
# DIRECTUS_JWT_SECRET=
|
||||
|
||||
# E2E test account (Directus — free tier, used by automated tests)
|
||||
# E2E_TEST_EMAIL=e2e@circuitforge.tech
|
||||
# E2E_TEST_PASSWORD=
|
||||
# E2E_TEST_USER_ID=
|
||||
|
||||
# In-app feedback → Forgejo issue creation
|
||||
# FORGEJO_API_TOKEN=
|
||||
# FORGEJO_REPO=Circuit-Forge/kiwi
|
||||
# FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
||||
|
||||
# Affiliate links (optional — plain URLs are shown if unset)
|
||||
# Amazon Associates tag (circuitforge_core.affiliates, retailer="amazon")
|
||||
# AMAZON_ASSOCIATES_TAG=circuitforge-20
|
||||
# Instacart affiliate ID (circuitforge_core.affiliates, retailer="instacart")
|
||||
# INSTACART_AFFILIATE_ID=circuitforge
|
||||
# Walmart Impact network affiliate ID (inline, path-based redirect)
|
||||
# WALMART_AFFILIATE_ID=
|
||||
|
||||
|
||||
# Community PostgreSQL — shared across CF products (cloud only; leave unset for local dev)
|
||||
# Points at cf-orch's cf-community-postgres container (port 5434 on the orch host).
|
||||
# When unset, community write paths fail soft with a plain-language message.
|
||||
# COMMUNITY_DB_URL=postgresql://cf_community:changeme@cf-orch-host:5434/cf_community
|
||||
# COMMUNITY_PSEUDONYM_SALT=change-this-to-a-random-32-char-string
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
# Kiwi CI — lint, type-check, test on PR/push
|
||||
# Full-stack: FastAPI (Python) + Vue 3 SPA (Node)
|
||||
# Adapted from Circuit-Forge/cf-agents workflows/ci.yml (cf-agents#4 tracks the
|
||||
# upstream ci-fullstack.yml variant; update this file when that lands).
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add `npm run test` when vitest is wired (kiwi#XX).
|
||||
#
|
||||
# circuitforge-core is not on PyPI — installed from Forgejo git (public repo).
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, 'feature/**', 'fix/**']
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e ".[dev]" || pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
# Mirror push to GitHub and Codeberg on every push to main or tag.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/mirror.yml
|
||||
# Required secrets: GITHUB_MIRROR_TOKEN, CODEBERG_MIRROR_TOKEN
|
||||
|
||||
name: Mirror
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Mirror to GitHub
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add github "https://x-access-token:${GITHUB_TOKEN}@github.com/CircuitForgeLLC/${REPO}.git"
|
||||
git push github --mirror
|
||||
|
||||
- name: Mirror to Codeberg
|
||||
env:
|
||||
CODEBERG_TOKEN: ${{ secrets.CODEBERG_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add codeberg "https://CircuitForge:${CODEBERG_TOKEN}@codeberg.org/CircuitForge/${REPO}.git"
|
||||
git push codeberg --mirror
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
# Tag-triggered release workflow.
|
||||
# Generates changelog and creates Forgejo release on v* tags.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/release.yml
|
||||
#
|
||||
# Docker push is intentionally disabled — BSL 1.1 registry policy not yet resolved.
|
||||
# Tracked in Circuit-Forge/cf-agents#3. Re-enable the Docker steps when that lands.
|
||||
#
|
||||
# Required secrets: FORGEJO_RELEASE_TOKEN
|
||||
# (GHCR_TOKEN not needed until Docker push is enabled)
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# ── Changelog ────────────────────────────────────────────────────────────
|
||||
- name: Generate changelog
|
||||
uses: orhun/git-cliff-action@v3
|
||||
id: cliff
|
||||
with:
|
||||
config: .cliff.toml
|
||||
args: --latest --strip header
|
||||
env:
|
||||
OUTPUT: CHANGES.md
|
||||
|
||||
# ── Docker (disabled — BSL registry policy pending cf-agents#3) ──────────
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v3
|
||||
# - name: Set up Buildx
|
||||
# uses: docker/setup-buildx-action@v3
|
||||
# - name: Log in to GHCR
|
||||
# uses: docker/login-action@v3
|
||||
# with:
|
||||
# registry: ghcr.io
|
||||
# username: ${{ github.actor }}
|
||||
# password: ${{ secrets.GHCR_TOKEN }}
|
||||
# - name: Build and push Docker image
|
||||
# uses: docker/build-push-action@v6
|
||||
# with:
|
||||
# context: .
|
||||
# push: true
|
||||
# platforms: linux/amd64,linux/arm64
|
||||
# tags: |
|
||||
# ghcr.io/circuitforgellc/kiwi:${{ github.ref_name }}
|
||||
# ghcr.io/circuitforgellc/kiwi:latest
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
|
||||
# ── Forgejo Release ───────────────────────────────────────────────────────
|
||||
- name: Create Forgejo release
|
||||
env:
|
||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_RELEASE_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
NOTES: ${{ steps.cliff.outputs.content }}
|
||||
run: |
|
||||
curl -sS -X POST \
|
||||
"https://git.opensourcesolarpunk.com/api/v1/repos/Circuit-Forge/${REPO}/releases" \
|
||||
-H "Authorization: token ${FORGEJO_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$(jq -n --arg tag "$TAG" --arg body "$NOTES" \
|
||||
'{tag_name: $tag, name: $tag, body: $body}')"
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
|
|
@ -1,59 +0,0 @@
|
|||
# Kiwi CI — runs on GitHub mirror for public credibility badge.
|
||||
# Forgejo (.forgejo/workflows/ci.yml) is the canonical CI — keep these in sync.
|
||||
# No Forgejo-specific secrets used here; circuitforge-core is public on Forgejo.
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add 'npm run test' when vitest is wired.
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,7 +1,4 @@
|
|||
|
||||
# CLAUDE.md — gitignored per BSL 1.1 commercial policy
|
||||
CLAUDE.md
|
||||
|
||||
# Superpowers brainstorming artifacts
|
||||
.superpowers/
|
||||
|
||||
|
|
@ -25,6 +22,3 @@ data/
|
|||
|
||||
# Test artifacts (MagicMock sqlite files from pytest)
|
||||
<MagicMock*
|
||||
|
||||
# Playwright / debug screenshots
|
||||
debug-screenshots/
|
||||
|
|
|
|||
|
|
@ -1,34 +0,0 @@
|
|||
# Kiwi gitleaks config — extends base CircuitForge config with local rules
|
||||
|
||||
[extend]
|
||||
path = "/Library/Development/CircuitForge/circuitforge-hooks/gitleaks.toml"
|
||||
|
||||
# ── Global allowlist ──────────────────────────────────────────────────────────
|
||||
# Amazon grocery department IDs (rh=n:<10-digit>) false-positive as phone
|
||||
# numbers. locale_config.py is a static lookup table with no secrets.
|
||||
|
||||
[allowlist]
|
||||
# Amazon grocery dept IDs (rh=n:<digits>) false-positive as phone numbers.
|
||||
regexes = [
|
||||
'''rh=n:\d{8,12}''',
|
||||
]
|
||||
|
||||
# ── Test fixture allowlists ───────────────────────────────────────────────────
|
||||
|
||||
[[rules]]
|
||||
id = "cf-generic-env-token"
|
||||
description = "Generic KEY=<token> in env-style assignment — catches FORGEJO_API_TOKEN=hex etc."
|
||||
regex = '''(?i)(token|secret|key|password|passwd|pwd|api_key)\s*[=:]\s*['"]?[A-Za-z0-9\-_]{20,}['"]?'''
|
||||
[rules.allowlist]
|
||||
paths = [
|
||||
'.*test.*',
|
||||
]
|
||||
regexes = [
|
||||
'api_key:\s*ollama',
|
||||
'api_key:\s*any',
|
||||
'your-[a-z\-]+-here',
|
||||
'replace-with-',
|
||||
'xxxx',
|
||||
'test-fixture-',
|
||||
'CFG-KIWI-TEST-',
|
||||
]
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
# Findings suppressed here are historical false positives or already-rotated secrets.
|
||||
# .env was accidentally included in the initial commit; it is now gitignored.
|
||||
# Rotate DIRECTUS_JWT_SECRET if it has not been changed since 2026-03-30.
|
||||
|
||||
# c166e5216 (chore: initial commit) — .env included by mistake
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:generic-api-key:25
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:cf-generic-env-token:25
|
||||
12
Dockerfile
12
Dockerfile
|
|
@ -11,23 +11,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||
COPY circuitforge-core/ ./circuitforge-core/
|
||||
RUN conda run -n base pip install --no-cache-dir -e ./circuitforge-core
|
||||
|
||||
# Install circuitforge-orch — needed for the cf-orch-agent sidecar (compose.override.yml)
|
||||
COPY circuitforge-orch/ ./circuitforge-orch/
|
||||
|
||||
# Create kiwi conda env and install app
|
||||
COPY kiwi/environment.yml .
|
||||
RUN conda env create -f environment.yml
|
||||
|
||||
COPY kiwi/ ./kiwi/
|
||||
|
||||
# Remove gitignored config files that may exist locally — defense-in-depth.
|
||||
# The parent .dockerignore should exclude these, but an explicit rm guarantees
|
||||
# they never end up in the cloud image regardless of .dockerignore placement.
|
||||
RUN rm -f /app/kiwi/.env
|
||||
|
||||
# Install cf-core and cf-orch into the kiwi env BEFORE installing kiwi
|
||||
# Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep)
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-orch
|
||||
WORKDIR /app/kiwi
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e .
|
||||
|
||||
|
|
|
|||
28
LICENSE-BSL
28
LICENSE-BSL
|
|
@ -1,28 +0,0 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Circuit Forge LLC
|
||||
Licensed Work: Kiwi — Pantry tracking and leftover recipe suggestions
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
Additional Use Grant: You may use the Licensed Work for personal,
|
||||
non-commercial pantry tracking and recipe suggestion
|
||||
purposes only.
|
||||
Change Date: 2030-01-01
|
||||
Change License: MIT License
|
||||
|
||||
For the full Business Source License 1.1 text, see:
|
||||
https://mariadb.com/bsl11/
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/services/recipe/recipe_engine.py
|
||||
- app/services/recipe/assembly_recipes.py
|
||||
- app/services/recipe/llm_recipe.py
|
||||
- app/services/expiration_predictor.py
|
||||
- app/tasks/scheduler.py
|
||||
- app/tasks/runner.py
|
||||
- app/tiers.py
|
||||
- app/cloud_session.py
|
||||
- frontend/src/components/RecipesView.vue
|
||||
- frontend/src/stores/recipes.ts
|
||||
34
LICENSE-MIT
34
LICENSE-MIT
|
|
@ -1,34 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/api/endpoints/inventory.py
|
||||
- app/api/endpoints/ocr.py
|
||||
- app/db/store.py
|
||||
- app/db/migrations/
|
||||
- app/core/config.py
|
||||
- scripts/pipeline/
|
||||
- scripts/download_datasets.py
|
||||
- scripts/backfill_texture_profiles.py
|
||||
26
README.md
26
README.md
|
|
@ -6,11 +6,7 @@
|
|||
|
||||
Scan barcodes, photograph receipts, and get recipe ideas based on what you already have — before it expires.
|
||||
|
||||
**LLM support is optional.** Inventory tracking, barcode scanning, expiry alerts, CSV export, and receipt upload all work without any LLM configured. AI features (receipt OCR, recipe suggestions, meal planning) activate when a backend is available and are BYOK-unlockable at any tier.
|
||||
|
||||
**Status:** Beta · CircuitForge LLC
|
||||
|
||||
**[Documentation](https://docs.circuitforge.tech/kiwi/)** · [circuitforge.tech](https://circuitforge.tech)
|
||||
**Status:** Pre-alpha · CircuitForge LLC
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -18,14 +14,9 @@ Scan barcodes, photograph receipts, and get recipe ideas based on what you alrea
|
|||
|
||||
- **Inventory tracking** — add items by barcode scan, receipt upload, or manually
|
||||
- **Expiry alerts** — know what's about to go bad
|
||||
- **Recipe browser** — browse the full recipe corpus by cuisine, meal type, dietary preference, or main ingredient; pantry match percentage shown inline (Free)
|
||||
- **Saved recipes** — bookmark any recipe with notes, a 0–5 star rating, and free-text style tags (Free); organize into named collections (Paid)
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier, BYOK-unlockable)
|
||||
- **Recipe suggestions** — four levels from pantry-match to full LLM generation (Paid tier, BYOK-unlockable)
|
||||
- **Style auto-classifier** — LLM suggests style tags (comforting, hands-off, quick, etc.) for saved recipes (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Free, 5/day; unlimited at Paid+)
|
||||
- **LLM backend config** — configure inference via `circuitforge-core` env-var system; BYOK unlocks Paid AI features at any tier
|
||||
- **Feedback FAB** — in-app feedback button; status probed on load, hidden if CF feedback endpoint unreachable
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier)
|
||||
- **Recipe suggestions** — LLM-powered ideas based on what's expiring (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Premium tier)
|
||||
|
||||
## Stack
|
||||
|
||||
|
|
@ -61,16 +52,11 @@ cp .env.example .env
|
|||
| Receipt upload | ✓ | ✓ | ✓ |
|
||||
| Expiry alerts | ✓ | ✓ | ✓ |
|
||||
| CSV export | ✓ | ✓ | ✓ |
|
||||
| Recipe browser (domain/category) | ✓ | ✓ | ✓ |
|
||||
| Save recipes + notes + star rating | ✓ | ✓ | ✓ |
|
||||
| Style tags (manual, free-text) | ✓ | ✓ | ✓ |
|
||||
| Receipt OCR | BYOK | ✓ | ✓ |
|
||||
| Recipe suggestions (L1–L4) | BYOK | ✓ | ✓ |
|
||||
| Named recipe collections | — | ✓ | ✓ |
|
||||
| LLM style auto-classifier | — | BYOK | ✓ |
|
||||
| Recipe suggestions | BYOK | ✓ | ✓ |
|
||||
| Meal planning | — | ✓ | ✓ |
|
||||
| Multi-household | — | — | ✓ |
|
||||
| Leftover mode (5/day) | ✓ | ✓ | ✓ |
|
||||
| Leftover mode | — | — | ✓ |
|
||||
|
||||
BYOK = bring your own LLM backend (configure `~/.config/circuitforge/llm.yaml`)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,5 +3,5 @@
|
|||
Kiwi: Pantry tracking and leftover recipe suggestions.
|
||||
"""
|
||||
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.1.0"
|
||||
__author__ = "Alan 'pyr0ball' Weinstock"
|
||||
|
|
@ -1,358 +0,0 @@
|
|||
# app/api/endpoints/community.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.core.config import settings
|
||||
from app.db.store import Store
|
||||
from app.services.community.feed import posts_to_rss
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/community", tags=["community"])
|
||||
|
||||
_community_store = None
|
||||
|
||||
|
||||
def _get_community_store():
|
||||
return _community_store
|
||||
|
||||
|
||||
def init_community_store(community_db_url: str | None) -> None:
|
||||
global _community_store
|
||||
if not community_db_url:
|
||||
logger.info(
|
||||
"COMMUNITY_DB_URL not set — community write features disabled. "
|
||||
"Browse still works via cloud feed."
|
||||
)
|
||||
return
|
||||
from circuitforge_core.community import CommunityDB
|
||||
from app.services.community.community_store import KiwiCommunityStore
|
||||
db = CommunityDB(dsn=community_db_url)
|
||||
db.run_migrations()
|
||||
_community_store = KiwiCommunityStore(db)
|
||||
logger.info("Community store initialized.")
|
||||
|
||||
|
||||
def _visible(post, session=None) -> bool:
|
||||
"""Return False for premium-tier posts when the session is not paid/premium."""
|
||||
tier = getattr(post, "tier", None)
|
||||
if tier == "premium":
|
||||
if session is None or getattr(session, "tier", None) not in ("paid", "premium"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/posts")
|
||||
async def list_posts(
|
||||
post_type: str | None = None,
|
||||
dietary_tags: str | None = None,
|
||||
allergen_exclude: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {
|
||||
"posts": [],
|
||||
"total": 0,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"note": "Community DB not available on this instance.",
|
||||
}
|
||||
|
||||
dietary = [t.strip() for t in dietary_tags.split(",")] if dietary_tags else None
|
||||
allergen_ex = [t.strip() for t in allergen_exclude.split(",")] if allergen_exclude else None
|
||||
offset = (page - 1) * min(page_size, 100)
|
||||
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts,
|
||||
limit=min(page_size, 100),
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
dietary_tags=dietary,
|
||||
allergen_exclude=allergen_ex,
|
||||
)
|
||||
visible = [_post_to_dict(p) for p in posts if _visible(p)]
|
||||
return {"posts": visible, "total": len(visible), "page": page, "page_size": page_size}
|
||||
|
||||
|
||||
@router.get("/posts/{slug}")
|
||||
async def get_post(slug: str, request: Request):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available on this instance.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
|
||||
accept = request.headers.get("accept", "")
|
||||
if "application/activity+json" in accept or "application/ld+json" in accept:
|
||||
from app.services.community.ap_compat import post_to_ap_json_ld
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
return post_to_ap_json_ld(_post_to_dict(post), base_url=base_url)
|
||||
|
||||
return _post_to_dict(post)
|
||||
|
||||
|
||||
@router.get("/feed.rss")
|
||||
async def get_rss_feed(request: Request):
|
||||
store = _get_community_store()
|
||||
posts_data: list[dict] = []
|
||||
if store is not None:
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
posts_data = [_post_to_dict(p) for p in posts]
|
||||
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
rss = posts_to_rss(posts_data, base_url=base_url)
|
||||
return Response(content=rss, media_type="application/rss+xml; charset=utf-8")
|
||||
|
||||
|
||||
@router.get("/local-feed")
|
||||
async def local_feed():
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return []
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
return [_post_to_dict(p) for p in posts]
|
||||
|
||||
|
||||
@router.get("/hall-of-chaos")
|
||||
async def hall_of_chaos():
|
||||
"""Hidden easter egg endpoint -- returns the 10 most chaotic bloopers."""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {"posts": [], "chaos_level": 0}
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts, limit=10, post_type="recipe_blooper"
|
||||
)
|
||||
return {
|
||||
"posts": [_post_to_dict(p) for p in posts],
|
||||
"chaos_level": len(posts),
|
||||
}
|
||||
|
||||
|
||||
_VALID_POST_TYPES = {"plan", "recipe_success", "recipe_blooper"}
|
||||
_MAX_TITLE_LEN = 200
|
||||
_MAX_TEXT_LEN = 2000
|
||||
|
||||
|
||||
def _validate_publish_body(body: dict) -> None:
|
||||
"""Raise HTTPException(422) for any invalid fields in a publish request."""
|
||||
post_type = body.get("post_type", "plan")
|
||||
if post_type not in _VALID_POST_TYPES:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"post_type must be one of: {', '.join(sorted(_VALID_POST_TYPES))}",
|
||||
)
|
||||
title = body.get("title") or ""
|
||||
if len(title) > _MAX_TITLE_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"title exceeds {_MAX_TITLE_LEN} character limit.")
|
||||
for field in ("description", "outcome_notes", "recipe_name"):
|
||||
value = body.get(field)
|
||||
if value and len(str(value)) > _MAX_TEXT_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"{field} exceeds {_MAX_TEXT_LEN} character limit.")
|
||||
photo_url = body.get("photo_url")
|
||||
if photo_url and not str(photo_url).startswith("https://"):
|
||||
raise HTTPException(status_code=422, detail="photo_url must be an https:// URL.")
|
||||
|
||||
|
||||
@router.post("/posts", status_code=201)
|
||||
async def publish_post(body: dict, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_publish", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Community publishing requires Paid tier.")
|
||||
|
||||
_validate_publish_body(body)
|
||||
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="This Kiwi instance is not connected to a community database. "
|
||||
"Publishing is only available on cloud instances.",
|
||||
)
|
||||
|
||||
from app.services.community.community_store import get_or_create_pseudonym
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return get_or_create_pseudonym(
|
||||
store=s,
|
||||
directus_user_id=session.user_id,
|
||||
requested_name=body.get("pseudonym_name"),
|
||||
)
|
||||
finally:
|
||||
s.close()
|
||||
try:
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=422, detail=str(exc)) from exc
|
||||
|
||||
recipe_ids = [slot["recipe_id"] for slot in body.get("slots", []) if slot.get("recipe_id")]
|
||||
from app.services.community.element_snapshot import compute_snapshot
|
||||
def _snapshot():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return compute_snapshot(recipe_ids=recipe_ids, store=s)
|
||||
finally:
|
||||
s.close()
|
||||
snapshot = await asyncio.to_thread(_snapshot)
|
||||
|
||||
post_type = body.get("post_type", "plan")
|
||||
slug_title = re.sub(r"[^a-z0-9]+", "-", (body.get("title") or "plan").lower()).strip("-")
|
||||
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
slug = f"kiwi-{_post_type_prefix(post_type)}-{pseudonym.lower().replace(' ', '')}-{today}-{slug_title}"[:120]
|
||||
|
||||
from circuitforge_core.community.models import CommunityPost
|
||||
post = CommunityPost(
|
||||
slug=slug,
|
||||
pseudonym=pseudonym,
|
||||
post_type=post_type,
|
||||
published=datetime.now(timezone.utc),
|
||||
title=(body.get("title") or "Untitled")[:_MAX_TITLE_LEN],
|
||||
description=body.get("description"),
|
||||
photo_url=body.get("photo_url"),
|
||||
slots=body.get("slots", []),
|
||||
recipe_id=body.get("recipe_id"),
|
||||
recipe_name=body.get("recipe_name"),
|
||||
level=body.get("level"),
|
||||
outcome_notes=body.get("outcome_notes"),
|
||||
seasoning_score=snapshot.seasoning_score,
|
||||
richness_score=snapshot.richness_score,
|
||||
brightness_score=snapshot.brightness_score,
|
||||
depth_score=snapshot.depth_score,
|
||||
aroma_score=snapshot.aroma_score,
|
||||
structure_score=snapshot.structure_score,
|
||||
texture_profile=snapshot.texture_profile,
|
||||
dietary_tags=list(snapshot.dietary_tags),
|
||||
allergen_flags=list(snapshot.allergen_flags),
|
||||
flavor_molecules=list(snapshot.flavor_molecules),
|
||||
fat_pct=snapshot.fat_pct,
|
||||
protein_pct=snapshot.protein_pct,
|
||||
moisture_pct=snapshot.moisture_pct,
|
||||
)
|
||||
|
||||
try:
|
||||
inserted = await asyncio.to_thread(store.insert_post, post)
|
||||
except sqlite3.IntegrityError as exc:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A post with this title already exists today. Try a different title.",
|
||||
) from exc
|
||||
return _post_to_dict(inserted)
|
||||
|
||||
|
||||
@router.delete("/posts/{slug}", status_code=204)
|
||||
async def delete_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return s.get_current_pseudonym(session.user_id)
|
||||
finally:
|
||||
s.close()
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
if not pseudonym:
|
||||
raise HTTPException(status_code=400, detail="No pseudonym set. Cannot delete posts.")
|
||||
|
||||
deleted = await asyncio.to_thread(store.delete_post, slug=slug, pseudonym=pseudonym)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Post not found or you are not the author.")
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork", status_code=201)
|
||||
async def fork_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
if post.post_type != "plan":
|
||||
raise HTTPException(status_code=400, detail="Only plan posts can be forked as a meal plan.")
|
||||
|
||||
required_slot_keys = {"day", "meal_type", "recipe_id"}
|
||||
if any(not required_slot_keys.issubset(slot) for slot in post.slots):
|
||||
raise HTTPException(status_code=400, detail="Post contains malformed slots and cannot be forked.")
|
||||
|
||||
from datetime import date
|
||||
week_start = date.today().strftime("%Y-%m-%d")
|
||||
|
||||
def _create_plan():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
meal_types = list({slot["meal_type"] for slot in post.slots})
|
||||
plan = s.create_meal_plan(week_start=week_start, meal_types=meal_types or ["dinner"])
|
||||
for slot in post.slots:
|
||||
s.assign_recipe_to_slot(
|
||||
plan_id=plan["id"],
|
||||
day_of_week=slot["day"],
|
||||
meal_type=slot["meal_type"],
|
||||
recipe_id=slot["recipe_id"],
|
||||
)
|
||||
return plan
|
||||
finally:
|
||||
s.close()
|
||||
|
||||
plan = await asyncio.to_thread(_create_plan)
|
||||
return {"plan_id": plan["id"], "week_start": plan["week_start"], "forked_from": slug}
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork-adapt", status_code=201)
|
||||
async def fork_adapt_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_fork_adapt", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Fork with adaptation requires Paid tier or BYOK.")
|
||||
# Stub: full LLM adaptation deferred
|
||||
raise HTTPException(status_code=501, detail="Fork-adapt not yet implemented.")
|
||||
|
||||
|
||||
def _post_to_dict(post) -> dict:
|
||||
return {
|
||||
"slug": post.slug,
|
||||
"pseudonym": post.pseudonym,
|
||||
"post_type": post.post_type,
|
||||
"published": post.published.isoformat() if hasattr(post.published, "isoformat") else str(post.published),
|
||||
"title": post.title,
|
||||
"description": post.description,
|
||||
"photo_url": post.photo_url,
|
||||
"slots": list(post.slots),
|
||||
"recipe_id": post.recipe_id,
|
||||
"recipe_name": post.recipe_name,
|
||||
"level": post.level,
|
||||
"outcome_notes": post.outcome_notes,
|
||||
"element_profiles": {
|
||||
"seasoning_score": post.seasoning_score,
|
||||
"richness_score": post.richness_score,
|
||||
"brightness_score": post.brightness_score,
|
||||
"depth_score": post.depth_score,
|
||||
"aroma_score": post.aroma_score,
|
||||
"structure_score": post.structure_score,
|
||||
"texture_profile": post.texture_profile,
|
||||
},
|
||||
"dietary_tags": list(post.dietary_tags),
|
||||
"allergen_flags": list(post.allergen_flags),
|
||||
"flavor_molecules": list(post.flavor_molecules),
|
||||
"fat_pct": post.fat_pct,
|
||||
"protein_pct": post.protein_pct,
|
||||
"moisture_pct": post.moisture_pct,
|
||||
}
|
||||
|
||||
|
||||
def _post_type_prefix(post_type: str) -> str:
|
||||
return {"plan": "plan", "recipe_success": "success", "recipe_blooper": "blooper"}.get(post_type, "post")
|
||||
|
|
@ -1,11 +1,9 @@
|
|||
"""Export endpoints — CSV and JSON export of user data."""
|
||||
"""Export endpoints — CSV/Excel of receipt and inventory data."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
|
@ -47,33 +45,3 @@ async def export_inventory_csv(store: Store = Depends(get_store)):
|
|||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=inventory.csv"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/json")
|
||||
async def export_full_json(store: Store = Depends(get_store)):
|
||||
"""Export full pantry inventory + saved recipes as a single JSON file.
|
||||
|
||||
Intended for data portability — users can import this into another
|
||||
Kiwi instance or keep it as an offline backup.
|
||||
"""
|
||||
inventory, saved = await asyncio.gather(
|
||||
asyncio.to_thread(store.list_inventory),
|
||||
asyncio.to_thread(store.get_saved_recipes),
|
||||
)
|
||||
|
||||
export_doc = {
|
||||
"kiwi_export": {
|
||||
"version": "1.0",
|
||||
"exported_at": datetime.now(timezone.utc).isoformat(),
|
||||
"inventory": [dict(row) for row in inventory],
|
||||
"saved_recipes": [dict(row) for row in saved],
|
||||
}
|
||||
}
|
||||
|
||||
body = json.dumps(export_doc, default=str, indent=2)
|
||||
filename = f"kiwi-export-{datetime.now(timezone.utc).strftime('%Y%m%d')}.json"
|
||||
return StreamingResponse(
|
||||
iter([body]),
|
||||
media_type="application/json",
|
||||
headers={"Content-Disposition": f"attachment; filename={filename}"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
"""Feedback router — provided by circuitforge-core."""
|
||||
from circuitforge_core.api import make_feedback_router
|
||||
from app.core.config import settings
|
||||
|
||||
router = make_feedback_router(
|
||||
repo="Circuit-Forge/kiwi",
|
||||
product="kiwi",
|
||||
demo_mode_fn=lambda: settings.DEMO_MODE,
|
||||
)
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
"""Screenshot attachment endpoint for in-app feedback.
|
||||
|
||||
After the cf-core feedback router creates a Forgejo issue, the frontend
|
||||
can call POST /feedback/attach to upload a screenshot and pin it as a
|
||||
comment on that issue.
|
||||
|
||||
The endpoint is separate from the cf-core router so Kiwi owns it
|
||||
without modifying shared infrastructure.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import os
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_FORGEJO_BASE = os.environ.get(
|
||||
"FORGEJO_API_URL", "https://git.opensourcesolarpunk.com/api/v1"
|
||||
)
|
||||
_REPO = "Circuit-Forge/kiwi"
|
||||
_MAX_BYTES = 5 * 1024 * 1024 # 5 MB
|
||||
|
||||
|
||||
class AttachRequest(BaseModel):
|
||||
issue_number: int
|
||||
filename: str = Field(default="screenshot.png", max_length=80)
|
||||
image_b64: str # data URI or raw base64
|
||||
|
||||
|
||||
class AttachResponse(BaseModel):
|
||||
comment_url: str
|
||||
|
||||
|
||||
def _forgejo_headers() -> dict[str, str]:
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
return {"Authorization": f"token {token}"}
|
||||
|
||||
|
||||
def _decode_image(image_b64: str) -> tuple[bytes, str]:
|
||||
"""Return (raw_bytes, mime_type) from a base64 string or data URI."""
|
||||
if image_b64.startswith("data:"):
|
||||
header, _, data = image_b64.partition(",")
|
||||
mime = header.split(";")[0].split(":")[1] if ":" in header else "image/png"
|
||||
else:
|
||||
data = image_b64
|
||||
mime = "image/png"
|
||||
return base64.b64decode(data), mime
|
||||
|
||||
|
||||
@router.post("/attach", response_model=AttachResponse)
|
||||
def attach_screenshot(payload: AttachRequest) -> AttachResponse:
|
||||
"""Upload a screenshot to a Forgejo issue as a comment with embedded image.
|
||||
|
||||
The image is uploaded as an issue asset, then referenced in a comment
|
||||
so it is visible inline when the issue is viewed.
|
||||
"""
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
if not token:
|
||||
raise HTTPException(status_code=503, detail="Feedback not configured.")
|
||||
|
||||
raw_bytes, mime = _decode_image(payload.image_b64)
|
||||
|
||||
if len(raw_bytes) > _MAX_BYTES:
|
||||
raise HTTPException(
|
||||
status_code=413,
|
||||
detail=f"Screenshot exceeds 5 MB limit ({len(raw_bytes) // 1024} KB received).",
|
||||
)
|
||||
|
||||
# Upload image as issue asset
|
||||
asset_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/assets",
|
||||
headers=_forgejo_headers(),
|
||||
files={"attachment": (payload.filename, raw_bytes, mime)},
|
||||
timeout=20,
|
||||
)
|
||||
if not asset_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo asset upload failed: {asset_resp.text[:200]}",
|
||||
)
|
||||
|
||||
asset_url = asset_resp.json().get("browser_download_url", "")
|
||||
|
||||
# Pin as a comment so the image is visible inline
|
||||
comment_body = f"**Screenshot attached by reporter:**\n\n"
|
||||
comment_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/comments",
|
||||
headers={**_forgejo_headers(), "Content-Type": "application/json"},
|
||||
json={"body": comment_body},
|
||||
timeout=15,
|
||||
)
|
||||
if not comment_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo comment failed: {comment_resp.text[:200]}",
|
||||
)
|
||||
|
||||
comment_url = comment_resp.json().get("html_url", "")
|
||||
return AttachResponse(comment_url=comment_url)
|
||||
|
|
@ -1,217 +0,0 @@
|
|||
"""Household management endpoints — shared pantry for Premium users."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import sqlite3
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, CLOUD_DATA_ROOT, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.household import (
|
||||
HouseholdAcceptRequest,
|
||||
HouseholdAcceptResponse,
|
||||
HouseholdCreateResponse,
|
||||
HouseholdInviteResponse,
|
||||
HouseholdMember,
|
||||
HouseholdRemoveMemberRequest,
|
||||
HouseholdStatusResponse,
|
||||
MessageResponse,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
_INVITE_TTL_DAYS = 7
|
||||
_KIWI_BASE_URL = os.environ.get("KIWI_BASE_URL", "https://menagerie.circuitforge.tech/kiwi")
|
||||
|
||||
|
||||
def _require_premium(session: CloudUser = Depends(get_session)) -> CloudUser:
|
||||
if session.tier not in ("premium", "ultra", "local"):
|
||||
raise HTTPException(status_code=403, detail="Household features require Premium tier.")
|
||||
return session
|
||||
|
||||
|
||||
def _require_household_owner(session: CloudUser = Depends(_require_premium)) -> CloudUser:
|
||||
if not session.is_household_owner or not session.household_id:
|
||||
raise HTTPException(status_code=403, detail="Only the household owner can perform this action.")
|
||||
return session
|
||||
|
||||
|
||||
def _household_store(household_id: str) -> Store:
|
||||
"""Open the household DB directly (used during invite acceptance).
|
||||
Sets row_factory so dict-style column access works on raw conn queries.
|
||||
"""
|
||||
db_path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
store = Store(db_path)
|
||||
store.conn.row_factory = sqlite3.Row
|
||||
return store
|
||||
|
||||
|
||||
def _heimdall_post(path: str, body: dict) -> dict:
|
||||
"""Call Heimdall admin API. Returns response dict or raises HTTPException."""
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
log.warning("HEIMDALL_ADMIN_TOKEN not set — household Heimdall call skipped")
|
||||
return {}
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}{path}",
|
||||
json=body,
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=10,
|
||||
)
|
||||
if not resp.ok:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall error: {resp.text}")
|
||||
return resp.json()
|
||||
except requests.RequestException as exc:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall unreachable: {exc}")
|
||||
|
||||
|
||||
@router.post("/create", response_model=HouseholdCreateResponse)
|
||||
async def create_household(session: CloudUser = Depends(_require_premium)):
|
||||
"""Create a new household. The calling user becomes owner."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
data = _heimdall_post("/admin/household/create", {"owner_user_id": session.user_id})
|
||||
household_id = data.get("household_id")
|
||||
if not household_id:
|
||||
# Heimdall returned OK but without a household_id — treat as server error.
|
||||
# Fall back to a local stub only when HEIMDALL_ADMIN_TOKEN is unset (dev mode).
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
raise HTTPException(status_code=500, detail="Heimdall did not return a household_id.")
|
||||
household_id = "local-household"
|
||||
return HouseholdCreateResponse(
|
||||
household_id=household_id,
|
||||
message="Household created. Share an invite link to add members.",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/status", response_model=HouseholdStatusResponse)
|
||||
async def household_status(session: CloudUser = Depends(_require_premium)):
|
||||
"""Return current user's household membership status."""
|
||||
if not session.household_id:
|
||||
return HouseholdStatusResponse(in_household=False)
|
||||
|
||||
members: list[HouseholdMember] = []
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/admin/household/{session.household_id}",
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
raw = resp.json()
|
||||
for m in raw.get("members", []):
|
||||
members.append(HouseholdMember(
|
||||
user_id=m["user_id"],
|
||||
joined_at=m.get("joined_at", ""),
|
||||
is_owner=m["user_id"] == raw.get("owner_user_id"),
|
||||
))
|
||||
except Exception as exc:
|
||||
log.warning("Could not fetch household members: %s", exc)
|
||||
|
||||
return HouseholdStatusResponse(
|
||||
in_household=True,
|
||||
household_id=session.household_id,
|
||||
is_owner=session.is_household_owner,
|
||||
members=members,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/invite", response_model=HouseholdInviteResponse)
|
||||
async def create_invite(session: CloudUser = Depends(_require_household_owner)):
|
||||
"""Generate a one-time invite token valid for 7 days."""
|
||||
token = secrets.token_hex(32)
|
||||
expires_at = (datetime.now(timezone.utc) + timedelta(days=_INVITE_TTL_DAYS)).isoformat()
|
||||
store = Store(session.db)
|
||||
try:
|
||||
store.conn.execute(
|
||||
"""INSERT INTO household_invites (token, household_id, created_by, expires_at)
|
||||
VALUES (?, ?, ?, ?)""",
|
||||
(token, session.household_id, session.user_id, expires_at),
|
||||
)
|
||||
store.conn.commit()
|
||||
finally:
|
||||
store.close()
|
||||
invite_url = f"{_KIWI_BASE_URL}/#/join?household_id={session.household_id}&token={token}"
|
||||
return HouseholdInviteResponse(token=token, invite_url=invite_url, expires_at=expires_at)
|
||||
|
||||
|
||||
@router.post("/accept", response_model=HouseholdAcceptResponse)
|
||||
async def accept_invite(
|
||||
body: HouseholdAcceptRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Accept a household invite. Opens the household DB directly to validate the token."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
|
||||
hh_store = _household_store(body.household_id)
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
try:
|
||||
row = hh_store.conn.execute(
|
||||
"""SELECT token, expires_at, used_at FROM household_invites
|
||||
WHERE token = ? AND household_id = ?""",
|
||||
(body.token, body.household_id),
|
||||
).fetchone()
|
||||
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Invite not found.")
|
||||
if row["used_at"] is not None:
|
||||
raise HTTPException(status_code=410, detail="Invite already used.")
|
||||
if row["expires_at"] < now:
|
||||
raise HTTPException(status_code=410, detail="Invite has expired.")
|
||||
|
||||
hh_store.conn.execute(
|
||||
"UPDATE household_invites SET used_at = ?, used_by = ? WHERE token = ?",
|
||||
(now, session.user_id, body.token),
|
||||
)
|
||||
hh_store.conn.commit()
|
||||
finally:
|
||||
hh_store.close()
|
||||
|
||||
_heimdall_post("/admin/household/add-member", {
|
||||
"household_id": body.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
|
||||
return HouseholdAcceptResponse(
|
||||
message="You have joined the household. Reload the app to switch to the shared pantry.",
|
||||
household_id=body.household_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/leave", response_model=MessageResponse)
|
||||
async def leave_household(session: CloudUser = Depends(_require_premium)) -> MessageResponse:
|
||||
"""Leave the current household (non-owners only)."""
|
||||
if not session.household_id:
|
||||
raise HTTPException(status_code=400, detail="You are not in a household.")
|
||||
if session.is_household_owner:
|
||||
raise HTTPException(status_code=400, detail="The household owner cannot leave. Delete the household instead.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
return MessageResponse(message="You have left the household. Reload the app to return to your personal pantry.")
|
||||
|
||||
|
||||
@router.post("/remove-member", response_model=MessageResponse)
|
||||
async def remove_member(
|
||||
body: HouseholdRemoveMemberRequest,
|
||||
session: CloudUser = Depends(_require_household_owner),
|
||||
) -> MessageResponse:
|
||||
"""Remove a member from the household (owner only)."""
|
||||
if body.user_id == session.user_id:
|
||||
raise HTTPException(status_code=400, detail="Use /leave to remove yourself.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": body.user_id,
|
||||
})
|
||||
return MessageResponse(message=f"Member {body.user_id} removed from household.")
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
"""Kiwi — /api/v1/imitate/samples endpoint for Avocet Imitate tab.
|
||||
|
||||
Returns the actual assembled prompt Kiwi sends to its LLM for recipe generation,
|
||||
including the full pantry context (expiry-first ordering), dietary constraints
|
||||
(from user_settings if present), and the Level 3 format instructions.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import get_session, CloudUser
|
||||
from app.db.store import Store
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_LEVEL3_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"3. <continue for each step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
_LEVEL4_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
|
||||
def _read_user_settings(store: Store) -> dict:
|
||||
"""Read all key/value pairs from user_settings table."""
|
||||
try:
|
||||
rows = store.conn.execute("SELECT key, value FROM user_settings").fetchall()
|
||||
return {r["key"]: r["value"] for r in rows}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _build_recipe_prompt(
|
||||
pantry_names: list[str],
|
||||
expiring_names: list[str],
|
||||
constraints: list[str],
|
||||
allergies: list[str],
|
||||
level: int = 3,
|
||||
) -> str:
|
||||
"""Assemble the recipe generation prompt matching Kiwi's Level 3/4 format."""
|
||||
# Expiring items first, then remaining pantry items (deduped)
|
||||
expiring_set = set(expiring_names)
|
||||
ordered = list(expiring_names) + [n for n in pantry_names if n not in expiring_set]
|
||||
|
||||
if not ordered:
|
||||
ordered = pantry_names
|
||||
|
||||
if level == 4:
|
||||
lines = [
|
||||
"Surprise me with a creative, unexpected recipe.",
|
||||
"Only use ingredients that make culinary sense together. "
|
||||
"Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.",
|
||||
f"Ingredients available: {', '.join(ordered)}",
|
||||
]
|
||||
if constraints:
|
||||
lines.append(f"Constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergies)}")
|
||||
lines.append("Treat any mystery ingredient as a wildcard — use your imagination.")
|
||||
lines += _LEVEL4_FORMAT
|
||||
else:
|
||||
lines = [
|
||||
"You are a creative chef. Generate a recipe using the ingredients below.",
|
||||
"IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name "
|
||||
"from the pantry list. Do not add adjectives, quantities, or cooking states "
|
||||
"(e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').",
|
||||
"IMPORTANT: Only use pantry items that make culinary sense for the dish. "
|
||||
"Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, "
|
||||
"dessert sauces, flavoured syrups) into savoury dishes.",
|
||||
"IMPORTANT: Do not default to the same ingredient repeatedly across dishes. "
|
||||
"If a pantry item does not genuinely improve this specific dish, leave it out.",
|
||||
"",
|
||||
f"Pantry items: {', '.join(ordered)}",
|
||||
]
|
||||
if expiring_names:
|
||||
lines.append(
|
||||
f"Priority — use these soon (expiring): {', '.join(expiring_names)}"
|
||||
)
|
||||
if constraints:
|
||||
lines.append(f"Dietary constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergies)}")
|
||||
lines += _LEVEL3_FORMAT
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@router.get("/samples")
|
||||
async def imitate_samples(
|
||||
limit: int = 5,
|
||||
level: int = 3,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Return assembled recipe generation prompts for Avocet's Imitate tab.
|
||||
|
||||
Each sample includes:
|
||||
system_prompt empty (Kiwi uses no system context)
|
||||
input_text full Level 3/4 prompt with pantry items, expiring items,
|
||||
dietary constraints, and format instructions
|
||||
output_text empty (no prior LLM output stored per-request)
|
||||
|
||||
level: 3 (structured with element biasing context) or 4 (wildcard creative)
|
||||
limit: max number of distinct prompt variants to return (varies by pantry state)
|
||||
"""
|
||||
limit = max(1, min(limit, 10))
|
||||
store = Store(session.db)
|
||||
|
||||
# Full pantry for context
|
||||
all_items = store.list_inventory()
|
||||
pantry_names = [r["product_name"] for r in all_items if r.get("product_name")]
|
||||
|
||||
# Expiring items as priority ingredients
|
||||
expiring = store.expiring_soon(days=14)
|
||||
expiring_names = [r["product_name"] for r in expiring if r.get("product_name")]
|
||||
|
||||
# Dietary constraints from user_settings (keys: constraints, allergies)
|
||||
settings = _read_user_settings(store)
|
||||
import json as _json
|
||||
try:
|
||||
constraints = _json.loads(settings.get("dietary_constraints", "[]")) or []
|
||||
except Exception:
|
||||
constraints = []
|
||||
try:
|
||||
allergies = _json.loads(settings.get("dietary_allergies", "[]")) or []
|
||||
except Exception:
|
||||
allergies = []
|
||||
|
||||
if not pantry_names:
|
||||
return {"samples": [], "total": 0, "type": f"recipe_level{level}"}
|
||||
|
||||
# Build prompt variants: one per expiring item as the "anchor" ingredient,
|
||||
# plus one general pantry prompt. Cap at limit.
|
||||
samples = []
|
||||
seen_anchors: set[str] = set()
|
||||
|
||||
for item in (expiring[:limit - 1] if expiring else []):
|
||||
anchor = item.get("product_name", "")
|
||||
if not anchor or anchor in seen_anchors:
|
||||
continue
|
||||
seen_anchors.add(anchor)
|
||||
|
||||
# Put this item first in the list for the prompt
|
||||
ordered_expiring = [anchor] + [n for n in expiring_names if n != anchor]
|
||||
prompt = _build_recipe_prompt(pantry_names, ordered_expiring, constraints, allergies, level)
|
||||
|
||||
samples.append({
|
||||
"id": item.get("id", 0),
|
||||
"anchor_item": anchor,
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
# One general prompt using all expiring as priority
|
||||
if len(samples) < limit:
|
||||
prompt = _build_recipe_prompt(pantry_names, expiring_names, constraints, allergies, level)
|
||||
samples.append({
|
||||
"id": 0,
|
||||
"anchor_item": "full pantry",
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
return {"samples": samples, "total": len(samples), "type": f"recipe_level{level}"}
|
||||
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
|
@ -12,73 +11,25 @@ import aiofiles
|
|||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
_predictor = ExpirationPredictor()
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.inventory import (
|
||||
BarcodeScanResponse,
|
||||
BulkAddByNameRequest,
|
||||
BulkAddByNameResponse,
|
||||
BulkAddItemResult,
|
||||
DiscardRequest,
|
||||
InventoryItemCreate,
|
||||
InventoryItemResponse,
|
||||
InventoryItemUpdate,
|
||||
InventoryStats,
|
||||
PartialConsumeRequest,
|
||||
ProductCreate,
|
||||
ProductResponse,
|
||||
ProductUpdate,
|
||||
TagCreate,
|
||||
TagResponse,
|
||||
)
|
||||
from app.models.schemas.label_capture import LabelConfirmRequest
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _user_constraints(store) -> list[str]:
|
||||
"""Load active dietary constraints from user settings (comma-separated string)."""
|
||||
raw = store.get_setting("dietary_constraints") or ""
|
||||
return [c.strip() for c in raw.split(",") if c.strip()]
|
||||
|
||||
|
||||
def _enrich_item(item: dict, user_constraints: list[str] | None = None) -> dict:
|
||||
"""Attach computed fields: opened_expiry_date, secondary_state/uses/warning/discard_signs."""
|
||||
from datetime import date, timedelta
|
||||
opened = item.get("opened_date")
|
||||
if opened:
|
||||
days = _predictor.days_after_opening(item.get("category"))
|
||||
if days is not None:
|
||||
try:
|
||||
opened_expiry = date.fromisoformat(opened) + timedelta(days=days)
|
||||
item = {**item, "opened_expiry_date": str(opened_expiry)}
|
||||
except ValueError:
|
||||
pass
|
||||
if "opened_expiry_date" not in item:
|
||||
item = {**item, "opened_expiry_date": None}
|
||||
|
||||
# Secondary use window — check sell-by date (not opened expiry).
|
||||
# Apply dietary constraint filter (e.g. wine suppressed for halal/alcohol-free).
|
||||
sec = _predictor.secondary_state(item.get("category"), item.get("expiration_date"))
|
||||
sec = _predictor.filter_secondary_by_constraints(sec, user_constraints or [])
|
||||
item = {
|
||||
**item,
|
||||
"secondary_state": sec["label"] if sec else None,
|
||||
"secondary_uses": sec["uses"] if sec else None,
|
||||
"secondary_warning": sec["warning"] if sec else None,
|
||||
"secondary_discard_signs": sec["discard_signs"] if sec else None,
|
||||
}
|
||||
return item
|
||||
|
||||
|
||||
# ── Products ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/products", response_model=ProductResponse, status_code=status.HTTP_201_CREATED)
|
||||
|
|
@ -163,12 +114,7 @@ async def delete_product(product_id: int, store: Store = Depends(get_store)):
|
|||
# ── Inventory items ───────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_inventory_item(
|
||||
body: InventoryItemCreate,
|
||||
store: Store = Depends(get_store),
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
log.info("add_item auth=%s tier=%s product_id=%s", _auth_label(session.user_id), session.tier, body.product_id)
|
||||
async def create_inventory_item(body: InventoryItemCreate, store: Store = Depends(get_store)):
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
body.product_id,
|
||||
|
|
@ -181,38 +127,7 @@ async def create_inventory_item(
|
|||
notes=body.notes,
|
||||
source=body.source,
|
||||
)
|
||||
# RETURNING * omits joined columns (product_name, barcode, category).
|
||||
# Re-fetch with the products JOIN so the response is fully populated (#99).
|
||||
full_item = await asyncio.to_thread(store.get_inventory_item, item["id"])
|
||||
return InventoryItemResponse.model_validate(full_item)
|
||||
|
||||
|
||||
@router.post("/items/bulk-add-by-name", response_model=BulkAddByNameResponse)
|
||||
async def bulk_add_items_by_name(body: BulkAddByNameRequest, store: Store = Depends(get_store)):
|
||||
"""Create pantry items from a list of ingredient names (no barcode required).
|
||||
|
||||
Uses get_or_create_product so re-adding an existing product is idempotent.
|
||||
"""
|
||||
results: list[BulkAddItemResult] = []
|
||||
for entry in body.items:
|
||||
try:
|
||||
product, _ = await asyncio.to_thread(
|
||||
store.get_or_create_product, entry.name, None, source="manual"
|
||||
)
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"],
|
||||
entry.location,
|
||||
quantity=entry.quantity,
|
||||
unit=entry.unit,
|
||||
source="manual",
|
||||
)
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=True, item_id=item["id"]))
|
||||
except Exception as exc:
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=False, error=str(exc)))
|
||||
|
||||
added = sum(1 for r in results if r.ok)
|
||||
return BulkAddByNameResponse(added=added, failed=len(results) - added, results=results)
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.get("/items", response_model=List[InventoryItemResponse])
|
||||
|
|
@ -222,15 +137,13 @@ async def list_inventory_items(
|
|||
store: Store = Depends(get_store),
|
||||
):
|
||||
items = await asyncio.to_thread(store.list_inventory, location, item_status)
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i, constraints)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/expiring", response_model=List[InventoryItemResponse])
|
||||
async def get_expiring_items(days: int = 7, store: Store = Depends(get_store)):
|
||||
items = await asyncio.to_thread(store.expiring_soon, days)
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i, constraints)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -238,8 +151,7 @@ async def get_inventory_item(item_id: int, store: Store = Depends(get_store)):
|
|||
item = await asyncio.to_thread(store.get_inventory_item, item_id)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item, constraints))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.patch("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -251,83 +163,24 @@ async def update_inventory_item(
|
|||
updates["purchase_date"] = str(updates["purchase_date"])
|
||||
if "expiration_date" in updates and updates["expiration_date"]:
|
||||
updates["expiration_date"] = str(updates["expiration_date"])
|
||||
if "opened_date" in updates and updates["opened_date"]:
|
||||
updates["opened_date"] = str(updates["opened_date"])
|
||||
item = await asyncio.to_thread(store.update_inventory_item, item_id, **updates)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item, constraints))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/open", response_model=InventoryItemResponse)
|
||||
async def mark_item_opened(item_id: int, store: Store = Depends(get_store)):
|
||||
"""Record that this item was opened today, triggering secondary shelf-life tracking."""
|
||||
from datetime import date
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
opened_date=str(date.today()),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item, constraints))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/consume", response_model=InventoryItemResponse)
|
||||
async def consume_item(
|
||||
item_id: int,
|
||||
body: Optional[PartialConsumeRequest] = None,
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Consume an inventory item fully or partially.
|
||||
|
||||
When body.quantity is provided, decrements by that amount and only marks
|
||||
status=consumed when quantity reaches zero. Omit body to consume all.
|
||||
"""
|
||||
async def consume_item(item_id: int, store: Store = Depends(get_store)):
|
||||
from datetime import datetime, timezone
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
if body is not None:
|
||||
item = await asyncio.to_thread(
|
||||
store.partial_consume_item, item_id, body.quantity, now
|
||||
)
|
||||
else:
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="consumed",
|
||||
consumed_at=now,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item, constraints))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/discard", response_model=InventoryItemResponse)
|
||||
async def discard_item(
|
||||
item_id: int,
|
||||
body: DiscardRequest = DiscardRequest(),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Mark an item as discarded (not used, spoiled, etc).
|
||||
|
||||
Optional reason field accepts free text or a preset label
|
||||
('not used', 'spoiled', 'excess', 'other').
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="discarded",
|
||||
consumed_at=datetime.now(timezone.utc).isoformat(),
|
||||
disposal_reason=body.reason,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
constraints = await asyncio.to_thread(_user_constraints, store)
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item, constraints))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.delete("/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
|
@ -350,31 +203,6 @@ class BarcodeScanTextRequest(BaseModel):
|
|||
auto_add_to_inventory: bool = True
|
||||
|
||||
|
||||
def _captured_to_product_info(row: dict) -> dict:
|
||||
"""Convert a captured_products row to the product_info dict shape used by
|
||||
the barcode scan flow (mirrors what OpenFoodFactsService returns)."""
|
||||
macros: dict = {}
|
||||
for field in ("calories", "fat_g", "saturated_fat_g", "carbs_g", "sugar_g",
|
||||
"fiber_g", "protein_g", "sodium_mg", "serving_size_g"):
|
||||
if row.get(field) is not None:
|
||||
macros[field] = row[field]
|
||||
return {
|
||||
"name": row.get("product_name") or row.get("barcode", "Unknown Product"),
|
||||
"brand": row.get("brand"),
|
||||
"category": None,
|
||||
"nutrition_data": macros,
|
||||
"ingredient_names": row.get("ingredient_names") or [],
|
||||
"allergens": row.get("allergens") or [],
|
||||
"source": "visual_capture",
|
||||
}
|
||||
|
||||
|
||||
def _gap_message(tier: str, has_visual_capture: bool) -> str:
|
||||
if has_visual_capture:
|
||||
return "We couldn't find this product. Photograph the nutrition label to add it."
|
||||
return "Not found in any product database — add manually"
|
||||
|
||||
|
||||
@router.post("/scan/text", response_model=BarcodeScanResponse)
|
||||
async def scan_barcode_text(
|
||||
body: BarcodeScanTextRequest,
|
||||
|
|
@ -382,24 +210,12 @@ async def scan_barcode_text(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from a text string (e.g. from a hardware scanner or manual entry)."""
|
||||
log.info("scan auth=%s tier=%s barcode=%r", _auth_label(session.user_id), session.tier, body.barcode)
|
||||
from app.services.openfoodfacts import OpenFoodFactsService
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
from app.tiers import can_use
|
||||
|
||||
predictor = ExpirationPredictor()
|
||||
has_visual_capture = can_use("visual_label_capture", session.tier, session.has_byok)
|
||||
|
||||
# 1. Check local captured-products cache before hitting FDC/OFF
|
||||
cached = await asyncio.to_thread(store.get_captured_product, body.barcode)
|
||||
if cached and cached.get("confirmed_by_user"):
|
||||
product_info: dict | None = _captured_to_product_info(cached)
|
||||
product_source = "visual_capture"
|
||||
else:
|
||||
off = OpenFoodFactsService()
|
||||
predictor = ExpirationPredictor()
|
||||
product_info = await off.lookup_product(body.barcode)
|
||||
product_source = "openfoodfacts"
|
||||
|
||||
inventory_item = None
|
||||
|
||||
if product_info and body.auto_add_to_inventory:
|
||||
|
|
@ -410,7 +226,7 @@ async def scan_barcode_text(
|
|||
brand=product_info.get("brand"),
|
||||
category=product_info.get("category"),
|
||||
nutrition_data=product_info.get("nutrition_data", {}),
|
||||
source=product_source,
|
||||
source="openfoodfacts",
|
||||
source_data=product_info,
|
||||
)
|
||||
exp = predictor.predict_expiration(
|
||||
|
|
@ -420,14 +236,10 @@ async def scan_barcode_text(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
# Use OFFs pack size when detected; caller-supplied quantity is a fallback
|
||||
resolved_qty = product_info.get("pack_quantity") or body.quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], body.location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=body.quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
|
|
@ -435,8 +247,6 @@ async def scan_barcode_text(
|
|||
else:
|
||||
result_product = None
|
||||
|
||||
product_found = product_info is not None
|
||||
needs_capture = not product_found and has_visual_capture
|
||||
return BarcodeScanResponse(
|
||||
success=True,
|
||||
barcodes_found=1,
|
||||
|
|
@ -446,9 +256,7 @@ async def scan_barcode_text(
|
|||
"product": result_product,
|
||||
"inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None,
|
||||
"added_to_inventory": inventory_item is not None,
|
||||
"needs_manual_entry": not product_found and not needs_capture,
|
||||
"needs_visual_capture": needs_capture,
|
||||
"message": "Added to inventory" if inventory_item else _gap_message(session.tier, needs_capture),
|
||||
"message": "Added to inventory" if inventory_item else "Product not found in database",
|
||||
}],
|
||||
message="Barcode processed",
|
||||
)
|
||||
|
|
@ -464,10 +272,6 @@ async def scan_barcode_image(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from an uploaded image. Requires Phase 2 scanner integration."""
|
||||
log.info("scan_image auth=%s tier=%s", _auth_label(session.user_id), session.tier)
|
||||
from app.tiers import can_use
|
||||
has_visual_capture = can_use("visual_label_capture", session.tier, session.has_byok)
|
||||
|
||||
temp_dir = Path("/tmp/kiwi_barcode_scans")
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}"
|
||||
|
|
@ -490,16 +294,7 @@ async def scan_barcode_image(
|
|||
results = []
|
||||
for bc in barcodes:
|
||||
code = bc["data"]
|
||||
|
||||
# Check local visual-capture cache before hitting FDC/OFF
|
||||
cached = await asyncio.to_thread(store.get_captured_product, code)
|
||||
if cached and cached.get("confirmed_by_user"):
|
||||
product_info: dict | None = _captured_to_product_info(cached)
|
||||
product_source = "visual_capture"
|
||||
else:
|
||||
product_info = await off.lookup_product(code)
|
||||
product_source = "openfoodfacts"
|
||||
|
||||
inventory_item = None
|
||||
if product_info and auto_add_to_inventory:
|
||||
product, _ = await asyncio.to_thread(
|
||||
|
|
@ -509,7 +304,7 @@ async def scan_barcode_image(
|
|||
brand=product_info.get("brand"),
|
||||
category=product_info.get("category"),
|
||||
nutrition_data=product_info.get("nutrition_data", {}),
|
||||
source=product_source,
|
||||
source="openfoodfacts",
|
||||
source_data=product_info,
|
||||
)
|
||||
exp = predictor.predict_expiration(
|
||||
|
|
@ -519,27 +314,20 @@ async def scan_barcode_image(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
resolved_qty = product_info.get("pack_quantity") or quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
product_found = product_info is not None
|
||||
needs_capture = not product_found and has_visual_capture
|
||||
results.append({
|
||||
"barcode": code,
|
||||
"barcode_type": bc.get("type", "unknown"),
|
||||
"product": ProductResponse.model_validate(product_info) if product_info else None,
|
||||
"product": ProductResponse.model_validate(product) if product_info else None,
|
||||
"inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None,
|
||||
"added_to_inventory": inventory_item is not None,
|
||||
"needs_manual_entry": not product_found and not needs_capture,
|
||||
"needs_visual_capture": needs_capture,
|
||||
"message": "Added to inventory" if inventory_item else _gap_message(session.tier, needs_capture),
|
||||
"message": "Added to inventory" if inventory_item else "Barcode scanned",
|
||||
})
|
||||
return BarcodeScanResponse(
|
||||
success=True, barcodes_found=len(barcodes), results=results,
|
||||
|
|
@ -550,143 +338,6 @@ async def scan_barcode_image(
|
|||
temp_file.unlink()
|
||||
|
||||
|
||||
# ── Visual label capture (kiwi#79) ────────────────────────────────────────────
|
||||
|
||||
@router.post("/scan/label-capture")
|
||||
async def capture_nutrition_label(
|
||||
file: UploadFile = File(...),
|
||||
barcode: str = Form(...),
|
||||
store: Store = Depends(get_store),
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Photograph a nutrition label for an unenriched product (paid tier).
|
||||
|
||||
Sends the image to the vision model and returns structured nutrition data
|
||||
for user review. Fields extracted with confidence < 0.7 should be
|
||||
highlighted in amber in the UI.
|
||||
"""
|
||||
from app.tiers import can_use
|
||||
from app.models.schemas.label_capture import LabelCaptureResponse
|
||||
from app.services.label_capture import extract_label, needs_review as _needs_review
|
||||
|
||||
if not can_use("visual_label_capture", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=403, detail="Visual label capture requires a Paid tier or higher.")
|
||||
log.info("label_capture tier=%s barcode=%r", session.tier, barcode)
|
||||
|
||||
image_bytes = await file.read()
|
||||
extraction = await asyncio.to_thread(extract_label, image_bytes)
|
||||
|
||||
return LabelCaptureResponse(
|
||||
barcode=barcode,
|
||||
product_name=extraction.get("product_name"),
|
||||
brand=extraction.get("brand"),
|
||||
serving_size_g=extraction.get("serving_size_g"),
|
||||
calories=extraction.get("calories"),
|
||||
fat_g=extraction.get("fat_g"),
|
||||
saturated_fat_g=extraction.get("saturated_fat_g"),
|
||||
carbs_g=extraction.get("carbs_g"),
|
||||
sugar_g=extraction.get("sugar_g"),
|
||||
fiber_g=extraction.get("fiber_g"),
|
||||
protein_g=extraction.get("protein_g"),
|
||||
sodium_mg=extraction.get("sodium_mg"),
|
||||
ingredient_names=extraction.get("ingredient_names") or [],
|
||||
allergens=extraction.get("allergens") or [],
|
||||
confidence=extraction.get("confidence", 0.0),
|
||||
needs_review=_needs_review(extraction),
|
||||
)
|
||||
|
||||
|
||||
@router.post("/scan/label-confirm")
|
||||
async def confirm_nutrition_label(
|
||||
body: LabelConfirmRequest,
|
||||
store: Store = Depends(get_store),
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Confirm and save a user-reviewed label extraction.
|
||||
|
||||
Saves the product to the local cache so future scans of the same barcode
|
||||
resolve instantly without another capture. Optionally adds the item to
|
||||
the user's inventory.
|
||||
"""
|
||||
from app.tiers import can_use
|
||||
from app.models.schemas.label_capture import LabelConfirmResponse
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
if not can_use("visual_label_capture", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=403, detail="Visual label capture requires a Paid tier or higher.")
|
||||
log.info("label_confirm tier=%s barcode=%r", session.tier, body.barcode)
|
||||
|
||||
# Persist to local visual-capture cache
|
||||
await asyncio.to_thread(
|
||||
store.save_captured_product,
|
||||
body.barcode,
|
||||
product_name=body.product_name,
|
||||
brand=body.brand,
|
||||
serving_size_g=body.serving_size_g,
|
||||
calories=body.calories,
|
||||
fat_g=body.fat_g,
|
||||
saturated_fat_g=body.saturated_fat_g,
|
||||
carbs_g=body.carbs_g,
|
||||
sugar_g=body.sugar_g,
|
||||
fiber_g=body.fiber_g,
|
||||
protein_g=body.protein_g,
|
||||
sodium_mg=body.sodium_mg,
|
||||
ingredient_names=body.ingredient_names,
|
||||
allergens=body.allergens,
|
||||
confidence=body.confidence,
|
||||
confirmed_by_user=True,
|
||||
)
|
||||
|
||||
product_id: int | None = None
|
||||
inventory_item_id: int | None = None
|
||||
|
||||
if body.auto_add:
|
||||
predictor = ExpirationPredictor()
|
||||
nutrition = {}
|
||||
for field in ("calories", "fat_g", "saturated_fat_g", "carbs_g", "sugar_g",
|
||||
"fiber_g", "protein_g", "sodium_mg", "serving_size_g"):
|
||||
val = getattr(body, field, None)
|
||||
if val is not None:
|
||||
nutrition[field] = val
|
||||
|
||||
product, _ = await asyncio.to_thread(
|
||||
store.get_or_create_product,
|
||||
body.product_name or body.barcode,
|
||||
body.barcode,
|
||||
brand=body.brand,
|
||||
category=None,
|
||||
nutrition_data=nutrition,
|
||||
source="visual_capture",
|
||||
source_data={},
|
||||
)
|
||||
product_id = product["id"]
|
||||
|
||||
exp = predictor.predict_expiration(
|
||||
"",
|
||||
body.location,
|
||||
product_name=body.product_name or body.barcode,
|
||||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
inv_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product_id, body.location,
|
||||
quantity=body.quantity,
|
||||
unit="count",
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="visual_capture",
|
||||
)
|
||||
inventory_item_id = inv_item["id"]
|
||||
|
||||
return LabelConfirmResponse(
|
||||
ok=True,
|
||||
barcode=body.barcode,
|
||||
product_id=product_id,
|
||||
inventory_item_id=inventory_item_id,
|
||||
message="Product saved" + (" and added to inventory" if body.auto_add else ""),
|
||||
)
|
||||
|
||||
|
||||
# ── Tags ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/tags", response_model=TagResponse, status_code=status.HTTP_201_CREATED)
|
||||
|
|
|
|||
|
|
@ -1,325 +0,0 @@
|
|||
# app/api/endpoints/meal_plans.py
|
||||
"""Meal plan CRUD, shopping list, and prep session endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import (
|
||||
CreatePlanRequest,
|
||||
GapItem,
|
||||
PlanSummary,
|
||||
PrepSessionSummary,
|
||||
PrepTaskSummary,
|
||||
ShoppingListResponse,
|
||||
SlotSummary,
|
||||
UpdatePlanRequest,
|
||||
UpdatePrepTaskRequest,
|
||||
UpsertSlotRequest,
|
||||
VALID_MEAL_TYPES,
|
||||
)
|
||||
from app.services.meal_plan.affiliates import get_retailer_links
|
||||
from app.services.meal_plan.prep_scheduler import build_prep_tasks
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _slot_summary(row: dict) -> SlotSummary:
|
||||
return SlotSummary(
|
||||
id=row["id"],
|
||||
plan_id=row["plan_id"],
|
||||
day_of_week=row["day_of_week"],
|
||||
meal_type=row["meal_type"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
recipe_title=row.get("recipe_title"),
|
||||
servings=row["servings"],
|
||||
custom_label=row.get("custom_label"),
|
||||
)
|
||||
|
||||
|
||||
def _plan_summary(plan: dict, slots: list[dict]) -> PlanSummary:
|
||||
meal_types = plan.get("meal_types") or ["dinner"]
|
||||
if isinstance(meal_types, str):
|
||||
meal_types = json.loads(meal_types)
|
||||
return PlanSummary(
|
||||
id=plan["id"],
|
||||
week_start=plan["week_start"],
|
||||
meal_types=meal_types,
|
||||
slots=[_slot_summary(s) for s in slots],
|
||||
created_at=plan["created_at"],
|
||||
)
|
||||
|
||||
|
||||
def _prep_task_summary(row: dict) -> PrepTaskSummary:
|
||||
return PrepTaskSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
task_label=row["task_label"],
|
||||
duration_minutes=row.get("duration_minutes"),
|
||||
sequence_order=row["sequence_order"],
|
||||
equipment=row.get("equipment"),
|
||||
is_parallel=bool(row.get("is_parallel", False)),
|
||||
notes=row.get("notes"),
|
||||
user_edited=bool(row.get("user_edited", False)),
|
||||
)
|
||||
|
||||
|
||||
# ── plan CRUD ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/", response_model=PlanSummary)
|
||||
async def create_plan(
|
||||
req: CreatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
import sqlite3
|
||||
|
||||
# Free tier is locked to dinner-only; paid+ may configure meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
|
||||
try:
|
||||
plan = await asyncio.to_thread(store.create_meal_plan, str(req.week_start), meal_types)
|
||||
except sqlite3.IntegrityError:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"A meal plan for the week of {req.week_start} already exists.",
|
||||
)
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan["id"])
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[PlanSummary])
|
||||
async def list_plans(
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> list[PlanSummary]:
|
||||
plans = await asyncio.to_thread(store.list_meal_plans)
|
||||
result = []
|
||||
for p in plans:
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, p["id"])
|
||||
result.append(_plan_summary(p, slots))
|
||||
return result
|
||||
|
||||
|
||||
@router.patch("/{plan_id}", response_model=PlanSummary)
|
||||
async def update_plan(
|
||||
plan_id: int,
|
||||
req: UpdatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
# Free tier stays dinner-only; paid+ may add meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
updated = await asyncio.to_thread(store.update_meal_plan_types, plan_id, meal_types)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(updated, slots)
|
||||
|
||||
|
||||
@router.get("/{plan_id}", response_model=PlanSummary)
|
||||
async def get_plan(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
# ── slots ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.put("/{plan_id}/slots/{day_of_week}/{meal_type}", response_model=SlotSummary)
|
||||
async def upsert_slot(
|
||||
plan_id: int,
|
||||
day_of_week: int,
|
||||
meal_type: str,
|
||||
req: UpsertSlotRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> SlotSummary:
|
||||
if day_of_week < 0 or day_of_week > 6:
|
||||
raise HTTPException(status_code=422, detail="day_of_week must be 0-6.")
|
||||
if meal_type not in VALID_MEAL_TYPES:
|
||||
raise HTTPException(status_code=422, detail=f"Invalid meal_type '{meal_type}'.")
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
row = await asyncio.to_thread(
|
||||
store.upsert_slot,
|
||||
plan_id, day_of_week, meal_type,
|
||||
req.recipe_id, req.servings, req.custom_label,
|
||||
)
|
||||
return _slot_summary(row)
|
||||
|
||||
|
||||
@router.delete("/{plan_id}/slots/{slot_id}", status_code=204)
|
||||
async def delete_slot(
|
||||
plan_id: int,
|
||||
slot_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> None:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
await asyncio.to_thread(store.delete_slot, slot_id)
|
||||
|
||||
|
||||
# ── shopping list ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/shopping-list", response_model=ShoppingListResponse)
|
||||
async def get_shopping_list(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> ShoppingListResponse:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
inventory = await asyncio.to_thread(store.list_inventory)
|
||||
|
||||
gaps, covered = compute_shopping_list(recipes, inventory)
|
||||
|
||||
# Enrich gap items with retailer links
|
||||
def _to_schema(item, enrich: bool) -> GapItem:
|
||||
links = get_retailer_links(item.ingredient_name) if enrich else []
|
||||
return GapItem(
|
||||
ingredient_name=item.ingredient_name,
|
||||
needed_raw=item.needed_raw,
|
||||
have_quantity=item.have_quantity,
|
||||
have_unit=item.have_unit,
|
||||
covered=item.covered,
|
||||
retailer_links=links,
|
||||
)
|
||||
|
||||
gap_items = [_to_schema(g, enrich=True) for g in gaps]
|
||||
covered_items = [_to_schema(c, enrich=False) for c in covered]
|
||||
|
||||
disclosure = (
|
||||
"Some links may be affiliate links. Purchases through them support Kiwi development."
|
||||
if gap_items else None
|
||||
)
|
||||
|
||||
return ShoppingListResponse(
|
||||
plan_id=plan_id,
|
||||
gap_items=gap_items,
|
||||
covered_items=covered_items,
|
||||
disclosure=disclosure,
|
||||
)
|
||||
|
||||
|
||||
# ── prep session ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def get_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
prep_session = await asyncio.to_thread(store.get_prep_session_for_plan, plan_id)
|
||||
if prep_session is None:
|
||||
raise HTTPException(status_code=404, detail="No prep session for this plan.")
|
||||
raw_tasks = await asyncio.to_thread(store.get_prep_tasks, prep_session["id"])
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=plan_id,
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(t) for t in raw_tasks],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def create_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
prep_tasks = build_prep_tasks(slots=slots, recipes=recipes)
|
||||
|
||||
scheduled_date = date.today().isoformat()
|
||||
prep_session = await asyncio.to_thread(
|
||||
store.create_prep_session, plan_id, scheduled_date
|
||||
)
|
||||
session_id = prep_session["id"]
|
||||
|
||||
task_dicts = [
|
||||
{
|
||||
"recipe_id": t.recipe_id,
|
||||
"slot_id": t.slot_id,
|
||||
"task_label": t.task_label,
|
||||
"duration_minutes": t.duration_minutes,
|
||||
"sequence_order": t.sequence_order,
|
||||
"equipment": t.equipment,
|
||||
"is_parallel": t.is_parallel,
|
||||
"notes": t.notes,
|
||||
}
|
||||
for t in prep_tasks
|
||||
]
|
||||
inserted = await asyncio.to_thread(store.bulk_insert_prep_tasks, session_id, task_dicts)
|
||||
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=prep_session["plan_id"],
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(r) for r in inserted],
|
||||
)
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/{plan_id}/prep-session/tasks/{task_id}",
|
||||
response_model=PrepTaskSummary,
|
||||
)
|
||||
async def update_prep_task(
|
||||
plan_id: int,
|
||||
task_id: int,
|
||||
req: UpdatePrepTaskRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepTaskSummary:
|
||||
updated = await asyncio.to_thread(
|
||||
store.update_prep_task,
|
||||
task_id,
|
||||
duration_minutes=req.duration_minutes,
|
||||
sequence_order=req.sequence_order,
|
||||
notes=req.notes,
|
||||
equipment=req.equipment,
|
||||
)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Task not found.")
|
||||
return _prep_task_summary(updated)
|
||||
|
|
@ -219,7 +219,7 @@ def _commit_items(
|
|||
receipt_id=receipt_id,
|
||||
purchase_date=str(purchase_date) if purchase_date else None,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="receipt",
|
||||
source="receipt_ocr",
|
||||
)
|
||||
|
||||
created.append(ApprovedInventoryItem(
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
"""Proxy endpoint: exposes cf-orch call budget to the Kiwi frontend.
|
||||
|
||||
Only lifetime/founders users have a license_key — subscription and free
|
||||
users receive null (no budget UI shown).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.services.heimdall_orch import get_orch_usage
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def orch_usage_endpoint(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict | None:
|
||||
"""Return the current period's orch usage for the authenticated user.
|
||||
|
||||
Returns null if the user has no lifetime/founders license key (i.e. they
|
||||
are on a subscription or free plan — no budget cap applies to them).
|
||||
"""
|
||||
if session.license_key is None:
|
||||
return None
|
||||
return get_orch_usage(session.license_key, "kiwi")
|
||||
|
|
@ -42,11 +42,9 @@ async def upload_receipt(
|
|||
)
|
||||
# Only queue OCR if the feature is enabled server-side AND the user's tier allows it.
|
||||
# Check tier here, not inside the background task — once dispatched it can't be cancelled.
|
||||
# Pass session.db (a Path) rather than store — the store dependency closes before
|
||||
# background tasks run, so the task opens its own store from the DB path.
|
||||
ocr_allowed = settings.ENABLE_OCR and can_use("receipt_ocr", session.tier, session.has_byok)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
return ReceiptResponse.model_validate(receipt)
|
||||
|
||||
|
||||
|
|
@ -66,7 +64,7 @@ async def upload_receipts_batch(
|
|||
store.create_receipt, file.filename, str(saved)
|
||||
)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
results.append(ReceiptResponse.model_validate(receipt))
|
||||
return results
|
||||
|
||||
|
|
@ -99,13 +97,8 @@ async def get_receipt_quality(receipt_id: int, store: Store = Depends(get_store)
|
|||
return QualityAssessment.model_validate(qa)
|
||||
|
||||
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt.
|
||||
|
||||
Accepts db_path (not a Store instance) because FastAPI closes the request-scoped
|
||||
store before background tasks execute. This task owns its store lifecycle.
|
||||
"""
|
||||
store = Store(db_path)
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, store: Store) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt."""
|
||||
try:
|
||||
await asyncio.to_thread(store.update_receipt_status, receipt_id, "processing")
|
||||
from app.services.receipt_service import ReceiptService
|
||||
|
|
@ -115,5 +108,3 @@ async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path)
|
|||
await asyncio.to_thread(
|
||||
store.update_receipt_status, receipt_id, "error", str(exc)
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
|
|
|||
|
|
@ -1,166 +0,0 @@
|
|||
# app/api/endpoints/recipe_tags.py
|
||||
"""Community subcategory tagging for corpus recipes.
|
||||
|
||||
Users can tag a recipe they're viewing with a domain/category/subcategory
|
||||
from the browse taxonomy. Tags require a community pseudonym and reach
|
||||
public visibility once two independent users have tagged the same recipe
|
||||
to the same location (upvotes >= 2).
|
||||
|
||||
All tiers may submit and upvote tags — community contribution is free.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.api.endpoints.community import _get_community_store
|
||||
from app.api.endpoints.session import get_session
|
||||
from app.cloud_session import CloudUser
|
||||
from app.services.recipe.browser_domains import DOMAINS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
ACCEPT_THRESHOLD = 2
|
||||
|
||||
|
||||
# ── Request / response models ──────────────────────────────────────────────────
|
||||
|
||||
class TagSubmitBody(BaseModel):
|
||||
recipe_id: int
|
||||
domain: str
|
||||
category: str
|
||||
subcategory: str | None = None
|
||||
pseudonym: str
|
||||
|
||||
|
||||
class TagResponse(BaseModel):
|
||||
id: int
|
||||
recipe_id: int
|
||||
domain: str
|
||||
category: str
|
||||
subcategory: str | None
|
||||
pseudonym: str
|
||||
upvotes: int
|
||||
accepted: bool
|
||||
|
||||
|
||||
def _to_response(row: dict) -> TagResponse:
|
||||
return TagResponse(
|
||||
id=row["id"],
|
||||
recipe_id=int(row["recipe_ref"]),
|
||||
domain=row["domain"],
|
||||
category=row["category"],
|
||||
subcategory=row.get("subcategory"),
|
||||
pseudonym=row["pseudonym"],
|
||||
upvotes=row["upvotes"],
|
||||
accepted=row["upvotes"] >= ACCEPT_THRESHOLD,
|
||||
)
|
||||
|
||||
|
||||
def _validate_location(domain: str, category: str, subcategory: str | None) -> None:
|
||||
"""Raise 422 if (domain, category, subcategory) isn't in the known taxonomy."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=422, detail=f"Unknown domain '{domain}'.")
|
||||
cats = DOMAINS[domain].get("categories", {})
|
||||
if category not in cats:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
if subcategory is not None:
|
||||
subcats = cats[category].get("subcategories", {})
|
||||
if subcategory not in subcats:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"Unknown subcategory '{subcategory}' in '{domain}/{category}'.",
|
||||
)
|
||||
|
||||
|
||||
# ── Endpoints ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/recipes/community-tags/{recipe_id}", response_model=list[TagResponse])
|
||||
async def list_recipe_tags(
|
||||
recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[TagResponse]:
|
||||
"""Return all community tags for a corpus recipe, accepted ones first."""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return []
|
||||
tags = store.list_tags_for_recipe(recipe_id)
|
||||
return [_to_response(r) for r in tags]
|
||||
|
||||
|
||||
@router.post("/recipes/community-tags", response_model=TagResponse, status_code=201)
|
||||
async def submit_recipe_tag(
|
||||
body: TagSubmitBody,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> TagResponse:
|
||||
"""Tag a corpus recipe with a browse taxonomy location.
|
||||
|
||||
Requires the user to have a community pseudonym set. Returns 409 if this
|
||||
user has already tagged this recipe to this exact location.
|
||||
"""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Community features are not available on this instance.",
|
||||
)
|
||||
|
||||
_validate_location(body.domain, body.category, body.subcategory)
|
||||
|
||||
try:
|
||||
import psycopg2.errors # type: ignore[import]
|
||||
row = store.submit_recipe_tag(
|
||||
recipe_id=body.recipe_id,
|
||||
domain=body.domain,
|
||||
category=body.category,
|
||||
subcategory=body.subcategory,
|
||||
pseudonym=body.pseudonym,
|
||||
)
|
||||
return _to_response(row)
|
||||
except Exception as exc:
|
||||
if "unique" in str(exc).lower() or "UniqueViolation" in type(exc).__name__:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="You have already tagged this recipe to this location.",
|
||||
)
|
||||
logger.error("submit_recipe_tag failed: %s", exc)
|
||||
raise HTTPException(status_code=500, detail="Failed to submit tag.")
|
||||
|
||||
|
||||
@router.post("/recipes/community-tags/{tag_id}/upvote", response_model=TagResponse)
|
||||
async def upvote_recipe_tag(
|
||||
tag_id: int,
|
||||
pseudonym: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> TagResponse:
|
||||
"""Upvote an existing community tag.
|
||||
|
||||
Returns 409 if this pseudonym has already voted on this tag.
|
||||
Returns 404 if the tag doesn't exist.
|
||||
"""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community features unavailable.")
|
||||
|
||||
tag_row = store.get_recipe_tag_by_id(tag_id)
|
||||
if tag_row is None:
|
||||
raise HTTPException(status_code=404, detail=f"Tag {tag_id} not found.")
|
||||
|
||||
try:
|
||||
new_upvotes = store.upvote_recipe_tag(tag_id, pseudonym)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=404, detail=f"Tag {tag_id} not found.")
|
||||
except Exception as exc:
|
||||
if "unique" in str(exc).lower() or "UniqueViolation" in type(exc).__name__:
|
||||
raise HTTPException(status_code=409, detail="You have already voted on this tag.")
|
||||
logger.error("upvote_recipe_tag failed: %s", exc)
|
||||
raise HTTPException(status_code=500, detail="Failed to upvote tag.")
|
||||
|
||||
tag_row["upvotes"] = new_upvotes
|
||||
return _to_response(tag_row)
|
||||
|
|
@ -1,49 +1,15 @@
|
|||
"""Recipe suggestion and browser endpoints."""
|
||||
"""Recipe suggestion endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.db.session import get_store
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import (
|
||||
AssemblyTemplateOut,
|
||||
BuildRequest,
|
||||
RecipeJobStatus,
|
||||
RecipeRequest,
|
||||
RecipeResult,
|
||||
RecipeSuggestion,
|
||||
RoleCandidatesResponse,
|
||||
StreamTokenRequest,
|
||||
StreamTokenResponse,
|
||||
)
|
||||
from app.services.coordinator_proxy import CoordinatorError, coordinator_authorize
|
||||
from app.api.endpoints.imitate import _build_recipe_prompt
|
||||
from app.services.recipe.assembly_recipes import (
|
||||
build_from_selection,
|
||||
get_role_candidates,
|
||||
get_templates_for_api,
|
||||
)
|
||||
from app.services.recipe.browser_domains import (
|
||||
DOMAINS,
|
||||
category_has_subcategories,
|
||||
get_category_names,
|
||||
get_domain_labels,
|
||||
get_keywords_for_category,
|
||||
get_keywords_for_subcategory,
|
||||
get_subcategory_names,
|
||||
)
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeResult
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
from app.services.recipe.time_effort import parse_time_effort
|
||||
from app.services.recipe.sensory import build_sensory_exclude
|
||||
from app.services.heimdall_orch import check_orch_budget
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
|
@ -64,93 +30,13 @@ def _suggest_in_thread(db_path: Path, req: RecipeRequest) -> RecipeResult:
|
|||
store.close()
|
||||
|
||||
|
||||
def _build_stream_prompt(db_path: Path, level: int) -> str:
|
||||
"""Fetch pantry + user settings from DB and build the recipe prompt.
|
||||
|
||||
Runs in a thread (called via asyncio.to_thread) so it can use sync Store.
|
||||
"""
|
||||
import datetime
|
||||
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_names = [i["product_name"] for i in items if i.get("product_name")]
|
||||
|
||||
today = datetime.date.today()
|
||||
expiring_names = [
|
||||
i["product_name"]
|
||||
for i in items
|
||||
if i.get("product_name")
|
||||
and i.get("expiry_date")
|
||||
and (datetime.date.fromisoformat(i["expiry_date"]) - today).days <= 3
|
||||
]
|
||||
|
||||
settings: dict = {}
|
||||
try:
|
||||
rows = store.conn.execute("SELECT key, value FROM user_settings").fetchall()
|
||||
settings = {r["key"]: r["value"] for r in rows}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
constraints_raw = settings.get("dietary_constraints", "")
|
||||
constraints = [c.strip() for c in constraints_raw.split(",") if c.strip()] if constraints_raw else []
|
||||
allergies_raw = settings.get("allergies", "")
|
||||
allergies = [a.strip() for a in allergies_raw.split(",") if a.strip()] if allergies_raw else []
|
||||
|
||||
return _build_recipe_prompt(pantry_names, expiring_names, constraints, allergies, level)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
async def _enqueue_recipe_job(session: CloudUser, req: RecipeRequest):
|
||||
"""Queue an async recipe_llm job and return 202 with job_id.
|
||||
|
||||
Falls back to synchronous generation in CLOUD_MODE (scheduler polls only
|
||||
the shared settings DB, not per-user DBs — see snipe#45 / kiwi backlog).
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from fastapi.responses import JSONResponse
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.tasks.runner import insert_task
|
||||
|
||||
if CLOUD_MODE:
|
||||
log.warning("recipe_llm async jobs not supported in CLOUD_MODE — falling back to sync")
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
return result
|
||||
|
||||
job_id = f"rec_{uuid.uuid4().hex}"
|
||||
|
||||
def _create(db_path: Path) -> int:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
row = store.create_recipe_job(job_id, session.user_id, req.model_dump_json())
|
||||
return row["id"]
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
int_id = await asyncio.to_thread(_create, session.db)
|
||||
params_json = json.dumps({"job_id": job_id})
|
||||
task_id, is_new = insert_task(session.db, "recipe_llm", int_id, params=params_json)
|
||||
if is_new:
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(session.db).enqueue(task_id, "recipe_llm", int_id, params_json)
|
||||
|
||||
return JSONResponse(content={"job_id": job_id, "status": "queued"}, status_code=202)
|
||||
|
||||
|
||||
@router.post("/suggest")
|
||||
@router.post("/suggest", response_model=RecipeResult)
|
||||
async def suggest_recipes(
|
||||
req: RecipeRequest,
|
||||
async_mode: bool = Query(default=False, alias="async"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
log.info("recipes auth=%s tier=%s level=%s", _auth_label(session.user_id), session.tier, req.level)
|
||||
) -> RecipeResult:
|
||||
# Inject session-authoritative tier/byok immediately — client-supplied values are ignored.
|
||||
# Also read stored unit_system preference; default to metric if not set.
|
||||
unit_system = store.get_setting("unit_system") or "metric"
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok, "unit_system": unit_system})
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok})
|
||||
if req.level == 4 and not req.wildcard_confirmed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
|
@ -163,383 +49,7 @@ async def suggest_recipes(
|
|||
)
|
||||
if req.style_id and not can_use("style_picker", req.tier):
|
||||
raise HTTPException(status_code=403, detail="Style picker requires Paid tier.")
|
||||
|
||||
# Orch budget check for lifetime/founders keys — downgrade to L2 (local) if exhausted.
|
||||
# Subscription and local/BYOK users skip this check entirely.
|
||||
orch_fallback = False
|
||||
if (
|
||||
req.level in (3, 4)
|
||||
and session.license_key is not None
|
||||
and not session.has_byok
|
||||
and session.tier != "local"
|
||||
):
|
||||
budget = check_orch_budget(session.license_key, "kiwi")
|
||||
if not budget.get("allowed", True):
|
||||
req = req.model_copy(update={"level": 2})
|
||||
orch_fallback = True
|
||||
|
||||
if req.level in (3, 4) and async_mode:
|
||||
return await _enqueue_recipe_job(session, req)
|
||||
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
if orch_fallback:
|
||||
result = result.model_copy(update={"orch_fallback": True})
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/stream-token", response_model=StreamTokenResponse)
|
||||
async def get_stream_token(
|
||||
req: StreamTokenRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> StreamTokenResponse:
|
||||
"""Issue a one-time stream token for LLM recipe generation.
|
||||
|
||||
Tier-gated (Paid or BYOK). Builds the prompt from pantry + user settings,
|
||||
then calls the cf-orch coordinator to obtain a stream URL. Returns
|
||||
immediately — the frontend opens EventSource to the stream URL directly.
|
||||
"""
|
||||
if not can_use("recipe_suggestions", session.tier, session.has_byok):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Streaming recipe generation requires Paid tier or a configured LLM backend.",
|
||||
)
|
||||
if req.level == 4 and not req.wildcard_confirmed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Level 4 (Wildcard) streaming requires wildcard_confirmed=true.",
|
||||
)
|
||||
|
||||
prompt = await asyncio.to_thread(_build_stream_prompt, session.db, req.level)
|
||||
|
||||
try:
|
||||
result = await coordinator_authorize(prompt=prompt, caller="kiwi-recipe", ttl_s=300)
|
||||
except CoordinatorError as exc:
|
||||
raise HTTPException(status_code=exc.status_code, detail=str(exc))
|
||||
|
||||
return StreamTokenResponse(
|
||||
stream_url=result.stream_url,
|
||||
token=result.token,
|
||||
expires_in_s=result.expires_in_s,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/jobs/{job_id}", response_model=RecipeJobStatus)
|
||||
async def get_recipe_job_status(
|
||||
job_id: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeJobStatus:
|
||||
"""Poll the status of an async recipe generation job.
|
||||
|
||||
Returns 404 when job_id is unknown or belongs to a different user.
|
||||
On status='done' with suggestions=[], the LLM returned empty — client
|
||||
should show a 'no recipe generated, try again' message.
|
||||
"""
|
||||
def _get(db_path: Path) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe_job(job_id, session.user_id)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
row = await asyncio.to_thread(_get, session.db)
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="Job not found.")
|
||||
|
||||
result = None
|
||||
if row["status"] == "done" and row["result"]:
|
||||
result = RecipeResult.model_validate_json(row["result"])
|
||||
|
||||
return RecipeJobStatus(
|
||||
job_id=row["job_id"],
|
||||
status=row["status"],
|
||||
result=result,
|
||||
error=row["error"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/browse/domains")
|
||||
async def list_browse_domains(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return available domain schemas for the recipe browser."""
|
||||
return get_domain_labels()
|
||||
|
||||
|
||||
@router.get("/browse/{domain}")
|
||||
async def list_browse_categories(
|
||||
domain: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return categories with recipe counts for a given domain."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
cat_names = get_category_names(domain)
|
||||
keywords_by_category = {cat: get_keywords_for_category(domain, cat) for cat in cat_names}
|
||||
has_subs = {cat: category_has_subcategories(domain, cat) for cat in cat_names}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_categories(domain, keywords_by_category, has_subs)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}/subcategories")
|
||||
async def list_browse_subcategories(
|
||||
domain: str,
|
||||
category: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for a category that supports subcategories."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
if not category_has_subcategories(domain, category):
|
||||
return []
|
||||
|
||||
subcat_names = get_subcategory_names(domain, category)
|
||||
keywords_by_subcat = {
|
||||
sub: get_keywords_for_subcategory(domain, category, sub)
|
||||
for sub in subcat_names
|
||||
}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_subcategories(domain, keywords_by_subcat)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}")
|
||||
async def browse_recipes(
|
||||
domain: str,
|
||||
category: str,
|
||||
page: Annotated[int, Query(ge=1)] = 1,
|
||||
page_size: Annotated[int, Query(ge=1, le=100)] = 20,
|
||||
pantry_items: Annotated[str | None, Query()] = None,
|
||||
subcategory: Annotated[str | None, Query()] = None,
|
||||
q: Annotated[str | None, Query(max_length=200)] = None,
|
||||
sort: Annotated[str, Query(pattern="^(default|alpha|alpha_desc|match)$")] = "default",
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return a paginated list of recipes for a domain/category.
|
||||
|
||||
Pass pantry_items as a comma-separated string to receive match_pct badges.
|
||||
Pass subcategory to narrow within a category that has subcategories.
|
||||
Pass q to filter by title substring. Pass sort for ordering (default/alpha/alpha_desc/match).
|
||||
sort=match orders by pantry coverage DESC; falls back to default when no pantry_items.
|
||||
"""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
if category == "_all":
|
||||
keywords = None # unfiltered browse
|
||||
elif subcategory:
|
||||
keywords = get_keywords_for_subcategory(domain, category, subcategory)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown subcategory '{subcategory}' in '{category}'.",
|
||||
)
|
||||
else:
|
||||
keywords = get_keywords_for_category(domain, category)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
|
||||
pantry_list = (
|
||||
[p.strip() for p in pantry_items.split(",") if p.strip()]
|
||||
if pantry_items
|
||||
else None
|
||||
)
|
||||
|
||||
def _browse(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
# Load sensory preferences
|
||||
sensory_prefs_json = store.get_setting("sensory_preferences")
|
||||
sensory_exclude = build_sensory_exclude(sensory_prefs_json)
|
||||
|
||||
result = store.browse_recipes(
|
||||
keywords=keywords,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
pantry_items=pantry_list,
|
||||
q=q or None,
|
||||
sort=sort,
|
||||
sensory_exclude=sensory_exclude,
|
||||
)
|
||||
|
||||
# ── Attach time/effort signals to each browse result ────────────────
|
||||
import json as _json
|
||||
for recipe_row in result.get("recipes", []):
|
||||
directions_raw = recipe_row.get("directions") or []
|
||||
if isinstance(directions_raw, str):
|
||||
try:
|
||||
directions_raw = _json.loads(directions_raw)
|
||||
except Exception:
|
||||
directions_raw = []
|
||||
if directions_raw:
|
||||
_profile = parse_time_effort(directions_raw)
|
||||
recipe_row["active_min"] = _profile.active_min
|
||||
recipe_row["passive_min"] = _profile.passive_min
|
||||
else:
|
||||
recipe_row["active_min"] = None
|
||||
recipe_row["passive_min"] = None
|
||||
# Remove directions from browse payload — not needed by the card UI
|
||||
recipe_row.pop("directions", None)
|
||||
|
||||
# Community tag fallback: if FTS returned nothing for a subcategory,
|
||||
# check whether accepted community tags exist for this location and
|
||||
# fetch those corpus recipes directly by ID.
|
||||
if result["total"] == 0 and subcategory and keywords:
|
||||
try:
|
||||
from app.api.endpoints.community import _get_community_store
|
||||
cs = _get_community_store()
|
||||
if cs is not None:
|
||||
community_ids = cs.get_accepted_recipe_ids_for_subcategory(
|
||||
domain=domain,
|
||||
category=category,
|
||||
subcategory=subcategory,
|
||||
)
|
||||
if community_ids:
|
||||
offset = (page - 1) * page_size
|
||||
paged_ids = community_ids[offset: offset + page_size]
|
||||
recipes = store.fetch_recipes_by_ids(paged_ids, pantry_list)
|
||||
import json as _json_c
|
||||
for recipe_row in recipes:
|
||||
directions_raw = recipe_row.get("directions") or []
|
||||
if isinstance(directions_raw, str):
|
||||
try:
|
||||
directions_raw = _json_c.loads(directions_raw)
|
||||
except Exception:
|
||||
directions_raw = []
|
||||
if directions_raw:
|
||||
_profile = parse_time_effort(directions_raw)
|
||||
recipe_row["active_min"] = _profile.active_min
|
||||
recipe_row["passive_min"] = _profile.passive_min
|
||||
else:
|
||||
recipe_row["active_min"] = None
|
||||
recipe_row["passive_min"] = None
|
||||
recipe_row.pop("directions", None)
|
||||
result = {
|
||||
"recipes": recipes,
|
||||
"total": len(community_ids),
|
||||
"page": page,
|
||||
"community_tagged": True,
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("community tag fallback failed: %s", exc)
|
||||
|
||||
store.log_browser_telemetry(
|
||||
domain=domain,
|
||||
category=category,
|
||||
page=page,
|
||||
result_count=result["total"],
|
||||
)
|
||||
return result
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_browse, session.db)
|
||||
|
||||
|
||||
@router.get("/templates", response_model=list[AssemblyTemplateOut])
|
||||
async def list_assembly_templates() -> list[dict]:
|
||||
"""Return all 13 assembly templates with ordered role sequences.
|
||||
|
||||
Cache-friendly: static data, no per-user state.
|
||||
"""
|
||||
return get_templates_for_api()
|
||||
|
||||
|
||||
@router.get("/template-candidates", response_model=RoleCandidatesResponse)
|
||||
async def get_template_role_candidates(
|
||||
template_id: str = Query(..., description="Template slug, e.g. 'burrito_taco'"),
|
||||
role: str = Query(..., description="Role display name, e.g. 'protein'"),
|
||||
prior_picks: str = Query(default="", description="Comma-separated prior selections"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return pantry-matched candidates for one wizard step."""
|
||||
def _get(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
pantry_list = list(pantry_set)
|
||||
prior = [p.strip() for p in prior_picks.split(",") if p.strip()]
|
||||
profile_index = store.get_element_profiles(pantry_list + prior)
|
||||
return get_role_candidates(
|
||||
template_slug=template_id,
|
||||
role_display=role,
|
||||
pantry_set=pantry_set,
|
||||
prior_picks=prior,
|
||||
profile_index=profile_index,
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.post("/build", response_model=RecipeSuggestion)
|
||||
async def build_recipe(
|
||||
req: BuildRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeSuggestion:
|
||||
"""Build a recipe from explicit role selections."""
|
||||
def _build(db_path: Path) -> RecipeSuggestion | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
suggestion = build_from_selection(
|
||||
template_slug=req.template_id,
|
||||
role_overrides=req.role_overrides,
|
||||
pantry_set=pantry_set,
|
||||
)
|
||||
if suggestion is None:
|
||||
return None
|
||||
# Persist to recipes table so the result can be saved/bookmarked.
|
||||
# external_id encodes template + selections for stable dedup.
|
||||
import hashlib as _hl, json as _js
|
||||
sel_hash = _hl.md5(
|
||||
_js.dumps(req.role_overrides, sort_keys=True).encode()
|
||||
).hexdigest()[:8]
|
||||
external_id = f"assembly:{req.template_id}:{sel_hash}"
|
||||
real_id = store.upsert_built_recipe(
|
||||
external_id=external_id,
|
||||
title=suggestion.title,
|
||||
ingredients=suggestion.matched_ingredients,
|
||||
directions=suggestion.directions,
|
||||
)
|
||||
return suggestion.model_copy(update={"id": real_id})
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
result = await asyncio.to_thread(_build, session.db)
|
||||
if result is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Template not found or required ingredient missing.",
|
||||
)
|
||||
return result
|
||||
return await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
|
||||
|
||||
@router.get("/{recipe_id}")
|
||||
|
|
@ -554,57 +64,4 @@ async def get_recipe(recipe_id: int, session: CloudUser = Depends(get_session))
|
|||
recipe = await asyncio.to_thread(_get, session.db, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found.")
|
||||
|
||||
# Normalize corpus record into RecipeSuggestion shape so RecipeDetailPanel
|
||||
# can render it without knowing it came from a direct DB lookup.
|
||||
ingredient_names = recipe.get("ingredient_names") or []
|
||||
if isinstance(ingredient_names, str):
|
||||
import json as _json
|
||||
try:
|
||||
ingredient_names = _json.loads(ingredient_names)
|
||||
except Exception:
|
||||
ingredient_names = []
|
||||
|
||||
_directions_for_te = recipe.get("directions") or []
|
||||
if isinstance(_directions_for_te, str):
|
||||
import json as _json2
|
||||
try:
|
||||
_directions_for_te = _json2.loads(_directions_for_te)
|
||||
except Exception:
|
||||
_directions_for_te = []
|
||||
|
||||
if _directions_for_te:
|
||||
_te = parse_time_effort(_directions_for_te)
|
||||
_time_effort_out: dict | None = {
|
||||
"active_min": _te.active_min,
|
||||
"passive_min": _te.passive_min,
|
||||
"total_min": _te.total_min,
|
||||
"effort_label": _te.effort_label,
|
||||
"equipment": _te.equipment,
|
||||
"step_analyses": [
|
||||
{"is_passive": sa.is_passive, "detected_minutes": sa.detected_minutes}
|
||||
for sa in _te.step_analyses
|
||||
],
|
||||
}
|
||||
else:
|
||||
_time_effort_out = None
|
||||
|
||||
return {
|
||||
"id": recipe.get("id"),
|
||||
"title": recipe.get("title", ""),
|
||||
"match_count": 0,
|
||||
"matched_ingredients": ingredient_names,
|
||||
"missing_ingredients": [],
|
||||
"directions": recipe.get("directions") or [],
|
||||
"prep_notes": [],
|
||||
"swap_candidates": [],
|
||||
"element_coverage": {},
|
||||
"notes": recipe.get("notes") or "",
|
||||
"level": 1,
|
||||
"is_wildcard": False,
|
||||
"nutrition": None,
|
||||
"source_url": recipe.get("source_url") or None,
|
||||
"complexity": None,
|
||||
"estimated_time_min": None,
|
||||
"time_effort": _time_effort_out,
|
||||
}
|
||||
return recipe
|
||||
|
|
|
|||
|
|
@ -1,188 +0,0 @@
|
|||
"""Saved recipe bookmark endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.saved_recipe import (
|
||||
CollectionMemberRequest,
|
||||
CollectionRequest,
|
||||
CollectionSummary,
|
||||
SavedRecipeSummary,
|
||||
SaveRecipeRequest,
|
||||
UpdateSavedRecipeRequest,
|
||||
)
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _in_thread(db_path: Path, fn):
|
||||
"""Run a Store operation in a worker thread with its own connection."""
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
def _to_summary(row: dict, store: Store) -> SavedRecipeSummary:
|
||||
collection_ids = store.get_saved_recipe_collection_ids(row["id"])
|
||||
return SavedRecipeSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row["recipe_id"],
|
||||
title=row.get("title", ""),
|
||||
saved_at=row["saved_at"],
|
||||
notes=row.get("notes"),
|
||||
rating=row.get("rating"),
|
||||
style_tags=row.get("style_tags") or [],
|
||||
collection_ids=collection_ids,
|
||||
)
|
||||
|
||||
|
||||
# ── save / unsave ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=SavedRecipeSummary)
|
||||
async def save_recipe(
|
||||
req: SaveRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
row = store.save_recipe(req.recipe_id, req.notes, req.rating)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.delete("/{recipe_id}", status_code=204)
|
||||
async def unsave_recipe(
|
||||
recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.unsave_recipe(recipe_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/{recipe_id}", response_model=SavedRecipeSummary)
|
||||
async def update_saved_recipe(
|
||||
recipe_id: int,
|
||||
req: UpdateSavedRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
if not store.is_recipe_saved(recipe_id):
|
||||
raise HTTPException(status_code=404, detail="Recipe not saved.")
|
||||
row = store.update_saved_recipe(
|
||||
recipe_id, req.notes, req.rating, req.style_tags
|
||||
)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.get("", response_model=list[SavedRecipeSummary])
|
||||
async def list_saved_recipes(
|
||||
sort_by: str = "saved_at",
|
||||
collection_id: int | None = None,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[SavedRecipeSummary]:
|
||||
def _run(store: Store) -> list[SavedRecipeSummary]:
|
||||
rows = store.get_saved_recipes(sort_by=sort_by, collection_id=collection_id)
|
||||
return [_to_summary(r, store) for r in rows]
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── collections (Paid) ────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/collections", response_model=list[CollectionSummary])
|
||||
async def list_collections(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[CollectionSummary]:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
rows = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.get_collections()
|
||||
)
|
||||
return [CollectionSummary(**r) for r in rows]
|
||||
|
||||
|
||||
@router.post("/collections", response_model=CollectionSummary)
|
||||
async def create_collection(
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Collections require Paid tier.",
|
||||
)
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.create_collection(req.name, req.description),
|
||||
)
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.delete("/collections/{collection_id}", status_code=204)
|
||||
async def delete_collection(
|
||||
collection_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_collection(collection_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/collections/{collection_id}", response_model=CollectionSummary)
|
||||
async def rename_collection(
|
||||
collection_id: int,
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.rename_collection(collection_id, req.name, req.description),
|
||||
)
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Collection not found.")
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.post("/collections/{collection_id}/members", status_code=204)
|
||||
async def add_to_collection(
|
||||
collection_id: int,
|
||||
req: CollectionMemberRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.add_to_collection(collection_id, req.saved_recipe_id),
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/collections/{collection_id}/members/{saved_recipe_id}", status_code=204
|
||||
)
|
||||
async def remove_from_collection(
|
||||
collection_id: int,
|
||||
saved_recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.remove_from_collection(collection_id, saved_recipe_id),
|
||||
)
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
"""Session bootstrap endpoint — called once per app load by the frontend.
|
||||
|
||||
Logs auth= + tier= for log-based analytics without client-side tracking.
|
||||
See Circuit-Forge/kiwi#86.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/bootstrap")
|
||||
def session_bootstrap(session: CloudUser = Depends(get_session)) -> dict:
|
||||
"""Record auth type and tier for log-based analytics.
|
||||
|
||||
Expected log output:
|
||||
INFO:app.api.endpoints.session: session auth=authed tier=paid
|
||||
INFO:app.api.endpoints.session: session auth=anon tier=free
|
||||
|
||||
E2E test sessions (E2E_TEST_USER_ID) are logged at DEBUG so they don't
|
||||
pollute analytics counts while still being visible when DEBUG=true.
|
||||
"""
|
||||
is_test = bool(settings.E2E_TEST_USER_ID and session.user_id == settings.E2E_TEST_USER_ID)
|
||||
logger = log.debug if is_test else log.info
|
||||
logger("session auth=%s tier=%s%s", _auth_label(session.user_id), session.tier, " e2e=true" if is_test else "")
|
||||
return {
|
||||
"auth": _auth_label(session.user_id),
|
||||
"tier": session.tier,
|
||||
"has_byok": session.has_byok,
|
||||
}
|
||||
|
|
@ -10,7 +10,7 @@ from app.db.store import Store
|
|||
|
||||
router = APIRouter()
|
||||
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment", "unit_system", "shopping_locale", "sensory_preferences", "time_first_layout"})
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment"})
|
||||
|
||||
|
||||
class SettingBody(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,233 +0,0 @@
|
|||
"""Shopping list endpoints.
|
||||
|
||||
Free tier for all users (anonymous guests included — shopping list is the
|
||||
primary affiliate revenue surface). Confirm-purchase action is also Free:
|
||||
it moves a checked item into pantry inventory without a tier gate so the
|
||||
flow works for anyone who signs up or browses without an account.
|
||||
|
||||
Routes:
|
||||
GET /shopping — list items (with affiliate links)
|
||||
POST /shopping — add item manually
|
||||
PATCH /shopping/{id} — update (check/uncheck, rename, qty)
|
||||
DELETE /shopping/{id} — remove single item
|
||||
DELETE /shopping/checked — clear all checked items
|
||||
DELETE /shopping/all — clear entire list
|
||||
POST /shopping/from-recipe — bulk add gaps from a recipe
|
||||
POST /shopping/{id}/confirm — confirm purchase → add to pantry inventory
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.shopping import (
|
||||
BulkAddFromRecipeRequest,
|
||||
ConfirmPurchaseRequest,
|
||||
ShoppingItemCreate,
|
||||
ShoppingItemResponse,
|
||||
ShoppingItemUpdate,
|
||||
)
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _enrich(item: dict, builder: GroceryLinkBuilder) -> ShoppingItemResponse:
|
||||
"""Attach live affiliate links to a raw store row."""
|
||||
links = builder.build_links(item["name"])
|
||||
return ShoppingItemResponse(
|
||||
**{**item, "checked": bool(item.get("checked", 0))},
|
||||
grocery_links=[{"ingredient": l.ingredient, "retailer": l.retailer, "url": l.url} for l in links],
|
||||
)
|
||||
|
||||
|
||||
def _in_thread(db_path, fn):
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
# ── List ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def _locale_from_store(store: Store) -> str:
|
||||
return store.get_setting("shopping_locale") or "us"
|
||||
|
||||
|
||||
@router.get("", response_model=list[ShoppingItemResponse])
|
||||
async def list_shopping_items(
|
||||
include_checked: bool = True,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
locale = await asyncio.to_thread(_in_thread, session.db, _locale_from_store)
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok, locale=locale)
|
||||
items = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.list_shopping_items(include_checked)
|
||||
)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Add manually ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=ShoppingItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def add_shopping_item(
|
||||
body: ShoppingItemCreate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok, locale=_locale_from_store(store))
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.add_shopping_item(
|
||||
name=body.name,
|
||||
quantity=body.quantity,
|
||||
unit=body.unit,
|
||||
category=body.category,
|
||||
notes=body.notes,
|
||||
source=body.source,
|
||||
recipe_id=body.recipe_id,
|
||||
sort_order=body.sort_order,
|
||||
),
|
||||
)
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Bulk add from recipe ───────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/from-recipe", response_model=list[ShoppingItemResponse], status_code=status.HTTP_201_CREATED)
|
||||
async def add_from_recipe(
|
||||
body: BulkAddFromRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Add missing ingredients from a recipe to the shopping list.
|
||||
|
||||
Runs pantry gap analysis and adds only the items the user doesn't have
|
||||
(unless include_covered=True). Skips duplicates already on the list.
|
||||
"""
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
|
||||
def _run(store: Store):
|
||||
recipe = store.get_recipe(body.recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
inventory = store.list_inventory()
|
||||
gaps, covered = compute_shopping_list([recipe], inventory)
|
||||
targets = (gaps + covered) if body.include_covered else gaps
|
||||
|
||||
# Avoid duplicates already on the list
|
||||
existing = {i["name"].lower() for i in store.list_shopping_items()}
|
||||
added = []
|
||||
for gap in targets:
|
||||
if gap.ingredient_name.lower() in existing:
|
||||
continue
|
||||
item = store.add_shopping_item(
|
||||
name=gap.ingredient_name,
|
||||
quantity=None,
|
||||
unit=gap.have_unit,
|
||||
source="recipe",
|
||||
recipe_id=body.recipe_id,
|
||||
)
|
||||
added.append(item)
|
||||
return added
|
||||
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok, locale=_locale_from_store(store))
|
||||
items = await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Update ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.patch("/{item_id}", response_model=ShoppingItemResponse)
|
||||
async def update_shopping_item(
|
||||
item_id: int,
|
||||
body: ShoppingItemUpdate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok, locale=_locale_from_store(store))
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.update_shopping_item(item_id, **body.model_dump(exclude_none=True)),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Confirm purchase → pantry ─────────────────────────────────────────────────
|
||||
|
||||
@router.post("/{item_id}/confirm", status_code=status.HTTP_201_CREATED)
|
||||
async def confirm_purchase(
|
||||
item_id: int,
|
||||
body: ConfirmPurchaseRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Confirm a checked item was purchased and add it to pantry inventory.
|
||||
|
||||
Human approval step: the user explicitly confirms what they actually bought
|
||||
before it lands in their pantry. Returns the new inventory item.
|
||||
"""
|
||||
def _run(store: Store):
|
||||
shopping_item = store.get_shopping_item(item_id)
|
||||
if not shopping_item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
qty = body.quantity if body.quantity is not None else (shopping_item.get("quantity") or 1.0)
|
||||
unit = body.unit or shopping_item.get("unit") or "count"
|
||||
category = shopping_item.get("category")
|
||||
|
||||
product = store.get_or_create_product(
|
||||
name=shopping_item["name"],
|
||||
category=category,
|
||||
)
|
||||
inv_item = store.add_inventory_item(
|
||||
product_id=product["id"],
|
||||
location=body.location,
|
||||
quantity=qty,
|
||||
unit=unit,
|
||||
source="manual",
|
||||
)
|
||||
# Mark the shopping item checked and leave it for the user to clear
|
||||
store.update_shopping_item(item_id, checked=True)
|
||||
return inv_item
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── Delete ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_shopping_item(
|
||||
item_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
deleted = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_shopping_item(item_id)
|
||||
)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
|
||||
@router.delete("/checked", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_checked(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_checked_shopping_items()
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/all", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_all(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_all_shopping_items()
|
||||
)
|
||||
|
|
@ -1,26 +1,13 @@
|
|||
from fastapi import APIRouter
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, feedback_attach, household, saved_recipes, imitate, meal_plans, orch_usage, session, shopping
|
||||
from app.api.endpoints.community import router as community_router
|
||||
from app.api.endpoints.recipe_tags import router as recipe_tags_router
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
api_router.include_router(session.router, prefix="/session", tags=["session"])
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
||||
api_router.include_router(export.router, tags=["export"])
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
api_router.include_router(saved_recipes.router, prefix="/recipes/saved", tags=["saved-recipes"])
|
||||
api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(staples.router, prefix="/staples", tags=["staples"])
|
||||
api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(feedback_attach.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(household.router, prefix="/household", tags=["household"])
|
||||
api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"])
|
||||
api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"])
|
||||
api_router.include_router(orch_usage.router, prefix="/orch-usage", tags=["orch-usage"])
|
||||
api_router.include_router(shopping.router, prefix="/shopping", tags=["shopping"])
|
||||
api_router.include_router(community_router)
|
||||
api_router.include_router(recipe_tags_router)
|
||||
|
|
|
|||
|
|
@ -22,12 +22,10 @@ import time
|
|||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import uuid
|
||||
|
||||
import jwt as pyjwt
|
||||
import requests
|
||||
import yaml
|
||||
from fastapi import Depends, HTTPException, Request, Response
|
||||
from fastapi import Depends, HTTPException, Request
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -78,21 +76,12 @@ def _is_bypass_ip(ip: str) -> bool:
|
|||
|
||||
_LOCAL_KIWI_DB: Path = Path(os.environ.get("KIWI_DB", "data/kiwi.db"))
|
||||
|
||||
_TIER_CACHE: dict[str, tuple[dict, float]] = {}
|
||||
_TIER_CACHE: dict[str, tuple[str, float]] = {}
|
||||
_TIER_CACHE_TTL = 300 # 5 minutes
|
||||
|
||||
TIERS = ["free", "paid", "premium", "ultra"]
|
||||
|
||||
|
||||
def _auth_label(user_id: str) -> str:
|
||||
"""Classify a user_id into a short tag for structured log lines. No PII emitted."""
|
||||
if user_id in ("local", "local-dev"):
|
||||
return "local"
|
||||
if user_id.startswith("anon-"):
|
||||
return "anon"
|
||||
return "authed"
|
||||
|
||||
|
||||
# ── Domain ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
|
@ -101,9 +90,6 @@ class CloudUser:
|
|||
tier: str # free | paid | premium | ultra | local
|
||||
db: Path # per-user SQLite DB path
|
||||
has_byok: bool # True if a configured LLM backend is present in llm.yaml
|
||||
household_id: str | None = None
|
||||
is_household_owner: bool = False
|
||||
license_key: str | None = None # key_display for lifetime/founders keys; None for subscription/free
|
||||
|
||||
|
||||
# ── JWT validation ─────────────────────────────────────────────────────────────
|
||||
|
|
@ -144,16 +130,14 @@ def _ensure_provisioned(user_id: str) -> None:
|
|||
log.warning("Heimdall provision failed for user %s: %s", user_id, exc)
|
||||
|
||||
|
||||
def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
||||
"""Returns (tier, household_id | None, is_household_owner, license_key | None)."""
|
||||
def _fetch_cloud_tier(user_id: str) -> str:
|
||||
now = time.monotonic()
|
||||
cached = _TIER_CACHE.get(user_id)
|
||||
if cached and (now - cached[1]) < _TIER_CACHE_TTL:
|
||||
entry = cached[0]
|
||||
return entry["tier"], entry.get("household_id"), entry.get("is_household_owner", False), entry.get("license_key")
|
||||
return cached[0]
|
||||
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
return "free", None, False, None
|
||||
return "free"
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/admin/cloud/resolve",
|
||||
|
|
@ -161,39 +145,21 @@ def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
|||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
data = resp.json() if resp.ok else {}
|
||||
tier = data.get("tier", "free")
|
||||
household_id = data.get("household_id")
|
||||
is_owner = data.get("is_household_owner", False)
|
||||
license_key = data.get("key_display")
|
||||
tier = resp.json().get("tier", "free") if resp.ok else "free"
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc)
|
||||
tier, household_id, is_owner, license_key = "free", None, False, None
|
||||
tier = "free"
|
||||
|
||||
_TIER_CACHE[user_id] = ({"tier": tier, "household_id": household_id, "is_household_owner": is_owner, "license_key": license_key}, now)
|
||||
return tier, household_id, is_owner, license_key
|
||||
_TIER_CACHE[user_id] = (tier, now)
|
||||
return tier
|
||||
|
||||
|
||||
def _user_db_path(user_id: str, household_id: str | None = None) -> Path:
|
||||
if household_id:
|
||||
path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
else:
|
||||
def _user_db_path(user_id: str) -> Path:
|
||||
path = CLOUD_DATA_ROOT / user_id / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def _anon_guest_db_path(guest_id: str) -> Path:
|
||||
"""Per-session DB for unauthenticated guest visitors.
|
||||
|
||||
Each anonymous visitor gets an isolated SQLite DB keyed by their guest UUID
|
||||
cookie, so shopping lists and affiliate interactions never bleed across sessions.
|
||||
"""
|
||||
path = CLOUD_DATA_ROOT / f"anon-{guest_id}" / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
# ── BYOK detection ────────────────────────────────────────────────────────────
|
||||
|
||||
_LLM_CONFIG_PATH = Path.home() / ".config" / "circuitforge" / "llm.yaml"
|
||||
|
|
@ -219,52 +185,22 @@ def _detect_byok(config_path: Path = _LLM_CONFIG_PATH) -> bool:
|
|||
|
||||
# ── FastAPI dependency ────────────────────────────────────────────────────────
|
||||
|
||||
_GUEST_COOKIE = "kiwi_guest_id"
|
||||
_GUEST_COOKIE_MAX_AGE = 60 * 60 * 24 * 90 # 90 days
|
||||
|
||||
|
||||
def _resolve_guest_session(request: Request, response: Response, has_byok: bool) -> CloudUser:
|
||||
"""Return a per-session anonymous CloudUser, creating a guest UUID cookie if needed."""
|
||||
guest_id = request.cookies.get(_GUEST_COOKIE, "").strip()
|
||||
is_new = not guest_id
|
||||
if is_new:
|
||||
guest_id = str(uuid.uuid4())
|
||||
log.debug("New guest session assigned: anon-%s", guest_id[:8])
|
||||
# Secure flag only when the request actually arrived over HTTPS
|
||||
# (Caddy sets X-Forwarded-Proto=https in cloud; absent on direct port access).
|
||||
# Avoids losing the session cookie on HTTP direct-port testing of the cloud stack.
|
||||
is_https = request.headers.get("x-forwarded-proto", "http").lower() == "https"
|
||||
response.set_cookie(
|
||||
key=_GUEST_COOKIE,
|
||||
value=guest_id,
|
||||
max_age=_GUEST_COOKIE_MAX_AGE,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=is_https,
|
||||
)
|
||||
return CloudUser(
|
||||
user_id=f"anon-{guest_id}",
|
||||
tier="free",
|
||||
db=_anon_guest_db_path(guest_id),
|
||||
has_byok=has_byok,
|
||||
)
|
||||
|
||||
|
||||
def get_session(request: Request, response: Response) -> CloudUser:
|
||||
def get_session(request: Request) -> CloudUser:
|
||||
"""FastAPI dependency — resolves the current user from the request.
|
||||
|
||||
Local mode: fully-privileged "local" user pointing at local DB.
|
||||
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
||||
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
||||
returns a "local" session without JWT validation (dev/LAN use only).
|
||||
Anonymous: per-session UUID cookie isolates each guest visitor's data.
|
||||
"""
|
||||
has_byok = _detect_byok()
|
||||
|
||||
if not CLOUD_MODE:
|
||||
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
||||
|
||||
# Prefer X-Real-IP (set by Caddy from the actual client address) over the
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
# Prefer X-Real-IP (set by nginx from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
client_ip = (
|
||||
request.headers.get("x-real-ip", "")
|
||||
|
|
@ -276,32 +212,21 @@ def get_session(request: Request, response: Response) -> CloudUser:
|
|||
dev_db = _user_db_path("local-dev")
|
||||
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
||||
|
||||
# Resolve cf_session JWT: prefer the explicit header injected by Caddy, then
|
||||
# fall back to the cf_session cookie value. Other cookies (e.g. kiwi_guest_id)
|
||||
# must never be treated as auth tokens.
|
||||
raw_session = request.headers.get("x-cf-session", "").strip()
|
||||
if not raw_session:
|
||||
raw_session = request.cookies.get("cf_session", "").strip()
|
||||
raw_header = (
|
||||
request.headers.get("x-cf-session", "")
|
||||
or request.headers.get("cookie", "")
|
||||
)
|
||||
if not raw_header:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
if not raw_session:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
|
||||
token = _extract_session_token(raw_session) # gitleaks:allow — function name, not a secret
|
||||
token = _extract_session_token(raw_header)
|
||||
if not token:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = validate_session_jwt(token)
|
||||
_ensure_provisioned(user_id)
|
||||
tier, household_id, is_household_owner, license_key = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(
|
||||
user_id=user_id,
|
||||
tier=tier,
|
||||
db=_user_db_path(user_id, household_id=household_id),
|
||||
has_byok=has_byok,
|
||||
household_id=household_id,
|
||||
is_household_owner=is_household_owner,
|
||||
license_key=license_key,
|
||||
)
|
||||
tier = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(user_id=user_id, tier=tier, db=_user_db_path(user_id), has_byok=has_byok)
|
||||
|
||||
|
||||
def require_tier(min_tier: str):
|
||||
|
|
|
|||
|
|
@ -35,24 +35,6 @@ class Settings:
|
|||
# Database
|
||||
DB_PATH: Path = Path(os.environ.get("DB_PATH", str(DATA_DIR / "kiwi.db")))
|
||||
|
||||
# Pre-computed browse counts cache (small SQLite, separate from corpus).
|
||||
# Written by the nightly refresh task and by infer_recipe_tags.py.
|
||||
# Set BROWSE_COUNTS_PATH to a bind-mounted path if you want the host
|
||||
# pipeline to share counts with the container without re-running FTS.
|
||||
BROWSE_COUNTS_PATH: Path = Path(
|
||||
os.environ.get("BROWSE_COUNTS_PATH", str(DATA_DIR / "browse_counts.db"))
|
||||
)
|
||||
|
||||
# Community feature settings
|
||||
COMMUNITY_DB_URL: str | None = os.environ.get("COMMUNITY_DB_URL") or None
|
||||
COMMUNITY_PSEUDONYM_SALT: str = os.environ.get(
|
||||
"COMMUNITY_PSEUDONYM_SALT", "kiwi-default-salt-change-in-prod"
|
||||
)
|
||||
COMMUNITY_CLOUD_FEED_URL: str = os.environ.get(
|
||||
"COMMUNITY_CLOUD_FEED_URL",
|
||||
"https://menagerie.circuitforge.tech/kiwi/api/v1/community/posts",
|
||||
)
|
||||
|
||||
# Processing
|
||||
MAX_CONCURRENT_JOBS: int = int(os.environ.get("MAX_CONCURRENT_JOBS", "4"))
|
||||
USE_GPU: bool = os.environ.get("USE_GPU", "true").lower() in ("1", "true", "yes")
|
||||
|
|
@ -64,23 +46,8 @@ class Settings:
|
|||
# CF-core resource coordinator (VRAM lease management)
|
||||
COORDINATOR_URL: str = os.environ.get("COORDINATOR_URL", "http://localhost:7700")
|
||||
|
||||
# Hosted cf-orch coordinator — bearer token for managed cloud GPU inference (Paid+)
|
||||
# CFOrchClient reads CF_LICENSE_KEY automatically; exposed here for startup validation.
|
||||
CF_LICENSE_KEY: str | None = os.environ.get("CF_LICENSE_KEY")
|
||||
|
||||
# E2E test account — analytics logging is suppressed for this user_id so test
|
||||
# runs don't pollute session counts. Set to the Directus UUID of the test user.
|
||||
E2E_TEST_USER_ID: str | None = os.environ.get("E2E_TEST_USER_ID") or None
|
||||
|
||||
# Feature flags
|
||||
ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes")
|
||||
# Use OrchestratedScheduler (coordinator-aware, multi-GPU fan-out) instead of
|
||||
# LocalScheduler. Defaults to true in CLOUD_MODE; can be set independently
|
||||
# for multi-GPU local rigs that don't need full cloud auth.
|
||||
USE_ORCH_SCHEDULER: bool | None = (
|
||||
None if os.environ.get("USE_ORCH_SCHEDULER") is None
|
||||
else os.environ.get("USE_ORCH_SCHEDULER", "").lower() in ("1", "true", "yes")
|
||||
)
|
||||
|
||||
# Runtime
|
||||
DEBUG: bool = os.environ.get("DEBUG", "false").lower() in ("1", "true", "yes")
|
||||
|
|
|
|||
|
|
@ -14,25 +14,3 @@ CREATE VIRTUAL TABLE IF NOT EXISTS recipes_fts USING fts5(
|
|||
);
|
||||
|
||||
INSERT INTO recipes_fts(recipes_fts) VALUES('rebuild');
|
||||
|
||||
-- Triggers to keep the FTS index in sync with the recipes table.
|
||||
-- Without these, rows inserted after the initial rebuild are invisible to FTS queries.
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 016: Add FTS5 sync triggers for the recipes_fts content table.
|
||||
--
|
||||
-- Migration 015 created recipes_fts and did a one-time rebuild, but omitted
|
||||
-- triggers. Without them, INSERT/UPDATE/DELETE on recipes does not update the
|
||||
-- FTS index, so new rows are invisible to MATCH queries.
|
||||
--
|
||||
-- CREATE TRIGGER IF NOT EXISTS is idempotent — safe to re-run.
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 017_household_invites.sql
|
||||
CREATE TABLE IF NOT EXISTS household_invites (
|
||||
token TEXT PRIMARY KEY,
|
||||
household_id TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
expires_at TEXT NOT NULL,
|
||||
used_at TEXT,
|
||||
used_by TEXT
|
||||
);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 018: saved recipes bookmarks.
|
||||
|
||||
CREATE TABLE saved_recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
saved_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
notes TEXT,
|
||||
rating INTEGER CHECK (rating IS NULL OR (rating >= 0 AND rating <= 5)),
|
||||
style_tags TEXT NOT NULL DEFAULT '[]',
|
||||
UNIQUE (recipe_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_saved_recipes_saved_at ON saved_recipes (saved_at DESC);
|
||||
CREATE INDEX idx_saved_recipes_rating ON saved_recipes (rating);
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
-- Migration 019: recipe collections (Paid tier organisation).
|
||||
|
||||
CREATE TABLE recipe_collections (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE recipe_collection_members (
|
||||
collection_id INTEGER NOT NULL REFERENCES recipe_collections(id) ON DELETE CASCADE,
|
||||
saved_recipe_id INTEGER NOT NULL REFERENCES saved_recipes(id) ON DELETE CASCADE,
|
||||
added_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
PRIMARY KEY (collection_id, saved_recipe_id)
|
||||
);
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
-- Migration 020: recipe browser navigation telemetry.
|
||||
-- Used to determine whether category nesting depth needs increasing.
|
||||
-- Review: if any category has page > 5 and result_count > 100 consistently,
|
||||
-- consider adding a third nesting level for that category.
|
||||
|
||||
CREATE TABLE browser_telemetry (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT NOT NULL,
|
||||
category TEXT NOT NULL,
|
||||
page INTEGER NOT NULL,
|
||||
result_count INTEGER NOT NULL,
|
||||
recorded_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
-- Migration 021: FTS5 inverted index for the recipe browser (category + keywords).
|
||||
--
|
||||
-- The browser domain queries were using LIKE '%keyword%' against category and
|
||||
-- keywords columns — a leading wildcard prevents any B-tree index use, so every
|
||||
-- query was a full sequential scan of 3.1M rows. This FTS5 index replaces those
|
||||
-- scans with O(log N) token lookups.
|
||||
--
|
||||
-- Content-table backed: stores only the inverted index, no text duplication.
|
||||
-- The keywords column is a JSON array; FTS5 tokenises it as plain text, stripping
|
||||
-- the punctuation, which gives correct per-word matching.
|
||||
--
|
||||
-- One-time rebuild cost on 3.1M rows: ~20-40 seconds at first startup.
|
||||
-- Subsequent startups skip this migration (IF NOT EXISTS guard).
|
||||
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
content=recipes,
|
||||
content_rowid=id,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
-- 022_meal_plans.sql
|
||||
CREATE TABLE meal_plans (
|
||||
id INTEGER PRIMARY KEY,
|
||||
week_start TEXT NOT NULL,
|
||||
meal_types TEXT NOT NULL DEFAULT '["dinner"]',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 022: Add is_generic flag to recipes
|
||||
-- Generic recipes are catch-all/dump recipes with loose ingredient lists
|
||||
-- that should not appear in Level 1 (deterministic "use what I have") results.
|
||||
-- Admins can mark recipes via the recipe editor or a bulk backfill script.
|
||||
ALTER TABLE recipes ADD COLUMN is_generic INTEGER NOT NULL DEFAULT 0;
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
-- 023_meal_plan_slots.sql
|
||||
CREATE TABLE meal_plan_slots (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
day_of_week INTEGER NOT NULL CHECK(day_of_week BETWEEN 0 AND 6),
|
||||
meal_type TEXT NOT NULL,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
servings REAL NOT NULL DEFAULT 2.0,
|
||||
custom_label TEXT,
|
||||
UNIQUE(plan_id, day_of_week, meal_type)
|
||||
);
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 024_prep_sessions.sql
|
||||
CREATE TABLE prep_sessions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
scheduled_date TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'draft'
|
||||
CHECK(status IN ('draft','reviewed','done')),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
-- 025_prep_tasks.sql
|
||||
CREATE TABLE prep_tasks (
|
||||
id INTEGER PRIMARY KEY,
|
||||
session_id INTEGER NOT NULL REFERENCES prep_sessions(id) ON DELETE CASCADE,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
slot_id INTEGER REFERENCES meal_plan_slots(id),
|
||||
task_label TEXT NOT NULL,
|
||||
duration_minutes INTEGER,
|
||||
sequence_order INTEGER NOT NULL,
|
||||
equipment TEXT,
|
||||
is_parallel INTEGER NOT NULL DEFAULT 0,
|
||||
notes TEXT,
|
||||
user_edited INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- 028_community_pseudonyms.sql
|
||||
-- Per-user pseudonym store: maps the user's chosen community display name
|
||||
-- to their Directus user ID. This table lives in per-user kiwi.db only.
|
||||
-- It is NEVER replicated to the community PostgreSQL — pseudonym isolation is by design.
|
||||
--
|
||||
-- A user may have one active pseudonym. Old pseudonyms are retained for reference
|
||||
-- (posts published under them keep their pseudonym attribution) but only one is
|
||||
-- flagged as current (is_current = 1).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS community_pseudonyms (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pseudonym TEXT NOT NULL,
|
||||
directus_user_id TEXT NOT NULL,
|
||||
is_current INTEGER NOT NULL DEFAULT 1 CHECK (is_current IN (0, 1)),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
-- Only one pseudonym can be current at a time per user
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_community_pseudonyms_current
|
||||
ON community_pseudonyms (directus_user_id)
|
||||
WHERE is_current = 1;
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
-- Migration 029: Add inferred_tags column and update FTS index to include it.
|
||||
--
|
||||
-- inferred_tags holds a JSON array of normalized tag strings derived by
|
||||
-- scripts/pipeline/infer_recipe_tags.py (e.g. ["cuisine:Italian",
|
||||
-- "dietary:Low-Carb", "flavor:Umami", "can_be:Gluten-Free"]).
|
||||
--
|
||||
-- The FTS5 browser table is rebuilt to index inferred_tags alongside
|
||||
-- category and keywords so browse domain queries match against all signals.
|
||||
|
||||
-- 1. Add inferred_tags column (empty array default; populated by pipeline run)
|
||||
ALTER TABLE recipes ADD COLUMN inferred_tags TEXT NOT NULL DEFAULT '[]';
|
||||
|
||||
-- 2. Drop old FTS table and triggers that only covered category + keywords
|
||||
DROP TRIGGER IF EXISTS recipes_ai;
|
||||
DROP TRIGGER IF EXISTS recipes_ad;
|
||||
DROP TRIGGER IF EXISTS recipes_au;
|
||||
DROP TABLE IF EXISTS recipe_browser_fts;
|
||||
|
||||
-- 3. Recreate FTS5 table: now indexes category, keywords, AND inferred_tags
|
||||
CREATE VIRTUAL TABLE recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
inferred_tags,
|
||||
content=recipes,
|
||||
content_rowid=id
|
||||
);
|
||||
|
||||
-- 4. Triggers to keep FTS in sync with recipes table changes
|
||||
CREATE TRIGGER recipes_ai AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_ad AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_au AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
-- 5. Populate FTS from current table state
|
||||
-- (inferred_tags is '[]' for all rows at this point; run infer_recipe_tags.py
|
||||
-- to populate, then the FTS will be rebuilt as part of that script.)
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 030: open-package tracking
|
||||
-- Adds opened_date to track when a multi-use item was first opened,
|
||||
-- enabling secondary shelf-life windows (e.g. salsa: 1 year sealed → 2 weeks opened).
|
||||
|
||||
ALTER TABLE inventory_items ADD COLUMN opened_date TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- Migration 031: add disposal_reason for waste logging (#60)
|
||||
-- status='discarded' already exists in the CHECK constraint from migration 002.
|
||||
-- This column stores free-text reason (optional) and calm-framing presets.
|
||||
ALTER TABLE inventory_items ADD COLUMN disposal_reason TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- 032_meal_plan_unique_week.sql
|
||||
-- Prevent duplicate plans for the same week.
|
||||
-- Existing duplicates must be resolved before applying (keep MIN(id) per week_start).
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_meal_plans_week_start ON meal_plans (week_start);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- Migration 033: standalone shopping list
|
||||
-- Items can be added manually, from recipe gap analysis, or from the recipe browser.
|
||||
-- Affiliate links are computed at query time by the API layer (never stored).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS shopping_list_items (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
quantity REAL,
|
||||
unit TEXT,
|
||||
category TEXT,
|
||||
checked INTEGER NOT NULL DEFAULT 0, -- 0=want, 1=in-cart/checked off
|
||||
notes TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'manual', -- manual | recipe | meal_plan
|
||||
recipe_id INTEGER REFERENCES recipes(id) ON DELETE SET NULL,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_shopping_list_checked
|
||||
ON shopping_list_items (checked, sort_order);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 034: async recipe generation job queue
|
||||
CREATE TABLE IF NOT EXISTS recipe_jobs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
job_id TEXT NOT NULL UNIQUE,
|
||||
user_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
request TEXT NOT NULL,
|
||||
result TEXT,
|
||||
error TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_job_id ON recipe_jobs (job_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_user_id ON recipe_jobs (user_id, created_at DESC);
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
-- Migration 035: add sensory_tags column for sensory profile filtering
|
||||
--
|
||||
-- sensory_tags holds a JSON object with texture, smell, and noise signals:
|
||||
-- {"textures": ["mushy", "creamy"], "smell": "pungent", "noise": "moderate"}
|
||||
--
|
||||
-- Empty object '{}' means untagged — these recipes pass ALL sensory filters
|
||||
-- (graceful degradation when tag_sensory_profiles.py has not yet been run).
|
||||
--
|
||||
-- Populated offline by: python scripts/tag_sensory_profiles.py [path/to/kiwi.db]
|
||||
-- No FTS rebuild needed — sensory_tags is filtered in Python after candidate fetch.
|
||||
|
||||
ALTER TABLE recipes ADD COLUMN sensory_tags TEXT NOT NULL DEFAULT '{}';
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
-- Migration 036: captured_products local cache
|
||||
-- Products captured via visual label scanning (kiwi#79).
|
||||
-- Keyed by barcode; checked before FDC/OFF on future scans so each product
|
||||
-- is only captured once per device.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS captured_products (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
barcode TEXT UNIQUE NOT NULL,
|
||||
product_name TEXT,
|
||||
brand TEXT,
|
||||
serving_size_g REAL,
|
||||
calories REAL,
|
||||
fat_g REAL,
|
||||
saturated_fat_g REAL,
|
||||
carbs_g REAL,
|
||||
sugar_g REAL,
|
||||
fiber_g REAL,
|
||||
protein_g REAL,
|
||||
sodium_mg REAL,
|
||||
ingredient_names TEXT NOT NULL DEFAULT '[]', -- JSON array
|
||||
allergens TEXT NOT NULL DEFAULT '[]', -- JSON array
|
||||
confidence REAL,
|
||||
source TEXT NOT NULL DEFAULT 'visual_capture',
|
||||
captured_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
confirmed_by_user INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
-- Migration 037: add 'visual_capture' to products.source CHECK constraint
|
||||
-- SQLite cannot ALTER a CHECK constraint, so we rebuild the table.
|
||||
|
||||
PRAGMA foreign_keys = OFF;
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE products_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
barcode TEXT UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
brand TEXT,
|
||||
category TEXT,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
nutrition_data TEXT NOT NULL DEFAULT '{}',
|
||||
source TEXT NOT NULL DEFAULT 'openfoodfacts'
|
||||
CHECK (source IN ('openfoodfacts', 'manual', 'receipt_ocr', 'visual_capture')),
|
||||
source_data TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
INSERT INTO products_new
|
||||
SELECT id, barcode, name, brand, category, description, image_url,
|
||||
nutrition_data, source, source_data, created_at, updated_at
|
||||
FROM products;
|
||||
|
||||
DROP TABLE products;
|
||||
ALTER TABLE products_new RENAME TO products;
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
-- Migration 038: add 'visual_capture' to inventory_items.source CHECK constraint
|
||||
-- SQLite cannot ALTER a CHECK constraint, so we rebuild the table.
|
||||
|
||||
PRAGMA foreign_keys = OFF;
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE inventory_items_new (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
product_id INTEGER NOT NULL
|
||||
REFERENCES products (id) ON DELETE RESTRICT,
|
||||
receipt_id INTEGER
|
||||
REFERENCES receipts (id) ON DELETE SET NULL,
|
||||
quantity REAL NOT NULL DEFAULT 1 CHECK (quantity > 0),
|
||||
unit TEXT NOT NULL DEFAULT 'count',
|
||||
location TEXT NOT NULL,
|
||||
sublocation TEXT,
|
||||
purchase_date TEXT,
|
||||
expiration_date TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'available'
|
||||
CHECK (status IN ('available', 'consumed', 'expired', 'discarded')),
|
||||
consumed_at TEXT,
|
||||
notes TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'manual'
|
||||
CHECK (source IN ('barcode_scan', 'manual', 'receipt', 'visual_capture')),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
opened_date TEXT,
|
||||
disposal_reason TEXT
|
||||
);
|
||||
|
||||
INSERT INTO inventory_items_new
|
||||
SELECT id, product_id, receipt_id, quantity, unit, location, sublocation,
|
||||
purchase_date, expiration_date, status, consumed_at, notes, source,
|
||||
created_at, updated_at, opened_date, disposal_reason
|
||||
FROM inventory_items;
|
||||
|
||||
DROP TABLE inventory_items;
|
||||
ALTER TABLE inventory_items_new RENAME TO inventory_items;
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
990
app/db/store.py
990
app/db/store.py
File diff suppressed because it is too large
Load diff
54
app/main.py
54
app/main.py
|
|
@ -1,9 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# app/main.py
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
|
@ -11,70 +9,20 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
|
||||
from app.api.routes import api_router
|
||||
from app.core.config import settings
|
||||
from app.services.meal_plan.affiliates import register_kiwi_programs
|
||||
|
||||
# Structured key=value log lines — grep/awk-friendly for log-based analytics.
|
||||
# Without basicConfig, app-level INFO logs are silently dropped.
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s:%(name)s: %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_BROWSE_REFRESH_INTERVAL_H = 24
|
||||
|
||||
|
||||
async def _browse_counts_refresh_loop(corpus_path: str) -> None:
|
||||
"""Refresh browse counts every 24 h while the container is running."""
|
||||
from app.db.store import _COUNT_CACHE
|
||||
from app.services.recipe.browse_counts_cache import load_into_memory, refresh
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(_BROWSE_REFRESH_INTERVAL_H * 3600)
|
||||
try:
|
||||
logger.info("browse_counts: starting scheduled refresh...")
|
||||
computed = await asyncio.to_thread(
|
||||
refresh, corpus_path, settings.BROWSE_COUNTS_PATH
|
||||
)
|
||||
load_into_memory(settings.BROWSE_COUNTS_PATH, _COUNT_CACHE, corpus_path)
|
||||
logger.info("browse_counts: scheduled refresh complete (%d sets)", computed)
|
||||
except Exception as exc:
|
||||
logger.warning("browse_counts: scheduled refresh failed: %s", exc)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
logger.info("Starting Kiwi API...")
|
||||
settings.ensure_dirs()
|
||||
register_kiwi_programs()
|
||||
|
||||
# Start LLM background task scheduler
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(settings.DB_PATH)
|
||||
logger.info("Task scheduler started.")
|
||||
|
||||
# Initialize community store (no-op if COMMUNITY_DB_URL is not set)
|
||||
from app.api.endpoints.community import init_community_store
|
||||
init_community_store(settings.COMMUNITY_DB_URL)
|
||||
|
||||
# Browse counts cache — warm in-memory cache from disk, refresh if stale.
|
||||
# Uses the corpus path the store will attach to at request time.
|
||||
corpus_path = os.environ.get("RECIPE_DB_PATH", str(settings.DB_PATH))
|
||||
try:
|
||||
from app.db.store import _COUNT_CACHE
|
||||
from app.services.recipe.browse_counts_cache import (
|
||||
is_stale, load_into_memory, refresh,
|
||||
)
|
||||
if is_stale(settings.BROWSE_COUNTS_PATH):
|
||||
logger.info("browse_counts: cache stale — refreshing in background...")
|
||||
asyncio.create_task(
|
||||
asyncio.to_thread(refresh, corpus_path, settings.BROWSE_COUNTS_PATH)
|
||||
)
|
||||
else:
|
||||
load_into_memory(settings.BROWSE_COUNTS_PATH, _COUNT_CACHE, corpus_path)
|
||||
except Exception as exc:
|
||||
logger.warning("browse_counts: startup init failed (live FTS fallback active): %s", exc)
|
||||
|
||||
# Nightly background refresh loop
|
||||
asyncio.create_task(_browse_counts_refresh_loop(corpus_path))
|
||||
|
||||
yield
|
||||
|
||||
# Graceful scheduler shutdown
|
||||
|
|
@ -87,7 +35,7 @@ async def lifespan(app: FastAPI):
|
|||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
description="Pantry tracking + leftover recipe suggestions",
|
||||
version="0.2.0",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
"""Pydantic schemas for household management endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class HouseholdCreateResponse(BaseModel):
|
||||
household_id: str
|
||||
message: str
|
||||
|
||||
|
||||
class HouseholdMember(BaseModel):
|
||||
user_id: str
|
||||
joined_at: str
|
||||
is_owner: bool
|
||||
|
||||
|
||||
class HouseholdStatusResponse(BaseModel):
|
||||
in_household: bool
|
||||
household_id: str | None = None
|
||||
is_owner: bool = False
|
||||
members: list[HouseholdMember] = Field(default_factory=list)
|
||||
max_seats: int = 4
|
||||
|
||||
|
||||
class HouseholdInviteResponse(BaseModel):
|
||||
invite_url: str
|
||||
token: str
|
||||
expires_at: str
|
||||
|
||||
|
||||
class HouseholdAcceptRequest(BaseModel):
|
||||
household_id: str
|
||||
token: str
|
||||
|
||||
|
||||
class HouseholdAcceptResponse(BaseModel):
|
||||
message: str
|
||||
household_id: str
|
||||
|
||||
|
||||
class HouseholdRemoveMemberRequest(BaseModel):
|
||||
user_id: str
|
||||
|
||||
|
||||
class MessageResponse(BaseModel):
|
||||
message: str
|
||||
|
|
@ -89,20 +89,9 @@ class InventoryItemUpdate(BaseModel):
|
|||
unit: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
sublocation: Optional[str] = None
|
||||
purchase_date: Optional[date] = None
|
||||
expiration_date: Optional[date] = None
|
||||
opened_date: Optional[date] = None
|
||||
status: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
disposal_reason: Optional[str] = None
|
||||
|
||||
|
||||
class PartialConsumeRequest(BaseModel):
|
||||
quantity: float = Field(..., gt=0, description="Amount to consume from this item")
|
||||
|
||||
|
||||
class DiscardRequest(BaseModel):
|
||||
reason: Optional[str] = Field(None, max_length=200)
|
||||
|
||||
|
||||
class InventoryItemResponse(BaseModel):
|
||||
|
|
@ -117,15 +106,8 @@ class InventoryItemResponse(BaseModel):
|
|||
sublocation: Optional[str]
|
||||
purchase_date: Optional[str]
|
||||
expiration_date: Optional[str]
|
||||
opened_date: Optional[str] = None
|
||||
opened_expiry_date: Optional[str] = None
|
||||
secondary_state: Optional[str] = None
|
||||
secondary_uses: Optional[List[str]] = None
|
||||
secondary_warning: Optional[str] = None
|
||||
secondary_discard_signs: Optional[str] = None
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
disposal_reason: Optional[str] = None
|
||||
source: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
|
@ -141,8 +123,6 @@ class BarcodeScanResult(BaseModel):
|
|||
product: Optional[ProductResponse]
|
||||
inventory_item: Optional[InventoryItemResponse]
|
||||
added_to_inventory: bool
|
||||
needs_manual_entry: bool = False
|
||||
needs_visual_capture: bool = False # Paid tier offer when no product data found
|
||||
message: str
|
||||
|
||||
|
||||
|
|
@ -153,32 +133,6 @@ class BarcodeScanResponse(BaseModel):
|
|||
message: str
|
||||
|
||||
|
||||
# ── Bulk add by name ─────────────────────────────────────────────────────────
|
||||
|
||||
class BulkAddItem(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: float = Field(default=1.0, gt=0)
|
||||
unit: str = "count"
|
||||
location: str = "pantry"
|
||||
|
||||
|
||||
class BulkAddByNameRequest(BaseModel):
|
||||
items: List[BulkAddItem] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class BulkAddItemResult(BaseModel):
|
||||
name: str
|
||||
ok: bool
|
||||
item_id: Optional[int] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class BulkAddByNameResponse(BaseModel):
|
||||
added: int
|
||||
failed: int
|
||||
results: List[BulkAddItemResult]
|
||||
|
||||
|
||||
# ── Stats ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
class InventoryStats(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,59 +0,0 @@
|
|||
"""Pydantic schemas for visual label capture (kiwi#79)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class LabelCaptureResponse(BaseModel):
|
||||
"""Extraction result returned after the user photographs a nutrition label."""
|
||||
barcode: str
|
||||
product_name: Optional[str] = None
|
||||
brand: Optional[str] = None
|
||||
serving_size_g: Optional[float] = None
|
||||
calories: Optional[float] = None
|
||||
fat_g: Optional[float] = None
|
||||
saturated_fat_g: Optional[float] = None
|
||||
carbs_g: Optional[float] = None
|
||||
sugar_g: Optional[float] = None
|
||||
fiber_g: Optional[float] = None
|
||||
protein_g: Optional[float] = None
|
||||
sodium_mg: Optional[float] = None
|
||||
ingredient_names: List[str] = Field(default_factory=list)
|
||||
allergens: List[str] = Field(default_factory=list)
|
||||
confidence: float = 0.0
|
||||
needs_review: bool = True # True when confidence < REVIEW_THRESHOLD
|
||||
|
||||
|
||||
class LabelConfirmRequest(BaseModel):
|
||||
"""User-confirmed extraction to save to the local product cache."""
|
||||
barcode: str
|
||||
product_name: Optional[str] = None
|
||||
brand: Optional[str] = None
|
||||
serving_size_g: Optional[float] = None
|
||||
calories: Optional[float] = None
|
||||
fat_g: Optional[float] = None
|
||||
saturated_fat_g: Optional[float] = None
|
||||
carbs_g: Optional[float] = None
|
||||
sugar_g: Optional[float] = None
|
||||
fiber_g: Optional[float] = None
|
||||
protein_g: Optional[float] = None
|
||||
sodium_mg: Optional[float] = None
|
||||
ingredient_names: List[str] = Field(default_factory=list)
|
||||
allergens: List[str] = Field(default_factory=list)
|
||||
confidence: float = 0.0
|
||||
# When True the confirmed product is also added to inventory
|
||||
location: str = "pantry"
|
||||
quantity: float = 1.0
|
||||
auto_add: bool = True
|
||||
|
||||
|
||||
class LabelConfirmResponse(BaseModel):
|
||||
"""Result of confirming a captured product."""
|
||||
ok: bool
|
||||
barcode: str
|
||||
product_id: Optional[int] = None
|
||||
inventory_item_id: Optional[int] = None
|
||||
message: str
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
# app/models/schemas/meal_plan.py
|
||||
"""Pydantic schemas for meal planning endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date as _date
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
VALID_MEAL_TYPES = {"breakfast", "lunch", "dinner", "snack"}
|
||||
|
||||
|
||||
class CreatePlanRequest(BaseModel):
|
||||
week_start: _date
|
||||
meal_types: list[str] = Field(default_factory=lambda: ["dinner"])
|
||||
|
||||
@field_validator("week_start")
|
||||
@classmethod
|
||||
def must_be_monday(cls, v: _date) -> _date:
|
||||
if v.weekday() != 0:
|
||||
raise ValueError("week_start must be a Monday (weekday 0)")
|
||||
return v
|
||||
|
||||
|
||||
class UpdatePlanRequest(BaseModel):
|
||||
meal_types: list[str]
|
||||
|
||||
|
||||
class UpsertSlotRequest(BaseModel):
|
||||
recipe_id: int | None = None
|
||||
servings: float = Field(2.0, gt=0)
|
||||
custom_label: str | None = None
|
||||
|
||||
|
||||
class SlotSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
day_of_week: int
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
recipe_title: str | None
|
||||
servings: float
|
||||
custom_label: str | None
|
||||
|
||||
|
||||
class PlanSummary(BaseModel):
|
||||
id: int
|
||||
week_start: str
|
||||
meal_types: list[str]
|
||||
slots: list[SlotSummary]
|
||||
created_at: str
|
||||
|
||||
|
||||
class RetailerLink(BaseModel):
|
||||
retailer: str
|
||||
label: str
|
||||
url: str
|
||||
|
||||
|
||||
class GapItem(BaseModel):
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # e.g. "2 cups" from recipe text
|
||||
have_quantity: float | None # from pantry
|
||||
have_unit: str | None
|
||||
covered: bool # True = pantry has it
|
||||
retailer_links: list[RetailerLink] = Field(default_factory=list)
|
||||
|
||||
|
||||
class ShoppingListResponse(BaseModel):
|
||||
plan_id: int
|
||||
gap_items: list[GapItem]
|
||||
covered_items: list[GapItem]
|
||||
disclosure: str | None = None # affiliate disclosure text when links present
|
||||
|
||||
|
||||
class PrepTaskSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool
|
||||
notes: str | None
|
||||
user_edited: bool
|
||||
|
||||
|
||||
class PrepSessionSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
scheduled_date: str
|
||||
status: str
|
||||
tasks: list[PrepTaskSummary]
|
||||
|
||||
|
||||
class UpdatePrepTaskRequest(BaseModel):
|
||||
duration_minutes: int | None = None
|
||||
sequence_order: int | None = None
|
||||
notes: str | None = None
|
||||
equipment: str | None = None
|
||||
|
|
@ -32,7 +32,6 @@ class RecipeSuggestion(BaseModel):
|
|||
match_count: int
|
||||
element_coverage: dict[str, float] = Field(default_factory=dict)
|
||||
swap_candidates: list[SwapCandidate] = Field(default_factory=list)
|
||||
matched_ingredients: list[str] = Field(default_factory=list)
|
||||
missing_ingredients: list[str] = Field(default_factory=list)
|
||||
directions: list[str] = Field(default_factory=list)
|
||||
prep_notes: list[str] = Field(default_factory=list)
|
||||
|
|
@ -40,10 +39,6 @@ class RecipeSuggestion(BaseModel):
|
|||
level: int = 1
|
||||
is_wildcard: bool = False
|
||||
nutrition: NutritionPanel | None = None
|
||||
source_url: str | None = None
|
||||
complexity: str | None = None # 'easy' | 'moderate' | 'involved'
|
||||
estimated_time_min: int | None = None # derived from step count + method signals
|
||||
rerank_score: float | None = None # cross-encoder relevance score (paid+ only, None for free tier)
|
||||
|
||||
|
||||
class GroceryLink(BaseModel):
|
||||
|
|
@ -59,19 +54,6 @@ class RecipeResult(BaseModel):
|
|||
grocery_links: list[GroceryLink] = Field(default_factory=list)
|
||||
rate_limited: bool = False
|
||||
rate_limit_count: int = 0
|
||||
orch_fallback: bool = False # True when orch budget exhausted; fell back to local LLM
|
||||
|
||||
|
||||
class RecipeJobQueued(BaseModel):
|
||||
job_id: str
|
||||
status: str = "queued"
|
||||
|
||||
|
||||
class RecipeJobStatus(BaseModel):
|
||||
job_id: str
|
||||
status: str
|
||||
result: RecipeResult | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class NutritionFilters(BaseModel):
|
||||
|
|
@ -84,10 +66,6 @@ class NutritionFilters(BaseModel):
|
|||
|
||||
class RecipeRequest(BaseModel):
|
||||
pantry_items: list[str]
|
||||
# Maps product name → secondary state label for items past nominal expiry
|
||||
# but still within their secondary use window (e.g. {"Bread": "stale"}).
|
||||
# Used by the recipe engine to boost recipes suited to those specific states.
|
||||
secondary_pantry_items: dict[str, str] = Field(default_factory=dict)
|
||||
level: int = Field(default=1, ge=1, le=4)
|
||||
constraints: list[str] = Field(default_factory=list)
|
||||
expiry_first: bool = False
|
||||
|
|
@ -101,76 +79,3 @@ class RecipeRequest(BaseModel):
|
|||
allergies: list[str] = Field(default_factory=list)
|
||||
nutrition_filters: NutritionFilters = Field(default_factory=NutritionFilters)
|
||||
excluded_ids: list[int] = Field(default_factory=list)
|
||||
exclude_ingredients: list[str] = Field(default_factory=list)
|
||||
shopping_mode: bool = False
|
||||
pantry_match_only: bool = False # when True, only return recipes with zero missing ingredients
|
||||
complexity_filter: str | None = None # 'easy' | 'moderate' | 'involved' — None = any
|
||||
max_time_min: int | None = None # filter by estimated cooking time ceiling
|
||||
max_total_min: int | None = None # filter by parsed total time from recipe directions
|
||||
unit_system: str = "metric" # "metric" | "imperial"
|
||||
|
||||
|
||||
# ── Build Your Own schemas ──────────────────────────────────────────────────
|
||||
|
||||
|
||||
class AssemblyRoleOut(BaseModel):
|
||||
"""One role slot in a template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
display: str
|
||||
required: bool
|
||||
keywords: list[str]
|
||||
hint: str = ""
|
||||
|
||||
|
||||
class AssemblyTemplateOut(BaseModel):
|
||||
"""One assembly template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
id: str # slug, e.g. "burrito_taco"
|
||||
title: str
|
||||
icon: str
|
||||
descriptor: str
|
||||
role_sequence: list[AssemblyRoleOut]
|
||||
|
||||
|
||||
class RoleCandidateItem(BaseModel):
|
||||
"""One candidate ingredient for a wizard picker step."""
|
||||
|
||||
name: str
|
||||
in_pantry: bool
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RoleCandidatesResponse(BaseModel):
|
||||
"""Response from GET /api/recipes/template-candidates."""
|
||||
|
||||
compatible: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
other: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
available_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class BuildRequest(BaseModel):
|
||||
"""Request body for POST /api/recipes/build."""
|
||||
|
||||
template_id: str
|
||||
role_overrides: dict[str, str] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class StreamTokenRequest(BaseModel):
|
||||
"""Request body for POST /recipes/stream-token.
|
||||
|
||||
Pantry items and dietary constraints are fetched from the DB at request
|
||||
time — the client does not supply them here.
|
||||
"""
|
||||
level: int = Field(4, ge=3, le=4, description="Recipe level: 3=styled, 4=wildcard")
|
||||
wildcard_confirmed: bool = Field(False, description="Required true for level 4")
|
||||
|
||||
|
||||
class StreamTokenResponse(BaseModel):
|
||||
"""Response from POST /recipes/stream-token.
|
||||
|
||||
The frontend opens EventSource at stream_url?token=<token> to receive
|
||||
SSE chunks directly from the coordinator.
|
||||
"""
|
||||
stream_url: str
|
||||
token: str
|
||||
expires_in_s: int
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
"""Pydantic schemas for saved recipes and collections."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SaveRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
|
||||
|
||||
class UpdateSavedRecipeRequest(BaseModel):
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
style_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class SavedRecipeSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int
|
||||
title: str
|
||||
saved_at: str
|
||||
notes: str | None
|
||||
rating: int | None
|
||||
style_tags: list[str]
|
||||
collection_ids: list[int] = Field(default_factory=list)
|
||||
|
||||
|
||||
class CollectionSummary(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: str | None
|
||||
member_count: int
|
||||
created_at: str
|
||||
|
||||
|
||||
class CollectionRequest(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
|
||||
|
||||
class CollectionMemberRequest(BaseModel):
|
||||
saved_recipe_id: int
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""Pydantic schemas for the shopping list endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ShoppingItemCreate(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
source: str = "manual"
|
||||
recipe_id: Optional[int] = None
|
||||
sort_order: int = 0
|
||||
|
||||
|
||||
class ShoppingItemUpdate(BaseModel):
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
checked: Optional[bool] = None
|
||||
notes: Optional[str] = None
|
||||
sort_order: Optional[int] = None
|
||||
|
||||
|
||||
class GroceryLinkOut(BaseModel):
|
||||
ingredient: str
|
||||
retailer: str
|
||||
url: str
|
||||
|
||||
|
||||
class ShoppingItemResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
quantity: Optional[float]
|
||||
unit: Optional[str]
|
||||
category: Optional[str]
|
||||
checked: bool
|
||||
notes: Optional[str]
|
||||
source: str
|
||||
recipe_id: Optional[int]
|
||||
sort_order: int
|
||||
created_at: str
|
||||
updated_at: str
|
||||
grocery_links: list[GroceryLinkOut] = []
|
||||
|
||||
|
||||
class BulkAddFromRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
include_covered: bool = False # if True, add pantry-covered items too
|
||||
|
||||
|
||||
class ConfirmPurchaseRequest(BaseModel):
|
||||
"""Move a checked item into pantry inventory."""
|
||||
location: str = "pantry"
|
||||
quantity: Optional[float] = None # override the list quantity
|
||||
unit: Optional[str] = None
|
||||
|
|
@ -3,11 +3,6 @@
|
|||
Business logic services for Kiwi.
|
||||
"""
|
||||
|
||||
from app.services.receipt_service import ReceiptService
|
||||
|
||||
__all__ = ["ReceiptService"]
|
||||
|
||||
|
||||
def __getattr__(name: str):
|
||||
if name == "ReceiptService":
|
||||
from app.services.receipt_service import ReceiptService
|
||||
return ReceiptService
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
# app/services/community/ap_compat.py
|
||||
# MIT License — AP scaffold only (no actor, inbox, outbox)
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
def post_to_ap_json_ld(post: dict, base_url: str) -> dict:
|
||||
"""Serialize a community post dict to an ActivityPub-compatible JSON-LD Note.
|
||||
|
||||
This is a read-only scaffold. No AP actor, inbox, or outbox.
|
||||
The slug URI is stable so a future full AP implementation can reuse posts
|
||||
without a DB migration.
|
||||
"""
|
||||
slug = post["slug"]
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
published_str = published.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
else:
|
||||
published_str = str(published)
|
||||
|
||||
dietary_tags: list[str] = post.get("dietary_tags") or []
|
||||
tags = [{"type": "Hashtag", "name": "#kiwi"}]
|
||||
for tag in dietary_tags:
|
||||
tags.append({"type": "Hashtag", "name": f"#{tag.replace('-', '').replace(' ', '')}"})
|
||||
|
||||
return {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Note",
|
||||
"id": f"{base_url}/api/v1/community/posts/{slug}",
|
||||
"attributedTo": post.get("pseudonym", "anonymous"),
|
||||
"content": _build_content(post),
|
||||
"published": published_str,
|
||||
"tag": tags,
|
||||
}
|
||||
|
||||
|
||||
def _build_content(post: dict) -> str:
|
||||
title = post.get("title") or "Untitled"
|
||||
desc = post.get("description")
|
||||
if desc:
|
||||
return f"{title} — {desc}"
|
||||
return title
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
# app/services/community/community_store.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from circuitforge_core.community import CommunityPost, SharedStore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KiwiCommunityStore(SharedStore):
|
||||
"""Kiwi-specific community store: adds kiwi-domain query methods on top of SharedStore."""
|
||||
|
||||
def list_meal_plans(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
dietary_tags: list[str] | None = None,
|
||||
allergen_exclude: list[str] | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type="plan",
|
||||
dietary_tags=dietary_tags,
|
||||
allergen_exclude=allergen_exclude,
|
||||
source_product="kiwi",
|
||||
)
|
||||
|
||||
def list_outcomes(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
post_type: str | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
if post_type in ("recipe_success", "recipe_blooper"):
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
source_product="kiwi",
|
||||
)
|
||||
success = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_success",
|
||||
source_product="kiwi",
|
||||
)
|
||||
bloopers = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_blooper",
|
||||
source_product="kiwi",
|
||||
)
|
||||
merged = sorted(success + bloopers, key=lambda p: p.published, reverse=True)
|
||||
return merged[:limit]
|
||||
|
||||
|
||||
def get_or_create_pseudonym(
|
||||
store,
|
||||
directus_user_id: str,
|
||||
requested_name: str | None,
|
||||
) -> str:
|
||||
"""Return the user's current pseudonym, creating it if it doesn't exist.
|
||||
|
||||
If the user has an existing pseudonym, return it (ignore requested_name).
|
||||
If not, create using requested_name (must be provided for first-time setup).
|
||||
|
||||
Raises ValueError if no existing pseudonym and requested_name is None or blank.
|
||||
"""
|
||||
existing = store.get_current_pseudonym(directus_user_id)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
if not requested_name or not requested_name.strip():
|
||||
raise ValueError(
|
||||
"A pseudonym is required for first publish. "
|
||||
"Pass requested_name with the user's chosen display name."
|
||||
)
|
||||
|
||||
name = requested_name.strip()
|
||||
if "@" in name:
|
||||
raise ValueError(
|
||||
"Pseudonym must not contain '@' — use a display name, not an email address."
|
||||
)
|
||||
|
||||
store.set_pseudonym(directus_user_id, name)
|
||||
return name
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
# app/services/community/element_snapshot.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Ingredient name substrings → allergen flag
|
||||
_ALLERGEN_MAP: dict[str, str] = {
|
||||
"milk": "dairy", "cream": "dairy", "cheese": "dairy", "butter": "dairy",
|
||||
"yogurt": "dairy", "whey": "dairy",
|
||||
"egg": "eggs",
|
||||
"wheat": "gluten", "pasta": "gluten", "flour": "gluten", "bread": "gluten",
|
||||
"barley": "gluten", "rye": "gluten",
|
||||
"peanut": "nuts", "almond": "nuts", "cashew": "nuts", "walnut": "nuts",
|
||||
"pecan": "nuts", "hazelnut": "nuts", "pistachio": "nuts", "macadamia": "nuts",
|
||||
"soy": "soy", "tofu": "soy", "edamame": "soy", "miso": "soy", "tempeh": "soy",
|
||||
"shrimp": "shellfish", "crab": "shellfish", "lobster": "shellfish",
|
||||
"clam": "shellfish", "mussel": "shellfish", "scallop": "shellfish",
|
||||
"fish": "fish", "salmon": "fish", "tuna": "fish", "cod": "fish",
|
||||
"tilapia": "fish", "halibut": "fish",
|
||||
"sesame": "sesame",
|
||||
}
|
||||
|
||||
_MEAT_KEYWORDS = frozenset([
|
||||
"chicken", "beef", "pork", "lamb", "turkey", "bacon", "ham", "sausage",
|
||||
"salami", "prosciutto", "guanciale", "pancetta", "steak", "ground meat",
|
||||
"mince", "veal", "duck", "venison", "bison", "lard",
|
||||
])
|
||||
_SEAFOOD_KEYWORDS = frozenset([
|
||||
"fish", "shrimp", "crab", "lobster", "tuna", "salmon", "clam", "mussel",
|
||||
"scallop", "anchovy", "sardine", "cod", "tilapia",
|
||||
])
|
||||
_ANIMAL_PRODUCT_KEYWORDS = frozenset([
|
||||
"milk", "cream", "cheese", "butter", "egg", "honey", "yogurt", "whey",
|
||||
])
|
||||
|
||||
|
||||
def _detect_allergens(ingredient_names: list[str]) -> list[str]:
|
||||
found: set[str] = set()
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
for ingredient in lowered:
|
||||
for keyword, flag in _ALLERGEN_MAP.items():
|
||||
if keyword in ingredient:
|
||||
found.add(flag)
|
||||
return sorted(found)
|
||||
|
||||
|
||||
def _detect_dietary_tags(ingredient_names: list[str]) -> list[str]:
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
all_text = " ".join(lowered)
|
||||
|
||||
has_meat = any(k in all_text for k in _MEAT_KEYWORDS)
|
||||
has_seafood = any(k in all_text for k in _SEAFOOD_KEYWORDS)
|
||||
has_animal_products = any(k in all_text for k in _ANIMAL_PRODUCT_KEYWORDS)
|
||||
|
||||
tags: list[str] = []
|
||||
if not has_meat and not has_seafood:
|
||||
tags.append("vegetarian")
|
||||
if not has_meat and not has_seafood and not has_animal_products:
|
||||
tags.append("vegan")
|
||||
return tags
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ElementSnapshot:
|
||||
seasoning_score: float
|
||||
richness_score: float
|
||||
brightness_score: float
|
||||
depth_score: float
|
||||
aroma_score: float
|
||||
structure_score: float
|
||||
texture_profile: str
|
||||
dietary_tags: tuple
|
||||
allergen_flags: tuple
|
||||
flavor_molecules: tuple
|
||||
fat_pct: float | None
|
||||
protein_pct: float | None
|
||||
moisture_pct: float | None
|
||||
|
||||
|
||||
def compute_snapshot(recipe_ids: list[int], store) -> ElementSnapshot:
|
||||
"""Compute an element snapshot from a list of recipe IDs.
|
||||
|
||||
Pulls SFAH scores, ingredient lists, and USDA FDC macros from the corpus.
|
||||
Averages numeric scores across all recipes. Unions allergen flags and dietary tags.
|
||||
Call at publish time only — snapshot is stored denormalized in community_posts.
|
||||
"""
|
||||
if not recipe_ids:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
rows = store.get_recipes_by_ids(recipe_ids)
|
||||
if not rows:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
def _avg(field: str) -> float:
|
||||
vals = [r.get(field) or 0.0 for r in rows]
|
||||
return sum(vals) / len(vals)
|
||||
|
||||
all_ingredients: list[str] = []
|
||||
for r in rows:
|
||||
names = r.get("ingredient_names") or []
|
||||
all_ingredients.extend(names if isinstance(names, list) else [])
|
||||
|
||||
allergens = _detect_allergens(all_ingredients)
|
||||
dietary = _detect_dietary_tags(all_ingredients)
|
||||
|
||||
texture = rows[0].get("texture_profile") or ""
|
||||
|
||||
fat_vals = [r.get("fat") for r in rows if r.get("fat") is not None]
|
||||
prot_vals = [r.get("protein") for r in rows if r.get("protein") is not None]
|
||||
moist_vals = [r.get("moisture") for r in rows if r.get("moisture") is not None]
|
||||
|
||||
return ElementSnapshot(
|
||||
seasoning_score=_avg("seasoning_score"),
|
||||
richness_score=_avg("richness_score"),
|
||||
brightness_score=_avg("brightness_score"),
|
||||
depth_score=_avg("depth_score"),
|
||||
aroma_score=_avg("aroma_score"),
|
||||
structure_score=_avg("structure_score"),
|
||||
texture_profile=texture,
|
||||
dietary_tags=tuple(dietary),
|
||||
allergen_flags=tuple(allergens),
|
||||
flavor_molecules=(),
|
||||
fat_pct=(sum(fat_vals) / len(fat_vals)) if fat_vals else None,
|
||||
protein_pct=(sum(prot_vals) / len(prot_vals)) if prot_vals else None,
|
||||
moisture_pct=(sum(moist_vals) / len(moist_vals)) if moist_vals else None,
|
||||
)
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
# app/services/community/feed.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from email.utils import format_datetime
|
||||
from xml.etree.ElementTree import Element, SubElement, tostring
|
||||
|
||||
|
||||
def posts_to_rss(posts: list[dict], base_url: str) -> str:
|
||||
"""Generate an RSS 2.0 feed from a list of community post dicts.
|
||||
|
||||
base_url: the root URL of this Kiwi instance (no trailing slash).
|
||||
Returns UTF-8 XML string.
|
||||
"""
|
||||
rss = Element("rss", version="2.0")
|
||||
channel = SubElement(rss, "channel")
|
||||
|
||||
_sub(channel, "title", "Kiwi Community Feed")
|
||||
_sub(channel, "link", f"{base_url}/community")
|
||||
_sub(channel, "description", "Meal plans and recipe outcomes from the Kiwi community")
|
||||
_sub(channel, "language", "en")
|
||||
_sub(channel, "lastBuildDate", format_datetime(datetime.now(timezone.utc)))
|
||||
|
||||
for post in posts:
|
||||
item = SubElement(channel, "item")
|
||||
_sub(item, "title", post.get("title") or "Untitled")
|
||||
_sub(item, "link", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
_sub(item, "guid", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
if post.get("description"):
|
||||
_sub(item, "description", post["description"])
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
_sub(item, "pubDate", format_datetime(published))
|
||||
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring(rss, encoding="unicode")
|
||||
|
||||
|
||||
def _sub(parent: Element, tag: str, text: str) -> Element:
|
||||
el = SubElement(parent, tag)
|
||||
el.text = text
|
||||
return el
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
# app/services/community/mdns.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import socket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Import deferred to avoid hard failure when zeroconf is not installed
|
||||
try:
|
||||
from zeroconf import ServiceInfo, Zeroconf
|
||||
_ZEROCONF_AVAILABLE = True
|
||||
except ImportError:
|
||||
_ZEROCONF_AVAILABLE = False
|
||||
|
||||
|
||||
class KiwiMDNS:
|
||||
"""Advertise this Kiwi instance on the LAN via mDNS (_kiwi._tcp.local).
|
||||
|
||||
Defaults to disabled (enabled=False). User must explicitly opt in via the
|
||||
Settings page. This matches the CF a11y requirement: no surprise broadcasting.
|
||||
|
||||
Usage:
|
||||
mdns = KiwiMDNS(enabled=settings.MDNS_ENABLED, port=settings.PORT,
|
||||
feed_url=f"http://{hostname}:{settings.PORT}/api/v1/community/local-feed")
|
||||
mdns.start() # in lifespan startup
|
||||
mdns.stop() # in lifespan shutdown
|
||||
"""
|
||||
|
||||
SERVICE_TYPE = "_kiwi._tcp.local."
|
||||
|
||||
def __init__(self, enabled: bool, port: int, feed_url: str) -> None:
|
||||
self._enabled = enabled
|
||||
self._port = port
|
||||
self._feed_url = feed_url
|
||||
self._zc: "Zeroconf | None" = None
|
||||
self._info: "ServiceInfo | None" = None
|
||||
|
||||
def start(self) -> None:
|
||||
if not self._enabled:
|
||||
logger.debug("mDNS advertisement disabled (user has not opted in)")
|
||||
return
|
||||
if not _ZEROCONF_AVAILABLE:
|
||||
logger.warning("zeroconf package not installed — mDNS advertisement unavailable")
|
||||
return
|
||||
|
||||
hostname = socket.gethostname()
|
||||
service_name = f"kiwi-{hostname}.{self.SERVICE_TYPE}"
|
||||
self._info = ServiceInfo(
|
||||
type_=self.SERVICE_TYPE,
|
||||
name=service_name,
|
||||
port=self._port,
|
||||
properties={
|
||||
b"feed_url": self._feed_url.encode(),
|
||||
b"version": b"1",
|
||||
},
|
||||
addresses=[socket.inet_aton("127.0.0.1")],
|
||||
)
|
||||
self._zc = Zeroconf()
|
||||
self._zc.register_service(self._info)
|
||||
logger.info("mDNS: advertising %s on port %d", service_name, self._port)
|
||||
|
||||
def stop(self) -> None:
|
||||
if self._zc is None or self._info is None:
|
||||
return
|
||||
self._zc.unregister_service(self._info)
|
||||
self._zc.close()
|
||||
self._zc = None
|
||||
self._info = None
|
||||
logger.info("mDNS: advertisement stopped")
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
"""cf-orch coordinator proxy client.
|
||||
|
||||
Calls the coordinator's /proxy/authorize endpoint to obtain a one-time
|
||||
stream URL + token for LLM streaming. Always raises CoordinatorError on
|
||||
failure — callers decide how to handle it (stream-token endpoint returns
|
||||
503 or 403 as appropriate).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
import httpx
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CoordinatorError(Exception):
|
||||
"""Raised when the coordinator returns an error or is unreachable."""
|
||||
def __init__(self, message: str, status_code: int = 503):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StreamTokenResult:
|
||||
stream_url: str
|
||||
token: str
|
||||
expires_in_s: int
|
||||
|
||||
|
||||
def _coordinator_url() -> str:
|
||||
return os.environ.get("COORDINATOR_URL", "http://10.1.10.71:7700")
|
||||
|
||||
|
||||
def _product_key() -> str:
|
||||
return os.environ.get("COORDINATOR_KIWI_KEY", "")
|
||||
|
||||
|
||||
async def coordinator_authorize(
|
||||
prompt: str,
|
||||
caller: str = "kiwi-recipe",
|
||||
ttl_s: int = 300,
|
||||
) -> StreamTokenResult:
|
||||
"""Call POST /proxy/authorize on the coordinator.
|
||||
|
||||
Returns a StreamTokenResult with the stream URL and one-time token.
|
||||
Raises CoordinatorError on any failure (network, auth, capacity).
|
||||
"""
|
||||
url = f"{_coordinator_url()}/proxy/authorize"
|
||||
key = _product_key()
|
||||
if not key:
|
||||
raise CoordinatorError(
|
||||
"COORDINATOR_KIWI_KEY env var is not set — streaming unavailable",
|
||||
status_code=503,
|
||||
)
|
||||
|
||||
payload = {
|
||||
"product": "kiwi",
|
||||
"product_key": key,
|
||||
"caller": caller,
|
||||
"prompt": prompt,
|
||||
"params": {},
|
||||
"ttl_s": ttl_s,
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
resp = await client.post(url, json=payload)
|
||||
except httpx.RequestError as exc:
|
||||
log.warning("coordinator_authorize network error: %s", exc)
|
||||
raise CoordinatorError(f"Coordinator unreachable: {exc}", status_code=503)
|
||||
|
||||
if resp.status_code == 401:
|
||||
raise CoordinatorError("Invalid product key", status_code=401)
|
||||
if resp.status_code == 429:
|
||||
raise CoordinatorError("Too many concurrent streams", status_code=429)
|
||||
if resp.status_code == 503:
|
||||
raise CoordinatorError("No GPU available for streaming", status_code=503)
|
||||
if not resp.is_success:
|
||||
raise CoordinatorError(
|
||||
f"Coordinator error {resp.status_code}: {resp.text[:200]}",
|
||||
status_code=503,
|
||||
)
|
||||
|
||||
data = resp.json()
|
||||
# Use public_stream_url if coordinator provides it (cloud mode), else stream_url
|
||||
stream_url = data.get("public_stream_url") or data["stream_url"]
|
||||
return StreamTokenResult(
|
||||
stream_url=stream_url,
|
||||
token=data["token"],
|
||||
expires_in_s=data["expires_in_s"],
|
||||
)
|
||||
|
|
@ -116,270 +116,6 @@ class ExpirationPredictor:
|
|||
'prepared_foods': {'fridge': 4, 'freezer': 90},
|
||||
}
|
||||
|
||||
# Secondary shelf life in days after a package is opened.
|
||||
# Sources: USDA FoodKeeper app, FDA consumer guides.
|
||||
# Only categories where opening significantly shortens shelf life are listed.
|
||||
# Items not listed default to None (no secondary window tracked).
|
||||
SHELF_LIFE_AFTER_OPENING: dict[str, int] = {
|
||||
# Dairy — once opened, clock ticks fast
|
||||
'dairy': 5,
|
||||
'milk': 5,
|
||||
'cream': 3,
|
||||
'yogurt': 7,
|
||||
'cheese': 14,
|
||||
'butter': 30,
|
||||
# Condiments — refrigerated after opening
|
||||
'condiments': 30,
|
||||
'ketchup': 30,
|
||||
'mustard': 30,
|
||||
'mayo': 14,
|
||||
'salad_dressing': 30,
|
||||
'soy_sauce': 90,
|
||||
# Canned goods — once opened, very short
|
||||
'canned_goods': 4,
|
||||
# Beverages
|
||||
'juice': 7,
|
||||
'soda': 4,
|
||||
# Bread / Bakery
|
||||
'bread': 5,
|
||||
'bakery': 3,
|
||||
# Produce
|
||||
'leafy_greens': 3,
|
||||
'berries': 3,
|
||||
# Pantry staples (open bag)
|
||||
'chips': 14,
|
||||
'cookies': 14,
|
||||
'cereal': 30,
|
||||
'flour': 90,
|
||||
}
|
||||
|
||||
# Post-expiry secondary use window.
|
||||
# These are NOT spoilage extensions — they describe a qualitative state
|
||||
# change where the ingredient is specifically suited for certain preparations.
|
||||
# Sources: USDA FoodKeeper, food science, culinary tradition.
|
||||
#
|
||||
# Fields:
|
||||
# window_days — days past nominal expiry still usable in secondary state
|
||||
# label — short UI label for the state
|
||||
# uses — recipe contexts suited to this state (shown in UI)
|
||||
# warning — safety note, calm tone, None if none needed
|
||||
# discard_signs — qualitative signs the item has gone past the secondary window
|
||||
# constraints_exclude — dietary constraint labels that suppress this entry entirely
|
||||
# (e.g. alcohol-containing items suppressed for halal/alcohol-free)
|
||||
SECONDARY_WINDOW: dict[str, dict] = {
|
||||
'bread': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['croutons', 'stuffing', 'bread pudding', 'French toast', 'panzanella'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
'discard_signs': 'Visible mold (any colour), or unpleasant smell beyond dry/yeasty.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'bakery': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['French toast', 'bread pudding', 'crumbles', 'trifle base', 'cake pops', 'streusel topping', 'bread crumbs'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
'discard_signs': 'Visible mold, sliminess, or strong sour smell.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'bananas': {
|
||||
'window_days': 5,
|
||||
'label': 'overripe',
|
||||
'uses': ['banana bread', 'smoothies', 'pancakes', 'muffins'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Leaking liquid, fermented smell, or mold on skin.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'milk': {
|
||||
'window_days': 3,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'scones', 'waffles', 'muffins', 'quick breads', 'béchamel', 'baked mac and cheese'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
'discard_signs': 'Chunky texture, strong unpleasant smell beyond tangy, or visible separation with grey colour.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'dairy': {
|
||||
'window_days': 2,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'scones', 'quick breads', 'muffins', 'waffles'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
'discard_signs': 'Strong unpleasant smell, unusual colour, or chunky texture.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'cheese': {
|
||||
'window_days': 14,
|
||||
'label': 'rind-ready',
|
||||
'uses': ['parmesan broth', 'minestrone', 'ribollita', 'risotto', 'polenta', 'bean soups', 'gratins'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Soft or wet texture on hard cheese, pink or black mold (white/green surface mold on hard cheese can be cut off with 1cm margin).',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'rice': {
|
||||
'window_days': 2,
|
||||
'label': 'day-old',
|
||||
'uses': ['fried rice', 'onigiri', 'rice porridge', 'congee', 'arancini', 'stuffed peppers', 'rice fritters'],
|
||||
'warning': 'Refrigerate immediately after cooking — do not leave at room temp.',
|
||||
'discard_signs': 'Slimy texture, unusual smell, or more than 4 days since cooking.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'tortillas': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['chilaquiles', 'migas', 'tortilla soup', 'casserole'],
|
||||
'warning': 'Check for mold, especially if stored in a sealed bag — discard if any is visible.',
|
||||
'discard_signs': 'Visible mold (check seams and edges), or strong sour smell.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
# ── New entries ──────────────────────────────────────────────────────
|
||||
'apples': {
|
||||
'window_days': 7,
|
||||
'label': 'soft',
|
||||
'uses': ['applesauce', 'apple butter', 'baked apples', 'apple crisp', 'smoothies', 'chutney'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Large bruised areas with fermented smell, visible mold, or liquid leaking from skin.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'leafy_greens': {
|
||||
'window_days': 2,
|
||||
'label': 'wilting',
|
||||
'uses': ['sautéed greens', 'soups', 'smoothies', 'frittata', 'pasta add-in', 'stir fry'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Slimy texture, strong unpleasant smell, or yellowed and mushy leaves.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'tomatoes': {
|
||||
'window_days': 4,
|
||||
'label': 'soft',
|
||||
'uses': ['roasted tomatoes', 'tomato sauce', 'shakshuka', 'bruschetta', 'soup', 'salsa'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Broken skin with liquid pooling, mold, or fermented smell.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'cooked_pasta': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['pasta frittata', 'pasta salad', 'baked pasta', 'soup add-in', 'fried pasta cakes'],
|
||||
'warning': 'Refrigerate within 2 hours of cooking.',
|
||||
'discard_signs': 'Slimy texture, off smell, or more than 4 days since cooking.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'cooked_potatoes': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['potato pancakes', 'hash browns', 'potato soup', 'gnocchi', 'twice-baked potatoes', 'croquettes'],
|
||||
'warning': 'Refrigerate within 2 hours of cooking.',
|
||||
'discard_signs': 'Slimy texture, off smell, or more than 4 days since cooking.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'yogurt': {
|
||||
'window_days': 7,
|
||||
'label': 'tangy',
|
||||
'uses': ['marinades', 'flatbreads', 'smoothies', 'tzatziki', 'baked goods', 'salad dressings'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Pink or orange discolouration, visible mold, or strongly unpleasant smell (not just tangy).',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'cream': {
|
||||
'window_days': 2,
|
||||
'label': 'sour',
|
||||
'uses': ['soups', 'sauces', 'scones', 'quick breads', 'mashed potatoes'],
|
||||
'warning': 'Use in cooked recipes only. Discard if the smell is strongly unpleasant rather than tangy.',
|
||||
'discard_signs': 'Strong unpleasant smell beyond tangy, unusual colour, or chunky texture.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'wine': {
|
||||
'window_days': 4,
|
||||
'label': 'open',
|
||||
'uses': ['pan sauces', 'braises', 'risotto', 'marinades', 'poaching liquid', 'wine reduction'],
|
||||
'warning': None,
|
||||
'discard_signs': 'Strong vinegar smell (still usable in braises/marinades), or visible cloudiness with off-smell.',
|
||||
'constraints_exclude': ['halal', 'alcohol-free'],
|
||||
},
|
||||
'cooked_beans': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['refried beans', 'bean soup', 'bean fritters', 'hummus', 'bean dip', 'grain bowls'],
|
||||
'warning': 'Refrigerate within 2 hours of cooking.',
|
||||
'discard_signs': 'Slimy texture, off smell, or more than 4 days since cooking.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
'cooked_meat': {
|
||||
'window_days': 2,
|
||||
'label': 'leftover',
|
||||
'uses': ['grain bowls', 'tacos', 'soups', 'fried rice', 'sandwiches', 'hash', 'pasta add-in'],
|
||||
'warning': 'Refrigerate within 2 hours of cooking.',
|
||||
'discard_signs': 'Off smell, slimy texture, or more than 3–4 days since cooking.',
|
||||
'constraints_exclude': [],
|
||||
},
|
||||
}
|
||||
|
||||
def days_after_opening(self, category: str | None) -> int | None:
|
||||
"""Return days of shelf life remaining once a package is opened.
|
||||
|
||||
Returns None if the category is unknown or not tracked after opening
|
||||
(e.g. frozen items, raw meat — category check irrelevant once opened).
|
||||
"""
|
||||
if not category:
|
||||
return None
|
||||
return self.SHELF_LIFE_AFTER_OPENING.get(category.lower())
|
||||
|
||||
def secondary_state(
|
||||
self, category: str | None, expiry_date: str | None
|
||||
) -> dict | None:
|
||||
"""Return secondary use info if the item is in its post-expiry secondary window.
|
||||
|
||||
Returns a dict with label, uses, warning, discard_signs, constraints_exclude,
|
||||
days_past, and window_days when the item is past its nominal expiry date but
|
||||
still within the secondary use window.
|
||||
Returns None in all other cases (unknown category, no window defined, not yet
|
||||
expired, or past the secondary window).
|
||||
|
||||
Callers should apply constraints_exclude against user dietary constraints
|
||||
and suppress the result entirely if any excluded constraint is active.
|
||||
See filter_secondary_by_constraints().
|
||||
"""
|
||||
if not category or not expiry_date:
|
||||
return None
|
||||
entry = self.SECONDARY_WINDOW.get(category.lower())
|
||||
if not entry:
|
||||
return None
|
||||
try:
|
||||
from datetime import date
|
||||
today = date.today()
|
||||
exp = date.fromisoformat(expiry_date)
|
||||
days_past = (today - exp).days
|
||||
if 0 <= days_past <= entry['window_days']:
|
||||
return {
|
||||
'label': entry['label'],
|
||||
'uses': list(entry['uses']),
|
||||
'warning': entry['warning'],
|
||||
'discard_signs': entry.get('discard_signs'),
|
||||
'constraints_exclude': list(entry.get('constraints_exclude') or []),
|
||||
'days_past': days_past,
|
||||
'window_days': entry['window_days'],
|
||||
}
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def filter_secondary_by_constraints(
|
||||
sec: dict | None,
|
||||
user_constraints: list[str],
|
||||
) -> dict | None:
|
||||
"""Suppress secondary state entirely if any excluded constraint is active.
|
||||
|
||||
Call after secondary_state() when user dietary constraints are available.
|
||||
Returns sec unchanged when no constraints match, or None when suppressed.
|
||||
"""
|
||||
if sec is None:
|
||||
return None
|
||||
excluded = sec.get('constraints_exclude') or []
|
||||
if any(c.lower() in [e.lower() for e in excluded] for c in user_constraints):
|
||||
return None
|
||||
return sec
|
||||
|
||||
# Keyword lists are checked in declaration order — most specific first.
|
||||
# Rules:
|
||||
# - canned/processed goods BEFORE raw-meat terms (canned chicken != raw chicken)
|
||||
|
|
|
|||
|
|
@ -1,80 +0,0 @@
|
|||
"""Heimdall cf-orch budget client.
|
||||
|
||||
Calls Heimdall's /orch/* endpoints to gate and record cf-orch usage for
|
||||
lifetime/founders license holders. Always fails open on network errors —
|
||||
a Heimdall outage should never block the user.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
|
||||
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
|
||||
|
||||
|
||||
def _headers() -> dict[str, str]:
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
return {"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"}
|
||||
return {}
|
||||
|
||||
|
||||
def check_orch_budget(key_display: str, product: str) -> dict:
|
||||
"""Call POST /orch/check and return the response dict.
|
||||
|
||||
On any error (network, auth, etc.) returns a permissive dict so the
|
||||
caller can proceed without blocking the user.
|
||||
"""
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/orch/check",
|
||||
json={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/check returned %s for key %s", resp.status_code, key_display[:12])
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/check failed (fail-open): %s", exc)
|
||||
|
||||
# Fail open — Heimdall outage must never block the user
|
||||
return {
|
||||
"allowed": True,
|
||||
"calls_used": 0,
|
||||
"calls_total": 0,
|
||||
"topup_calls": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
||||
|
||||
def get_orch_usage(key_display: str, product: str) -> dict:
|
||||
"""Call GET /orch/usage and return the response dict.
|
||||
|
||||
Returns zeros on error (non-blocking).
|
||||
"""
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/orch/usage",
|
||||
params={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/usage returned %s", resp.status_code)
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/usage failed: %s", exc)
|
||||
|
||||
return {
|
||||
"calls_used": 0,
|
||||
"topup_calls": 0,
|
||||
"calls_total": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
|
@ -1,140 +0,0 @@
|
|||
"""Visual label capture service for unenriched products (kiwi#79).
|
||||
|
||||
Wraps the cf-core VisionRouter to extract structured nutrition data from a
|
||||
photographed nutrition facts panel. When the VisionRouter is not yet wired
|
||||
(NotImplementedError) the service falls back to a mock extraction so the
|
||||
barcode scan flow can be exercised end-to-end in development.
|
||||
|
||||
JSON contract returned by the vision model (and mock):
|
||||
{
|
||||
"product_name": str | null,
|
||||
"brand": str | null,
|
||||
"serving_size_g": number | null,
|
||||
"calories": number | null,
|
||||
"fat_g": number | null,
|
||||
"saturated_fat_g": number | null,
|
||||
"carbs_g": number | null,
|
||||
"sugar_g": number | null,
|
||||
"fiber_g": number | null,
|
||||
"protein_g": number | null,
|
||||
"sodium_mg": number | null,
|
||||
"ingredient_names": [str],
|
||||
"allergens": [str],
|
||||
"confidence": number (0.0–1.0)
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Confidence below this threshold surfaces amber highlights in the UI.
|
||||
REVIEW_THRESHOLD = 0.7
|
||||
|
||||
_MOCK_EXTRACTION: dict[str, Any] = {
|
||||
"product_name": "Unknown Product",
|
||||
"brand": None,
|
||||
"serving_size_g": None,
|
||||
"calories": None,
|
||||
"fat_g": None,
|
||||
"saturated_fat_g": None,
|
||||
"carbs_g": None,
|
||||
"sugar_g": None,
|
||||
"fiber_g": None,
|
||||
"protein_g": None,
|
||||
"sodium_mg": None,
|
||||
"ingredient_names": [],
|
||||
"allergens": [],
|
||||
"confidence": 0.0,
|
||||
}
|
||||
|
||||
_EXTRACTION_PROMPT = """You are reading a nutrition facts label photograph.
|
||||
Extract the following fields as a JSON object with no extra text:
|
||||
|
||||
{
|
||||
"product_name": <product name or null>,
|
||||
"brand": <brand name or null>,
|
||||
"serving_size_g": <serving size in grams as a number or null>,
|
||||
"calories": <calories per serving as a number or null>,
|
||||
"fat_g": <total fat grams or null>,
|
||||
"saturated_fat_g": <saturated fat grams or null>,
|
||||
"carbs_g": <total carbohydrates grams or null>,
|
||||
"sugar_g": <sugars grams or null>,
|
||||
"fiber_g": <dietary fiber grams or null>,
|
||||
"protein_g": <protein grams or null>,
|
||||
"sodium_mg": <sodium milligrams or null>,
|
||||
"ingredient_names": [list of individual ingredients as strings],
|
||||
"allergens": [list of allergens explicitly stated on label],
|
||||
"confidence": <your confidence this extraction is correct, 0.0 to 1.0>
|
||||
}
|
||||
|
||||
Use null for any field you cannot read clearly. Do not guess values.
|
||||
Respond with JSON only."""
|
||||
|
||||
|
||||
def extract_label(image_bytes: bytes) -> dict[str, Any]:
|
||||
"""Run vision model extraction on raw label image bytes.
|
||||
|
||||
Returns a dict matching the nutrition JSON contract above.
|
||||
Falls back to a zero-confidence mock if the VisionRouter is not yet
|
||||
implemented (stub) or if the model returns unparseable output.
|
||||
"""
|
||||
# Allow unit tests to bypass the vision model entirely.
|
||||
if os.environ.get("KIWI_LABEL_CAPTURE_MOCK") == "1":
|
||||
log.debug("label_capture: mock mode active")
|
||||
return dict(_MOCK_EXTRACTION)
|
||||
|
||||
try:
|
||||
from circuitforge_core.vision import caption as vision_caption
|
||||
result = vision_caption(image_bytes, prompt=_EXTRACTION_PROMPT)
|
||||
raw = result.caption or ""
|
||||
return _parse_extraction(raw)
|
||||
except Exception as exc:
|
||||
log.warning("label_capture: extraction failed (%s) — returning mock extraction", exc)
|
||||
return dict(_MOCK_EXTRACTION)
|
||||
|
||||
|
||||
def _parse_extraction(raw: str) -> dict[str, Any]:
|
||||
"""Parse the JSON string returned by the vision model.
|
||||
|
||||
Strips markdown code fences if present. Validates required shape.
|
||||
Returns the mock on any parse error.
|
||||
"""
|
||||
text = raw.strip()
|
||||
if text.startswith("```"):
|
||||
# Strip ```json ... ``` fences
|
||||
lines = text.splitlines()
|
||||
text = "\n".join(lines[1:-1] if lines[-1].strip() == "```" else lines[1:])
|
||||
|
||||
try:
|
||||
data = json.loads(text)
|
||||
except json.JSONDecodeError as exc:
|
||||
log.warning("label_capture: could not parse vision response: %s", exc)
|
||||
return dict(_MOCK_EXTRACTION)
|
||||
|
||||
if not isinstance(data, dict):
|
||||
log.warning("label_capture: vision response is not a dict")
|
||||
return dict(_MOCK_EXTRACTION)
|
||||
|
||||
# Normalise list fields — model may return None instead of []
|
||||
for list_key in ("ingredient_names", "allergens"):
|
||||
if not isinstance(data.get(list_key), list):
|
||||
data[list_key] = []
|
||||
|
||||
# Clamp confidence to [0, 1]
|
||||
confidence = data.get("confidence")
|
||||
if not isinstance(confidence, (int, float)):
|
||||
confidence = 0.0
|
||||
data["confidence"] = max(0.0, min(1.0, float(confidence)))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def needs_review(extraction: dict[str, Any]) -> bool:
|
||||
"""Return True when the extraction confidence is below REVIEW_THRESHOLD."""
|
||||
return float(extraction.get("confidence", 0.0)) < REVIEW_THRESHOLD
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""Meal planning service layer — no FastAPI imports (extraction-ready for cf-core)."""
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
# app/services/meal_plan/affiliates.py
|
||||
"""Register Kiwi-specific affiliate programs and provide search URL builders.
|
||||
|
||||
Called once at API startup. Programs not yet in core.affiliates are registered
|
||||
here. The actual affiliate IDs are read from environment variables at call
|
||||
time, so the process can start before accounts are approved (plain URLs
|
||||
returned when env vars are absent).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import AffiliateProgram, register_program, wrap_url
|
||||
|
||||
|
||||
# ── URL builders ──────────────────────────────────────────────────────────────
|
||||
|
||||
def _walmart_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}affil=apa&affiliateId={affiliate_id}"
|
||||
|
||||
|
||||
def _target_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}afid={affiliate_id}"
|
||||
|
||||
|
||||
def _thrive_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}raf={affiliate_id}"
|
||||
|
||||
|
||||
def _misfits_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}ref={affiliate_id}"
|
||||
|
||||
|
||||
# ── Registration ──────────────────────────────────────────────────────────────
|
||||
|
||||
def register_kiwi_programs() -> None:
|
||||
"""Register Kiwi retailer programs. Safe to call multiple times (idempotent)."""
|
||||
register_program(AffiliateProgram(
|
||||
name="Walmart",
|
||||
retailer_key="walmart",
|
||||
env_var="WALMART_AFFILIATE_ID",
|
||||
build_url=_walmart_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Target",
|
||||
retailer_key="target",
|
||||
env_var="TARGET_AFFILIATE_ID",
|
||||
build_url=_target_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Thrive Market",
|
||||
retailer_key="thrive",
|
||||
env_var="THRIVE_AFFILIATE_ID",
|
||||
build_url=_thrive_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Misfits Market",
|
||||
retailer_key="misfits",
|
||||
env_var="MISFITS_AFFILIATE_ID",
|
||||
build_url=_misfits_search,
|
||||
))
|
||||
|
||||
|
||||
# ── Search URL helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
_SEARCH_TEMPLATES: dict[str, str] = {
|
||||
"amazon": "https://www.amazon.com/s?k={q}",
|
||||
"instacart": "https://www.instacart.com/store/search_v3/term?term={q}",
|
||||
"walmart": "https://www.walmart.com/search?q={q}",
|
||||
"target": "https://www.target.com/s?searchTerm={q}",
|
||||
"thrive": "https://thrivemarket.com/search?q={q}",
|
||||
"misfits": "https://www.misfitsmarket.com/shop?search={q}",
|
||||
}
|
||||
|
||||
KIWI_RETAILERS = list(_SEARCH_TEMPLATES.keys())
|
||||
|
||||
|
||||
def get_retailer_links(ingredient_name: str) -> list[dict]:
|
||||
"""Return affiliate-wrapped search links for *ingredient_name*.
|
||||
|
||||
Returns a list of dicts: {"retailer": str, "label": str, "url": str}.
|
||||
Falls back to plain search URL when no affiliate ID is configured.
|
||||
"""
|
||||
q = quote_plus(ingredient_name)
|
||||
links = []
|
||||
for key, template in _SEARCH_TEMPLATES.items():
|
||||
plain_url = template.format(q=q)
|
||||
try:
|
||||
affiliate_url = wrap_url(plain_url, retailer=key)
|
||||
except Exception:
|
||||
affiliate_url = plain_url
|
||||
links.append({"retailer": key, "label": _label(key), "url": affiliate_url})
|
||||
return links
|
||||
|
||||
|
||||
def _label(key: str) -> str:
|
||||
return {
|
||||
"amazon": "Amazon",
|
||||
"instacart": "Instacart",
|
||||
"walmart": "Walmart",
|
||||
"target": "Target",
|
||||
"thrive": "Thrive Market",
|
||||
"misfits": "Misfits Market",
|
||||
}.get(key, key.title())
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/llm_planner.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""LLM-assisted full-week meal plan generation.
|
||||
|
||||
Returns suggestions for human review — never writes to the DB directly.
|
||||
The API endpoint presents the suggestions and waits for user approval
|
||||
before calling store.upsert_slot().
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_PLAN_SYSTEM = """\
|
||||
You are a practical meal planning assistant. Given a pantry inventory and
|
||||
dietary preferences, suggest a week of dinners (or other configured meals).
|
||||
|
||||
Prioritise ingredients that are expiring soon. Prefer variety across the week.
|
||||
Respect all dietary restrictions.
|
||||
|
||||
Respond with a JSON array only — no prose, no markdown fences.
|
||||
Each item: {"day": 0-6, "meal_type": "dinner", "recipe_id": <int or null>, "suggestion": "<recipe name>"}
|
||||
|
||||
day 0 = Monday, day 6 = Sunday.
|
||||
If you cannot match a known recipe_id, set recipe_id to null and provide a suggestion name.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PlanSuggestion:
|
||||
day: int # 0 = Monday
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
suggestion: str # human-readable name
|
||||
|
||||
|
||||
def generate_plan(
|
||||
pantry_items: list[str],
|
||||
meal_types: list[str],
|
||||
dietary_notes: str,
|
||||
router,
|
||||
) -> list[PlanSuggestion]:
|
||||
"""Return a list of PlanSuggestion for user review.
|
||||
|
||||
Never writes to DB — caller must upsert slots after user approves.
|
||||
Returns an empty list if router is None or response is unparseable.
|
||||
"""
|
||||
if router is None:
|
||||
return []
|
||||
|
||||
pantry_text = "\n".join(f"- {item}" for item in pantry_items[:50])
|
||||
meal_text = ", ".join(meal_types)
|
||||
user_msg = (
|
||||
f"Meal types: {meal_text}\n"
|
||||
f"Dietary notes: {dietary_notes or 'none'}\n\n"
|
||||
f"Pantry (partial):\n{pantry_text}"
|
||||
)
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_PLAN_SYSTEM,
|
||||
user=user_msg,
|
||||
max_tokens=512,
|
||||
temperature=0.7,
|
||||
)
|
||||
items = json.loads(response.strip())
|
||||
suggestions = []
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
day = item.get("day")
|
||||
meal_type = item.get("meal_type", "dinner")
|
||||
if not isinstance(day, int) or day < 0 or day > 6:
|
||||
continue
|
||||
suggestions.append(PlanSuggestion(
|
||||
day=day,
|
||||
meal_type=meal_type,
|
||||
recipe_id=item.get("recipe_id"),
|
||||
suggestion=str(item.get("suggestion", "")),
|
||||
))
|
||||
return suggestions
|
||||
except Exception as exc:
|
||||
logger.debug("LLM plan generation failed: %s", exc)
|
||||
return []
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
# app/services/meal_plan/llm_router.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Provide a router-compatible LLM client for meal plan generation tasks.
|
||||
|
||||
Cloud (CF_ORCH_URL set):
|
||||
Allocates a cf-text service via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Returns an _OrchTextRouter that wraps the cf-text HTTP endpoint
|
||||
with a .complete(system, user, **kwargs) interface.
|
||||
|
||||
Local / self-hosted (no CF_ORCH_URL):
|
||||
Returns an LLMRouter instance which tries ollama, vllm, or any
|
||||
backend configured in ~/.config/circuitforge/llm.yaml.
|
||||
|
||||
Both paths expose the same interface so llm_timing.py and llm_planner.py
|
||||
need no knowledge of the backend.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import nullcontext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# cf-orch service name and VRAM budget for meal plan LLM tasks.
|
||||
# These are lighter than recipe_llm (4.0 GB) — cf-text handles them.
|
||||
_SERVICE_TYPE = "cf-text"
|
||||
_TTL_S = 120.0
|
||||
_CALLER = "kiwi-meal-plan"
|
||||
|
||||
|
||||
class _OrchTextRouter:
|
||||
"""Thin adapter that makes a cf-text HTTP endpoint look like LLMRouter."""
|
||||
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
|
||||
def complete(
|
||||
self,
|
||||
system: str = "",
|
||||
user: str = "",
|
||||
max_tokens: int = 512,
|
||||
temperature: float = 0.7,
|
||||
**_kwargs,
|
||||
) -> str:
|
||||
from openai import OpenAI
|
||||
client = OpenAI(base_url=self._base_url + "/v1", api_key="any")
|
||||
messages = []
|
||||
if system:
|
||||
messages.append({"role": "system", "content": system})
|
||||
messages.append({"role": "user", "content": user})
|
||||
try:
|
||||
model = client.models.list().data[0].id
|
||||
except Exception:
|
||||
model = "local"
|
||||
resp = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
return resp.choices[0].message.content or ""
|
||||
|
||||
|
||||
def get_meal_plan_router():
|
||||
"""Return an LLM client for meal plan tasks.
|
||||
|
||||
Tries cf-orch cf-text allocation first (cloud); falls back to LLMRouter
|
||||
(local ollama/vllm). Returns None if no backend is available.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
ctx = client.allocate(
|
||||
service=_SERVICE_TYPE,
|
||||
ttl_s=_TTL_S,
|
||||
caller=_CALLER,
|
||||
)
|
||||
alloc = ctx.__enter__()
|
||||
if alloc is not None:
|
||||
return _OrchTextRouter(alloc.url), ctx
|
||||
except Exception as exc:
|
||||
logger.debug("cf-orch cf-text allocation failed, falling back to LLMRouter: %s", exc)
|
||||
|
||||
# Local fallback: LLMRouter (ollama / vllm / openai-compat)
|
||||
try:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter(), nullcontext(None)
|
||||
except FileNotFoundError:
|
||||
logger.debug("LLMRouter: no llm.yaml and no LLM env vars — meal plan LLM disabled")
|
||||
return None, nullcontext(None)
|
||||
except Exception as exc:
|
||||
logger.debug("LLMRouter init failed: %s", exc)
|
||||
return None, nullcontext(None)
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
# app/services/meal_plan/llm_timing.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Estimate cook times for recipes missing corpus prep/cook time fields.
|
||||
|
||||
Used only when tier allows `meal_plan_llm_timing`. Falls back gracefully
|
||||
when no LLM backend is available.
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_TIMING_PROMPT = """\
|
||||
You are a practical cook. Given a recipe name and its ingredients, estimate:
|
||||
1. prep_time: minutes of active prep work (chopping, mixing, etc.)
|
||||
2. cook_time: minutes of cooking (oven, stovetop, etc.)
|
||||
|
||||
Respond with ONLY two integers on separate lines:
|
||||
prep_time
|
||||
cook_time
|
||||
|
||||
If you cannot estimate, respond with:
|
||||
0
|
||||
0
|
||||
"""
|
||||
|
||||
|
||||
def estimate_timing(recipe_name: str, ingredients: list[str], router) -> tuple[int | None, int | None]:
|
||||
"""Return (prep_minutes, cook_minutes) for a recipe using LLMRouter.
|
||||
|
||||
Returns (None, None) if the router is unavailable or the response is
|
||||
unparseable. Never raises.
|
||||
|
||||
Args:
|
||||
recipe_name: Name of the recipe.
|
||||
ingredients: List of raw ingredient strings from the corpus.
|
||||
router: An LLMRouter instance (from circuitforge_core.llm).
|
||||
"""
|
||||
if router is None:
|
||||
return None, None
|
||||
|
||||
ingredient_list = "\n".join(f"- {i}" for i in (ingredients or [])[:15])
|
||||
prompt = f"Recipe: {recipe_name}\n\nIngredients:\n{ingredient_list}"
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_TIMING_PROMPT,
|
||||
user=prompt,
|
||||
max_tokens=16,
|
||||
temperature=0.0,
|
||||
)
|
||||
lines = response.strip().splitlines()
|
||||
prep = int(lines[0].strip()) if lines else 0
|
||||
cook = int(lines[1].strip()) if len(lines) > 1 else 0
|
||||
if prep == 0 and cook == 0:
|
||||
return None, None
|
||||
return prep or None, cook or None
|
||||
except Exception as exc:
|
||||
logger.debug("LLM timing estimation failed for %r: %s", recipe_name, exc)
|
||||
return None, None
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# app/services/meal_plan/planner.py
|
||||
"""Plan and slot orchestration — thin layer over Store.
|
||||
|
||||
No FastAPI imports. Provides helpers used by the API endpoint.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import VALID_MEAL_TYPES
|
||||
|
||||
|
||||
def create_plan(store: Store, week_start: str, meal_types: list[str]) -> dict:
|
||||
"""Create a plan, filtering meal_types to valid values only."""
|
||||
valid = [t for t in meal_types if t in VALID_MEAL_TYPES]
|
||||
if not valid:
|
||||
valid = ["dinner"]
|
||||
return store.create_meal_plan(week_start, valid)
|
||||
|
||||
|
||||
def get_plan_with_slots(store: Store, plan_id: int) -> dict | None:
|
||||
"""Return a plan row with its slots list attached, or None."""
|
||||
plan = store.get_meal_plan(plan_id)
|
||||
if plan is None:
|
||||
return None
|
||||
slots = store.get_plan_slots(plan_id)
|
||||
return {**plan, "slots": slots}
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/prep_scheduler.py
|
||||
"""Sequence prep tasks for a batch cooking session.
|
||||
|
||||
Pure function — no DB or network calls. Sorts tasks by equipment priority
|
||||
(oven first to maximise oven utilisation) then assigns sequence_order.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
_EQUIPMENT_PRIORITY = {"oven": 0, "stovetop": 1, "cold": 2, "no-heat": 3}
|
||||
_DEFAULT_PRIORITY = 4
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrepTask:
|
||||
recipe_id: int | None
|
||||
slot_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool = False
|
||||
notes: str | None = None
|
||||
user_edited: bool = False
|
||||
|
||||
|
||||
def _total_minutes(recipe: dict) -> int | None:
|
||||
prep = recipe.get("prep_time")
|
||||
cook = recipe.get("cook_time")
|
||||
if prep is None and cook is None:
|
||||
return None
|
||||
return (prep or 0) + (cook or 0)
|
||||
|
||||
|
||||
def _equipment(recipe: dict) -> str | None:
|
||||
# Corpus recipes don't have an explicit equipment field; use test helper
|
||||
# field if present, otherwise infer from cook_time (long = oven heuristic).
|
||||
if "_equipment" in recipe:
|
||||
return recipe["_equipment"]
|
||||
minutes = _total_minutes(recipe)
|
||||
if minutes and minutes >= 45:
|
||||
return "oven"
|
||||
return "stovetop"
|
||||
|
||||
|
||||
def build_prep_tasks(slots: list[dict], recipes: list[dict]) -> list[PrepTask]:
|
||||
"""Return a sequenced list of PrepTask objects from plan slots + recipe rows.
|
||||
|
||||
Algorithm:
|
||||
1. Build a recipe_id → recipe dict lookup.
|
||||
2. Create one task per slot that has a recipe assigned.
|
||||
3. Sort by equipment priority (oven first).
|
||||
4. Assign contiguous sequence_order starting at 1.
|
||||
"""
|
||||
if not slots or not recipes:
|
||||
return []
|
||||
|
||||
recipe_map: dict[int, dict] = {r["id"]: r for r in recipes}
|
||||
raw_tasks: list[tuple[int, dict]] = [] # (priority, kwargs)
|
||||
|
||||
for slot in slots:
|
||||
recipe_id = slot.get("recipe_id")
|
||||
if not recipe_id:
|
||||
continue
|
||||
recipe = recipe_map.get(recipe_id)
|
||||
if not recipe:
|
||||
continue
|
||||
|
||||
eq = _equipment(recipe)
|
||||
priority = _EQUIPMENT_PRIORITY.get(eq or "", _DEFAULT_PRIORITY)
|
||||
raw_tasks.append((priority, {
|
||||
"recipe_id": recipe_id,
|
||||
"slot_id": slot.get("id"),
|
||||
"task_label": recipe.get("name", f"Recipe {recipe_id}"),
|
||||
"duration_minutes": _total_minutes(recipe),
|
||||
"equipment": eq,
|
||||
}))
|
||||
|
||||
raw_tasks.sort(key=lambda t: t[0])
|
||||
return [
|
||||
PrepTask(
|
||||
recipe_id=kw["recipe_id"],
|
||||
slot_id=kw["slot_id"],
|
||||
task_label=kw["task_label"],
|
||||
duration_minutes=kw["duration_minutes"],
|
||||
sequence_order=i,
|
||||
equipment=kw["equipment"],
|
||||
)
|
||||
for i, (_, kw) in enumerate(raw_tasks, 1)
|
||||
]
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# app/services/meal_plan/shopping_list.py
|
||||
"""Compute a shopping list from a meal plan and current pantry inventory.
|
||||
|
||||
Pure function — no DB or network calls. Takes plain dicts from the Store
|
||||
and returns GapItem dataclasses.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GapItem:
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # first quantity token from recipe text, e.g. "300g"
|
||||
have_quantity: float | None # pantry quantity when partial match
|
||||
have_unit: str | None
|
||||
covered: bool
|
||||
retailer_links: list = field(default_factory=list) # filled by API layer
|
||||
|
||||
|
||||
_QUANTITY_RE = re.compile(r"^(\d+[\d./]*\s*(?:g|kg|ml|l|oz|lb|cup|cups|tsp|tbsp|tbsps|tsps)?)\b", re.I)
|
||||
|
||||
|
||||
def _extract_quantity(ingredient_text: str) -> str | None:
|
||||
"""Pull the leading quantity string from a raw ingredient line."""
|
||||
m = _QUANTITY_RE.match(ingredient_text.strip())
|
||||
return m.group(1).strip() if m else None
|
||||
|
||||
|
||||
def _normalise(name: str) -> str:
|
||||
"""Lowercase, strip possessives and plural -s for fuzzy matching."""
|
||||
return name.lower().strip().rstrip("s")
|
||||
|
||||
|
||||
def compute_shopping_list(
|
||||
recipes: list[dict],
|
||||
inventory: list[dict],
|
||||
) -> tuple[list[GapItem], list[GapItem]]:
|
||||
"""Return (gap_items, covered_items) for a list of recipe dicts + inventory dicts.
|
||||
|
||||
Deduplicates by normalised ingredient name — the first recipe's quantity
|
||||
string wins when the same ingredient appears in multiple recipes.
|
||||
"""
|
||||
if not recipes:
|
||||
return [], []
|
||||
|
||||
# Build pantry lookup: normalised_name → inventory row
|
||||
pantry: dict[str, dict] = {}
|
||||
for item in inventory:
|
||||
pantry[_normalise(item["name"])] = item
|
||||
|
||||
# Collect unique ingredients with their first quantity token
|
||||
seen: dict[str, str | None] = {} # normalised_name → needed_raw
|
||||
for recipe in recipes:
|
||||
names: list[str] = recipe.get("ingredient_names") or []
|
||||
raw_lines: list[str] = recipe.get("ingredients") or []
|
||||
for i, name in enumerate(names):
|
||||
key = _normalise(name)
|
||||
if key in seen:
|
||||
continue
|
||||
raw = raw_lines[i] if i < len(raw_lines) else ""
|
||||
seen[key] = _extract_quantity(raw)
|
||||
|
||||
gaps: list[GapItem] = []
|
||||
covered: list[GapItem] = []
|
||||
|
||||
for norm_name, needed_raw in seen.items():
|
||||
pantry_row = pantry.get(norm_name)
|
||||
if pantry_row:
|
||||
covered.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=pantry_row.get("quantity"),
|
||||
have_unit=pantry_row.get("unit"),
|
||||
covered=True,
|
||||
))
|
||||
else:
|
||||
gaps.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=None,
|
||||
have_unit=None,
|
||||
covered=False,
|
||||
))
|
||||
|
||||
return gaps, covered
|
||||
|
|
@ -33,7 +33,7 @@ def _try_docuvision(image_path: str | Path) -> str | None:
|
|||
if not cf_orch_url:
|
||||
return None
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
from app.services.ocr.docuvision_client import DocuvisionClient
|
||||
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
|
|
|
|||
|
|
@ -15,72 +15,63 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class OpenFoodFactsService:
|
||||
"""
|
||||
Service for interacting with the Open*Facts family of databases.
|
||||
Service for interacting with the OpenFoodFacts API.
|
||||
|
||||
Primary: OpenFoodFacts (food products).
|
||||
Fallback chain: Open Beauty Facts (personal care) → Open Products Facts (household).
|
||||
All three databases share the same API path and JSON format.
|
||||
OpenFoodFacts is a free, open database of food products with
|
||||
ingredients, allergens, and nutrition facts.
|
||||
"""
|
||||
|
||||
BASE_URL = "https://world.openfoodfacts.org/api/v2"
|
||||
USER_AGENT = "Kiwi/0.1.0 (https://circuitforge.tech)"
|
||||
|
||||
# Fallback databases tried in order when OFFs returns no match.
|
||||
# Same API format as OFFs — only the host differs.
|
||||
_FALLBACK_DATABASES = [
|
||||
"https://world.openbeautyfacts.org/api/v2",
|
||||
"https://world.openproductsfacts.org/api/v2",
|
||||
]
|
||||
|
||||
async def _lookup_in_database(
|
||||
self, barcode: str, base_url: str, client: httpx.AsyncClient
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Try one Open*Facts database using an existing client. Returns parsed product dict or None."""
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{base_url}/product/{barcode}.json",
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
if data.get("status") != 1:
|
||||
return None
|
||||
return self._parse_product_data(data, barcode)
|
||||
except httpx.HTTPError as e:
|
||||
logger.debug("HTTP error for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.debug("Lookup failed for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
|
||||
async def lookup_product(self, barcode: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Look up a product by barcode, trying OFFs then fallback databases.
|
||||
|
||||
A single httpx.AsyncClient is created for the whole lookup chain so that
|
||||
connection pooling and TLS session reuse apply across all database attempts.
|
||||
Look up a product by barcode in the OpenFoodFacts database.
|
||||
|
||||
Args:
|
||||
barcode: UPC/EAN barcode (8-13 digits)
|
||||
|
||||
Returns:
|
||||
Dictionary with product information, or None if not found in any database.
|
||||
Dictionary with product information, or None if not found
|
||||
|
||||
Example response:
|
||||
{
|
||||
"name": "Organic Milk",
|
||||
"brand": "Horizon",
|
||||
"categories": ["Dairy", "Milk"],
|
||||
"image_url": "https://...",
|
||||
"nutrition_data": {...},
|
||||
"raw_data": {...} # Full API response
|
||||
}
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
result = await self._lookup_in_database(barcode, self.BASE_URL, client)
|
||||
if result:
|
||||
return result
|
||||
url = f"{self.BASE_URL}/product/{barcode}.json"
|
||||
|
||||
for db_url in self._FALLBACK_DATABASES:
|
||||
result = await self._lookup_in_database(barcode, db_url, client)
|
||||
if result:
|
||||
logger.info("Barcode %s found in fallback database: %s", barcode, db_url)
|
||||
return result
|
||||
response = await client.get(
|
||||
url,
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
logger.info("Barcode %s not found in any Open*Facts database", barcode)
|
||||
if response.status_code == 404:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if data.get("status") != 1:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
return self._parse_product_data(data, barcode)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(f"HTTP error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
|
||||
def _parse_product_data(self, data: Dict[str, Any], barcode: str) -> Dict[str, Any]:
|
||||
|
|
@ -123,9 +114,6 @@ class OpenFoodFactsService:
|
|||
allergens = product.get("allergens_tags", [])
|
||||
labels = product.get("labels_tags", [])
|
||||
|
||||
# Pack size detection: prefer explicit unit_count, fall back to serving count
|
||||
pack_quantity, pack_unit = self._extract_pack_size(product)
|
||||
|
||||
return {
|
||||
"name": name,
|
||||
"brand": brand,
|
||||
|
|
@ -136,47 +124,9 @@ class OpenFoodFactsService:
|
|||
"nutrition_data": nutrition_data,
|
||||
"allergens": allergens,
|
||||
"labels": labels,
|
||||
"pack_quantity": pack_quantity,
|
||||
"pack_unit": pack_unit,
|
||||
"raw_data": product, # Store full response for debugging
|
||||
}
|
||||
|
||||
def _extract_pack_size(self, product: Dict[str, Any]) -> tuple[float | None, str | None]:
|
||||
"""Return (quantity, unit) for multi-pack products, or (None, None).
|
||||
|
||||
OFFs fields tried in order:
|
||||
1. `number_of_units` (explicit count, highest confidence)
|
||||
2. `serving_quantity` + `product_quantity_unit` (e.g. 6 x 150g yoghurt)
|
||||
3. Parse `quantity` string like "4 x 113 g" or "6 pack"
|
||||
|
||||
Returns None, None when data is absent, ambiguous, or single-unit.
|
||||
"""
|
||||
import re
|
||||
|
||||
# Field 1: explicit unit count
|
||||
unit_count = product.get("number_of_units")
|
||||
if unit_count:
|
||||
try:
|
||||
n = float(unit_count)
|
||||
if n > 1:
|
||||
return n, product.get("serving_size_unit") or "unit"
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Field 2: parse quantity string for "N x ..." pattern
|
||||
qty_str = product.get("quantity", "")
|
||||
if qty_str:
|
||||
m = re.match(r"^(\d+(?:\.\d+)?)\s*[xX×]\s*", qty_str.strip())
|
||||
if m:
|
||||
n = float(m.group(1))
|
||||
if n > 1:
|
||||
# Try to get a sensible sub-unit label from the rest
|
||||
rest = qty_str[m.end():].strip()
|
||||
unit_label = re.sub(r"[\d.,\s]+", "", rest).strip()[:20] or "unit"
|
||||
return n, unit_label
|
||||
|
||||
return None, None
|
||||
|
||||
def _extract_nutrition_data(self, product: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract nutrition facts from product data.
|
||||
|
|
|
|||
|
|
@ -42,21 +42,11 @@ class AssemblyRole:
|
|||
class AssemblyTemplate:
|
||||
"""A template assembly dish."""
|
||||
id: int
|
||||
slug: str # URL-safe identifier, e.g. "burrito_taco"
|
||||
icon: str # emoji
|
||||
descriptor: str # one-line description shown in template grid
|
||||
title: str
|
||||
required: list[AssemblyRole]
|
||||
optional: list[AssemblyRole]
|
||||
directions: list[str]
|
||||
notes: str = ""
|
||||
# Per-role hints shown in the wizard picker header
|
||||
# keys match role.display values; missing keys fall back to ""
|
||||
role_hints: dict[str, str] = None # type: ignore[assignment]
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.role_hints is None:
|
||||
self.role_hints = {}
|
||||
|
||||
|
||||
def _matches_role(role: AssemblyRole, pantry_set: set[str]) -> list[str]:
|
||||
|
|
@ -148,9 +138,6 @@ def _personalized_title(tmpl: AssemblyTemplate, pantry_set: set[str], seed: int)
|
|||
ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
||||
AssemblyTemplate(
|
||||
id=-1,
|
||||
slug="burrito_taco",
|
||||
icon="🌯",
|
||||
descriptor="Protein, veg, and sauce in a tortilla or over rice",
|
||||
title="Burrito / Taco",
|
||||
required=[
|
||||
AssemblyRole("tortilla or wrap", [
|
||||
|
|
@ -183,21 +170,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Fold in the sides and roll tightly. Optionally toast seam-side down 1-2 minutes.",
|
||||
],
|
||||
notes="Works as a burrito (rolled), taco (folded), or quesadilla (cheese only, pressed flat).",
|
||||
role_hints={
|
||||
"tortilla or wrap": "The foundation -- what holds everything",
|
||||
"protein": "The main filling",
|
||||
"rice or starch": "Optional base layer",
|
||||
"cheese": "Optional -- melts into the filling",
|
||||
"salsa or sauce": "Optional -- adds moisture and heat",
|
||||
"sour cream or yogurt": "Optional -- cool contrast to heat",
|
||||
"vegetables": "Optional -- adds texture and colour",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-2,
|
||||
slug="fried_rice",
|
||||
icon="🍳",
|
||||
descriptor="Rice + egg + whatever's in the fridge",
|
||||
title="Fried Rice",
|
||||
required=[
|
||||
AssemblyRole("cooked rice", [
|
||||
|
|
@ -230,21 +205,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season with soy sauce and any other sauces. Toss to combine.",
|
||||
],
|
||||
notes="Add a fried egg on top. A drizzle of sesame oil at the end adds a lot.",
|
||||
role_hints={
|
||||
"cooked rice": "Day-old cold rice works best",
|
||||
"protein": "Pre-cooked or raw -- cook before adding rice",
|
||||
"soy sauce or seasoning": "The primary flavour driver",
|
||||
"oil": "High smoke-point oil for high heat",
|
||||
"egg": "Scrambled in the same pan",
|
||||
"vegetables": "Add crunch and colour",
|
||||
"garlic or ginger": "Aromatic base -- add first",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-3,
|
||||
slug="omelette_scramble",
|
||||
icon="🥚",
|
||||
descriptor="Eggs with fillings, pan-cooked",
|
||||
title="Omelette / Scramble",
|
||||
required=[
|
||||
AssemblyRole("eggs", ["egg"]),
|
||||
|
|
@ -275,19 +238,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season and serve immediately.",
|
||||
],
|
||||
notes="Works for breakfast, lunch, or a quick dinner. Any leftover vegetables work well.",
|
||||
role_hints={
|
||||
"eggs": "The base -- beat with a splash of water",
|
||||
"cheese": "Fold in just before serving",
|
||||
"vegetables": "Saute first, then add eggs",
|
||||
"protein": "Cook through before adding eggs",
|
||||
"herbs or seasoning": "Season at the end",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-4,
|
||||
slug="stir_fry",
|
||||
icon="🥢",
|
||||
descriptor="High-heat protein + veg in sauce",
|
||||
title="Stir Fry",
|
||||
required=[
|
||||
AssemblyRole("vegetables", [
|
||||
|
|
@ -295,8 +248,6 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"zucchini", "mushroom", "corn", "onion", "bean sprout",
|
||||
"cabbage", "spinach", "asparagus",
|
||||
]),
|
||||
# Starch base required — prevents this from firing on any pantry with vegetables
|
||||
AssemblyRole("starch base", ["rice", "noodle", "pasta", "ramen", "cauliflower rice"]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("protein", [
|
||||
|
|
@ -306,6 +257,7 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"soy sauce", "teriyaki", "oyster sauce", "hoisin",
|
||||
"stir fry sauce", "sesame",
|
||||
]),
|
||||
AssemblyRole("starch base", ["rice", "noodle", "pasta", "ramen"]),
|
||||
AssemblyRole("garlic or ginger", ["garlic", "ginger"]),
|
||||
AssemblyRole("oil", ["oil", "sesame"]),
|
||||
],
|
||||
|
|
@ -318,20 +270,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Serve over rice or noodles.",
|
||||
],
|
||||
notes="High heat is the key. Do not crowd the pan -- cook in batches if needed.",
|
||||
role_hints={
|
||||
"vegetables": "Cut to similar size for even cooking",
|
||||
"starch base": "Serve under or toss with the stir fry",
|
||||
"protein": "Cook first, remove, add back at end",
|
||||
"sauce": "Add last -- toss for 1-2 minutes only",
|
||||
"garlic or ginger": "Add early for aromatic base",
|
||||
"oil": "High smoke-point oil only",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-5,
|
||||
slug="pasta",
|
||||
icon="🍝",
|
||||
descriptor="Pantry pasta with flexible sauce",
|
||||
title="Pasta with Whatever You Have",
|
||||
required=[
|
||||
AssemblyRole("pasta", [
|
||||
|
|
@ -365,20 +306,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Toss cooked pasta with sauce. Finish with cheese if using.",
|
||||
],
|
||||
notes="Pasta water is the secret -- the starch thickens and binds any sauce.",
|
||||
role_hints={
|
||||
"pasta": "The base -- cook al dente, reserve pasta water",
|
||||
"sauce base": "Simmer 5 min; pasta water loosens it",
|
||||
"protein": "Cook through before adding sauce",
|
||||
"cheese": "Finish off heat to avoid graininess",
|
||||
"vegetables": "Saute until tender before adding sauce",
|
||||
"garlic": "Saute in oil first -- the flavour foundation",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-6,
|
||||
slug="sandwich_wrap",
|
||||
icon="🥪",
|
||||
descriptor="Protein + veg between bread or in a wrap",
|
||||
title="Sandwich / Wrap",
|
||||
required=[
|
||||
AssemblyRole("bread or wrap", [
|
||||
|
|
@ -410,19 +340,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Press together and cut diagonally.",
|
||||
],
|
||||
notes="Leftovers, deli meat, canned fish -- nearly anything works between bread.",
|
||||
role_hints={
|
||||
"bread or wrap": "Toast for better texture",
|
||||
"protein": "Layer on first after condiments",
|
||||
"cheese": "Goes on top of protein",
|
||||
"condiment": "Spread on both inner surfaces",
|
||||
"vegetables": "Top layer -- keeps bread from getting soggy",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-7,
|
||||
slug="grain_bowl",
|
||||
icon="🥗",
|
||||
descriptor="Grain base + protein + toppings + dressing",
|
||||
title="Grain Bowl",
|
||||
required=[
|
||||
AssemblyRole("grain base", [
|
||||
|
|
@ -456,25 +376,14 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Drizzle with dressing and add toppings.",
|
||||
],
|
||||
notes="Great for meal prep -- cook grains and proteins in bulk, assemble bowls all week.",
|
||||
role_hints={
|
||||
"grain base": "Season while cooking -- bland grains sink the bowl",
|
||||
"protein": "Slice or shred; arrange on top",
|
||||
"vegetables": "Roast or saute for best flavour",
|
||||
"dressing or sauce": "Drizzle last -- ties everything together",
|
||||
"toppings": "Add crunch and contrast",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-8,
|
||||
slug="soup_stew",
|
||||
icon="🥣",
|
||||
descriptor="Liquid-based, flexible ingredients",
|
||||
title="Soup / Stew",
|
||||
required=[
|
||||
# Narrow to dedicated soup bases — tomato sauce and coconut milk are
|
||||
# pantry staples used in too many non-soup dishes to serve as anchors.
|
||||
AssemblyRole("broth or stock", [
|
||||
"broth", "stock", "bouillon", "cream of",
|
||||
AssemblyRole("broth or liquid base", [
|
||||
"broth", "stock", "bouillon",
|
||||
"tomato sauce", "coconut milk", "cream of",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
|
|
@ -504,19 +413,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Season to taste and simmer at least 20 minutes for flavors to develop.",
|
||||
],
|
||||
notes="Soups and stews improve overnight in the fridge. Almost any combination works.",
|
||||
role_hints={
|
||||
"broth or stock": "The liquid base -- determines overall flavour",
|
||||
"protein": "Brown first for deeper flavour",
|
||||
"vegetables": "Dense veg first; quick-cooking veg last",
|
||||
"starch thickener": "Adds body and turns soup into stew",
|
||||
"seasoning": "Taste and adjust after 20 min simmer",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-9,
|
||||
slug="casserole_bake",
|
||||
icon="🫙",
|
||||
descriptor="Oven bake with protein, veg, starch",
|
||||
title="Casserole / Bake",
|
||||
required=[
|
||||
AssemblyRole("starch or base", [
|
||||
|
|
@ -556,20 +455,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Bake covered 25 minutes, then uncovered 15 minutes until golden and bubbly.",
|
||||
],
|
||||
notes="Classic pantry dump dinner. Cream of anything soup is the universal binder.",
|
||||
role_hints={
|
||||
"starch or base": "Cook slightly underdone -- finishes in oven",
|
||||
"binder or sauce": "Coats everything and holds the bake together",
|
||||
"protein": "Pre-cook before mixing in",
|
||||
"vegetables": "Chop small for even distribution",
|
||||
"cheese topping": "Goes on last -- browns in the final 15 min",
|
||||
"seasoning": "Casseroles need more salt than you think",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-10,
|
||||
slug="pancakes_quickbread",
|
||||
icon="🥞",
|
||||
descriptor="Batter-based; sweet or savory",
|
||||
title="Pancakes / Waffles / Quick Bread",
|
||||
required=[
|
||||
AssemblyRole("flour or baking mix", [
|
||||
|
|
@ -605,20 +493,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"For muffins or quick bread: pour into greased pan, bake at 375 F until a toothpick comes out clean.",
|
||||
],
|
||||
notes="Overmixing develops gluten and makes pancakes tough. Stop when just combined.",
|
||||
role_hints={
|
||||
"flour or baking mix": "Whisk dry ingredients together first",
|
||||
"leavening or egg": "Activates rise -- don't skip",
|
||||
"liquid": "Add to dry ingredients; lumps are fine",
|
||||
"fat": "Adds richness and prevents sticking",
|
||||
"sweetener": "Mix into wet ingredients",
|
||||
"mix-ins": "Fold in last -- gently",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-11,
|
||||
slug="porridge_oatmeal",
|
||||
icon="🌾",
|
||||
descriptor="Oat or grain base with toppings",
|
||||
title="Porridge / Oatmeal",
|
||||
required=[
|
||||
AssemblyRole("oats or grain porridge", [
|
||||
|
|
@ -641,20 +518,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Top with fruit, nuts, or seeds and serve immediately.",
|
||||
],
|
||||
notes="Overnight oats: skip cooking — soak oats in cold milk overnight in the fridge.",
|
||||
role_hints={
|
||||
"oats or grain porridge": "1 part oats to 2 parts liquid",
|
||||
"liquid": "Use milk for creamier result",
|
||||
"sweetener": "Stir in after cooking",
|
||||
"fruit": "Add fresh on top or simmer dried fruit in",
|
||||
"toppings": "Add last for crunch",
|
||||
"spice": "Stir in with sweetener",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-12,
|
||||
slug="pie_pot_pie",
|
||||
icon="🥧",
|
||||
descriptor="Pastry or biscuit crust with filling",
|
||||
title="Pie / Pot Pie",
|
||||
required=[
|
||||
AssemblyRole("pastry or crust", [
|
||||
|
|
@ -693,20 +559,9 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"For sweet pie: fill unbaked crust with fruit filling, top with second crust or crumble, bake similarly.",
|
||||
],
|
||||
notes="Puff pastry from the freezer is the shortcut to impressive pot pies. Thaw in the fridge overnight.",
|
||||
role_hints={
|
||||
"pastry or crust": "Thaw puff pastry overnight in fridge",
|
||||
"protein filling": "Cook through before adding to filling",
|
||||
"vegetables": "Chop small; cook until just tender",
|
||||
"sauce or binder": "Holds the filling together in the crust",
|
||||
"seasoning": "Fillings need generous seasoning",
|
||||
"sweet filling": "For dessert pies -- fruit + sugar",
|
||||
},
|
||||
),
|
||||
AssemblyTemplate(
|
||||
id=-13,
|
||||
slug="pudding_custard",
|
||||
icon="🍮",
|
||||
descriptor="Dairy-based set dessert",
|
||||
title="Pudding / Custard",
|
||||
required=[
|
||||
AssemblyRole("dairy or dairy-free milk", [
|
||||
|
|
@ -717,12 +572,6 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"egg", "cornstarch", "custard powder", "gelatin",
|
||||
"agar", "tapioca", "arrowroot",
|
||||
]),
|
||||
# Require a clear dessert-intent signal — milk + eggs alone is too generic
|
||||
# (also covers white sauce, quiche, etc.)
|
||||
AssemblyRole("sweetener or flavouring", [
|
||||
"sugar", "honey", "maple syrup", "condensed milk",
|
||||
"vanilla", "chocolate", "cocoa", "caramel", "custard powder",
|
||||
]),
|
||||
],
|
||||
optional=[
|
||||
AssemblyRole("sweetener", ["sugar", "honey", "maple syrup", "condensed milk"]),
|
||||
|
|
@ -744,58 +593,10 @@ ASSEMBLY_TEMPLATES: list[AssemblyTemplate] = [
|
|||
"Pour into dishes and refrigerate at least 2 hours to set.",
|
||||
],
|
||||
notes="UK-style pudding is broad — bread pudding, rice pudding, spotted dick, treacle sponge all count.",
|
||||
role_hints={
|
||||
"dairy or dairy-free milk": "Heat until steaming before adding to eggs",
|
||||
"thickener or set": "Cornstarch for stovetop; eggs for baked custard",
|
||||
"sweetener or flavouring": "Signals dessert intent -- required",
|
||||
"sweetener": "Adjust to taste",
|
||||
"flavouring": "Add off-heat to preserve aroma",
|
||||
"starchy base": "For bread pudding or rice pudding",
|
||||
"fruit": "Layer in or fold through before setting",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# Slug to template lookup (built once at import time)
|
||||
_TEMPLATE_BY_SLUG: dict[str, AssemblyTemplate] = {
|
||||
t.slug: t for t in ASSEMBLY_TEMPLATES
|
||||
}
|
||||
|
||||
|
||||
def get_templates_for_api() -> list[dict]:
|
||||
"""Serialise all 13 templates for GET /api/recipes/templates.
|
||||
|
||||
Combines required and optional roles into a single ordered role_sequence
|
||||
with required roles first.
|
||||
"""
|
||||
out = []
|
||||
for tmpl in ASSEMBLY_TEMPLATES:
|
||||
roles = []
|
||||
for role in tmpl.required:
|
||||
roles.append({
|
||||
"display": role.display,
|
||||
"required": True,
|
||||
"keywords": role.keywords,
|
||||
"hint": tmpl.role_hints.get(role.display, ""),
|
||||
})
|
||||
for role in tmpl.optional:
|
||||
roles.append({
|
||||
"display": role.display,
|
||||
"required": False,
|
||||
"keywords": role.keywords,
|
||||
"hint": tmpl.role_hints.get(role.display, ""),
|
||||
})
|
||||
out.append({
|
||||
"id": tmpl.slug,
|
||||
"title": tmpl.title,
|
||||
"icon": tmpl.icon,
|
||||
"descriptor": tmpl.descriptor,
|
||||
"role_sequence": roles,
|
||||
})
|
||||
return out
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -804,20 +605,14 @@ def match_assembly_templates(
|
|||
pantry_items: list[str],
|
||||
pantry_set: set[str],
|
||||
excluded_ids: list[int],
|
||||
expiring_set: set[str] | None = None,
|
||||
) -> list[RecipeSuggestion]:
|
||||
"""Return assembly-dish suggestions whose required roles are all satisfied.
|
||||
|
||||
Titles are personalized with specific pantry items (deterministically chosen
|
||||
from the pantry contents so the same pantry always produces the same title).
|
||||
Skips templates whose id is in excluded_ids (dismiss/load-more support).
|
||||
|
||||
expiring_set: expanded pantry set of items close to expiry. Templates that
|
||||
use an expiring item in a required role get +2 added to match_count so they
|
||||
rank higher when the caller sorts the combined result list.
|
||||
"""
|
||||
excluded = set(excluded_ids)
|
||||
expiring = expiring_set or set()
|
||||
seed = _pantry_hash(pantry_set)
|
||||
results: list[RecipeSuggestion] = []
|
||||
|
||||
|
|
@ -825,40 +620,20 @@ def match_assembly_templates(
|
|||
if tmpl.id in excluded:
|
||||
continue
|
||||
|
||||
# All required roles must be satisfied; collect matched items for required roles
|
||||
required_matches: list[str] = []
|
||||
skip = False
|
||||
for role in tmpl.required:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if not hits:
|
||||
skip = True
|
||||
break
|
||||
required_matches.append(_pick_one(hits, seed + tmpl.id))
|
||||
if skip:
|
||||
# All required roles must be satisfied
|
||||
if any(not _matches_role(role, pantry_set) for role in tmpl.required):
|
||||
continue
|
||||
|
||||
# Collect matched items for optional roles (one representative per matched role)
|
||||
optional_matches: list[str] = []
|
||||
for role in tmpl.optional:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if hits:
|
||||
optional_matches.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
matched = required_matches + optional_matches
|
||||
|
||||
# Expiry boost: +2 if any required ingredient is in the expiring set,
|
||||
# so time-sensitive templates surface first in the merged ranking.
|
||||
expiry_bonus = 2 if expiring and any(
|
||||
item.lower() in expiring for item in required_matches
|
||||
) else 0
|
||||
optional_hit_count = sum(
|
||||
1 for role in tmpl.optional if _matches_role(role, pantry_set)
|
||||
)
|
||||
|
||||
results.append(RecipeSuggestion(
|
||||
id=tmpl.id,
|
||||
title=_personalized_title(tmpl, pantry_set, seed + tmpl.id),
|
||||
match_count=len(matched) + expiry_bonus,
|
||||
match_count=len(tmpl.required) + optional_hit_count,
|
||||
element_coverage={},
|
||||
swap_candidates=[],
|
||||
matched_ingredients=matched,
|
||||
missing_ingredients=[],
|
||||
directions=tmpl.directions,
|
||||
notes=tmpl.notes,
|
||||
|
|
@ -870,148 +645,3 @@ def match_assembly_templates(
|
|||
# Sort by optional coverage descending — best-matched templates first
|
||||
results.sort(key=lambda s: s.match_count, reverse=True)
|
||||
return results
|
||||
|
||||
|
||||
def get_role_candidates(
|
||||
template_slug: str,
|
||||
role_display: str,
|
||||
pantry_set: set[str],
|
||||
prior_picks: list[str],
|
||||
profile_index: dict[str, list[str]],
|
||||
) -> dict:
|
||||
"""Return ingredient candidates for one wizard step.
|
||||
|
||||
Splits candidates into 'compatible' (element overlap with prior picks)
|
||||
and 'other' (valid for role but no overlap).
|
||||
|
||||
profile_index: {ingredient_name: [element_tag, ...]} -- pre-loaded from
|
||||
Store.get_element_profiles() by the caller so this function stays DB-free.
|
||||
|
||||
Returns {"compatible": [...], "other": [...], "available_tags": [...]}
|
||||
where each item is {"name": str, "in_pantry": bool, "tags": [str]}.
|
||||
"""
|
||||
tmpl = _TEMPLATE_BY_SLUG.get(template_slug)
|
||||
if tmpl is None:
|
||||
return {"compatible": [], "other": [], "available_tags": []}
|
||||
|
||||
# Find the AssemblyRole for this display name
|
||||
target_role: AssemblyRole | None = None
|
||||
for role in tmpl.required + tmpl.optional:
|
||||
if role.display == role_display:
|
||||
target_role = role
|
||||
break
|
||||
if target_role is None:
|
||||
return {"compatible": [], "other": [], "available_tags": []}
|
||||
|
||||
# Build prior-pick element set for compatibility scoring
|
||||
prior_elements: set[str] = set()
|
||||
for pick in prior_picks:
|
||||
prior_elements.update(profile_index.get(pick, []))
|
||||
|
||||
# Find pantry items that match this role
|
||||
pantry_matches = _matches_role(target_role, pantry_set)
|
||||
|
||||
# Build keyword-based "other" candidates from role keywords not in pantry
|
||||
pantry_lower = {p.lower() for p in pantry_set}
|
||||
other_names: list[str] = []
|
||||
for kw in target_role.keywords:
|
||||
if not any(kw in item.lower() for item in pantry_lower):
|
||||
if len(kw) >= 4:
|
||||
other_names.append(kw.title())
|
||||
|
||||
def _make_item(name: str, in_pantry: bool) -> dict:
|
||||
tags = profile_index.get(name, profile_index.get(name.lower(), []))
|
||||
return {"name": name, "in_pantry": in_pantry, "tags": tags}
|
||||
|
||||
# Score: compatible if shares any element with prior picks (or no prior picks yet)
|
||||
compatible: list[dict] = []
|
||||
other: list[dict] = []
|
||||
for name in pantry_matches:
|
||||
item_elements = set(profile_index.get(name, []))
|
||||
item = _make_item(name, in_pantry=True)
|
||||
if not prior_elements or item_elements & prior_elements:
|
||||
compatible.append(item)
|
||||
else:
|
||||
other.append(item)
|
||||
|
||||
for name in other_names:
|
||||
other.append(_make_item(name, in_pantry=False))
|
||||
|
||||
# available_tags: union of all tags in the full candidate set
|
||||
all_tags: set[str] = set()
|
||||
for item in compatible + other:
|
||||
all_tags.update(item["tags"])
|
||||
|
||||
return {
|
||||
"compatible": compatible,
|
||||
"other": other,
|
||||
"available_tags": sorted(all_tags),
|
||||
}
|
||||
|
||||
|
||||
def build_from_selection(
|
||||
template_slug: str,
|
||||
role_overrides: dict[str, str],
|
||||
pantry_set: set[str],
|
||||
) -> "RecipeSuggestion | None":
|
||||
"""Build a RecipeSuggestion from explicit role selections.
|
||||
|
||||
role_overrides: {role.display -> chosen pantry item name}
|
||||
|
||||
Returns None if template not found or any required role is uncovered.
|
||||
"""
|
||||
tmpl = _TEMPLATE_BY_SLUG.get(template_slug)
|
||||
if tmpl is None:
|
||||
return None
|
||||
|
||||
seed = _pantry_hash(pantry_set)
|
||||
|
||||
# Validate required roles: covered by override OR pantry match
|
||||
matched_required: list[str] = []
|
||||
for role in tmpl.required:
|
||||
chosen = role_overrides.get(role.display)
|
||||
if chosen:
|
||||
matched_required.append(chosen)
|
||||
else:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if not hits:
|
||||
return None
|
||||
matched_required.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
# Collect optional matches (override preferred, then pantry match)
|
||||
matched_optional: list[str] = []
|
||||
for role in tmpl.optional:
|
||||
chosen = role_overrides.get(role.display)
|
||||
if chosen:
|
||||
matched_optional.append(chosen)
|
||||
else:
|
||||
hits = _matches_role(role, pantry_set)
|
||||
if hits:
|
||||
matched_optional.append(_pick_one(hits, seed + tmpl.id))
|
||||
|
||||
all_matched = matched_required + matched_optional
|
||||
|
||||
# Build title: prefer override items for personalisation
|
||||
effective_pantry = pantry_set | set(role_overrides.values())
|
||||
title = _personalized_title(tmpl, effective_pantry, seed + tmpl.id)
|
||||
|
||||
# Items in role_overrides that aren't in the user's pantry = shopping list
|
||||
missing = [
|
||||
item for item in role_overrides.values()
|
||||
if item and item not in pantry_set
|
||||
]
|
||||
|
||||
return RecipeSuggestion(
|
||||
id=tmpl.id,
|
||||
title=title,
|
||||
match_count=len(all_matched),
|
||||
element_coverage={},
|
||||
swap_candidates=[],
|
||||
matched_ingredients=all_matched,
|
||||
missing_ingredients=missing,
|
||||
directions=tmpl.directions,
|
||||
notes=tmpl.notes,
|
||||
level=1,
|
||||
is_wildcard=False,
|
||||
nutrition=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,256 +0,0 @@
|
|||
"""
|
||||
Browse counts cache — pre-computes and persists recipe counts for all
|
||||
browse domain keyword sets so category/subcategory page loads never
|
||||
hit the 3.8 GB FTS index at request time.
|
||||
|
||||
Counts change only when the corpus changes (after a pipeline run).
|
||||
The cache is a small SQLite file separate from both the read-only
|
||||
corpus DB and per-user kiwi.db files, so the container can write it.
|
||||
|
||||
Refresh triggers:
|
||||
1. Startup — if cache is missing or older than STALE_DAYS
|
||||
2. Nightly — asyncio background task started in main.py lifespan
|
||||
3. Pipeline — infer_recipe_tags.py calls refresh() at end of run
|
||||
|
||||
The in-memory _COUNT_CACHE in store.py is pre-warmed from this file
|
||||
on startup, so FTS queries are never needed for known keyword sets.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
STALE_DAYS = 7
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _kw_key(keywords: list[str]) -> str:
|
||||
"""Stable string key for a keyword list — sorted and pipe-joined."""
|
||||
return "|".join(sorted(keywords))
|
||||
|
||||
|
||||
def _fts_match_expr(keywords: list[str]) -> str:
|
||||
phrases = ['"' + kw.replace('"', '""') + '"' for kw in keywords]
|
||||
return " OR ".join(phrases)
|
||||
|
||||
|
||||
def _ensure_schema(conn: sqlite3.Connection) -> None:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS browse_counts (
|
||||
keywords_key TEXT PRIMARY KEY,
|
||||
count INTEGER NOT NULL,
|
||||
computed_at TEXT NOT NULL
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS browse_counts_meta (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def is_stale(cache_path: Path, max_age_days: int = STALE_DAYS) -> bool:
|
||||
"""Return True if the cache is missing, empty, or older than max_age_days."""
|
||||
if not cache_path.exists():
|
||||
return True
|
||||
try:
|
||||
conn = sqlite3.connect(cache_path)
|
||||
row = conn.execute(
|
||||
"SELECT value FROM browse_counts_meta WHERE key = 'refreshed_at'"
|
||||
).fetchone()
|
||||
conn.close()
|
||||
if row is None:
|
||||
return True
|
||||
age = (datetime.now(timezone.utc) - datetime.fromisoformat(row[0])).days
|
||||
return age >= max_age_days
|
||||
except Exception:
|
||||
return True
|
||||
|
||||
|
||||
def load_into_memory(cache_path: Path, count_cache: dict, corpus_path: str) -> int:
|
||||
"""
|
||||
Load all rows from the cache file into the in-memory count_cache dict.
|
||||
|
||||
Uses corpus_path (the current RECIPE_DB_PATH env value) as the cache key,
|
||||
not what was stored in the file — the file may have been built against a
|
||||
different mount path (e.g. pipeline ran on host, container sees a different
|
||||
path). Counts are corpus-content-derived and path-independent.
|
||||
|
||||
Returns the number of entries loaded.
|
||||
"""
|
||||
if not cache_path.exists():
|
||||
return 0
|
||||
try:
|
||||
conn = sqlite3.connect(cache_path)
|
||||
rows = conn.execute("SELECT keywords_key, count FROM browse_counts").fetchall()
|
||||
conn.close()
|
||||
loaded = 0
|
||||
for kw_key, count in rows:
|
||||
keywords = kw_key.split("|") if kw_key else []
|
||||
cache_key = (corpus_path, *sorted(keywords))
|
||||
count_cache[cache_key] = count
|
||||
loaded += 1
|
||||
logger.info("browse_counts: warmed %d entries from %s", loaded, cache_path)
|
||||
return loaded
|
||||
except Exception as exc:
|
||||
logger.warning("browse_counts: load failed: %s", exc)
|
||||
return 0
|
||||
|
||||
|
||||
def refresh(corpus_path: str, cache_path: Path) -> int:
|
||||
"""
|
||||
Run FTS5 queries for every keyword set in browser_domains.DOMAINS
|
||||
and write results to cache_path.
|
||||
|
||||
Safe to call from both the host pipeline script and the in-container
|
||||
nightly task. The corpus_path must be reachable and readable from
|
||||
the calling process.
|
||||
|
||||
Returns the number of keyword sets computed.
|
||||
"""
|
||||
from app.services.recipe.browser_domains import DOMAINS # local import — avoid circular
|
||||
|
||||
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
cache_conn = sqlite3.connect(cache_path)
|
||||
_ensure_schema(cache_conn)
|
||||
|
||||
# Collect every unique keyword list across all domains/categories/subcategories.
|
||||
# DOMAINS structure: {domain: {label: str, categories: {cat_name: {keywords, subcategories}}}}
|
||||
seen: dict[str, list[str]] = {}
|
||||
for domain_data in DOMAINS.values():
|
||||
for cat_data in domain_data.get("categories", {}).values():
|
||||
if not isinstance(cat_data, dict):
|
||||
continue
|
||||
top_kws = cat_data.get("keywords", [])
|
||||
if top_kws:
|
||||
seen[_kw_key(top_kws)] = top_kws
|
||||
for subcat_kws in cat_data.get("subcategories", {}).values():
|
||||
if subcat_kws:
|
||||
seen[_kw_key(subcat_kws)] = subcat_kws
|
||||
|
||||
try:
|
||||
corpus_conn = sqlite3.connect(f"file:{corpus_path}?mode=ro", uri=True)
|
||||
except Exception as exc:
|
||||
logger.error("browse_counts: cannot open corpus %s: %s", corpus_path, exc)
|
||||
cache_conn.close()
|
||||
return 0
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
computed = 0
|
||||
|
||||
try:
|
||||
for kw_key, kws in seen.items():
|
||||
try:
|
||||
row = corpus_conn.execute(
|
||||
"SELECT count(*) FROM recipe_browser_fts WHERE recipe_browser_fts MATCH ?",
|
||||
(_fts_match_expr(kws),),
|
||||
).fetchone()
|
||||
count = row[0] if row else 0
|
||||
cache_conn.execute(
|
||||
"INSERT OR REPLACE INTO browse_counts (keywords_key, count, computed_at)"
|
||||
" VALUES (?, ?, ?)",
|
||||
(kw_key, count, now),
|
||||
)
|
||||
computed += 1
|
||||
except Exception as exc:
|
||||
logger.warning("browse_counts: query failed key=%r: %s", kw_key[:60], exc)
|
||||
|
||||
# Merge accepted community tags into counts.
|
||||
# For each (domain, category, subcategory) that has accepted community
|
||||
# tags, add the count of distinct tagged recipe_ids to the FTS count.
|
||||
# The two overlap rarely (community tags exist precisely because FTS
|
||||
# missed those recipes), so simple addition is accurate enough.
|
||||
try:
|
||||
_merge_community_tag_counts(cache_conn, DOMAINS, now)
|
||||
except Exception as exc:
|
||||
logger.warning("browse_counts: community merge skipped: %s", exc)
|
||||
|
||||
cache_conn.execute(
|
||||
"INSERT OR REPLACE INTO browse_counts_meta (key, value) VALUES ('refreshed_at', ?)",
|
||||
(now,),
|
||||
)
|
||||
cache_conn.execute(
|
||||
"INSERT OR REPLACE INTO browse_counts_meta (key, value) VALUES ('corpus_path', ?)",
|
||||
(corpus_path,),
|
||||
)
|
||||
cache_conn.commit()
|
||||
logger.info("browse_counts: wrote %d counts → %s", computed, cache_path)
|
||||
finally:
|
||||
corpus_conn.close()
|
||||
cache_conn.close()
|
||||
|
||||
return computed
|
||||
|
||||
|
||||
def _merge_community_tag_counts(
|
||||
cache_conn: sqlite3.Connection,
|
||||
domains: dict,
|
||||
now: str,
|
||||
threshold: int = 2,
|
||||
) -> None:
|
||||
"""Add accepted community tag counts on top of FTS counts in the cache.
|
||||
|
||||
Queries the community PostgreSQL store (if available) for accepted tags
|
||||
grouped by (domain, category, subcategory), maps each back to its keyword
|
||||
set key, then increments the cached count.
|
||||
|
||||
Silently skips if community features are unavailable.
|
||||
"""
|
||||
try:
|
||||
from app.api.endpoints.community import _get_community_store
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return
|
||||
except Exception:
|
||||
return
|
||||
|
||||
for domain_id, domain_data in domains.items():
|
||||
for cat_name, cat_data in domain_data.get("categories", {}).items():
|
||||
if not isinstance(cat_data, dict):
|
||||
continue
|
||||
# Check subcategories
|
||||
for subcat_name, subcat_kws in cat_data.get("subcategories", {}).items():
|
||||
if not subcat_kws:
|
||||
continue
|
||||
ids = store.get_accepted_recipe_ids_for_subcategory(
|
||||
domain=domain_id,
|
||||
category=cat_name,
|
||||
subcategory=subcat_name,
|
||||
threshold=threshold,
|
||||
)
|
||||
if not ids:
|
||||
continue
|
||||
kw_key = _kw_key(subcat_kws)
|
||||
cache_conn.execute(
|
||||
"UPDATE browse_counts SET count = count + ? WHERE keywords_key = ?",
|
||||
(len(ids), kw_key),
|
||||
)
|
||||
# Check category-level tags (subcategory IS NULL)
|
||||
top_kws = cat_data.get("keywords", [])
|
||||
if top_kws:
|
||||
ids = store.get_accepted_recipe_ids_for_subcategory(
|
||||
domain=domain_id,
|
||||
category=cat_name,
|
||||
subcategory=None,
|
||||
threshold=threshold,
|
||||
)
|
||||
if ids:
|
||||
kw_key = _kw_key(top_kws)
|
||||
cache_conn.execute(
|
||||
"UPDATE browse_counts SET count = count + ? WHERE keywords_key = ?",
|
||||
(len(ids), kw_key),
|
||||
)
|
||||
logger.info("browse_counts: community tag counts merged")
|
||||
|
|
@ -1,645 +0,0 @@
|
|||
"""
|
||||
Recipe browser domain schemas.
|
||||
|
||||
Each domain provides a two-level category hierarchy for browsing the recipe corpus.
|
||||
Keyword matching is case-insensitive against the recipes.category column and the
|
||||
recipes.keywords JSON array. A recipe may appear in multiple categories (correct).
|
||||
|
||||
Category values are either:
|
||||
- list[str] — flat keyword list (no subcategories)
|
||||
- dict — {"keywords": list[str], "subcategories": {name: list[str]}}
|
||||
keywords covers the whole category (used for "All X" browse);
|
||||
subcategories each have their own narrower keyword list.
|
||||
|
||||
These are starter mappings based on the food.com dataset structure. Run:
|
||||
|
||||
SELECT category, count(*) FROM recipes
|
||||
GROUP BY category ORDER BY count(*) DESC LIMIT 50;
|
||||
|
||||
against the corpus to verify coverage and refine keyword lists before the first
|
||||
production deploy.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
DOMAINS: dict[str, dict] = {
|
||||
"cuisine": {
|
||||
"label": "Cuisine",
|
||||
"categories": {
|
||||
"Italian": {
|
||||
"keywords": ["italian", "pasta", "pizza", "risotto", "lasagna", "carbonara"],
|
||||
"subcategories": {
|
||||
"Sicilian": ["sicilian", "sicily", "arancini", "caponata",
|
||||
"involtini", "cannoli"],
|
||||
"Neapolitan": ["neapolitan", "naples", "pizza napoletana",
|
||||
"sfogliatelle", "ragù"],
|
||||
"Tuscan": ["tuscan", "tuscany", "ribollita", "bistecca",
|
||||
"pappardelle", "crostini"],
|
||||
"Roman": ["roman", "rome", "cacio e pepe", "carbonara",
|
||||
"amatriciana", "gricia", "supplì"],
|
||||
"Venetian": ["venetian", "venice", "risotto", "bigoli",
|
||||
"baccalà", "sarde in saor"],
|
||||
"Ligurian": ["ligurian", "liguria", "pesto", "focaccia",
|
||||
"trofie", "farinata"],
|
||||
},
|
||||
},
|
||||
"Mexican": {
|
||||
"keywords": ["mexican", "taco", "enchilada", "burrito", "salsa",
|
||||
"guacamole", "mole", "tamale"],
|
||||
"subcategories": {
|
||||
"Oaxacan": ["oaxacan", "oaxaca", "mole negro", "tlayuda",
|
||||
"chapulines", "mezcal", "tasajo", "memelas"],
|
||||
"Yucatecan": ["yucatecan", "yucatan", "cochinita pibil", "poc chuc",
|
||||
"sopa de lima", "panuchos", "papadzules"],
|
||||
"Veracruz": ["veracruz", "veracruzana", "huachinango",
|
||||
"picadas", "enfrijoladas", "caldo de mariscos"],
|
||||
"Street Food": ["taco", "elote", "tlacoyos", "torta", "tamale",
|
||||
"quesadilla", "tostada", "sope", "gordita"],
|
||||
"Mole": ["mole", "mole negro", "mole rojo", "mole verde",
|
||||
"mole poblano", "mole amarillo", "pipián"],
|
||||
"Baja / Cal-Mex": ["baja", "baja california", "cal-mex", "baja fish taco",
|
||||
"fish taco", "carne asada fries", "california burrito",
|
||||
"birria", "birria tacos", "quesabirria",
|
||||
"lobster puerto nuevo", "tijuana", "ensenada",
|
||||
"agua fresca", "caesar salad tijuana"],
|
||||
"Mexico City": ["mexico city", "chilaquiles", "tlayuda cdmx",
|
||||
"tacos de canasta", "torta ahogada", "pozole",
|
||||
"chiles en nogada"],
|
||||
},
|
||||
},
|
||||
"Asian": {
|
||||
"keywords": ["asian", "chinese", "japanese", "thai", "korean", "vietnamese",
|
||||
"stir fry", "stir-fry", "ramen", "sushi", "malaysian",
|
||||
"taiwanese", "singaporean", "burmese", "cambodian",
|
||||
"laotian", "mongolian", "hong kong"],
|
||||
"subcategories": {
|
||||
"Korean": ["korean", "kimchi", "bibimbap", "bulgogi", "japchae",
|
||||
"doenjang", "gochujang", "tteokbokki", "sundubu",
|
||||
"galbi", "jjigae", "kbbq", "korean fried chicken"],
|
||||
"Japanese": ["japanese", "sushi", "ramen", "tempura", "miso",
|
||||
"teriyaki", "udon", "soba", "bento", "yakitori",
|
||||
"tonkatsu", "onigiri", "okonomiyaki", "takoyaki",
|
||||
"kaiseki", "izakaya"],
|
||||
"Chinese": ["chinese", "dim sum", "fried rice", "dumplings", "wonton",
|
||||
"spring roll", "szechuan", "sichuan", "cantonese",
|
||||
"chow mein", "mapo tofu", "lo mein", "hot pot",
|
||||
"peking duck", "char siu", "congee"],
|
||||
"Thai": ["thai", "pad thai", "green curry", "red curry",
|
||||
"coconut milk", "lemongrass", "satay", "tom yum",
|
||||
"larb", "khao man gai", "massaman", "pad see ew"],
|
||||
"Vietnamese": ["vietnamese", "pho", "banh mi", "spring rolls",
|
||||
"vermicelli", "nuoc cham", "bun bo hue",
|
||||
"banh xeo", "com tam", "bun cha"],
|
||||
"Filipino": ["filipino", "adobo", "sinigang", "pancit", "lumpia",
|
||||
"kare-kare", "lechon", "sisig", "halo-halo",
|
||||
"dinuguan", "tinola", "bistek"],
|
||||
"Indonesian": ["indonesian", "rendang", "nasi goreng", "gado-gado",
|
||||
"tempeh", "sambal", "soto", "opor ayam",
|
||||
"bakso", "mie goreng", "nasi uduk"],
|
||||
"Malaysian": ["malaysian", "laksa", "nasi lemak", "char kway teow",
|
||||
"satay malaysia", "roti canai", "bak kut teh",
|
||||
"cendol", "mee goreng mamak", "curry laksa"],
|
||||
"Taiwanese": ["taiwanese", "beef noodle soup", "lu rou fan",
|
||||
"oyster vermicelli", "scallion pancake taiwan",
|
||||
"pork chop rice", "three cup chicken",
|
||||
"bubble tea", "stinky tofu", "ba wan"],
|
||||
"Singaporean": ["singaporean", "chicken rice", "chili crab",
|
||||
"singaporean laksa", "bak chor mee", "rojak",
|
||||
"kaya toast", "nasi padang", "satay singapore"],
|
||||
"Burmese": ["burmese", "myanmar", "mohinga", "laphet thoke",
|
||||
"tea leaf salad", "ohn no khao swe",
|
||||
"mont di", "nangyi thoke"],
|
||||
"Hong Kong": ["hong kong", "hk style", "pineapple bun",
|
||||
"wonton noodle soup", "hk milk tea", "egg tart",
|
||||
"typhoon shelter crab", "char siu bao", "jook",
|
||||
"congee hk", "silk stocking tea", "dan tat",
|
||||
"siu mai hk", "cheung fun"],
|
||||
"Cambodian": ["cambodian", "khmer", "amok", "lok lak",
|
||||
"kuy teav", "bai sach chrouk", "nom banh chok",
|
||||
"samlor korko", "beef loc lac"],
|
||||
"Laotian": ["laotian", "lao", "larb", "tam mak hoong",
|
||||
"or lam", "khao niaw", "ping kai",
|
||||
"naem khao", "khao piak sen", "mok pa"],
|
||||
"Mongolian": ["mongolian", "buuz", "khuushuur", "tsuivan",
|
||||
"boodog", "airag", "khorkhog", "bansh",
|
||||
"guriltai shol", "suutei tsai"],
|
||||
"South Asian Fusion": ["south asian fusion", "indo-chinese",
|
||||
"hakka chinese", "chilli chicken",
|
||||
"manchurian", "schezwan"],
|
||||
},
|
||||
},
|
||||
"Indian": {
|
||||
"keywords": ["indian", "curry", "lentil", "dal", "tikka", "masala",
|
||||
"biryani", "naan", "chutney", "pakistani", "sri lankan",
|
||||
"bangladeshi", "nepali"],
|
||||
"subcategories": {
|
||||
"North Indian": ["north indian", "punjabi", "mughal", "tikka masala",
|
||||
"naan", "tandoori", "butter chicken", "palak paneer",
|
||||
"chole", "rajma", "aloo gobi"],
|
||||
"South Indian": ["south indian", "tamil", "kerala", "dosa", "idli",
|
||||
"sambar", "rasam", "coconut chutney", "appam",
|
||||
"fish curry kerala", "puttu", "payasam"],
|
||||
"Bengali": ["bengali", "mustard fish", "hilsa", "shorshe ilish",
|
||||
"mishti doi", "rasgulla", "kosha mangsho"],
|
||||
"Gujarati": ["gujarati", "dhokla", "thepla", "undhiyu",
|
||||
"khandvi", "fafda", "gujarati dal"],
|
||||
"Pakistani": ["pakistani", "nihari", "haleem", "seekh kebab",
|
||||
"karahi", "biryani karachi", "chapli kebab",
|
||||
"halwa puri", "paya"],
|
||||
"Sri Lankan": ["sri lankan", "kottu roti", "hoppers", "pol sambol",
|
||||
"sri lankan curry", "lamprais", "string hoppers",
|
||||
"wambatu moju"],
|
||||
"Bangladeshi": ["bangladeshi", "bangladesh", "dhaka biryani",
|
||||
"shutki", "pitha", "hilsa curry", "kacchi biryani",
|
||||
"bhuna khichuri", "doi maach", "rezala"],
|
||||
"Nepali": ["nepali", "dal bhat", "momos", "sekuwa",
|
||||
"sel roti", "gundruk", "thukpa"],
|
||||
},
|
||||
},
|
||||
"Mediterranean": {
|
||||
"keywords": ["mediterranean", "greek", "middle eastern", "turkish",
|
||||
"lebanese", "jewish", "palestinian", "yemeni", "egyptian",
|
||||
"syrian", "iraqi", "jordanian"],
|
||||
"subcategories": {
|
||||
"Greek": ["greek", "feta", "tzatziki", "moussaka", "spanakopita",
|
||||
"souvlaki", "dolmades", "spanakopita", "tiropita",
|
||||
"galaktoboureko"],
|
||||
"Turkish": ["turkish", "kebab", "borek", "meze", "baklava",
|
||||
"lahmacun", "menemen", "pide", "iskender",
|
||||
"kisir", "simit"],
|
||||
"Syrian": ["syrian", "fattet hummus", "kibbeh syria",
|
||||
"muhammara", "maklouba syria", "sfeeha",
|
||||
"halawet el jibn"],
|
||||
"Lebanese": ["lebanese", "middle eastern", "hummus", "falafel",
|
||||
"tabbouleh", "kibbeh", "fattoush", "manakish",
|
||||
"kafta", "sfiha"],
|
||||
"Jewish": ["jewish", "israeli", "ashkenazi", "sephardic",
|
||||
"shakshuka", "sabich", "za'atar", "tahini",
|
||||
"zhug", "zhoug", "s'khug", "z'houg",
|
||||
"hawaiij", "hawaij", "hawayej",
|
||||
"matzo", "latke", "rugelach", "babka", "challah",
|
||||
"cholent", "gefilte fish", "brisket", "kugel",
|
||||
"new york jewish", "new york deli", "pastrami",
|
||||
"knish", "lox", "bagel and lox", "jewish deli"],
|
||||
"Palestinian": ["palestinian", "musakhan", "maqluba", "knafeh",
|
||||
"maftoul", "freekeh", "sumac chicken"],
|
||||
"Yemeni": ["yemeni", "saltah", "lahoh", "bint al-sahn",
|
||||
"zhug", "zhoug", "hulba", "fahsa",
|
||||
"hawaiij", "hawaij", "hawayej"],
|
||||
"Egyptian": ["egyptian", "koshari", "molokhia", "mahshi",
|
||||
"ful medames", "ta'ameya", "feteer meshaltet"],
|
||||
},
|
||||
},
|
||||
"American": {
|
||||
"keywords": ["american", "southern", "comfort food", "cajun", "creole",
|
||||
"hawaiian", "tex-mex", "soul food"],
|
||||
"subcategories": {
|
||||
"Southern": ["southern", "soul food", "fried chicken",
|
||||
"collard greens", "cornbread", "biscuits and gravy",
|
||||
"mac and cheese", "sweet potato pie", "okra"],
|
||||
"Cajun/Creole": ["cajun", "creole", "new orleans", "gumbo",
|
||||
"jambalaya", "etouffee", "dirty rice", "po'boy",
|
||||
"muffuletta", "red beans and rice"],
|
||||
"Tex-Mex": ["tex-mex", "southwestern", "chili", "fajita",
|
||||
"queso", "breakfast taco", "chile con carne"],
|
||||
"New England": ["new england", "chowder", "lobster", "clam",
|
||||
"maple", "yankee", "boston baked beans",
|
||||
"johnnycake", "fish and chips"],
|
||||
"Pacific Northwest": ["pacific northwest", "pnw", "dungeness crab",
|
||||
"salmon", "cedar plank", "razor clam",
|
||||
"geoduck", "chanterelle", "marionberry"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "plate lunch", "loco moco",
|
||||
"poke", "spam musubi", "kalua pig", "lau lau",
|
||||
"haupia", "poi", "manapua", "garlic shrimp",
|
||||
"saimin", "huli huli", "malasada"],
|
||||
},
|
||||
},
|
||||
"BBQ & Smoke": {
|
||||
# Top-level keywords use broad corpus-friendly terms that appear in
|
||||
# food.com keyword/category fields (e.g. "BBQ", "Oven BBQ", "Smoker").
|
||||
# Subcategory keywords remain specific for drill-down filtering.
|
||||
"keywords": ["bbq", "barbecue", "barbeque", "smoked", "smoky",
|
||||
"smoke", "pit", "smoke ring", "low and slow",
|
||||
"brisket", "pulled pork", "ribs", "spare ribs",
|
||||
"baby back", "baby back ribs", "dry rub", "wet rub",
|
||||
"cookout", "smoker", "smoked meat", "smoked chicken",
|
||||
"smoked pork", "smoked beef", "smoked turkey",
|
||||
"pit smoked", "wood smoked", "slow smoked",
|
||||
"charcoal", "chargrilled", "burnt ends"],
|
||||
"subcategories": {
|
||||
"Texas BBQ": ["texas bbq", "central texas bbq", "brisket",
|
||||
"beef brisket", "beef ribs", "smoked brisket",
|
||||
"post oak", "salt and pepper rub",
|
||||
"east texas bbq", "lockhart", "franklin style"],
|
||||
"Carolina BBQ": ["carolina bbq", "north carolina bbq", "whole hog",
|
||||
"vinegar sauce", "vinegar bbq", "lexington style",
|
||||
"eastern nc", "south carolina bbq", "mustard sauce",
|
||||
"carolina pulled pork"],
|
||||
"Kansas City BBQ": ["kansas city bbq", "kc bbq", "burnt ends",
|
||||
"sweet bbq sauce", "tomato molasses sauce",
|
||||
"baby back ribs", "kansas city ribs"],
|
||||
"Memphis BBQ": ["memphis bbq", "dry rub ribs", "wet ribs",
|
||||
"memphis style", "dry rub pork", "memphis ribs"],
|
||||
"Alabama BBQ": ["alabama bbq", "white sauce", "alabama white sauce",
|
||||
"smoked chicken", "white bbq sauce"],
|
||||
"Kentucky BBQ": ["kentucky bbq", "mutton bbq", "owensboro bbq",
|
||||
"black dip", "western kentucky barbecue", "mutton"],
|
||||
"St. Louis BBQ": ["st louis bbq", "st louis ribs", "st. louis ribs",
|
||||
"st louis cut ribs", "spare ribs st louis"],
|
||||
"Backyard Grill": ["backyard bbq", "cookout", "grilled burgers",
|
||||
"charcoal grill", "kettle grill", "tailgate",
|
||||
"grill out", "backyard grilling"],
|
||||
},
|
||||
},
|
||||
"European": {
|
||||
"keywords": ["french", "german", "spanish", "british", "irish", "scottish",
|
||||
"welsh", "scandinavian", "nordic", "eastern european"],
|
||||
"subcategories": {
|
||||
"French": ["french", "provencal", "beurre", "crepe",
|
||||
"ratatouille", "cassoulet", "bouillabaisse"],
|
||||
"Spanish": ["spanish", "paella", "tapas", "gazpacho",
|
||||
"tortilla espanola", "chorizo"],
|
||||
"German": ["german", "bratwurst", "sauerkraut", "schnitzel",
|
||||
"pretzel", "strudel"],
|
||||
"British": ["british", "english", "pub food", "cornish",
|
||||
"shepherd's pie", "bangers", "toad in the hole",
|
||||
"coronation chicken", "london", "londoner",
|
||||
"cornish pasty", "ploughman's"],
|
||||
"Irish": ["irish", "ireland", "colcannon", "coddle",
|
||||
"irish stew", "soda bread", "boxty", "champ"],
|
||||
"Scottish": ["scottish", "scotland", "haggis", "cullen skink",
|
||||
"cranachan", "scotch broth", "glaswegian",
|
||||
"neeps and tatties", "tablet"],
|
||||
"Scandinavian": ["scandinavian", "nordic", "swedish", "norwegian",
|
||||
"danish", "finnish", "gravlax", "swedish meatballs",
|
||||
"lefse", "smörgåsbord", "fika", "crispbread",
|
||||
"cardamom bun", "herring", "æbleskiver",
|
||||
"lingonberry", "lutefisk", "janssons frestelse",
|
||||
"knäckebröd", "kladdkaka"],
|
||||
"Eastern European": ["eastern european", "polish", "russian", "ukrainian",
|
||||
"czech", "hungarian", "pierogi", "borscht",
|
||||
"goulash", "kielbasa", "varenyky", "pelmeni"],
|
||||
},
|
||||
},
|
||||
"Latin American": {
|
||||
"keywords": ["latin american", "peruvian", "argentinian", "colombian",
|
||||
"cuban", "caribbean", "brazilian", "venezuelan", "chilean"],
|
||||
"subcategories": {
|
||||
"Peruvian": ["peruvian", "ceviche", "lomo saltado", "anticucho",
|
||||
"aji amarillo", "causa", "leche de tigre",
|
||||
"arroz con leche peru", "pollo a la brasa"],
|
||||
"Brazilian": ["brazilian", "churrasco", "feijoada", "pao de queijo",
|
||||
"brigadeiro", "coxinha", "moqueca", "vatapa",
|
||||
"caipirinha", "acai bowl"],
|
||||
"Colombian": ["colombian", "bandeja paisa", "arepas", "empanadas",
|
||||
"sancocho", "ajiaco", "buñuelos", "changua"],
|
||||
"Argentinian": ["argentinian", "asado", "chimichurri", "empanadas argentina",
|
||||
"milanesa", "locro", "dulce de leche", "medialunas"],
|
||||
"Venezuelan": ["venezuelan", "pabellón criollo", "arepas venezuela",
|
||||
"hallacas", "cachapas", "tequeños", "caraotas"],
|
||||
"Chilean": ["chilean", "cazuela", "pastel de choclo", "curanto",
|
||||
"sopaipillas", "charquicán", "completo"],
|
||||
"Cuban": ["cuban", "ropa vieja", "moros y cristianos",
|
||||
"picadillo", "lechon cubano", "vaca frita",
|
||||
"tostones", "platanos maduros"],
|
||||
"Jamaican": ["jamaican", "jerk chicken", "jerk pork", "ackee saltfish",
|
||||
"curry goat", "rice and peas", "escovitch",
|
||||
"jamaican patty", "callaloo jamaica", "festival"],
|
||||
"Puerto Rican": ["puerto rican", "mofongo", "pernil", "arroz con gandules",
|
||||
"sofrito", "pasteles", "tostones pr", "tembleque",
|
||||
"coquito", "asopao"],
|
||||
"Dominican": ["dominican", "mangu", "sancocho dominicano",
|
||||
"pollo guisado", "habichuelas guisadas",
|
||||
"tostones dominicanos", "morir soñando"],
|
||||
"Haitian": ["haitian", "griot", "pikliz", "riz et pois",
|
||||
"joumou", "akra", "pain patate", "labouyi"],
|
||||
"Trinidad": ["trinidadian", "doubles", "roti trinidad", "pelau",
|
||||
"callaloo trinidad", "bake and shark",
|
||||
"curry duck", "oil down"],
|
||||
},
|
||||
},
|
||||
"Central American": {
|
||||
"keywords": ["central american", "salvadoran", "guatemalan",
|
||||
"honduran", "nicaraguan", "costa rican", "panamanian"],
|
||||
"subcategories": {
|
||||
"Salvadoran": ["salvadoran", "el salvador", "pupusas", "curtido",
|
||||
"sopa de pata", "nuégados", "atol shuco"],
|
||||
"Guatemalan": ["guatemalan", "pepián", "jocon", "kak'ik",
|
||||
"hilachas", "rellenitos", "fiambre"],
|
||||
"Costa Rican": ["costa rican", "gallo pinto", "casado",
|
||||
"olla de carne", "arroz con leche cr",
|
||||
"tres leches cr"],
|
||||
"Honduran": ["honduran", "baleadas", "sopa de caracol",
|
||||
"tapado", "machuca", "catrachitas"],
|
||||
"Nicaraguan": ["nicaraguan", "nacatamal", "vigorón", "indio viejo",
|
||||
"gallo pinto nicaragua", "güirilas"],
|
||||
},
|
||||
},
|
||||
"African": {
|
||||
"keywords": ["african", "west african", "east african", "ethiopian",
|
||||
"nigerian", "ghanaian", "kenyan", "south african",
|
||||
"senegalese", "tunisian"],
|
||||
"subcategories": {
|
||||
"West African": ["west african", "nigerian", "ghanaian",
|
||||
"jollof rice", "egusi soup", "fufu", "suya",
|
||||
"groundnut stew", "kelewele", "kontomire",
|
||||
"waakye", "ofam", "bitterleaf soup"],
|
||||
"Senegalese": ["senegalese", "senegal", "thieboudienne",
|
||||
"yassa", "mafe", "thiou", "ceebu jen",
|
||||
"domoda"],
|
||||
"Ethiopian & Eritrean": ["ethiopian", "eritrean", "injera", "doro wat",
|
||||
"kitfo", "tibs", "shiro", "misir wat",
|
||||
"gomen", "ful ethiopian", "tegamino"],
|
||||
"East African": ["east african", "kenyan", "tanzanian", "ugandan",
|
||||
"nyama choma", "ugali", "sukuma wiki",
|
||||
"pilau kenya", "mandazi", "matoke",
|
||||
"githeri", "irio"],
|
||||
"North African": ["north african", "tunisian", "algerian", "libyan",
|
||||
"brik", "lablabi", "merguez", "shakshuka tunisian",
|
||||
"harissa tunisian", "couscous algerian"],
|
||||
"South African": ["south african", "braai", "bobotie", "boerewors",
|
||||
"bunny chow", "pap", "chakalaka", "biltong",
|
||||
"malva pudding", "koeksister", "potjiekos"],
|
||||
"Moroccan": ["moroccan", "tagine", "couscous morocco",
|
||||
"harissa", "chermoula", "preserved lemon",
|
||||
"pastilla", "mechoui", "bastilla"],
|
||||
},
|
||||
},
|
||||
"Pacific & Oceania": {
|
||||
"keywords": ["pacific", "oceania", "polynesian", "melanesian",
|
||||
"micronesian", "maori", "fijian", "samoan", "tongan",
|
||||
"hawaiian", "australian", "new zealand"],
|
||||
"subcategories": {
|
||||
"Māori / New Zealand": ["maori", "new zealand", "hangi", "rewena bread",
|
||||
"boil-up", "paua", "kumara", "pavlova nz",
|
||||
"whitebait fritter", "kina", "hokey pokey"],
|
||||
"Australian": ["australian", "meat pie", "lamington",
|
||||
"anzac biscuits", "damper", "barramundi",
|
||||
"vegemite", "pavlova australia", "tim tam",
|
||||
"sausage sizzle", "chiko roll", "fairy bread"],
|
||||
"Fijian": ["fijian", "fiji", "kokoda", "lovo",
|
||||
"rourou", "palusami fiji", "duruka",
|
||||
"vakalolo"],
|
||||
"Samoan": ["samoan", "samoa", "palusami", "oka",
|
||||
"fa'ausi", "chop suey samoa", "sapasui",
|
||||
"koko alaisa", "supo esi"],
|
||||
"Tongan": ["tongan", "tonga", "lu pulu", "'ota 'ika",
|
||||
"fekkai", "faikakai topai", "kapisi pulu"],
|
||||
"Papua New Guinean": ["papua new guinea", "png", "mumu",
|
||||
"sago", "aibika", "kaukau",
|
||||
"taro png", "coconut crab"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "poke", "loco moco",
|
||||
"plate lunch", "kalua pig", "haupia",
|
||||
"spam musubi", "poi", "malasada"],
|
||||
},
|
||||
},
|
||||
"Central Asian & Caucasus": {
|
||||
"keywords": ["central asian", "caucasus", "georgian", "armenian", "uzbek",
|
||||
"afghan", "persian", "iranian", "azerbaijani", "kazakh"],
|
||||
"subcategories": {
|
||||
"Persian / Iranian": ["persian", "iranian", "ghormeh sabzi", "fesenjan",
|
||||
"tahdig", "joojeh kabab", "ash reshteh",
|
||||
"zereshk polo", "khoresh", "mast o khiar",
|
||||
"kashk-e-bademjan", "mirza ghasemi",
|
||||
"baghali polo"],
|
||||
"Georgian": ["georgian", "georgia", "khachapuri", "khinkali",
|
||||
"churchkhela", "ajapsandali", "satsivi",
|
||||
"pkhali", "lobiani", "badrijani nigvzit"],
|
||||
"Armenian": ["armenian", "dolma armenia", "lahmajoun",
|
||||
"manti armenia", "ghapama", "basturma",
|
||||
"harissa armenia", "nazook", "tolma"],
|
||||
"Azerbaijani": ["azerbaijani", "azerbaijan", "plov azerbaijan",
|
||||
"dolma azeri", "dushbara", "levengi",
|
||||
"shah plov", "gutab"],
|
||||
"Uzbek": ["uzbek", "uzbekistan", "plov", "samsa",
|
||||
"lagman", "shashlik", "manti uzbek",
|
||||
"non bread", "dimlama", "sumalak"],
|
||||
"Afghan": ["afghan", "afghanistan", "kabuli pulao", "mantu",
|
||||
"bolani", "qorma", "ashak", "shorwa",
|
||||
"aushak", "borani banjan"],
|
||||
"Kazakh": ["kazakh", "beshbarmak", "kuyrdak", "baursak",
|
||||
"kurt", "shubat", "kazy"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"meal_type": {
|
||||
"label": "Meal Type",
|
||||
"categories": {
|
||||
"Breakfast": {
|
||||
"keywords": ["breakfast", "brunch", "eggs", "pancakes", "waffles",
|
||||
"oatmeal", "muffin"],
|
||||
"subcategories": {
|
||||
"Eggs": ["egg", "omelette", "frittata", "quiche",
|
||||
"scrambled", "benedict", "shakshuka"],
|
||||
"Pancakes & Waffles": ["pancake", "waffle", "crepe", "french toast"],
|
||||
"Baked Goods": ["muffin", "scone", "biscuit", "quick bread",
|
||||
"coffee cake", "danish"],
|
||||
"Oats & Grains": ["oatmeal", "granola", "porridge", "muesli",
|
||||
"overnight oats"],
|
||||
},
|
||||
},
|
||||
"Lunch": {
|
||||
"keywords": ["lunch", "sandwich", "wrap", "salad", "soup", "light meal"],
|
||||
"subcategories": {
|
||||
"Sandwiches": ["sandwich", "sub", "hoagie", "panini", "club",
|
||||
"grilled cheese", "blt"],
|
||||
"Salads": ["salad", "grain bowl", "chopped", "caesar",
|
||||
"niçoise", "cobb"],
|
||||
"Soups": ["soup", "bisque", "chowder", "gazpacho",
|
||||
"minestrone", "lentil soup"],
|
||||
"Wraps": ["wrap", "burrito bowl", "pita", "lettuce wrap",
|
||||
"quesadilla"],
|
||||
},
|
||||
},
|
||||
"Dinner": {
|
||||
"keywords": ["dinner", "main dish", "entree", "main course", "supper"],
|
||||
"subcategories": {
|
||||
"Casseroles": ["casserole", "bake", "gratin", "lasagna",
|
||||
"sheperd's pie", "pot pie"],
|
||||
"Stews": ["stew", "braise", "slow cooker", "pot roast",
|
||||
"daube", "ragù"],
|
||||
"Grilled": ["grilled", "grill", "barbecue", "charred",
|
||||
"kebab", "skewer"],
|
||||
"Stir-Fries": ["stir fry", "stir-fry", "wok", "sauté",
|
||||
"sauteed"],
|
||||
"Roasts": ["roast", "roasted", "oven", "baked chicken",
|
||||
"pot roast"],
|
||||
},
|
||||
},
|
||||
"Snack": {
|
||||
"keywords": ["snack", "appetizer", "finger food", "dip", "bite",
|
||||
"starter"],
|
||||
"subcategories": {
|
||||
"Dips & Spreads": ["dip", "spread", "hummus", "guacamole",
|
||||
"salsa", "pate"],
|
||||
"Finger Foods": ["finger food", "bite", "skewer", "slider",
|
||||
"wing", "nugget"],
|
||||
"Chips & Crackers": ["chip", "cracker", "crisp", "popcorn",
|
||||
"pretzel"],
|
||||
},
|
||||
},
|
||||
"Dessert": {
|
||||
"keywords": ["dessert", "cake", "cookie", "pie", "sweet", "pudding",
|
||||
"ice cream", "brownie"],
|
||||
"subcategories": {
|
||||
"Cakes": ["cake", "cupcake", "layer cake", "bundt",
|
||||
"cheesecake", "torte"],
|
||||
"Cookies & Bars": ["cookie", "brownie", "blondie", "bar",
|
||||
"biscotti", "shortbread"],
|
||||
"Pies & Tarts": ["pie", "tart", "galette", "cobbler", "crisp",
|
||||
"crumble"],
|
||||
"Frozen": ["ice cream", "gelato", "sorbet", "frozen dessert",
|
||||
"popsicle", "granita"],
|
||||
"Puddings": ["pudding", "custard", "mousse", "panna cotta",
|
||||
"flan", "creme brulee"],
|
||||
"Candy": ["candy", "fudge", "truffle", "brittle",
|
||||
"caramel", "toffee"],
|
||||
},
|
||||
},
|
||||
"Beverage": ["drink", "smoothie", "cocktail", "beverage", "juice", "shake"],
|
||||
"Side Dish": ["side dish", "side", "accompaniment", "garnish"],
|
||||
},
|
||||
},
|
||||
"dietary": {
|
||||
"label": "Dietary",
|
||||
"categories": {
|
||||
"Vegetarian": ["vegetarian"],
|
||||
"Vegan": ["vegan", "plant-based", "plant based"],
|
||||
"Gluten-Free": ["gluten-free", "gluten free", "celiac"],
|
||||
"Low-Carb": ["low-carb", "low carb", "keto", "ketogenic"],
|
||||
"High-Protein": ["high protein", "high-protein"],
|
||||
"Low-Fat": ["low-fat", "low fat", "light"],
|
||||
"Dairy-Free": ["dairy-free", "dairy free", "lactose"],
|
||||
},
|
||||
},
|
||||
"main_ingredient": {
|
||||
"label": "Main Ingredient",
|
||||
"categories": {
|
||||
# keywords use exact inferred_tag strings (main:X) — indexed into recipe_browser_fts.
|
||||
"Chicken": {
|
||||
"keywords": ["main:Chicken"],
|
||||
"subcategories": {
|
||||
"Baked": ["baked chicken", "roast chicken", "chicken casserole",
|
||||
"chicken bake"],
|
||||
"Grilled": ["grilled chicken", "chicken kebab", "bbq chicken",
|
||||
"chicken skewer"],
|
||||
"Fried": ["fried chicken", "chicken cutlet", "chicken schnitzel",
|
||||
"crispy chicken"],
|
||||
"Stewed": ["chicken stew", "chicken soup", "coq au vin",
|
||||
"chicken curry", "chicken braise"],
|
||||
},
|
||||
},
|
||||
"Beef": {
|
||||
"keywords": ["main:Beef"],
|
||||
"subcategories": {
|
||||
"Ground Beef": ["ground beef", "hamburger", "meatball", "meatloaf",
|
||||
"bolognese", "burger"],
|
||||
"Steak": ["steak", "sirloin", "ribeye", "flank steak",
|
||||
"filet mignon", "t-bone"],
|
||||
"Roasts": ["beef roast", "pot roast", "brisket", "prime rib",
|
||||
"chuck roast"],
|
||||
"Stews": ["beef stew", "beef braise", "beef bourguignon",
|
||||
"short ribs"],
|
||||
},
|
||||
},
|
||||
"Pork": {
|
||||
"keywords": ["main:Pork"],
|
||||
"subcategories": {
|
||||
"Chops": ["pork chop", "pork loin", "pork cutlet"],
|
||||
"Pulled/Slow": ["pulled pork", "pork shoulder", "pork butt",
|
||||
"carnitas", "slow cooker pork"],
|
||||
"Sausage": ["sausage", "bratwurst", "chorizo", "andouille",
|
||||
"Italian sausage"],
|
||||
"Ribs": ["pork ribs", "baby back ribs", "spare ribs",
|
||||
"pork belly"],
|
||||
},
|
||||
},
|
||||
"Fish": {
|
||||
"keywords": ["main:Fish"],
|
||||
"subcategories": {
|
||||
"Salmon": ["salmon", "smoked salmon", "gravlax"],
|
||||
"Tuna": ["tuna", "albacore", "ahi"],
|
||||
"White Fish": ["cod", "tilapia", "halibut", "sole", "snapper",
|
||||
"flounder", "bass"],
|
||||
"Shellfish": ["shrimp", "prawn", "crab", "lobster", "scallop",
|
||||
"mussel", "clam", "oyster"],
|
||||
},
|
||||
},
|
||||
"Pasta": ["main:Pasta"],
|
||||
"Vegetables": {
|
||||
"keywords": ["main:Vegetables"],
|
||||
"subcategories": {
|
||||
"Root Veg": ["potato", "sweet potato", "carrot", "beet",
|
||||
"parsnip", "turnip"],
|
||||
"Leafy": ["spinach", "kale", "chard", "arugula",
|
||||
"collard greens", "lettuce"],
|
||||
"Brassicas": ["broccoli", "cauliflower", "brussels sprouts",
|
||||
"cabbage", "bok choy"],
|
||||
"Nightshades": ["tomato", "eggplant", "bell pepper", "zucchini",
|
||||
"squash"],
|
||||
"Mushrooms": ["mushroom", "portobello", "shiitake", "oyster mushroom",
|
||||
"chanterelle"],
|
||||
},
|
||||
},
|
||||
"Eggs": ["main:Eggs"],
|
||||
"Legumes": ["main:Legumes"],
|
||||
"Grains": ["main:Grains"],
|
||||
"Cheese": ["main:Cheese"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _get_category_def(domain: str, category: str) -> list[str] | dict | None:
|
||||
"""Return the raw category definition, or None if not found."""
|
||||
return DOMAINS.get(domain, {}).get("categories", {}).get(category)
|
||||
|
||||
|
||||
def get_domain_labels() -> list[dict]:
|
||||
"""Return [{id, label}] for all available domains."""
|
||||
return [{"id": k, "label": v["label"]} for k, v in DOMAINS.items()]
|
||||
|
||||
|
||||
def get_keywords_for_category(domain: str, category: str) -> list[str]:
|
||||
"""Return the keyword list for the category (top-level, covers all subcategories).
|
||||
|
||||
For flat categories returns the list directly.
|
||||
For nested categories returns the 'keywords' key.
|
||||
Returns [] if category or domain not found.
|
||||
"""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if cat_def is None:
|
||||
return []
|
||||
if isinstance(cat_def, list):
|
||||
return cat_def
|
||||
return cat_def.get("keywords", [])
|
||||
|
||||
|
||||
def category_has_subcategories(domain: str, category: str) -> bool:
|
||||
"""Return True when a category has a subcategory level."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return False
|
||||
return bool(cat_def.get("subcategories"))
|
||||
|
||||
|
||||
def get_subcategory_names(domain: str, category: str) -> list[str]:
|
||||
"""Return subcategory names for a category, or [] if none exist."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return list(cat_def.get("subcategories", {}).keys())
|
||||
|
||||
|
||||
def get_keywords_for_subcategory(domain: str, category: str, subcategory: str) -> list[str]:
|
||||
"""Return keyword list for a specific subcategory, or [] if not found."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return cat_def.get("subcategories", {}).get(subcategory, [])
|
||||
|
||||
|
||||
def get_category_names(domain: str) -> list[str]:
|
||||
"""Return category names for a domain, or [] if domain unknown."""
|
||||
domain_data = DOMAINS.get(domain, {})
|
||||
return list(domain_data.get("categories", {}).keys())
|
||||
|
|
@ -84,9 +84,8 @@ class ElementClassifier:
|
|||
name = ingredient_name.lower().strip()
|
||||
if not name:
|
||||
return IngredientProfile(name="", elements=[], source="heuristic")
|
||||
c = self._store._cp
|
||||
row = self._store._fetch_one(
|
||||
f"SELECT * FROM {c}ingredient_profiles WHERE name = ?", (name,)
|
||||
"SELECT * FROM ingredient_profiles WHERE name = ?", (name,)
|
||||
)
|
||||
if row:
|
||||
return self._row_to_profile(row)
|
||||
|
|
|
|||
|
|
@ -1,87 +1,69 @@
|
|||
"""
|
||||
GroceryLinkBuilder — affiliate deeplinks for missing ingredient grocery lists.
|
||||
|
||||
Delegates URL wrapping to circuitforge_core.affiliates.wrap_url, which handles
|
||||
the full resolution chain: opt-out → BYOK id → CF env var → plain URL.
|
||||
Free tier: URL construction only (Amazon Fresh, Walmart, Instacart).
|
||||
Paid+: live product search API (stubbed — future task).
|
||||
|
||||
Registered programs (via cf-core):
|
||||
amazon — Amazon Associates (env: AMAZON_ASSOCIATES_TAG)
|
||||
instacart — Instacart (env: INSTACART_AFFILIATE_ID)
|
||||
|
||||
Walmart is kept inline until cf-core adds Impact network support:
|
||||
env: WALMART_AFFILIATE_ID
|
||||
|
||||
Links are always generated (plain URLs are useful even without affiliate IDs).
|
||||
Walmart links only appear when WALMART_AFFILIATE_ID is set.
|
||||
Instacart and Walmart are US/CA-only; other locales get Amazon only.
|
||||
Config (env vars, all optional — missing = retailer disabled):
|
||||
AMAZON_AFFILIATE_TAG — e.g. "circuitforge-20"
|
||||
INSTACART_AFFILIATE_ID — e.g. "circuitforge"
|
||||
WALMART_AFFILIATE_ID — e.g. "circuitforge" (Impact affiliate network)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import wrap_url
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink
|
||||
from app.services.recipe.locale_config import get_locale
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _amazon_link(ingredient: str, locale: str) -> GroceryLink:
|
||||
cfg = get_locale(locale)
|
||||
def _amazon_link(ingredient: str, tag: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
domain = cfg["amazon_domain"]
|
||||
dept = cfg["amazon_grocery_dept"]
|
||||
base = f"https://www.{domain}/s?k={q}&{dept}"
|
||||
retailer = "Amazon" if locale != "us" else "Amazon Fresh"
|
||||
return GroceryLink(ingredient=ingredient, retailer=retailer, url=wrap_url(base, "amazon"))
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str, locale: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
if locale == "ca":
|
||||
base = f"https://www.instacart.ca/store/s?k={q}"
|
||||
else:
|
||||
base = f"https://www.instacart.com/store/s?k={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=wrap_url(base, "instacart"))
|
||||
url = f"https://www.amazon.com/s?k={q}&i=amazonfresh&tag={tag}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Amazon Fresh", url=url)
|
||||
|
||||
|
||||
def _walmart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
# Walmart uses Impact network — affiliate ID is in the redirect path, not a param
|
||||
url = (
|
||||
f"https://goto.walmart.com/c/{affiliate_id}/walmart"
|
||||
f"?u=https://www.walmart.com/search?q={q}"
|
||||
)
|
||||
# Walmart Impact affiliate deeplink pattern
|
||||
url = f"https://goto.walmart.com/c/{affiliate_id}/walmart?u=https://www.walmart.com/search?q={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Walmart Grocery", url=url)
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
url = f"https://www.instacart.com/store/s?k={q}&aff={affiliate_id}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=url)
|
||||
|
||||
|
||||
class GroceryLinkBuilder:
|
||||
def __init__(self, tier: str = "free", has_byok: bool = False, locale: str = "us") -> None:
|
||||
def __init__(self, tier: str = "free", has_byok: bool = False) -> None:
|
||||
self._tier = tier
|
||||
self._locale = locale
|
||||
self._locale_cfg = get_locale(locale)
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "").strip()
|
||||
self._has_byok = has_byok
|
||||
self._amazon_tag = os.environ.get("AMAZON_AFFILIATE_TAG", "")
|
||||
self._instacart_id = os.environ.get("INSTACART_AFFILIATE_ID", "")
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "")
|
||||
|
||||
def build_links(self, ingredient: str) -> list[GroceryLink]:
|
||||
"""Build grocery deeplinks for a single ingredient.
|
||||
"""Build affiliate deeplinks for a single ingredient.
|
||||
|
||||
Amazon link is always included, routed to the user's locale domain.
|
||||
Instacart and Walmart are only shown where they operate (US/CA).
|
||||
wrap_url handles affiliate ID injection for supported programs.
|
||||
Free tier: URL construction only.
|
||||
Paid+: would call live product search APIs (stubbed).
|
||||
"""
|
||||
if not ingredient.strip():
|
||||
return []
|
||||
links: list[GroceryLink] = []
|
||||
|
||||
links: list[GroceryLink] = [_amazon_link(ingredient, self._locale)]
|
||||
|
||||
if self._locale_cfg["instacart"]:
|
||||
links.append(_instacart_link(ingredient, self._locale))
|
||||
|
||||
if self._locale_cfg["walmart"] and self._walmart_id:
|
||||
if self._amazon_tag:
|
||||
links.append(_amazon_link(ingredient, self._amazon_tag))
|
||||
if self._walmart_id:
|
||||
links.append(_walmart_link(ingredient, self._walmart_id))
|
||||
if self._instacart_id:
|
||||
links.append(_instacart_link(ingredient, self._instacart_id))
|
||||
|
||||
# Paid+: live API stub (future task)
|
||||
# if self._tier in ("paid", "premium") and not self._has_byok:
|
||||
# links.extend(self._search_kroger_api(ingredient))
|
||||
|
||||
return links
|
||||
|
||||
|
|
|
|||
|
|
@ -68,9 +68,6 @@ class LLMRecipeGenerator:
|
|||
if allergy_list:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
if req.exclude_ingredients:
|
||||
lines.append(f"IMPORTANT — user does not want these today: {', '.join(req.exclude_ingredients)}. Do not include them.")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"Covered culinary elements: {', '.join(covered_elements) or 'none'}")
|
||||
|
||||
|
|
@ -87,13 +84,7 @@ class LLMRecipeGenerator:
|
|||
if template.aromatics:
|
||||
lines.append(f"Preferred aromatics: {', '.join(template.aromatics[:4])}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
unit_line,
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
|
|
@ -127,17 +118,8 @@ class LLMRecipeGenerator:
|
|||
if allergy_list:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
if req.exclude_ingredients:
|
||||
lines.append(f"Do not use today: {', '.join(req.exclude_ingredients)}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
"Treat any mystery ingredient as a wildcard — use your imagination.",
|
||||
unit_line,
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
|
|
@ -149,29 +131,25 @@ class LLMRecipeGenerator:
|
|||
|
||||
return "\n".join(lines)
|
||||
|
||||
_SERVICE_TYPE = "vllm"
|
||||
_MODEL_CANDIDATES = ["Qwen2.5-3B-Instruct", "Phi-4-mini-instruct"]
|
||||
_TTL_S = 300.0
|
||||
_CALLER = "kiwi-recipe"
|
||||
_MODEL_CANDIDATES: list[str] = ["Ouro-2.6B-Thinking", "Ouro-1.4B"]
|
||||
|
||||
def _get_llm_context(self):
|
||||
"""Return a sync context manager that yields an Allocation or None.
|
||||
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a cf-text allocation
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a vLLM allocation
|
||||
(which handles service lifecycle and VRAM). Falls back to nullcontext(None)
|
||||
when the env var is absent or CFOrchClient raises on construction.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
from circuitforge_core.resources import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
return client.allocate(
|
||||
service=self._SERVICE_TYPE,
|
||||
service="vllm",
|
||||
model_candidates=self._MODEL_CANDIDATES,
|
||||
ttl_s=self._TTL_S,
|
||||
caller=self._CALLER,
|
||||
pipeline=os.environ.get("CF_APP_NAME") or None,
|
||||
ttl_s=300.0,
|
||||
caller="kiwi-recipe",
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug("CFOrchClient init failed, falling back to direct URL: %s", exc)
|
||||
|
|
@ -182,31 +160,10 @@ class LLMRecipeGenerator:
|
|||
|
||||
With CF_ORCH_URL set: acquires a vLLM allocation via CFOrchClient and
|
||||
calls the OpenAI-compatible API directly against the allocated service URL.
|
||||
Allocation failure falls through to LLMRouter rather than silently returning "".
|
||||
Without CF_ORCH_URL: uses LLMRouter directly.
|
||||
Without CF_ORCH_URL: falls back to LLMRouter using its configured backends.
|
||||
"""
|
||||
ctx = self._get_llm_context()
|
||||
alloc = None
|
||||
try:
|
||||
alloc = ctx.__enter__()
|
||||
except Exception as exc:
|
||||
msg = str(exc)
|
||||
# 429 = coordinator at capacity (all nodes at max_concurrent limit).
|
||||
# Don't fall back to LLMRouter — it's also overloaded and the slow
|
||||
# fallback causes nginx 504s. Return "" fast so the caller degrades
|
||||
# gracefully (empty recipe result) rather than timing out.
|
||||
if "429" in msg or "max_concurrent" in msg.lower():
|
||||
logger.info("cf-orch at capacity — returning empty result (graceful degradation)")
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
logger.debug("cf-orch allocation failed, falling back to LLMRouter: %s", exc)
|
||||
ctx = None # __enter__ raised — do not call __exit__
|
||||
|
||||
try:
|
||||
with self._get_llm_context() as alloc:
|
||||
if alloc is not None:
|
||||
base_url = alloc.url.rstrip("/") + "/v1"
|
||||
client = OpenAI(base_url=base_url, api_key="any")
|
||||
|
|
@ -220,16 +177,11 @@ class LLMRecipeGenerator:
|
|||
return resp.choices[0].message.content or ""
|
||||
else:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter().complete(prompt)
|
||||
router = LLMRouter()
|
||||
return router.complete(prompt)
|
||||
except Exception as exc:
|
||||
logger.error("LLM call failed: %s", exc)
|
||||
return ""
|
||||
finally:
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Strips markdown bold/italic markers so "**Directions:**" parses like "Directions:"
|
||||
_MD_BOLD = re.compile(r"\*{1,2}([^*]+)\*{1,2}")
|
||||
|
|
|
|||
|
|
@ -1,160 +0,0 @@
|
|||
"""
|
||||
Shopping locale configuration.
|
||||
|
||||
Maps a locale key to Amazon domain, currency metadata, and retailer availability.
|
||||
Instacart and Walmart are US/CA-only; all other locales get Amazon only.
|
||||
Amazon Fresh (&i=amazonfresh) is US-only — international domains use the general
|
||||
grocery department (&rh=n:16310101) where available, plain search elsewhere.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class LocaleConfig(TypedDict):
|
||||
amazon_domain: str
|
||||
amazon_grocery_dept: str # URL fragment for grocery department on this locale's site
|
||||
currency_code: str
|
||||
currency_symbol: str
|
||||
instacart: bool
|
||||
walmart: bool
|
||||
|
||||
|
||||
LOCALES: dict[str, LocaleConfig] = {
|
||||
"us": {
|
||||
"amazon_domain": "amazon.com",
|
||||
"amazon_grocery_dept": "i=amazonfresh",
|
||||
"currency_code": "USD",
|
||||
"currency_symbol": "$",
|
||||
"instacart": True,
|
||||
"walmart": True,
|
||||
},
|
||||
"ca": {
|
||||
"amazon_domain": "amazon.ca",
|
||||
"amazon_grocery_dept": "rh=n:6967215011", # Grocery dept on .ca # gitleaks:allow
|
||||
"currency_code": "CAD",
|
||||
"currency_symbol": "CA$",
|
||||
"instacart": True,
|
||||
"walmart": False,
|
||||
},
|
||||
"gb": {
|
||||
"amazon_domain": "amazon.co.uk",
|
||||
"amazon_grocery_dept": "rh=n:340831031", # Grocery dept on .co.uk
|
||||
"currency_code": "GBP",
|
||||
"currency_symbol": "£",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"au": {
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # Pantry/grocery on .com.au # gitleaks:allow
|
||||
"currency_code": "AUD",
|
||||
"currency_symbol": "A$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nz": {
|
||||
# NZ has no Amazon storefront — route to .com.au as nearest option
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # gitleaks:allow
|
||||
"currency_code": "NZD",
|
||||
"currency_symbol": "NZ$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"de": {
|
||||
"amazon_domain": "amazon.de",
|
||||
"amazon_grocery_dept": "rh=n:340843031", # Lebensmittel & Getränke
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"fr": {
|
||||
"amazon_domain": "amazon.fr",
|
||||
"amazon_grocery_dept": "rh=n:197858031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"it": {
|
||||
"amazon_domain": "amazon.it",
|
||||
"amazon_grocery_dept": "rh=n:525616031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"es": {
|
||||
"amazon_domain": "amazon.es",
|
||||
"amazon_grocery_dept": "rh=n:599364031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nl": {
|
||||
"amazon_domain": "amazon.nl",
|
||||
"amazon_grocery_dept": "rh=n:16584827031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"se": {
|
||||
"amazon_domain": "amazon.se",
|
||||
"amazon_grocery_dept": "rh=n:20741393031",
|
||||
"currency_code": "SEK",
|
||||
"currency_symbol": "kr",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"jp": {
|
||||
"amazon_domain": "amazon.co.jp",
|
||||
"amazon_grocery_dept": "rh=n:2246283051", # gitleaks:allow
|
||||
"currency_code": "JPY",
|
||||
"currency_symbol": "¥",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"in": {
|
||||
"amazon_domain": "amazon.in",
|
||||
"amazon_grocery_dept": "rh=n:2454178031", # gitleaks:allow
|
||||
"currency_code": "INR",
|
||||
"currency_symbol": "₹",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"mx": {
|
||||
"amazon_domain": "amazon.com.mx",
|
||||
"amazon_grocery_dept": "rh=n:10737659011",
|
||||
"currency_code": "MXN",
|
||||
"currency_symbol": "MX$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"br": {
|
||||
"amazon_domain": "amazon.com.br",
|
||||
"amazon_grocery_dept": "rh=n:17878420011",
|
||||
"currency_code": "BRL",
|
||||
"currency_symbol": "R$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"sg": {
|
||||
"amazon_domain": "amazon.sg",
|
||||
"amazon_grocery_dept": "rh=n:6981647051", # gitleaks:allow
|
||||
"currency_code": "SGD",
|
||||
"currency_symbol": "S$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
}
|
||||
|
||||
DEFAULT_LOCALE = "us"
|
||||
|
||||
|
||||
def get_locale(key: str) -> LocaleConfig:
|
||||
"""Return locale config for *key*, falling back to US if unknown."""
|
||||
return LOCALES.get(key, LOCALES[DEFAULT_LOCALE])
|
||||
|
|
@ -21,12 +21,10 @@ if TYPE_CHECKING:
|
|||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink, NutritionPanel, RecipeRequest, RecipeResult, RecipeSuggestion, SwapCandidate
|
||||
from app.services.recipe.assembly_recipes import match_assembly_templates
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
from app.services.recipe.substitution_engine import SubstitutionEngine
|
||||
from app.services.recipe.sensory import SensoryExclude, build_sensory_exclude, passes_sensory_filter
|
||||
from app.services.recipe.time_effort import parse_time_effort
|
||||
from app.services.recipe.reranker import rerank_suggestions
|
||||
|
||||
_LEFTOVER_DAILY_MAX_FREE = 5
|
||||
|
||||
|
|
@ -158,56 +156,6 @@ _PANTRY_LABEL_SYNONYMS: dict[str, str] = {
|
|||
}
|
||||
|
||||
|
||||
# When a pantry item is in a secondary state (e.g. bread → "stale"), expand
|
||||
# the pantry set with terms that recipe ingredients commonly use to describe
|
||||
# that state. This lets "stale bread" in a recipe ingredient match a pantry
|
||||
# entry that is simply called "Bread" but is past its nominal use-by date.
|
||||
# Each key is (category_in_SECONDARY_WINDOW, label_returned_by_secondary_state).
|
||||
# Values are additional strings added to the pantry set for FTS coverage.
|
||||
_SECONDARY_STATE_SYNONYMS: dict[tuple[str, str], list[str]] = {
|
||||
# ── Existing entries (corrected) ─────────────────────────────────────────
|
||||
("bread", "stale"): ["stale bread", "day-old bread", "old bread", "dried bread"],
|
||||
("bakery", "day-old"): ["day-old bread", "stale bread", "stale pastry",
|
||||
"day-old croissant", "stale croissant", "day-old muffin",
|
||||
"stale cake", "old pastry", "day-old baguette"],
|
||||
("bananas", "overripe"): ["overripe bananas", "very ripe bananas", "spotty bananas",
|
||||
"brown bananas", "black bananas", "mushy bananas",
|
||||
"mashed banana", "ripe bananas"],
|
||||
("milk", "sour"): ["sour milk", "slightly sour milk", "buttermilk",
|
||||
"soured milk", "off milk", "milk gone sour"],
|
||||
("dairy", "sour"): ["sour milk", "slightly sour milk", "soured milk"],
|
||||
("cheese", "rind-ready"): ["parmesan rind", "cheese rind", "aged cheese",
|
||||
"hard cheese rind", "parmigiano rind", "grana padano rind",
|
||||
"pecorino rind", "dry cheese"],
|
||||
("rice", "day-old"): ["day-old rice", "leftover rice", "cold rice", "cooked rice",
|
||||
"old rice"],
|
||||
("tortillas", "stale"): ["stale tortillas", "dried tortillas", "day-old tortillas"],
|
||||
# ── New entries ──────────────────────────────────────────────────────────
|
||||
("apples", "soft"): ["soft apples", "mealy apples", "overripe apples",
|
||||
"bruised apples", "mushy apple"],
|
||||
("leafy_greens", "wilting"):["wilted spinach", "wilted greens", "limp lettuce",
|
||||
"wilted kale", "tired greens"],
|
||||
("tomatoes", "soft"): ["overripe tomatoes", "very ripe tomatoes", "ripe tomatoes",
|
||||
"soft tomatoes", "bruised tomatoes"],
|
||||
("cooked_pasta", "day-old"):["leftover pasta", "cooked pasta", "day-old pasta",
|
||||
"cold pasta", "pre-cooked pasta"],
|
||||
("cooked_potatoes", "day-old"): ["leftover potatoes", "cooked potatoes", "day-old potatoes",
|
||||
"mashed potatoes", "baked potatoes"],
|
||||
("yogurt", "tangy"): ["sour yogurt", "tangy yogurt", "past-date yogurt",
|
||||
"older yogurt", "well-cultured yogurt"],
|
||||
("cream", "sour"): ["slightly soured cream", "cultured cream",
|
||||
"heavy cream gone sour", "soured cream"],
|
||||
("wine", "open"): ["open wine", "leftover wine", "day-old wine",
|
||||
"cooking wine", "red wine", "white wine"],
|
||||
("cooked_beans", "day-old"):["leftover beans", "cooked beans", "day-old beans",
|
||||
"cold beans", "pre-cooked beans",
|
||||
"cooked chickpeas", "cooked lentils"],
|
||||
("cooked_meat", "leftover"):["leftover chicken", "shredded chicken", "leftover beef",
|
||||
"cooked chicken", "pulled chicken", "leftover pork",
|
||||
"cooked meat", "rotisserie chicken"],
|
||||
}
|
||||
|
||||
|
||||
# Matches leading quantity/unit prefixes in recipe ingredient strings,
|
||||
# e.g. "2 cups flour" → "flour", "1/2 c. ketchup" → "ketchup",
|
||||
# "3 oz. butter" → "butter"
|
||||
|
|
@ -337,24 +285,14 @@ def _prep_note_for(ingredient: str) -> str | None:
|
|||
return template.format(ingredient=ingredient_name)
|
||||
|
||||
|
||||
def _expand_pantry_set(
|
||||
pantry_items: list[str],
|
||||
secondary_pantry_items: dict[str, str] | None = None,
|
||||
) -> set[str]:
|
||||
def _expand_pantry_set(pantry_items: list[str]) -> set[str]:
|
||||
"""Return pantry_set expanded with canonical recipe-corpus synonyms.
|
||||
|
||||
For each pantry item, checks _PANTRY_LABEL_SYNONYMS for substring matches
|
||||
and adds the canonical form. This lets single-word recipe ingredients
|
||||
("hamburger", "chicken") match product-label pantry entries
|
||||
("burger patties", "rotisserie chicken").
|
||||
|
||||
If secondary_pantry_items is provided (product_name → state label), items
|
||||
in a secondary state also receive state-specific synonym expansion so that
|
||||
recipe ingredients like "stale bread" or "day-old rice" are matched.
|
||||
"""
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
_predictor = ExpirationPredictor()
|
||||
|
||||
expanded: set[str] = set()
|
||||
for item in pantry_items:
|
||||
lower = item.lower().strip()
|
||||
|
|
@ -362,15 +300,6 @@ def _expand_pantry_set(
|
|||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
expanded.add(canonical)
|
||||
|
||||
# Secondary state expansion — adds terms like "stale bread", "day-old rice"
|
||||
if secondary_pantry_items and item in secondary_pantry_items:
|
||||
state_label = secondary_pantry_items[item]
|
||||
category = _predictor.get_category_from_product(item)
|
||||
if category:
|
||||
synonyms = _SECONDARY_STATE_SYNONYMS.get((category, state_label), [])
|
||||
expanded.update(synonyms)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
|
|
@ -438,156 +367,6 @@ def _pantry_creative_swap(required: str, pantry_items: set[str]) -> str | None:
|
|||
return best
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Functional-category swap table (Level 2 only)
|
||||
# ---------------------------------------------------------------------------
|
||||
# Maps cleaned ingredient names → functional category label. Used as a
|
||||
# fallback when _pantry_creative_swap returns None (which always happens for
|
||||
# single-token ingredients, because that function requires ≥2 shared tokens).
|
||||
# A pantry item that belongs to the same category is offered as a substitute.
|
||||
_FUNCTIONAL_SWAP_CATEGORIES: dict[str, str] = {
|
||||
# Solid fats
|
||||
"butter": "solid_fat",
|
||||
"margarine": "solid_fat",
|
||||
"shortening": "solid_fat",
|
||||
"lard": "solid_fat",
|
||||
"ghee": "solid_fat",
|
||||
# Liquid/neutral cooking oils
|
||||
"oil": "liquid_fat",
|
||||
"vegetable oil": "liquid_fat",
|
||||
"olive oil": "liquid_fat",
|
||||
"canola oil": "liquid_fat",
|
||||
"sunflower oil": "liquid_fat",
|
||||
"avocado oil": "liquid_fat",
|
||||
# Sweeteners
|
||||
"sugar": "sweetener",
|
||||
"brown sugar": "sweetener",
|
||||
"honey": "sweetener",
|
||||
"maple syrup": "sweetener",
|
||||
"agave": "sweetener",
|
||||
"molasses": "sweetener",
|
||||
"stevia": "sweetener",
|
||||
"powdered sugar": "sweetener",
|
||||
# All-purpose flours and baking bases
|
||||
"flour": "flour",
|
||||
"all-purpose flour": "flour",
|
||||
"whole wheat flour": "flour",
|
||||
"bread flour": "flour",
|
||||
"self-rising flour": "flour",
|
||||
"cake flour": "flour",
|
||||
# Dairy and non-dairy milk
|
||||
"milk": "dairy_milk",
|
||||
"whole milk": "dairy_milk",
|
||||
"skim milk": "dairy_milk",
|
||||
"2% milk": "dairy_milk",
|
||||
"oat milk": "dairy_milk",
|
||||
"almond milk": "dairy_milk",
|
||||
"soy milk": "dairy_milk",
|
||||
"rice milk": "dairy_milk",
|
||||
# Heavy/whipping creams
|
||||
"cream": "heavy_cream",
|
||||
"heavy cream": "heavy_cream",
|
||||
"whipping cream": "heavy_cream",
|
||||
"double cream": "heavy_cream",
|
||||
"coconut cream": "heavy_cream",
|
||||
# Cultured dairy (acid + thick)
|
||||
"sour cream": "cultured_dairy",
|
||||
"greek yogurt": "cultured_dairy",
|
||||
"yogurt": "cultured_dairy",
|
||||
"buttermilk": "cultured_dairy",
|
||||
# Starch thickeners
|
||||
"cornstarch": "thickener",
|
||||
"arrowroot": "thickener",
|
||||
"tapioca starch": "thickener",
|
||||
"potato starch": "thickener",
|
||||
"rice flour": "thickener",
|
||||
# Egg binders
|
||||
"egg": "egg_binder",
|
||||
"eggs": "egg_binder",
|
||||
# Acids
|
||||
"vinegar": "acid",
|
||||
"apple cider vinegar": "acid",
|
||||
"white vinegar": "acid",
|
||||
"red wine vinegar": "acid",
|
||||
"lemon juice": "acid",
|
||||
"lime juice": "acid",
|
||||
# Stocks and broths
|
||||
"broth": "stock",
|
||||
"stock": "stock",
|
||||
"chicken broth": "stock",
|
||||
"beef broth": "stock",
|
||||
"vegetable broth": "stock",
|
||||
"chicken stock": "stock",
|
||||
"beef stock": "stock",
|
||||
"bouillon": "stock",
|
||||
# Hard cheeses (grating / melting interchangeable)
|
||||
"parmesan": "hard_cheese",
|
||||
"romano": "hard_cheese",
|
||||
"pecorino": "hard_cheese",
|
||||
"asiago": "hard_cheese",
|
||||
# Melting cheeses
|
||||
"cheddar": "melting_cheese",
|
||||
"mozzarella": "melting_cheese",
|
||||
"swiss": "melting_cheese",
|
||||
"gouda": "melting_cheese",
|
||||
"monterey jack": "melting_cheese",
|
||||
"colby": "melting_cheese",
|
||||
"provolone": "melting_cheese",
|
||||
# Canned tomato products
|
||||
"tomato sauce": "canned_tomato",
|
||||
"tomato paste": "canned_tomato",
|
||||
"crushed tomatoes": "canned_tomato",
|
||||
"diced tomatoes": "canned_tomato",
|
||||
"marinara": "canned_tomato",
|
||||
}
|
||||
|
||||
|
||||
def _category_swap(ingredient: str, pantry_items: set[str]) -> str | None:
|
||||
"""Level-2 fallback: find a same-category pantry substitute for a single-token ingredient.
|
||||
|
||||
_pantry_creative_swap requires ≥2 shared content tokens, so it always returns
|
||||
None for single-word ingredients like 'butter' or 'flour'. This function looks
|
||||
up the ingredient's functional category and returns any pantry item in that
|
||||
same category, enabling swaps like butter → ghee, milk → oat milk.
|
||||
"""
|
||||
clean = _strip_quantity(ingredient).lower()
|
||||
category = _FUNCTIONAL_SWAP_CATEGORIES.get(clean)
|
||||
if not category:
|
||||
return None
|
||||
for item in pantry_items:
|
||||
if item.lower() == clean:
|
||||
continue
|
||||
item_lower = item.lower()
|
||||
# Direct match: pantry item name is a known member of the same category
|
||||
if _FUNCTIONAL_SWAP_CATEGORIES.get(item_lower) == category:
|
||||
return item
|
||||
# Substring match: handles "organic oat milk" containing "oat milk"
|
||||
for known_ing, cat in _FUNCTIONAL_SWAP_CATEGORIES.items():
|
||||
if cat == category and known_ing in item_lower and item_lower != clean:
|
||||
return item
|
||||
return None
|
||||
|
||||
|
||||
# Assembly template caps by tier — prevents flooding results with templates
|
||||
# when a well-stocked pantry satisfies every required role.
|
||||
_SOURCE_URL_BUILDERS: dict[str, str] = {
|
||||
"foodcom": "https://www.food.com/recipe/{id}",
|
||||
}
|
||||
|
||||
|
||||
def _build_source_url(row: dict) -> str | None:
|
||||
"""Construct a canonical source URL from DB row fields, or None for generated recipes."""
|
||||
source = row.get("source") or ""
|
||||
external_id = row.get("external_id")
|
||||
template = _SOURCE_URL_BUILDERS.get(source)
|
||||
if not template or not external_id:
|
||||
return None
|
||||
try:
|
||||
return template.format(id=int(float(external_id)))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
# Method complexity classification patterns
|
||||
_EASY_METHODS = re.compile(
|
||||
r"\b(microwave|mix|stir|blend|toast|assemble|heat)\b", re.IGNORECASE
|
||||
|
|
@ -596,71 +375,6 @@ _INVOLVED_METHODS = re.compile(
|
|||
r"\b(braise|roast|knead|deep.?fry|fry|sauté|saute|bake|boil)\b", re.IGNORECASE
|
||||
)
|
||||
|
||||
# Hard day mode sort tier patterns
|
||||
_PREMADE_TITLE_RE = re.compile(
|
||||
r"\b(frozen|instant|microwave|ready.?made|pre.?made|packaged|heat.?and.?eat)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
_HEAT_ONLY_RE = re.compile(r"\b(microwave|heat|warm|thaw)\b", re.IGNORECASE)
|
||||
|
||||
|
||||
def _hard_day_sort_tier(
|
||||
title: str,
|
||||
ingredient_names: list[str],
|
||||
directions: list[str],
|
||||
) -> int:
|
||||
"""Return a sort priority tier for hard day mode.
|
||||
|
||||
0 — premade / heat-only (frozen dinner, quesadilla, microwave meal)
|
||||
1 — super simple (≤3 ingredients, easy method)
|
||||
2 — easy/moderate (everything else that passed the 'involved' filter)
|
||||
|
||||
Lower tier surfaces first.
|
||||
"""
|
||||
dir_text = " ".join(directions)
|
||||
n_ingredients = len(ingredient_names)
|
||||
n_steps = len(directions)
|
||||
|
||||
# Tier 0: title signals premade, OR very few ingredients with heat-only steps
|
||||
if _PREMADE_TITLE_RE.search(title):
|
||||
return 0
|
||||
if n_ingredients <= 2 and n_steps <= 3 and _HEAT_ONLY_RE.search(dir_text):
|
||||
return 0
|
||||
|
||||
# Tier 1: ≤3 ingredients with any easy method (quesadilla, cheese toast, etc.)
|
||||
if n_ingredients <= 3 and _EASY_METHODS.search(dir_text):
|
||||
return 1
|
||||
|
||||
return 2
|
||||
|
||||
|
||||
def _estimate_time_min(directions: list[str], complexity: str) -> int:
|
||||
"""Rough cooking time estimate from step count and method complexity.
|
||||
|
||||
Not precise — intended for filtering and display hints only.
|
||||
"""
|
||||
steps = len(directions)
|
||||
if complexity == "easy":
|
||||
return max(5, 10 + steps * 3)
|
||||
if complexity == "involved":
|
||||
return max(20, 30 + steps * 6)
|
||||
return max(10, 20 + steps * 4) # moderate
|
||||
|
||||
|
||||
def _within_time(directions: list[str], max_total_min: int) -> bool:
|
||||
"""Return True if parsed total time (active + passive) is within max_total_min.
|
||||
|
||||
Graceful degradation:
|
||||
- Empty directions -> True (no data, don't hide)
|
||||
- total_min == 0 (no time signals found) -> True (unparseable, don't hide)
|
||||
"""
|
||||
if not directions:
|
||||
return True
|
||||
profile = parse_time_effort(directions)
|
||||
if profile.total_min == 0:
|
||||
return True
|
||||
return profile.total_min <= max_total_min
|
||||
|
||||
|
||||
def _classify_method_complexity(
|
||||
directions: list[str],
|
||||
|
|
@ -721,8 +435,7 @@ class RecipeEngine:
|
|||
|
||||
profiles = self._classifier.classify_batch(req.pantry_items)
|
||||
gaps = self._classifier.identify_gaps(profiles)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items, req.secondary_pantry_items or None)
|
||||
exclude_set = _expand_pantry_set(req.exclude_ingredients) if req.exclude_ingredients else set()
|
||||
pantry_set = _expand_pantry_set(req.pantry_items)
|
||||
|
||||
if req.level >= 3:
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
|
|
@ -730,11 +443,6 @@ class RecipeEngine:
|
|||
return gen.generate(req, profiles, gaps)
|
||||
|
||||
# Level 1 & 2: deterministic path
|
||||
# L1 ("Use What I Have") applies strict quality gates:
|
||||
# - exclude_generic: filter catch-all recipes at the DB level
|
||||
# - effective_max_missing: default to 2 when user hasn't set a cap
|
||||
# - match ratio: require ≥60% ingredient coverage to avoid low-signal results
|
||||
_l1 = req.level == 1 and not req.shopping_mode
|
||||
nf = req.nutrition_filters
|
||||
rows = self._store.search_recipes_by_ingredients(
|
||||
req.pantry_items,
|
||||
|
|
@ -745,22 +453,8 @@ class RecipeEngine:
|
|||
max_carbs_g=nf.max_carbs_g,
|
||||
max_sodium_mg=nf.max_sodium_mg,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
exclude_generic=_l1,
|
||||
)
|
||||
|
||||
# L1 strict defaults: cap missing ingredients and require a minimum ratio.
|
||||
_L1_MAX_MISSING_DEFAULT = 2
|
||||
_L1_MIN_MATCH_RATIO = 0.6
|
||||
effective_max_missing = req.max_missing
|
||||
if _l1 and effective_max_missing is None:
|
||||
effective_max_missing = _L1_MAX_MISSING_DEFAULT
|
||||
|
||||
# Load sensory preferences -- applied as silent post-score filter
|
||||
_sensory_prefs_json = self._store.get_setting("sensory_preferences")
|
||||
_sensory_exclude = build_sensory_exclude(_sensory_prefs_json)
|
||||
|
||||
suggestions = []
|
||||
hard_day_tier_map: dict[int, int] = {} # recipe_id -> tier when hard_day_mode
|
||||
|
||||
for row in rows:
|
||||
ingredient_names: list[str] = row.get("ingredient_names") or []
|
||||
|
|
@ -770,34 +464,19 @@ class RecipeEngine:
|
|||
except Exception:
|
||||
ingredient_names = []
|
||||
|
||||
# Skip recipes that require any ingredient the user has excluded.
|
||||
if exclude_set and any(_ingredient_in_pantry(n, exclude_set) for n in ingredient_names):
|
||||
continue
|
||||
|
||||
# Sensory filter -- silent exclusion of recipes exceeding user tolerance
|
||||
if not _sensory_exclude.is_empty():
|
||||
if not passes_sensory_filter(row.get("sensory_tags"), _sensory_exclude):
|
||||
continue
|
||||
|
||||
# Compute missing ingredients, detecting pantry coverage first.
|
||||
# When covered, collect any prep-state annotations (e.g. "melted butter"
|
||||
# → note "Melt the butter before starting.") to surface separately.
|
||||
swap_candidates: list[SwapCandidate] = []
|
||||
matched: list[str] = []
|
||||
missing: list[str] = []
|
||||
prep_note_set: set[str] = set()
|
||||
for n in ingredient_names:
|
||||
if _ingredient_in_pantry(n, pantry_set):
|
||||
matched.append(_strip_quantity(n))
|
||||
note = _prep_note_for(n)
|
||||
if note:
|
||||
prep_note_set.add(note)
|
||||
continue
|
||||
swap_item = _pantry_creative_swap(n, pantry_set)
|
||||
# L2: also try functional-category swap for single-token ingredients
|
||||
# that _pantry_creative_swap can't match (requires ≥2 shared tokens).
|
||||
if swap_item is None and req.level == 2:
|
||||
swap_item = _category_swap(n, pantry_set)
|
||||
if swap_item:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=n,
|
||||
|
|
@ -809,55 +488,20 @@ class RecipeEngine:
|
|||
else:
|
||||
missing.append(n)
|
||||
|
||||
# Filter by max_missing — skipped in shopping mode (user is willing to buy)
|
||||
if not req.shopping_mode and effective_max_missing is not None and len(missing) > effective_max_missing:
|
||||
# Filter by max_missing (pantry swaps don't count as missing)
|
||||
if req.max_missing is not None and len(missing) > req.max_missing:
|
||||
continue
|
||||
|
||||
# "Can make now" toggle: drop any recipe that still has missing ingredients
|
||||
# after swaps are applied. Swapped items count as covered.
|
||||
if req.pantry_match_only and missing:
|
||||
continue
|
||||
|
||||
# L1 match ratio gate: drop results where less than 60% of the recipe's
|
||||
# ingredients are in the pantry. Prevents low-signal results like a
|
||||
# 10-ingredient recipe matching on only one common item.
|
||||
if _l1 and ingredient_names:
|
||||
match_ratio = len(matched) / len(ingredient_names)
|
||||
if match_ratio < _L1_MIN_MATCH_RATIO:
|
||||
continue
|
||||
|
||||
# Parse directions — needed for complexity, hard_day_mode, and time estimate.
|
||||
# Filter by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
directions: list[str] = row.get("directions") or []
|
||||
if isinstance(directions, str):
|
||||
try:
|
||||
directions = json.loads(directions)
|
||||
except Exception:
|
||||
directions = [directions]
|
||||
|
||||
# Compute complexity for every suggestion (used for badge + filter).
|
||||
row_complexity = _classify_method_complexity(directions, available_equipment)
|
||||
row_time_min = _estimate_time_min(directions, row_complexity)
|
||||
|
||||
# Filter and tier-rank by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
if row_complexity == "involved":
|
||||
continue
|
||||
hard_day_tier_map[row["id"]] = _hard_day_sort_tier(
|
||||
title=row.get("title", ""),
|
||||
ingredient_names=ingredient_names,
|
||||
directions=directions,
|
||||
)
|
||||
|
||||
# Complexity filter (#58)
|
||||
if req.complexity_filter and row_complexity != req.complexity_filter:
|
||||
continue
|
||||
|
||||
# Max time filter (#58)
|
||||
if req.max_time_min is not None and row_time_min > req.max_time_min:
|
||||
continue
|
||||
|
||||
# Total time filter (kiwi#52) — uses parsed time from directions
|
||||
if req.max_total_min is not None and not _within_time(directions, req.max_total_min):
|
||||
complexity = _classify_method_complexity(directions, available_equipment)
|
||||
if complexity == "involved":
|
||||
continue
|
||||
|
||||
# Level 2: also add dietary constraint swaps from substitution_pairs
|
||||
|
|
@ -903,37 +547,20 @@ class RecipeEngine:
|
|||
match_count=int(row.get("match_count") or 0),
|
||||
element_coverage=coverage_raw,
|
||||
swap_candidates=swap_candidates,
|
||||
matched_ingredients=matched,
|
||||
missing_ingredients=missing,
|
||||
prep_notes=sorted(prep_note_set),
|
||||
level=req.level,
|
||||
nutrition=nutrition if has_nutrition else None,
|
||||
source_url=_build_source_url(row),
|
||||
complexity=row_complexity,
|
||||
estimated_time_min=row_time_min,
|
||||
))
|
||||
|
||||
# Sort corpus results.
|
||||
# Paid+ tier: cross-encoder reranker orders by full pantry + dietary fit.
|
||||
# Free tier (or reranker failure): overlap sort with hard_day_mode tier grouping.
|
||||
reranked = rerank_suggestions(req, suggestions)
|
||||
if reranked is not None:
|
||||
# Reranker provided relevance order. In hard_day_mode, still respect
|
||||
# tier grouping as primary sort; reranker order applies within each tier.
|
||||
if req.hard_day_mode and hard_day_tier_map:
|
||||
suggestions = sorted(
|
||||
reranked,
|
||||
key=lambda s: hard_day_tier_map.get(s.id, 1),
|
||||
# Prepend assembly-dish templates (burrito, stir fry, omelette, etc.)
|
||||
# These fire regardless of corpus coverage — any pantry can make a burrito.
|
||||
assembly = match_assembly_templates(
|
||||
pantry_items=req.pantry_items,
|
||||
pantry_set=pantry_set,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
)
|
||||
else:
|
||||
suggestions = reranked
|
||||
elif req.hard_day_mode and hard_day_tier_map:
|
||||
suggestions = sorted(
|
||||
suggestions,
|
||||
key=lambda s: (hard_day_tier_map.get(s.id, 1), -s.match_count),
|
||||
)
|
||||
else:
|
||||
suggestions = sorted(suggestions, key=lambda s: -s.match_count)
|
||||
suggestions = assembly + suggestions
|
||||
|
||||
# Build grocery list — deduplicated union of all missing ingredients
|
||||
seen: set[str] = set()
|
||||
|
|
|
|||
|
|
@ -1,175 +0,0 @@
|
|||
"""
|
||||
Reranker integration for recipe suggestions.
|
||||
|
||||
Wraps circuitforge_core.reranker to score recipe candidates against a
|
||||
natural-language query built from the user's pantry, constraints, and
|
||||
preferences. Paid+ tier only; free tier returns None (caller keeps
|
||||
existing sort). All exceptions are caught and logged — the reranker
|
||||
must never break recipe suggestions.
|
||||
|
||||
Environment:
|
||||
CF_RERANKER_MOCK=1 — force mock backend (tests, no model required)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeSuggestion
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Tiers that get reranker access.
|
||||
_RERANKER_TIERS: frozenset[str] = frozenset({"paid", "premium", "local"})
|
||||
|
||||
# Minimum candidates worth reranking — below this the cross-encoder
|
||||
# overhead is not justified and the overlap sort is fine.
|
||||
_MIN_CANDIDATES: int = 3
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RerankerInput:
|
||||
"""Intermediate representation passed to the reranker."""
|
||||
query: str
|
||||
candidates: list[str]
|
||||
suggestion_ids: list[int] # parallel to candidates, for re-mapping
|
||||
|
||||
|
||||
# ── Query builder ─────────────────────────────────────────────────────────────
|
||||
|
||||
def build_query(req: RecipeRequest) -> str:
|
||||
"""Build a natural-language query string from the recipe request.
|
||||
|
||||
Encodes the user's full context so the cross-encoder can score
|
||||
relevance, dietary fit, and expiry urgency in a single pass.
|
||||
Only non-empty segments are included.
|
||||
"""
|
||||
parts: list[str] = []
|
||||
|
||||
if req.pantry_items:
|
||||
parts.append(f"Recipe using: {', '.join(req.pantry_items)}")
|
||||
|
||||
if req.exclude_ingredients:
|
||||
parts.append(f"Avoid: {', '.join(req.exclude_ingredients)}")
|
||||
|
||||
if req.allergies:
|
||||
parts.append(f"Allergies: {', '.join(req.allergies)}")
|
||||
|
||||
if req.constraints:
|
||||
parts.append(f"Dietary: {', '.join(req.constraints)}")
|
||||
|
||||
if req.category:
|
||||
parts.append(f"Category: {req.category}")
|
||||
|
||||
if req.style_id:
|
||||
parts.append(f"Style: {req.style_id}")
|
||||
|
||||
if req.complexity_filter:
|
||||
parts.append(f"Prefer: {req.complexity_filter}")
|
||||
|
||||
if req.hard_day_mode:
|
||||
parts.append("Prefer: easy, minimal effort")
|
||||
|
||||
# Secondary pantry items carry a state label (e.g. "stale", "overripe")
|
||||
# that helps the reranker favour recipes suited to those specific states.
|
||||
if req.secondary_pantry_items:
|
||||
expiry_parts = [f"{name} ({state})" for name, state in req.secondary_pantry_items.items()]
|
||||
parts.append(f"Use soon: {', '.join(expiry_parts)}")
|
||||
elif req.expiry_first:
|
||||
parts.append("Prefer: recipes that use expiring items first")
|
||||
|
||||
return ". ".join(parts) + "." if parts else "Recipe."
|
||||
|
||||
|
||||
# ── Candidate builder ─────────────────────────────────────────────────────────
|
||||
|
||||
def build_candidate_string(suggestion: RecipeSuggestion) -> str:
|
||||
"""Build a candidate string for a single recipe suggestion.
|
||||
|
||||
Format: "{title}. Ingredients: {comma-joined ingredients}"
|
||||
Matched ingredients appear before missing ones.
|
||||
Directions excluded to stay within BGE's 512-token window.
|
||||
"""
|
||||
ingredients = suggestion.matched_ingredients + suggestion.missing_ingredients
|
||||
if not ingredients:
|
||||
return suggestion.title
|
||||
return f"{suggestion.title}. Ingredients: {', '.join(ingredients)}"
|
||||
|
||||
|
||||
# ── Input assembler ───────────────────────────────────────────────────────────
|
||||
|
||||
def build_reranker_input(
|
||||
req: RecipeRequest,
|
||||
suggestions: list[RecipeSuggestion],
|
||||
) -> RerankerInput:
|
||||
"""Assemble query and candidate strings for the reranker."""
|
||||
query = build_query(req)
|
||||
candidates: list[str] = []
|
||||
ids: list[int] = []
|
||||
for s in suggestions:
|
||||
candidates.append(build_candidate_string(s))
|
||||
ids.append(s.id)
|
||||
return RerankerInput(query=query, candidates=candidates, suggestion_ids=ids)
|
||||
|
||||
|
||||
# ── cf-core seam (isolated for monkeypatching in tests) ──────────────────────
|
||||
|
||||
def _do_rerank(query: str, candidates: list[str], top_n: int = 0):
|
||||
"""Thin wrapper around cf-core rerank(). Extracted so tests can patch it."""
|
||||
from circuitforge_core.reranker import rerank
|
||||
return rerank(query, candidates, top_n=top_n)
|
||||
|
||||
|
||||
# ── Public entry point ────────────────────────────────────────────────────────
|
||||
|
||||
def rerank_suggestions(
|
||||
req: RecipeRequest,
|
||||
suggestions: list[RecipeSuggestion],
|
||||
) -> list[RecipeSuggestion] | None:
|
||||
"""Rerank suggestions using the cf-core cross-encoder.
|
||||
|
||||
Returns a reordered list with rerank_score populated, or None when:
|
||||
- Tier is not paid+ (free tier keeps overlap sort)
|
||||
- Fewer than _MIN_CANDIDATES suggestions (not worth the overhead)
|
||||
- Any exception is raised (graceful fallback to existing sort)
|
||||
|
||||
The caller should treat None as "keep existing sort order".
|
||||
Original suggestions are never mutated.
|
||||
"""
|
||||
if req.tier not in _RERANKER_TIERS:
|
||||
return None
|
||||
|
||||
if len(suggestions) < _MIN_CANDIDATES:
|
||||
return None
|
||||
|
||||
try:
|
||||
rinput = build_reranker_input(req, suggestions)
|
||||
results = _do_rerank(rinput.query, rinput.candidates, top_n=0)
|
||||
|
||||
# Map reranked results back to RecipeSuggestion objects using the
|
||||
# candidate string as key (build_candidate_string is deterministic).
|
||||
candidate_map: dict[str, RecipeSuggestion] = {
|
||||
build_candidate_string(s): s for s in suggestions
|
||||
}
|
||||
|
||||
reranked: list[RecipeSuggestion] = []
|
||||
for rr in results:
|
||||
suggestion = candidate_map.get(rr.candidate)
|
||||
if suggestion is not None:
|
||||
reranked.append(suggestion.model_copy(
|
||||
update={"rerank_score": round(float(rr.score), 4)}
|
||||
))
|
||||
|
||||
if len(reranked) < len(suggestions):
|
||||
log.warning(
|
||||
"Reranker lost %d/%d suggestions during mapping, falling back",
|
||||
len(suggestions) - len(reranked),
|
||||
len(suggestions),
|
||||
)
|
||||
return None
|
||||
|
||||
return reranked
|
||||
|
||||
except Exception:
|
||||
log.exception("Reranker failed, falling back to overlap sort")
|
||||
return None
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
"""
|
||||
Sensory filter dataclass and helpers.
|
||||
|
||||
SensoryExclude bridges user preferences (from user_settings) to the
|
||||
store browse methods and recipe engine suggest flow.
|
||||
|
||||
Recipes with sensory_tags = '{}' (untagged) pass ALL filters --
|
||||
graceful degradation when tag_sensory_profiles.py has not run.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
_SMELL_LEVELS: tuple[str, ...] = ("mild", "aromatic", "pungent", "fermented")
|
||||
_NOISE_LEVELS: tuple[str, ...] = ("quiet", "moderate", "loud", "very_loud")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SensoryExclude:
|
||||
"""Derived filter criteria from user sensory preferences.
|
||||
|
||||
textures: texture tags to exclude (empty tuple = no texture filter)
|
||||
smell_above: if set, exclude recipes whose smell level is strictly above
|
||||
this level in the smell spectrum
|
||||
noise_above: if set, exclude recipes whose noise level is strictly above
|
||||
this level in the noise spectrum
|
||||
"""
|
||||
textures: tuple[str, ...] = field(default_factory=tuple)
|
||||
smell_above: str | None = None
|
||||
noise_above: str | None = None
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> "SensoryExclude":
|
||||
"""No filtering -- pass-through for users with no preferences set."""
|
||||
return cls()
|
||||
|
||||
def is_empty(self) -> bool:
|
||||
"""True when no filtering will be applied."""
|
||||
return not self.textures and self.smell_above is None and self.noise_above is None
|
||||
|
||||
|
||||
def build_sensory_exclude(prefs_json: str | None) -> SensoryExclude:
|
||||
"""Parse user_settings value for 'sensory_preferences' into a SensoryExclude.
|
||||
|
||||
Expected JSON shape:
|
||||
{
|
||||
"avoid_textures": ["mushy", "slimy"],
|
||||
"max_smell": "pungent",
|
||||
"max_noise": "loud"
|
||||
}
|
||||
|
||||
Returns SensoryExclude.empty() on missing, null, or malformed input.
|
||||
"""
|
||||
if not prefs_json:
|
||||
return SensoryExclude.empty()
|
||||
try:
|
||||
prefs = json.loads(prefs_json)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return SensoryExclude.empty()
|
||||
if not isinstance(prefs, dict):
|
||||
return SensoryExclude.empty()
|
||||
|
||||
avoid_textures = tuple(
|
||||
t for t in (prefs.get("avoid_textures") or [])
|
||||
if isinstance(t, str)
|
||||
)
|
||||
max_smell: str | None = prefs.get("max_smell") or None
|
||||
max_noise: str | None = prefs.get("max_noise") or None
|
||||
|
||||
if max_smell and max_smell not in _SMELL_LEVELS:
|
||||
max_smell = None
|
||||
if max_noise and max_noise not in _NOISE_LEVELS:
|
||||
max_noise = None
|
||||
|
||||
return SensoryExclude(
|
||||
textures=avoid_textures,
|
||||
smell_above=max_smell,
|
||||
noise_above=max_noise,
|
||||
)
|
||||
|
||||
|
||||
def passes_sensory_filter(
|
||||
sensory_tags_raw: str | dict | None,
|
||||
exclude: SensoryExclude,
|
||||
) -> bool:
|
||||
"""Return True if the recipe passes the sensory exclude criteria.
|
||||
|
||||
sensory_tags_raw: the sensory_tags column value (JSON string or already-parsed dict).
|
||||
exclude: derived filter criteria.
|
||||
|
||||
Untagged recipes (empty dict or '{}') always pass -- graceful degradation.
|
||||
Empty SensoryExclude always passes -- no preferences set.
|
||||
"""
|
||||
if exclude.is_empty():
|
||||
return True
|
||||
|
||||
if sensory_tags_raw is None:
|
||||
return True
|
||||
if isinstance(sensory_tags_raw, str):
|
||||
try:
|
||||
tags: dict = json.loads(sensory_tags_raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return True
|
||||
else:
|
||||
tags = sensory_tags_raw
|
||||
|
||||
if not tags:
|
||||
return True
|
||||
|
||||
if exclude.textures:
|
||||
recipe_textures: list[str] = tags.get("textures") or []
|
||||
for t in recipe_textures:
|
||||
if t in exclude.textures:
|
||||
return False
|
||||
|
||||
if exclude.smell_above is not None:
|
||||
recipe_smell: str | None = tags.get("smell")
|
||||
if recipe_smell and recipe_smell in _SMELL_LEVELS:
|
||||
max_idx = _SMELL_LEVELS.index(exclude.smell_above)
|
||||
recipe_idx = _SMELL_LEVELS.index(recipe_smell)
|
||||
if recipe_idx > max_idx:
|
||||
return False
|
||||
|
||||
if exclude.noise_above is not None:
|
||||
recipe_noise: str | None = tags.get("noise")
|
||||
if recipe_noise and recipe_noise in _NOISE_LEVELS:
|
||||
max_idx = _NOISE_LEVELS.index(exclude.noise_above)
|
||||
recipe_idx = _NOISE_LEVELS.index(recipe_noise)
|
||||
if recipe_idx > max_idx:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
@ -55,12 +55,11 @@ class SubstitutionEngine:
|
|||
ingredient_name: str,
|
||||
constraint: str,
|
||||
) -> list[SubstitutionSwap]:
|
||||
c = self._store._cp
|
||||
rows = self._store._fetch_all(f"""
|
||||
rows = self._store._fetch_all("""
|
||||
SELECT substitute_name, constraint_label,
|
||||
fat_delta, moisture_delta, glutamate_delta, protein_delta,
|
||||
occurrence_count, compensation_hints
|
||||
FROM {c}substitution_pairs
|
||||
FROM substitution_pairs
|
||||
WHERE original_name = ? AND constraint_label = ?
|
||||
ORDER BY occurrence_count DESC
|
||||
""", (ingredient_name.lower(), constraint))
|
||||
|
|
|
|||
|
|
@ -1,325 +0,0 @@
|
|||
"""
|
||||
Recipe tag inference engine.
|
||||
|
||||
Derives normalized tags from a recipe's title, ingredient names, existing corpus
|
||||
tags (category + keywords), enriched ingredient profile data, and optional
|
||||
nutrition data.
|
||||
|
||||
Tags are organized into five namespaces:
|
||||
cuisine:* -- cuisine/region classification
|
||||
dietary:* -- dietary restriction / nutrition profile
|
||||
flavor:* -- flavor profile (spicy, smoky, sweet, etc.)
|
||||
time:* -- effort / time signals
|
||||
meal:* -- meal type
|
||||
can_be:* -- achievable with substitutions (e.g. can_be:Gluten-Free)
|
||||
|
||||
Output is a flat sorted list of strings, e.g.:
|
||||
["can_be:Gluten-Free", "cuisine:Italian", "dietary:Low-Carb",
|
||||
"flavor:Savory", "flavor:Umami", "time:Quick"]
|
||||
|
||||
These populate recipes.inferred_tags and are FTS5-indexed so browse domain
|
||||
queries find recipes the food.com corpus tags alone would miss.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Text-signal tables
|
||||
# (tag, [case-insensitive substrings to search in combined title+ingredient text])
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_CUISINE_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("cuisine:Japanese", ["miso", "dashi", "ramen", "sushi", "teriyaki", "sake", "mirin",
|
||||
"wasabi", "panko", "edamame", "tonkatsu", "yakitori", "ponzu"]),
|
||||
("cuisine:Korean", ["gochujang", "kimchi", "doenjang", "gochugaru",
|
||||
"bulgogi", "bibimbap", "japchae"]),
|
||||
("cuisine:Thai", ["fish sauce", "lemongrass", "galangal", "pad thai", "thai basil",
|
||||
"kaffir lime", "tom yum", "green curry", "red curry", "nam pla"]),
|
||||
("cuisine:Chinese", ["hoisin", "oyster sauce", "five spice", "bok choy", "chow mein",
|
||||
"dumpling", "wonton", "mapo", "char siu", "sichuan"]),
|
||||
("cuisine:Vietnamese", ["pho", "banh mi", "nuoc cham", "rice paper", "vietnamese"]),
|
||||
("cuisine:Indian", ["garam masala", "turmeric", "cardamom", "fenugreek", "paneer",
|
||||
"tikka", "masala", "biryani", "dal", "naan", "tandoori",
|
||||
"curry leaf", "tamarind", "chutney"]),
|
||||
("cuisine:Middle Eastern", ["tahini", "harissa", "za'atar", "sumac", "baharat", "rose water",
|
||||
"pomegranate molasses", "freekeh", "fattoush", "shakshuka"]),
|
||||
("cuisine:Greek", ["feta", "tzatziki", "moussaka", "spanakopita", "orzo",
|
||||
"kalamata", "gyro", "souvlaki", "dolma"]),
|
||||
("cuisine:Mediterranean", ["hummus", "pita", "couscous", "preserved lemon"]),
|
||||
("cuisine:Italian", ["pasta", "pizza", "risotto", "lasagna", "carbonara", "gnocchi",
|
||||
"parmesan", "mozzarella", "ricotta", "prosciutto", "pancetta",
|
||||
"arancini", "osso buco", "tiramisu", "pesto", "bolognese",
|
||||
"cannoli", "polenta", "bruschetta", "focaccia"]),
|
||||
("cuisine:French", ["croissant", "quiche", "crepe", "coq au vin",
|
||||
"ratatouille", "bearnaise", "hollandaise", "bouillabaisse",
|
||||
"herbes de provence", "dijon", "gruyere", "brie", "cassoulet"]),
|
||||
("cuisine:Spanish", ["paella", "chorizo", "gazpacho", "tapas", "patatas bravas",
|
||||
"sofrito", "manchego", "albondigas"]),
|
||||
("cuisine:German", ["sauerkraut", "bratwurst", "schnitzel", "pretzel", "strudel",
|
||||
"spaetzle", "sauerbraten"]),
|
||||
("cuisine:Mexican", ["taco", "burrito", "enchilada", "salsa", "guacamole", "chipotle",
|
||||
"queso", "tamale", "mole", "jalapeno", "tortilla", "carnitas",
|
||||
"chile verde", "posole", "tostada", "quesadilla"]),
|
||||
("cuisine:Latin American", ["plantain", "yuca", "chimichurri", "ceviche", "adobo", "empanada"]),
|
||||
("cuisine:American", ["bbq sauce", "buffalo sauce", "ranch dressing", "coleslaw",
|
||||
"cornbread", "mac and cheese", "brisket", "cheeseburger"]),
|
||||
("cuisine:Southern", ["collard greens", "black-eyed peas", "okra", "grits", "catfish",
|
||||
"hush puppies", "pecan pie"]),
|
||||
("cuisine:Cajun", ["cajun", "creole", "gumbo", "jambalaya", "andouille", "etouffee"]),
|
||||
("cuisine:African", ["injera", "berbere", "jollof", "suya", "egusi", "fufu", "tagine"]),
|
||||
("cuisine:Caribbean", ["jerk", "scotch bonnet", "callaloo", "ackee"]),
|
||||
# BBQ detection: match on title terms and key ingredients; these rarely appear
|
||||
# in food.com's own keyword/category taxonomy so we derive the tag from content.
|
||||
("cuisine:BBQ", ["brisket", "pulled pork", "spare ribs", "baby back ribs",
|
||||
"baby back", "burnt ends", "pit smoked", "smoke ring",
|
||||
"low and slow", "hickory", "mesquite", "liquid smoke",
|
||||
"bbq brisket", "smoked brisket", "barbecue brisket",
|
||||
"carolina bbq", "texas bbq", "kansas city bbq",
|
||||
"memphis bbq", "smoked ribs", "smoked pulled pork",
|
||||
"dry rub ribs", "wet rub ribs", "beer can chicken smoked"]),
|
||||
]
|
||||
|
||||
_DIETARY_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("dietary:Vegan", ["vegan", "plant-based", "plant based"]),
|
||||
("dietary:Vegetarian", ["vegetarian", "meatless"]),
|
||||
("dietary:Gluten-Free", ["gluten-free", "gluten free", "celiac"]),
|
||||
("dietary:Dairy-Free", ["dairy-free", "dairy free", "lactose free", "non-dairy"]),
|
||||
("dietary:Low-Carb", ["low-carb", "low carb", "keto", "ketogenic", "very low carbs"]),
|
||||
("dietary:High-Protein", ["high protein", "high-protein"]),
|
||||
("dietary:Low-Fat", ["low-fat", "low fat", "fat-free", "reduced fat"]),
|
||||
("dietary:Paleo", ["paleo", "whole30"]),
|
||||
("dietary:Nut-Free", ["nut-free", "nut free", "peanut free"]),
|
||||
("dietary:Egg-Free", ["egg-free", "egg free"]),
|
||||
("dietary:Low-Sodium", ["low sodium", "no salt"]),
|
||||
("dietary:Healthy", ["healthy", "low cholesterol", "heart healthy", "wholesome"]),
|
||||
]
|
||||
|
||||
_FLAVOR_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("flavor:Spicy", ["jalapeno", "habanero", "ghost pepper", "sriracha",
|
||||
"chili flake", "red pepper flake", "cayenne", "hot sauce",
|
||||
"gochujang", "harissa", "scotch bonnet", "szechuan pepper", "spicy"]),
|
||||
("flavor:Smoky", ["smoked", "liquid smoke", "smoked paprika",
|
||||
"bbq sauce", "barbecue", "hickory", "mesquite"]),
|
||||
("flavor:Sweet", ["honey", "maple syrup", "brown sugar", "caramel", "chocolate",
|
||||
"vanilla", "condensed milk", "molasses", "agave"]),
|
||||
("flavor:Savory", ["soy sauce", "fish sauce", "miso", "worcestershire", "anchovy",
|
||||
"parmesan", "blue cheese", "bone broth"]),
|
||||
("flavor:Tangy", ["lemon juice", "lime juice", "vinegar", "balsamic", "buttermilk",
|
||||
"sour cream", "fermented", "pickled", "tamarind", "sumac"]),
|
||||
("flavor:Herby", ["fresh basil", "fresh cilantro", "fresh dill", "fresh mint",
|
||||
"fresh tarragon", "fresh thyme", "herbes de provence"]),
|
||||
("flavor:Rich", ["heavy cream", "creme fraiche", "mascarpone", "double cream",
|
||||
"ghee", "coconut cream", "cream cheese"]),
|
||||
("flavor:Umami", ["mushroom", "nutritional yeast", "tomato paste",
|
||||
"parmesan rind", "bonito", "kombu"]),
|
||||
]
|
||||
|
||||
_TIME_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("time:Quick", ["< 15 mins", "< 30 mins", "weeknight", "easy"]),
|
||||
("time:Under 1 Hour", ["< 60 mins"]),
|
||||
("time:Make-Ahead", ["freezer", "overnight", "refrigerator", "make-ahead", "make ahead"]),
|
||||
("time:Slow Cook", ["slow cooker", "crockpot", "< 4 hours", "braise"]),
|
||||
]
|
||||
|
||||
_MAIN_INGREDIENT_SIGNALS: list[tuple[str, list[str]]] = [
|
||||
("main:Chicken", ["chicken", "poultry", "turkey"]),
|
||||
("main:Beef", ["beef", "ground beef", "steak", "brisket", "pot roast"]),
|
||||
("main:Pork", ["pork", "bacon", "ham", "sausage", "prosciutto"]),
|
||||
("main:Fish", ["salmon", "tuna", "tilapia", "cod", "halibut", "shrimp", "seafood", "fish"]),
|
||||
("main:Pasta", ["pasta", "noodle", "spaghetti", "penne", "fettuccine", "linguine"]),
|
||||
("main:Vegetables", ["broccoli", "cauliflower", "zucchini", "eggplant", "carrot",
|
||||
"vegetable", "veggie"]),
|
||||
("main:Eggs", ["egg", "frittata", "omelette", "omelet", "quiche"]),
|
||||
("main:Legumes", ["bean", "lentil", "chickpea", "tofu", "tempeh", "edamame"]),
|
||||
("main:Grains", ["rice", "quinoa", "barley", "farro", "oat", "grain"]),
|
||||
("main:Cheese", ["cheddar", "mozzarella", "parmesan", "ricotta", "brie",
|
||||
"cheese"]),
|
||||
]
|
||||
|
||||
# food.com corpus tag -> normalized tags
|
||||
_CORPUS_TAG_MAP: dict[str, list[str]] = {
|
||||
"european": ["cuisine:Italian", "cuisine:French", "cuisine:German",
|
||||
"cuisine:Spanish"],
|
||||
"asian": ["cuisine:Chinese", "cuisine:Japanese", "cuisine:Thai",
|
||||
"cuisine:Korean", "cuisine:Vietnamese"],
|
||||
"chinese": ["cuisine:Chinese"],
|
||||
"japanese": ["cuisine:Japanese"],
|
||||
"thai": ["cuisine:Thai"],
|
||||
"vietnamese": ["cuisine:Vietnamese"],
|
||||
"indian": ["cuisine:Indian"],
|
||||
"greek": ["cuisine:Greek"],
|
||||
"mexican": ["cuisine:Mexican"],
|
||||
"african": ["cuisine:African"],
|
||||
"caribbean": ["cuisine:Caribbean"],
|
||||
"vegan": ["dietary:Vegan", "dietary:Vegetarian"],
|
||||
"vegetarian": ["dietary:Vegetarian"],
|
||||
"healthy": ["dietary:Healthy"],
|
||||
"low cholesterol": ["dietary:Healthy"],
|
||||
"very low carbs": ["dietary:Low-Carb"],
|
||||
"high in...": ["dietary:High-Protein"],
|
||||
"lactose free": ["dietary:Dairy-Free"],
|
||||
"egg free": ["dietary:Egg-Free"],
|
||||
"< 15 mins": ["time:Quick"],
|
||||
"< 30 mins": ["time:Quick"],
|
||||
"< 60 mins": ["time:Under 1 Hour"],
|
||||
"< 4 hours": ["time:Slow Cook"],
|
||||
"weeknight": ["time:Quick"],
|
||||
"freezer": ["time:Make-Ahead"],
|
||||
"dessert": ["meal:Dessert"],
|
||||
"breakfast": ["meal:Breakfast"],
|
||||
"lunch/snacks": ["meal:Lunch", "meal:Snack"],
|
||||
"beverages": ["meal:Beverage"],
|
||||
"cookie & brownie": ["meal:Dessert"],
|
||||
"breads": ["meal:Bread"],
|
||||
}
|
||||
|
||||
# ingredient_profiles.elements value -> flavor tag
|
||||
_ELEMENT_TO_FLAVOR: dict[str, str] = {
|
||||
"Aroma": "flavor:Herby",
|
||||
"Richness": "flavor:Rich",
|
||||
"Structure": "", # no flavor tag
|
||||
"Binding": "",
|
||||
"Crust": "flavor:Smoky",
|
||||
"Lift": "",
|
||||
"Emulsion": "flavor:Rich",
|
||||
"Acid": "flavor:Tangy",
|
||||
}
|
||||
|
||||
|
||||
def _build_text(title: str, ingredient_names: list[str]) -> str:
|
||||
parts = [title.lower()]
|
||||
parts.extend(i.lower() for i in ingredient_names)
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def _match_signals(text: str, table: list[tuple[str, list[str]]]) -> list[str]:
|
||||
return [tag for tag, pats in table if any(p in text for p in pats)]
|
||||
|
||||
|
||||
def infer_tags(
|
||||
title: str,
|
||||
ingredient_names: list[str],
|
||||
corpus_keywords: list[str],
|
||||
corpus_category: str = "",
|
||||
# Enriched ingredient profile signals (from ingredient_profiles cross-ref)
|
||||
element_coverage: dict[str, float] | None = None,
|
||||
fermented_count: int = 0,
|
||||
glutamate_total: float = 0.0,
|
||||
ph_min: float | None = None,
|
||||
available_sub_constraints: list[str] | None = None,
|
||||
# Nutrition data for macro-based tags
|
||||
calories: float | None = None,
|
||||
protein_g: float | None = None,
|
||||
fat_g: float | None = None,
|
||||
carbs_g: float | None = None,
|
||||
servings: float | None = None,
|
||||
) -> list[str]:
|
||||
"""
|
||||
Derive normalized tags for a recipe.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
title, ingredient_names, corpus_keywords, corpus_category
|
||||
: Primary recipe data.
|
||||
element_coverage
|
||||
: Dict from recipes.element_coverage -- element name to coverage ratio
|
||||
(e.g. {"Aroma": 0.6, "Richness": 0.4}). Derived from ingredient_profiles.
|
||||
fermented_count
|
||||
: Number of fermented ingredients (from ingredient_profiles.is_fermented).
|
||||
glutamate_total
|
||||
: Sum of glutamate_mg across all profiled ingredients. High values signal umami.
|
||||
ph_min
|
||||
: Minimum ph_estimate across profiled ingredients. Low values signal acidity.
|
||||
available_sub_constraints
|
||||
: Substitution constraint labels achievable for this recipe
|
||||
(e.g. ["gluten_free", "low_carb"]). From substitution_pairs cross-ref.
|
||||
These become can_be:* tags.
|
||||
calories, protein_g, fat_g, carbs_g, servings
|
||||
: Nutrition data for macro-based dietary tags.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Sorted list of unique normalized tag strings.
|
||||
"""
|
||||
tags: set[str] = set()
|
||||
|
||||
# 1. Map corpus tags to normalized vocabulary
|
||||
for kw in corpus_keywords:
|
||||
for t in _CORPUS_TAG_MAP.get(kw.lower(), []):
|
||||
tags.add(t)
|
||||
if corpus_category:
|
||||
for t in _CORPUS_TAG_MAP.get(corpus_category.lower(), []):
|
||||
tags.add(t)
|
||||
|
||||
# 2. Text-signal matching
|
||||
text = _build_text(title, ingredient_names)
|
||||
tags.update(_match_signals(text, _CUISINE_SIGNALS))
|
||||
tags.update(_match_signals(text, _DIETARY_SIGNALS))
|
||||
tags.update(_match_signals(text, _FLAVOR_SIGNALS))
|
||||
tags.update(_match_signals(text, _MAIN_INGREDIENT_SIGNALS))
|
||||
|
||||
# 3. Time signals from corpus keywords + text
|
||||
corpus_text = " ".join(kw.lower() for kw in corpus_keywords)
|
||||
tags.update(_match_signals(corpus_text, _TIME_SIGNALS))
|
||||
tags.update(_match_signals(text, _TIME_SIGNALS))
|
||||
|
||||
# 4. Enriched profile signals
|
||||
if element_coverage:
|
||||
for element, coverage in element_coverage.items():
|
||||
if coverage > 0.2: # >20% of ingredients carry this element
|
||||
flavor_tag = _ELEMENT_TO_FLAVOR.get(element, "")
|
||||
if flavor_tag:
|
||||
tags.add(flavor_tag)
|
||||
|
||||
if glutamate_total > 50:
|
||||
tags.add("flavor:Umami")
|
||||
|
||||
if fermented_count > 0:
|
||||
tags.add("flavor:Tangy")
|
||||
|
||||
if ph_min is not None and ph_min < 4.5:
|
||||
tags.add("flavor:Tangy")
|
||||
|
||||
# 5. Achievable-via-substitution tags
|
||||
if available_sub_constraints:
|
||||
label_to_tag = {
|
||||
"gluten_free": "can_be:Gluten-Free",
|
||||
"low_calorie": "can_be:Low-Calorie",
|
||||
"low_carb": "can_be:Low-Carb",
|
||||
"vegan": "can_be:Vegan",
|
||||
"dairy_free": "can_be:Dairy-Free",
|
||||
"low_sodium": "can_be:Low-Sodium",
|
||||
}
|
||||
for label in available_sub_constraints:
|
||||
tag = label_to_tag.get(label)
|
||||
if tag:
|
||||
tags.add(tag)
|
||||
|
||||
# 6. Macro-based dietary tags
|
||||
if servings and servings > 0 and any(
|
||||
v is not None for v in (protein_g, fat_g, carbs_g, calories)
|
||||
):
|
||||
def _per(v: float | None) -> float | None:
|
||||
return v / servings if v is not None else None
|
||||
|
||||
prot_s = _per(protein_g)
|
||||
fat_s = _per(fat_g)
|
||||
carb_s = _per(carbs_g)
|
||||
cal_s = _per(calories)
|
||||
|
||||
if prot_s is not None and prot_s >= 20:
|
||||
tags.add("dietary:High-Protein")
|
||||
if fat_s is not None and fat_s <= 5:
|
||||
tags.add("dietary:Low-Fat")
|
||||
if carb_s is not None and carb_s <= 10:
|
||||
tags.add("dietary:Low-Carb")
|
||||
if cal_s is not None and cal_s <= 250:
|
||||
tags.add("dietary:Light")
|
||||
elif protein_g is not None and protein_g >= 20:
|
||||
tags.add("dietary:High-Protein")
|
||||
|
||||
# 7. Vegan implies vegetarian
|
||||
if "dietary:Vegan" in tags:
|
||||
tags.add("dietary:Vegetarian")
|
||||
|
||||
return sorted(tags)
|
||||
|
|
@ -1,197 +0,0 @@
|
|||
"""
|
||||
Runtime parser for active/passive time split and equipment detection.
|
||||
|
||||
Operates over a list of direction strings. No I/O — pure Python functions.
|
||||
Sub-millisecond for up to 20 recipes (20 × ~10 steps each = 200 regex calls).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
# ── Passive step keywords (whole-word, case-insensitive) ──────────────────
|
||||
|
||||
_PASSIVE_PATTERNS: Final[list[str]] = [
|
||||
"simmer", "bake", "roast", "broil", "refrigerate", "marinate",
|
||||
"chill", "cool", "freeze", "rest", "stand", "set", "soak",
|
||||
"steep", "proof", "rise", "let", "wait", "overnight", "braise",
|
||||
r"slow\s+cook", r"pressure\s+cook",
|
||||
]
|
||||
|
||||
# Pre-compiled as a single alternation — avoids re-compiling on every call.
|
||||
_PASSIVE_RE: re.Pattern[str] = re.compile(
|
||||
r"\b(?:" + "|".join(_PASSIVE_PATTERNS) + r")\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# ── Time extraction regex ─────────────────────────────────────────────────
|
||||
|
||||
# Two-branch pattern:
|
||||
# Branch A (groups 1-3): range "15-20 minutes", "15–20 min"
|
||||
# Branch B (groups 4-5): single "10 minutes", "2 hours", "30 sec"
|
||||
#
|
||||
# Separator characters: plain hyphen (-), en-dash (–), or literal "-to-"
|
||||
_TIME_RE: re.Pattern[str] = re.compile(
|
||||
r"(\d+)\s*(?:[-\u2013]|-to-)\s*(\d+)\s*(hour|hr|minute|min|second|sec)s?"
|
||||
r"|"
|
||||
r"(\d+)\s*(hour|hr|minute|min|second|sec)s?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
_MAX_MINUTES_PER_STEP: Final[int] = 480 # 8 hours sanity cap
|
||||
|
||||
# ── Equipment detection (keyword → label, in detection priority order) ────
|
||||
|
||||
_EQUIPMENT_RULES: Final[list[tuple[re.Pattern[str], str]]] = [
|
||||
(re.compile(r"\b(?:chop|dice|mince|slice|julienne)\b", re.IGNORECASE), "Knife"),
|
||||
(re.compile(r"\b(?:skillet|sauté|saute|fry|sear|pan-fry|pan fry)\b", re.IGNORECASE), "Skillet"),
|
||||
(re.compile(r"\b(?:wooden spoon|spatula|stir|fold)\b", re.IGNORECASE), "Spoon"),
|
||||
(re.compile(r"\b(?:pot|boil|simmer|blanch|stock)\b", re.IGNORECASE), "Pot"),
|
||||
(re.compile(r"\b(?:oven|bake|roast|preheat|broil)\b", re.IGNORECASE), "Oven"),
|
||||
(re.compile(r"\b(?:blender|blend|purée|puree|food processor)\b", re.IGNORECASE), "Blender"),
|
||||
(re.compile(r"\b(?:stand mixer|hand mixer|whip|beat)\b", re.IGNORECASE), "Mixer"),
|
||||
(re.compile(r"\b(?:grill|barbecue|char|griddle)\b", re.IGNORECASE), "Grill"),
|
||||
(re.compile(r"\b(?:slow cooker|crockpot|low and slow)\b", re.IGNORECASE), "Slow cooker"),
|
||||
(re.compile(r"\b(?:pressure cooker|instant pot)\b", re.IGNORECASE), "Pressure cooker"),
|
||||
(re.compile(r"\b(?:drain|strain|colander|rinse pasta)\b", re.IGNORECASE), "Colander"),
|
||||
]
|
||||
|
||||
# ── Dataclasses ───────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StepAnalysis:
|
||||
"""Analysis result for a single direction step."""
|
||||
is_passive: bool
|
||||
detected_minutes: int | None # None when no time mention found in text
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class TimeEffortProfile:
|
||||
"""Aggregated time and effort profile for a full recipe."""
|
||||
active_min: int # total minutes requiring active attention
|
||||
passive_min: int # total minutes the cook can step away
|
||||
total_min: int # active_min + passive_min
|
||||
step_analyses: list[StepAnalysis] # one entry per direction step
|
||||
equipment: list[str] # ordered, deduplicated equipment labels
|
||||
effort_label: str # "quick" | "moderate" | "involved"
|
||||
|
||||
|
||||
# ── Core parsing logic ────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def _extract_minutes(text: str) -> int | None:
|
||||
"""Return the number of minutes mentioned in text, or None.
|
||||
|
||||
Range values (e.g. "15-20 minutes") return the integer midpoint.
|
||||
Hours are converted to minutes. Seconds are rounded up to 1 minute minimum.
|
||||
Result is capped at _MAX_MINUTES_PER_STEP.
|
||||
"""
|
||||
m = _TIME_RE.search(text)
|
||||
if m is None:
|
||||
return None
|
||||
|
||||
if m.group(1) is not None:
|
||||
# Branch A: range match (e.g. "15-20 minutes")
|
||||
low = int(m.group(1))
|
||||
high = int(m.group(2))
|
||||
unit = m.group(3).lower()
|
||||
raw_value: float = (low + high) / 2
|
||||
else:
|
||||
# Branch B: single value match (e.g. "10 minutes")
|
||||
low = int(m.group(4))
|
||||
unit = m.group(5).lower()
|
||||
raw_value = float(low)
|
||||
|
||||
if unit in ("hour", "hr"):
|
||||
minutes: float = raw_value * 60
|
||||
elif unit in ("second", "sec"):
|
||||
minutes = max(1.0, math.ceil(raw_value / 60))
|
||||
else:
|
||||
minutes = raw_value
|
||||
|
||||
return min(int(minutes), _MAX_MINUTES_PER_STEP)
|
||||
|
||||
|
||||
def _classify_passive(text: str) -> bool:
|
||||
"""Return True if the step text matches any passive keyword (whole-word)."""
|
||||
return _PASSIVE_RE.search(text) is not None
|
||||
|
||||
|
||||
def _detect_equipment(all_text: str, has_passive: bool) -> list[str]:
|
||||
"""Return ordered, deduplicated list of equipment labels detected in text.
|
||||
|
||||
all_text should be all direction steps joined with spaces.
|
||||
has_passive controls whether 'Timer' is appended at the end.
|
||||
"""
|
||||
seen: set[str] = set()
|
||||
result: list[str] = []
|
||||
for pattern, label in _EQUIPMENT_RULES:
|
||||
if label not in seen and pattern.search(all_text):
|
||||
seen.add(label)
|
||||
result.append(label)
|
||||
if has_passive and "Timer" not in seen:
|
||||
result.append("Timer")
|
||||
return result
|
||||
|
||||
|
||||
def _effort_label(step_count: int) -> str:
|
||||
"""Derive effort label from step count."""
|
||||
if step_count <= 3:
|
||||
return "quick"
|
||||
if step_count <= 7:
|
||||
return "moderate"
|
||||
return "involved"
|
||||
|
||||
|
||||
def parse_time_effort(directions: list[str]) -> TimeEffortProfile:
|
||||
"""Parse a list of direction strings into a TimeEffortProfile.
|
||||
|
||||
Returns a zero-value profile with empty lists when directions is empty.
|
||||
Never raises — all failures silently produce sensible defaults.
|
||||
"""
|
||||
if not directions:
|
||||
return TimeEffortProfile(
|
||||
active_min=0,
|
||||
passive_min=0,
|
||||
total_min=0,
|
||||
step_analyses=[],
|
||||
equipment=[],
|
||||
effort_label="quick",
|
||||
)
|
||||
|
||||
step_analyses: list[StepAnalysis] = []
|
||||
active_min = 0
|
||||
passive_min = 0
|
||||
has_any_passive = False
|
||||
|
||||
for step in directions:
|
||||
is_passive = _classify_passive(step)
|
||||
detected = _extract_minutes(step)
|
||||
|
||||
if is_passive:
|
||||
has_any_passive = True
|
||||
if detected is not None:
|
||||
passive_min += detected
|
||||
else:
|
||||
if detected is not None:
|
||||
active_min += detected
|
||||
|
||||
step_analyses.append(StepAnalysis(
|
||||
is_passive=is_passive,
|
||||
detected_minutes=detected,
|
||||
))
|
||||
|
||||
combined_text = " ".join(directions)
|
||||
equipment = _detect_equipment(combined_text, has_any_passive)
|
||||
|
||||
return TimeEffortProfile(
|
||||
active_min=active_min,
|
||||
passive_min=passive_min,
|
||||
total_min=active_min + passive_min,
|
||||
step_analyses=step_analyses,
|
||||
equipment=equipment,
|
||||
effort_label=_effort_label(len(directions)),
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue