Compare commits
No commits in common. "main" and "a17dcc8c55bfcd8aaa09bd717bd90e8e1700716c" have entirely different histories.
main
...
a17dcc8c55
236 changed files with 1481 additions and 29112 deletions
44
.cliff.toml
44
.cliff.toml
|
|
@ -1,44 +0,0 @@
|
|||
# git-cliff changelog configuration for Kiwi
|
||||
# See: https://git-cliff.org/docs/configuration
|
||||
|
||||
[changelog]
|
||||
header = """
|
||||
# Changelog\n
|
||||
"""
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [Unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}**{{ commit.scope }}:** {% endif %}{{ commit.message | upper_first }}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
trim = true
|
||||
|
||||
[git]
|
||||
conventional_commits = true
|
||||
filter_unconventional = true
|
||||
split_commits = false
|
||||
commit_preprocessors = []
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^refactor", group = "Refactoring" },
|
||||
{ message = "^docs", group = "Documentation" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore", group = "Chores" },
|
||||
{ message = "^ci", group = "CI/CD" },
|
||||
{ message = "^revert", group = "Reverts" },
|
||||
]
|
||||
filter_commits = false
|
||||
tag_pattern = "v[0-9].*"
|
||||
skip_tags = ""
|
||||
ignore_tags = ""
|
||||
topo_order = false
|
||||
sort_commits = "oldest"
|
||||
68
.env.example
68
.env.example
|
|
@ -11,33 +11,6 @@ DATA_DIR=./data
|
|||
# Database (defaults to DATA_DIR/kiwi.db)
|
||||
# DB_PATH=./data/kiwi.db
|
||||
|
||||
# Pipeline data directory for downloaded parquets (used by download_datasets.py)
|
||||
# Override to store large datasets on a separate drive or NAS
|
||||
# KIWI_PIPELINE_DATA_DIR=./data/pipeline
|
||||
|
||||
# CF-core resource coordinator (VRAM lease management)
|
||||
# Set to the coordinator URL when running alongside cf-core orchestration
|
||||
# COORDINATOR_URL=http://localhost:7700
|
||||
# IP this machine advertises to the coordinator (must be reachable from coordinator host)
|
||||
# CF_ORCH_ADVERTISE_HOST=10.1.10.71
|
||||
|
||||
# CF-core hosted coordinator (managed cloud GPU inference — Paid+ tier)
|
||||
# Set CF_ORCH_URL to use a hosted cf-orch coordinator instead of self-hosting.
|
||||
# CF_LICENSE_KEY is read automatically by CFOrchClient for bearer auth.
|
||||
# CF_ORCH_URL=https://orch.circuitforge.tech
|
||||
# CF_LICENSE_KEY=CFG-KIWI-xxxx-xxxx-xxxx
|
||||
|
||||
# LLM backend — env-var auto-config (no llm.yaml needed for bare-metal users)
|
||||
# LLMRouter checks these in priority order:
|
||||
# 1. Anthropic cloud — set ANTHROPIC_API_KEY
|
||||
# 2. OpenAI cloud — set OPENAI_API_KEY
|
||||
# 3. Local Ollama — set OLLAMA_HOST (+ optionally OLLAMA_MODEL)
|
||||
# All three are optional; leave unset to rely on a local llm.yaml instead.
|
||||
# ANTHROPIC_API_KEY=sk-ant-...
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# OLLAMA_HOST=http://localhost:11434
|
||||
# OLLAMA_MODEL=llama3.2
|
||||
|
||||
# Processing
|
||||
USE_GPU=true
|
||||
GPU_MEMORY_LIMIT=6144
|
||||
|
|
@ -51,53 +24,14 @@ ENABLE_OCR=false
|
|||
DEBUG=false
|
||||
CLOUD_MODE=false
|
||||
DEMO_MODE=false
|
||||
# Product identifier reported in cf-orch coordinator analytics for per-app breakdown
|
||||
CF_APP_NAME=kiwi
|
||||
# USE_ORCH_SCHEDULER: use coordinator-aware multi-GPU scheduler instead of local FIFO.
|
||||
# Unset = auto-detect: true if CLOUD_MODE or circuitforge_orch is installed (paid+ local).
|
||||
# Set false to force LocalScheduler even when cf-orch is present.
|
||||
# USE_ORCH_SCHEDULER=false
|
||||
|
||||
# Cloud mode (set in compose.cloud.yml; also set here for reference)
|
||||
# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data
|
||||
# KIWI_DB=data/kiwi.db # local-mode DB path override
|
||||
# DEV ONLY: bypass JWT auth for these IPs/CIDRs (LAN testing without Caddy in the path).
|
||||
# NEVER set in production.
|
||||
# IMPORTANT: Docker port mapping NATs source IPs to the bridge gateway. When hitting
|
||||
# localhost:8515 (host → Docker → nginx → API), nginx sees 192.168.80.1, not 127.0.0.1.
|
||||
# Include the Docker bridge CIDR to allow localhost and LAN access through nginx.
|
||||
# Run: docker network inspect kiwi-cloud_kiwi-cloud-net | grep Subnet
|
||||
# Example: CLOUD_AUTH_BYPASS_IPS=10.1.10.0/24,127.0.0.1,::1,192.168.80.0/20
|
||||
# CLOUD_AUTH_BYPASS_IPS=
|
||||
|
||||
# Heimdall license server (required for cloud tier resolution)
|
||||
# HEIMDALL_URL=https://license.circuitforge.tech
|
||||
# HEIMDALL_ADMIN_TOKEN=
|
||||
|
||||
# Directus JWT (must match cf-directus SECRET env var exactly, including base64 == padding)
|
||||
# Directus JWT (must match cf-directus SECRET env var)
|
||||
# DIRECTUS_JWT_SECRET=
|
||||
|
||||
# E2E test account (Directus — free tier, used by automated tests)
|
||||
# E2E_TEST_EMAIL=e2e@circuitforge.tech
|
||||
# E2E_TEST_PASSWORD=
|
||||
# E2E_TEST_USER_ID=
|
||||
|
||||
# In-app feedback → Forgejo issue creation
|
||||
# FORGEJO_API_TOKEN=
|
||||
# FORGEJO_REPO=Circuit-Forge/kiwi
|
||||
# FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
||||
|
||||
# Affiliate links (optional — plain URLs are shown if unset)
|
||||
# Amazon Associates tag (circuitforge_core.affiliates, retailer="amazon")
|
||||
# AMAZON_ASSOCIATES_TAG=circuitforge-20
|
||||
# Instacart affiliate ID (circuitforge_core.affiliates, retailer="instacart")
|
||||
# INSTACART_AFFILIATE_ID=circuitforge
|
||||
# Walmart Impact network affiliate ID (inline, path-based redirect)
|
||||
# WALMART_AFFILIATE_ID=
|
||||
|
||||
|
||||
# Community PostgreSQL — shared across CF products (cloud only; leave unset for local dev)
|
||||
# Points at cf-orch's cf-community-postgres container (port 5434 on the orch host).
|
||||
# When unset, community write paths fail soft with a plain-language message.
|
||||
# COMMUNITY_DB_URL=postgresql://cf_community:changeme@cf-orch-host:5434/cf_community
|
||||
# COMMUNITY_PSEUDONYM_SALT=change-this-to-a-random-32-char-string
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
# Kiwi CI — lint, type-check, test on PR/push
|
||||
# Full-stack: FastAPI (Python) + Vue 3 SPA (Node)
|
||||
# Adapted from Circuit-Forge/cf-agents workflows/ci.yml (cf-agents#4 tracks the
|
||||
# upstream ci-fullstack.yml variant; update this file when that lands).
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add `npm run test` when vitest is wired (kiwi#XX).
|
||||
#
|
||||
# circuitforge-core is not on PyPI — installed from Forgejo git (public repo).
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, 'feature/**', 'fix/**']
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e ".[dev]" || pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
# Mirror push to GitHub and Codeberg on every push to main or tag.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/mirror.yml
|
||||
# Required secrets: GITHUB_MIRROR_TOKEN, CODEBERG_MIRROR_TOKEN
|
||||
|
||||
name: Mirror
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
mirror:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Mirror to GitHub
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add github "https://x-access-token:${GITHUB_TOKEN}@github.com/CircuitForgeLLC/${REPO}.git"
|
||||
git push github --mirror
|
||||
|
||||
- name: Mirror to Codeberg
|
||||
env:
|
||||
CODEBERG_TOKEN: ${{ secrets.CODEBERG_MIRROR_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
git remote add codeberg "https://CircuitForge:${CODEBERG_TOKEN}@codeberg.org/CircuitForge/${REPO}.git"
|
||||
git push codeberg --mirror
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
# Tag-triggered release workflow.
|
||||
# Generates changelog and creates Forgejo release on v* tags.
|
||||
# Copied from Circuit-Forge/cf-agents workflows/release.yml
|
||||
#
|
||||
# Docker push is intentionally disabled — BSL 1.1 registry policy not yet resolved.
|
||||
# Tracked in Circuit-Forge/cf-agents#3. Re-enable the Docker steps when that lands.
|
||||
#
|
||||
# Required secrets: FORGEJO_RELEASE_TOKEN
|
||||
# (GHCR_TOKEN not needed until Docker push is enabled)
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# ── Changelog ────────────────────────────────────────────────────────────
|
||||
- name: Generate changelog
|
||||
uses: orhun/git-cliff-action@v3
|
||||
id: cliff
|
||||
with:
|
||||
config: .cliff.toml
|
||||
args: --latest --strip header
|
||||
env:
|
||||
OUTPUT: CHANGES.md
|
||||
|
||||
# ── Docker (disabled — BSL registry policy pending cf-agents#3) ──────────
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v3
|
||||
# - name: Set up Buildx
|
||||
# uses: docker/setup-buildx-action@v3
|
||||
# - name: Log in to GHCR
|
||||
# uses: docker/login-action@v3
|
||||
# with:
|
||||
# registry: ghcr.io
|
||||
# username: ${{ github.actor }}
|
||||
# password: ${{ secrets.GHCR_TOKEN }}
|
||||
# - name: Build and push Docker image
|
||||
# uses: docker/build-push-action@v6
|
||||
# with:
|
||||
# context: .
|
||||
# push: true
|
||||
# platforms: linux/amd64,linux/arm64
|
||||
# tags: |
|
||||
# ghcr.io/circuitforgellc/kiwi:${{ github.ref_name }}
|
||||
# ghcr.io/circuitforgellc/kiwi:latest
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
|
||||
# ── Forgejo Release ───────────────────────────────────────────────────────
|
||||
- name: Create Forgejo release
|
||||
env:
|
||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_RELEASE_TOKEN }}
|
||||
REPO: ${{ github.event.repository.name }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
NOTES: ${{ steps.cliff.outputs.content }}
|
||||
run: |
|
||||
curl -sS -X POST \
|
||||
"https://git.opensourcesolarpunk.com/api/v1/repos/Circuit-Forge/${REPO}/releases" \
|
||||
-H "Authorization: token ${FORGEJO_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$(jq -n --arg tag "$TAG" --arg body "$NOTES" \
|
||||
'{tag_name: $tag, name: $tag, body: $body}')"
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
|
|
@ -1,59 +0,0 @@
|
|||
# Kiwi CI — runs on GitHub mirror for public credibility badge.
|
||||
# Forgejo (.forgejo/workflows/ci.yml) is the canonical CI — keep these in sync.
|
||||
# No Forgejo-specific secrets used here; circuitforge-core is public on Forgejo.
|
||||
#
|
||||
# Note: frontend has no test suite yet — CI runs typecheck only.
|
||||
# Add 'npm run test' when vitest is wired.
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
name: Backend (Python)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: pip
|
||||
|
||||
- name: Install circuitforge-core
|
||||
run: pip install git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -e . pytest pytest-asyncio httpx ruff
|
||||
|
||||
- name: Lint
|
||||
run: ruff check .
|
||||
|
||||
- name: Test
|
||||
run: pytest tests/ -v --tb=short
|
||||
|
||||
frontend:
|
||||
name: Frontend (Vue)
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npx vue-tsc --noEmit
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -1,7 +1,4 @@
|
|||
|
||||
# CLAUDE.md — gitignored per BSL 1.1 commercial policy
|
||||
CLAUDE.md
|
||||
|
||||
# Superpowers brainstorming artifacts
|
||||
.superpowers/
|
||||
|
||||
|
|
@ -22,9 +19,3 @@ dist/
|
|||
|
||||
# Data directories
|
||||
data/
|
||||
|
||||
# Test artifacts (MagicMock sqlite files from pytest)
|
||||
<MagicMock*
|
||||
|
||||
# Playwright / debug screenshots
|
||||
debug-screenshots/
|
||||
|
|
|
|||
|
|
@ -1,34 +0,0 @@
|
|||
# Kiwi gitleaks config — extends base CircuitForge config with local rules
|
||||
|
||||
[extend]
|
||||
path = "/Library/Development/CircuitForge/circuitforge-hooks/gitleaks.toml"
|
||||
|
||||
# ── Global allowlist ──────────────────────────────────────────────────────────
|
||||
# Amazon grocery department IDs (rh=n:<10-digit>) false-positive as phone
|
||||
# numbers. locale_config.py is a static lookup table with no secrets.
|
||||
|
||||
[allowlist]
|
||||
# Amazon grocery dept IDs (rh=n:<digits>) false-positive as phone numbers.
|
||||
regexes = [
|
||||
'''rh=n:\d{8,12}''',
|
||||
]
|
||||
|
||||
# ── Test fixture allowlists ───────────────────────────────────────────────────
|
||||
|
||||
[[rules]]
|
||||
id = "cf-generic-env-token"
|
||||
description = "Generic KEY=<token> in env-style assignment — catches FORGEJO_API_TOKEN=hex etc."
|
||||
regex = '''(?i)(token|secret|key|password|passwd|pwd|api_key)\s*[=:]\s*['"]?[A-Za-z0-9\-_]{20,}['"]?'''
|
||||
[rules.allowlist]
|
||||
paths = [
|
||||
'.*test.*',
|
||||
]
|
||||
regexes = [
|
||||
'api_key:\s*ollama',
|
||||
'api_key:\s*any',
|
||||
'your-[a-z\-]+-here',
|
||||
'replace-with-',
|
||||
'xxxx',
|
||||
'test-fixture-',
|
||||
'CFG-KIWI-TEST-',
|
||||
]
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
# Findings suppressed here are historical false positives or already-rotated secrets.
|
||||
# .env was accidentally included in the initial commit; it is now gitignored.
|
||||
# Rotate DIRECTUS_JWT_SECRET if it has not been changed since 2026-03-30.
|
||||
|
||||
# c166e5216 (chore: initial commit) — .env included by mistake
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:generic-api-key:25
|
||||
c166e5216af532a08112ef87e8542cd51c184115:.env:cf-generic-env-token:25
|
||||
12
Dockerfile
12
Dockerfile
|
|
@ -11,23 +11,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||
COPY circuitforge-core/ ./circuitforge-core/
|
||||
RUN conda run -n base pip install --no-cache-dir -e ./circuitforge-core
|
||||
|
||||
# Install circuitforge-orch — needed for the cf-orch-agent sidecar (compose.override.yml)
|
||||
COPY circuitforge-orch/ ./circuitforge-orch/
|
||||
|
||||
# Create kiwi conda env and install app
|
||||
COPY kiwi/environment.yml .
|
||||
RUN conda env create -f environment.yml
|
||||
|
||||
COPY kiwi/ ./kiwi/
|
||||
|
||||
# Remove gitignored config files that may exist locally — defense-in-depth.
|
||||
# The parent .dockerignore should exclude these, but an explicit rm guarantees
|
||||
# they never end up in the cloud image regardless of .dockerignore placement.
|
||||
RUN rm -f /app/kiwi/.env
|
||||
|
||||
# Install cf-core and cf-orch into the kiwi env BEFORE installing kiwi
|
||||
# Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep)
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-orch
|
||||
WORKDIR /app/kiwi
|
||||
RUN conda run -n kiwi pip install --no-cache-dir -e .
|
||||
|
||||
|
|
|
|||
28
LICENSE-BSL
28
LICENSE-BSL
|
|
@ -1,28 +0,0 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Circuit Forge LLC
|
||||
Licensed Work: Kiwi — Pantry tracking and leftover recipe suggestions
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
Additional Use Grant: You may use the Licensed Work for personal,
|
||||
non-commercial pantry tracking and recipe suggestion
|
||||
purposes only.
|
||||
Change Date: 2030-01-01
|
||||
Change License: MIT License
|
||||
|
||||
For the full Business Source License 1.1 text, see:
|
||||
https://mariadb.com/bsl11/
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/services/recipe/recipe_engine.py
|
||||
- app/services/recipe/assembly_recipes.py
|
||||
- app/services/recipe/llm_recipe.py
|
||||
- app/services/expiration_predictor.py
|
||||
- app/tasks/scheduler.py
|
||||
- app/tasks/runner.py
|
||||
- app/tiers.py
|
||||
- app/cloud_session.py
|
||||
- frontend/src/components/RecipesView.vue
|
||||
- frontend/src/stores/recipes.ts
|
||||
34
LICENSE-MIT
34
LICENSE-MIT
|
|
@ -1,34 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2026 Circuit Forge LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
This license applies to the following components of Kiwi:
|
||||
|
||||
- app/api/endpoints/inventory.py
|
||||
- app/api/endpoints/ocr.py
|
||||
- app/db/store.py
|
||||
- app/db/migrations/
|
||||
- app/core/config.py
|
||||
- scripts/pipeline/
|
||||
- scripts/download_datasets.py
|
||||
- scripts/backfill_texture_profiles.py
|
||||
26
README.md
26
README.md
|
|
@ -6,11 +6,7 @@
|
|||
|
||||
Scan barcodes, photograph receipts, and get recipe ideas based on what you already have — before it expires.
|
||||
|
||||
**LLM support is optional.** Inventory tracking, barcode scanning, expiry alerts, CSV export, and receipt upload all work without any LLM configured. AI features (receipt OCR, recipe suggestions, meal planning) activate when a backend is available and are BYOK-unlockable at any tier.
|
||||
|
||||
**Status:** Beta · CircuitForge LLC
|
||||
|
||||
**[Documentation](https://docs.circuitforge.tech/kiwi/)** · [circuitforge.tech](https://circuitforge.tech)
|
||||
**Status:** Pre-alpha · CircuitForge LLC
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -18,14 +14,9 @@ Scan barcodes, photograph receipts, and get recipe ideas based on what you alrea
|
|||
|
||||
- **Inventory tracking** — add items by barcode scan, receipt upload, or manually
|
||||
- **Expiry alerts** — know what's about to go bad
|
||||
- **Recipe browser** — browse the full recipe corpus by cuisine, meal type, dietary preference, or main ingredient; pantry match percentage shown inline (Free)
|
||||
- **Saved recipes** — bookmark any recipe with notes, a 0–5 star rating, and free-text style tags (Free); organize into named collections (Paid)
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier, BYOK-unlockable)
|
||||
- **Recipe suggestions** — four levels from pantry-match to full LLM generation (Paid tier, BYOK-unlockable)
|
||||
- **Style auto-classifier** — LLM suggests style tags (comforting, hands-off, quick, etc.) for saved recipes (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Free, 5/day; unlimited at Paid+)
|
||||
- **LLM backend config** — configure inference via `circuitforge-core` env-var system; BYOK unlocks Paid AI features at any tier
|
||||
- **Feedback FAB** — in-app feedback button; status probed on load, hidden if CF feedback endpoint unreachable
|
||||
- **Receipt OCR** — extract line items from receipt photos automatically (Paid tier)
|
||||
- **Recipe suggestions** — LLM-powered ideas based on what's expiring (Paid tier, BYOK-unlockable)
|
||||
- **Leftover mode** — prioritize nearly-expired items in recipe ranking (Premium tier)
|
||||
|
||||
## Stack
|
||||
|
||||
|
|
@ -61,16 +52,11 @@ cp .env.example .env
|
|||
| Receipt upload | ✓ | ✓ | ✓ |
|
||||
| Expiry alerts | ✓ | ✓ | ✓ |
|
||||
| CSV export | ✓ | ✓ | ✓ |
|
||||
| Recipe browser (domain/category) | ✓ | ✓ | ✓ |
|
||||
| Save recipes + notes + star rating | ✓ | ✓ | ✓ |
|
||||
| Style tags (manual, free-text) | ✓ | ✓ | ✓ |
|
||||
| Receipt OCR | BYOK | ✓ | ✓ |
|
||||
| Recipe suggestions (L1–L4) | BYOK | ✓ | ✓ |
|
||||
| Named recipe collections | — | ✓ | ✓ |
|
||||
| LLM style auto-classifier | — | BYOK | ✓ |
|
||||
| Recipe suggestions | BYOK | ✓ | ✓ |
|
||||
| Meal planning | — | ✓ | ✓ |
|
||||
| Multi-household | — | — | ✓ |
|
||||
| Leftover mode (5/day) | ✓ | ✓ | ✓ |
|
||||
| Leftover mode | — | — | ✓ |
|
||||
|
||||
BYOK = bring your own LLM backend (configure `~/.config/circuitforge/llm.yaml`)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,5 +3,5 @@
|
|||
Kiwi: Pantry tracking and leftover recipe suggestions.
|
||||
"""
|
||||
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.1.0"
|
||||
__author__ = "Alan 'pyr0ball' Weinstock"
|
||||
|
|
@ -1,358 +0,0 @@
|
|||
# app/api/endpoints/community.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.core.config import settings
|
||||
from app.db.store import Store
|
||||
from app.services.community.feed import posts_to_rss
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/community", tags=["community"])
|
||||
|
||||
_community_store = None
|
||||
|
||||
|
||||
def _get_community_store():
|
||||
return _community_store
|
||||
|
||||
|
||||
def init_community_store(community_db_url: str | None) -> None:
|
||||
global _community_store
|
||||
if not community_db_url:
|
||||
logger.info(
|
||||
"COMMUNITY_DB_URL not set — community write features disabled. "
|
||||
"Browse still works via cloud feed."
|
||||
)
|
||||
return
|
||||
from circuitforge_core.community import CommunityDB
|
||||
from app.services.community.community_store import KiwiCommunityStore
|
||||
db = CommunityDB(dsn=community_db_url)
|
||||
db.run_migrations()
|
||||
_community_store = KiwiCommunityStore(db)
|
||||
logger.info("Community store initialized.")
|
||||
|
||||
|
||||
def _visible(post, session=None) -> bool:
|
||||
"""Return False for premium-tier posts when the session is not paid/premium."""
|
||||
tier = getattr(post, "tier", None)
|
||||
if tier == "premium":
|
||||
if session is None or getattr(session, "tier", None) not in ("paid", "premium"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@router.get("/posts")
|
||||
async def list_posts(
|
||||
post_type: str | None = None,
|
||||
dietary_tags: str | None = None,
|
||||
allergen_exclude: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {
|
||||
"posts": [],
|
||||
"total": 0,
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"note": "Community DB not available on this instance.",
|
||||
}
|
||||
|
||||
dietary = [t.strip() for t in dietary_tags.split(",")] if dietary_tags else None
|
||||
allergen_ex = [t.strip() for t in allergen_exclude.split(",")] if allergen_exclude else None
|
||||
offset = (page - 1) * min(page_size, 100)
|
||||
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts,
|
||||
limit=min(page_size, 100),
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
dietary_tags=dietary,
|
||||
allergen_exclude=allergen_ex,
|
||||
)
|
||||
visible = [_post_to_dict(p) for p in posts if _visible(p)]
|
||||
return {"posts": visible, "total": len(visible), "page": page, "page_size": page_size}
|
||||
|
||||
|
||||
@router.get("/posts/{slug}")
|
||||
async def get_post(slug: str, request: Request):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available on this instance.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
|
||||
accept = request.headers.get("accept", "")
|
||||
if "application/activity+json" in accept or "application/ld+json" in accept:
|
||||
from app.services.community.ap_compat import post_to_ap_json_ld
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
return post_to_ap_json_ld(_post_to_dict(post), base_url=base_url)
|
||||
|
||||
return _post_to_dict(post)
|
||||
|
||||
|
||||
@router.get("/feed.rss")
|
||||
async def get_rss_feed(request: Request):
|
||||
store = _get_community_store()
|
||||
posts_data: list[dict] = []
|
||||
if store is not None:
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
posts_data = [_post_to_dict(p) for p in posts]
|
||||
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
rss = posts_to_rss(posts_data, base_url=base_url)
|
||||
return Response(content=rss, media_type="application/rss+xml; charset=utf-8")
|
||||
|
||||
|
||||
@router.get("/local-feed")
|
||||
async def local_feed():
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return []
|
||||
posts = await asyncio.to_thread(store.list_posts, limit=50)
|
||||
return [_post_to_dict(p) for p in posts]
|
||||
|
||||
|
||||
@router.get("/hall-of-chaos")
|
||||
async def hall_of_chaos():
|
||||
"""Hidden easter egg endpoint -- returns the 10 most chaotic bloopers."""
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
return {"posts": [], "chaos_level": 0}
|
||||
posts = await asyncio.to_thread(
|
||||
store.list_posts, limit=10, post_type="recipe_blooper"
|
||||
)
|
||||
return {
|
||||
"posts": [_post_to_dict(p) for p in posts],
|
||||
"chaos_level": len(posts),
|
||||
}
|
||||
|
||||
|
||||
_VALID_POST_TYPES = {"plan", "recipe_success", "recipe_blooper"}
|
||||
_MAX_TITLE_LEN = 200
|
||||
_MAX_TEXT_LEN = 2000
|
||||
|
||||
|
||||
def _validate_publish_body(body: dict) -> None:
|
||||
"""Raise HTTPException(422) for any invalid fields in a publish request."""
|
||||
post_type = body.get("post_type", "plan")
|
||||
if post_type not in _VALID_POST_TYPES:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"post_type must be one of: {', '.join(sorted(_VALID_POST_TYPES))}",
|
||||
)
|
||||
title = body.get("title") or ""
|
||||
if len(title) > _MAX_TITLE_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"title exceeds {_MAX_TITLE_LEN} character limit.")
|
||||
for field in ("description", "outcome_notes", "recipe_name"):
|
||||
value = body.get(field)
|
||||
if value and len(str(value)) > _MAX_TEXT_LEN:
|
||||
raise HTTPException(status_code=422, detail=f"{field} exceeds {_MAX_TEXT_LEN} character limit.")
|
||||
photo_url = body.get("photo_url")
|
||||
if photo_url and not str(photo_url).startswith("https://"):
|
||||
raise HTTPException(status_code=422, detail="photo_url must be an https:// URL.")
|
||||
|
||||
|
||||
@router.post("/posts", status_code=201)
|
||||
async def publish_post(body: dict, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_publish", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Community publishing requires Paid tier.")
|
||||
|
||||
_validate_publish_body(body)
|
||||
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="This Kiwi instance is not connected to a community database. "
|
||||
"Publishing is only available on cloud instances.",
|
||||
)
|
||||
|
||||
from app.services.community.community_store import get_or_create_pseudonym
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return get_or_create_pseudonym(
|
||||
store=s,
|
||||
directus_user_id=session.user_id,
|
||||
requested_name=body.get("pseudonym_name"),
|
||||
)
|
||||
finally:
|
||||
s.close()
|
||||
try:
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=422, detail=str(exc)) from exc
|
||||
|
||||
recipe_ids = [slot["recipe_id"] for slot in body.get("slots", []) if slot.get("recipe_id")]
|
||||
from app.services.community.element_snapshot import compute_snapshot
|
||||
def _snapshot():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return compute_snapshot(recipe_ids=recipe_ids, store=s)
|
||||
finally:
|
||||
s.close()
|
||||
snapshot = await asyncio.to_thread(_snapshot)
|
||||
|
||||
post_type = body.get("post_type", "plan")
|
||||
slug_title = re.sub(r"[^a-z0-9]+", "-", (body.get("title") or "plan").lower()).strip("-")
|
||||
today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
slug = f"kiwi-{_post_type_prefix(post_type)}-{pseudonym.lower().replace(' ', '')}-{today}-{slug_title}"[:120]
|
||||
|
||||
from circuitforge_core.community.models import CommunityPost
|
||||
post = CommunityPost(
|
||||
slug=slug,
|
||||
pseudonym=pseudonym,
|
||||
post_type=post_type,
|
||||
published=datetime.now(timezone.utc),
|
||||
title=(body.get("title") or "Untitled")[:_MAX_TITLE_LEN],
|
||||
description=body.get("description"),
|
||||
photo_url=body.get("photo_url"),
|
||||
slots=body.get("slots", []),
|
||||
recipe_id=body.get("recipe_id"),
|
||||
recipe_name=body.get("recipe_name"),
|
||||
level=body.get("level"),
|
||||
outcome_notes=body.get("outcome_notes"),
|
||||
seasoning_score=snapshot.seasoning_score,
|
||||
richness_score=snapshot.richness_score,
|
||||
brightness_score=snapshot.brightness_score,
|
||||
depth_score=snapshot.depth_score,
|
||||
aroma_score=snapshot.aroma_score,
|
||||
structure_score=snapshot.structure_score,
|
||||
texture_profile=snapshot.texture_profile,
|
||||
dietary_tags=list(snapshot.dietary_tags),
|
||||
allergen_flags=list(snapshot.allergen_flags),
|
||||
flavor_molecules=list(snapshot.flavor_molecules),
|
||||
fat_pct=snapshot.fat_pct,
|
||||
protein_pct=snapshot.protein_pct,
|
||||
moisture_pct=snapshot.moisture_pct,
|
||||
)
|
||||
|
||||
try:
|
||||
inserted = await asyncio.to_thread(store.insert_post, post)
|
||||
except sqlite3.IntegrityError as exc:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A post with this title already exists today. Try a different title.",
|
||||
) from exc
|
||||
return _post_to_dict(inserted)
|
||||
|
||||
|
||||
@router.delete("/posts/{slug}", status_code=204)
|
||||
async def delete_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
def _get_pseudonym():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
return s.get_current_pseudonym(session.user_id)
|
||||
finally:
|
||||
s.close()
|
||||
pseudonym = await asyncio.to_thread(_get_pseudonym)
|
||||
if not pseudonym:
|
||||
raise HTTPException(status_code=400, detail="No pseudonym set. Cannot delete posts.")
|
||||
|
||||
deleted = await asyncio.to_thread(store.delete_post, slug=slug, pseudonym=pseudonym)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Post not found or you are not the author.")
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork", status_code=201)
|
||||
async def fork_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
store = _get_community_store()
|
||||
if store is None:
|
||||
raise HTTPException(status_code=503, detail="Community DB not available.")
|
||||
|
||||
post = await asyncio.to_thread(store.get_post_by_slug, slug)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=404, detail="Post not found.")
|
||||
if post.post_type != "plan":
|
||||
raise HTTPException(status_code=400, detail="Only plan posts can be forked as a meal plan.")
|
||||
|
||||
required_slot_keys = {"day", "meal_type", "recipe_id"}
|
||||
if any(not required_slot_keys.issubset(slot) for slot in post.slots):
|
||||
raise HTTPException(status_code=400, detail="Post contains malformed slots and cannot be forked.")
|
||||
|
||||
from datetime import date
|
||||
week_start = date.today().strftime("%Y-%m-%d")
|
||||
|
||||
def _create_plan():
|
||||
s = Store(session.db)
|
||||
try:
|
||||
meal_types = list({slot["meal_type"] for slot in post.slots})
|
||||
plan = s.create_meal_plan(week_start=week_start, meal_types=meal_types or ["dinner"])
|
||||
for slot in post.slots:
|
||||
s.assign_recipe_to_slot(
|
||||
plan_id=plan["id"],
|
||||
day_of_week=slot["day"],
|
||||
meal_type=slot["meal_type"],
|
||||
recipe_id=slot["recipe_id"],
|
||||
)
|
||||
return plan
|
||||
finally:
|
||||
s.close()
|
||||
|
||||
plan = await asyncio.to_thread(_create_plan)
|
||||
return {"plan_id": plan["id"], "week_start": plan["week_start"], "forked_from": slug}
|
||||
|
||||
|
||||
@router.post("/posts/{slug}/fork-adapt", status_code=201)
|
||||
async def fork_adapt_post(slug: str, session: CloudUser = Depends(get_session)):
|
||||
from app.tiers import can_use
|
||||
if not can_use("community_fork_adapt", session.tier, session.has_byok):
|
||||
raise HTTPException(status_code=402, detail="Fork with adaptation requires Paid tier or BYOK.")
|
||||
# Stub: full LLM adaptation deferred
|
||||
raise HTTPException(status_code=501, detail="Fork-adapt not yet implemented.")
|
||||
|
||||
|
||||
def _post_to_dict(post) -> dict:
|
||||
return {
|
||||
"slug": post.slug,
|
||||
"pseudonym": post.pseudonym,
|
||||
"post_type": post.post_type,
|
||||
"published": post.published.isoformat() if hasattr(post.published, "isoformat") else str(post.published),
|
||||
"title": post.title,
|
||||
"description": post.description,
|
||||
"photo_url": post.photo_url,
|
||||
"slots": list(post.slots),
|
||||
"recipe_id": post.recipe_id,
|
||||
"recipe_name": post.recipe_name,
|
||||
"level": post.level,
|
||||
"outcome_notes": post.outcome_notes,
|
||||
"element_profiles": {
|
||||
"seasoning_score": post.seasoning_score,
|
||||
"richness_score": post.richness_score,
|
||||
"brightness_score": post.brightness_score,
|
||||
"depth_score": post.depth_score,
|
||||
"aroma_score": post.aroma_score,
|
||||
"structure_score": post.structure_score,
|
||||
"texture_profile": post.texture_profile,
|
||||
},
|
||||
"dietary_tags": list(post.dietary_tags),
|
||||
"allergen_flags": list(post.allergen_flags),
|
||||
"flavor_molecules": list(post.flavor_molecules),
|
||||
"fat_pct": post.fat_pct,
|
||||
"protein_pct": post.protein_pct,
|
||||
"moisture_pct": post.moisture_pct,
|
||||
}
|
||||
|
||||
|
||||
def _post_type_prefix(post_type: str) -> str:
|
||||
return {"plan": "plan", "recipe_success": "success", "recipe_blooper": "blooper"}.get(post_type, "post")
|
||||
|
|
@ -1,11 +1,9 @@
|
|||
"""Export endpoints — CSV and JSON export of user data."""
|
||||
"""Export endpoints — CSV/Excel of receipt and inventory data."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
|
@ -47,33 +45,3 @@ async def export_inventory_csv(store: Store = Depends(get_store)):
|
|||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=inventory.csv"},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/json")
|
||||
async def export_full_json(store: Store = Depends(get_store)):
|
||||
"""Export full pantry inventory + saved recipes as a single JSON file.
|
||||
|
||||
Intended for data portability — users can import this into another
|
||||
Kiwi instance or keep it as an offline backup.
|
||||
"""
|
||||
inventory, saved = await asyncio.gather(
|
||||
asyncio.to_thread(store.list_inventory),
|
||||
asyncio.to_thread(store.get_saved_recipes),
|
||||
)
|
||||
|
||||
export_doc = {
|
||||
"kiwi_export": {
|
||||
"version": "1.0",
|
||||
"exported_at": datetime.now(timezone.utc).isoformat(),
|
||||
"inventory": [dict(row) for row in inventory],
|
||||
"saved_recipes": [dict(row) for row in saved],
|
||||
}
|
||||
}
|
||||
|
||||
body = json.dumps(export_doc, default=str, indent=2)
|
||||
filename = f"kiwi-export-{datetime.now(timezone.utc).strftime('%Y%m%d')}.json"
|
||||
return StreamingResponse(
|
||||
iter([body]),
|
||||
media_type="application/json",
|
||||
headers={"Content-Disposition": f"attachment; filename={filename}"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
"""Feedback router — provided by circuitforge-core."""
|
||||
from circuitforge_core.api import make_feedback_router
|
||||
from app.core.config import settings
|
||||
|
||||
router = make_feedback_router(
|
||||
repo="Circuit-Forge/kiwi",
|
||||
product="kiwi",
|
||||
demo_mode_fn=lambda: settings.DEMO_MODE,
|
||||
)
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
"""Screenshot attachment endpoint for in-app feedback.
|
||||
|
||||
After the cf-core feedback router creates a Forgejo issue, the frontend
|
||||
can call POST /feedback/attach to upload a screenshot and pin it as a
|
||||
comment on that issue.
|
||||
|
||||
The endpoint is separate from the cf-core router so Kiwi owns it
|
||||
without modifying shared infrastructure.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import os
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_FORGEJO_BASE = os.environ.get(
|
||||
"FORGEJO_API_URL", "https://git.opensourcesolarpunk.com/api/v1"
|
||||
)
|
||||
_REPO = "Circuit-Forge/kiwi"
|
||||
_MAX_BYTES = 5 * 1024 * 1024 # 5 MB
|
||||
|
||||
|
||||
class AttachRequest(BaseModel):
|
||||
issue_number: int
|
||||
filename: str = Field(default="screenshot.png", max_length=80)
|
||||
image_b64: str # data URI or raw base64
|
||||
|
||||
|
||||
class AttachResponse(BaseModel):
|
||||
comment_url: str
|
||||
|
||||
|
||||
def _forgejo_headers() -> dict[str, str]:
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
return {"Authorization": f"token {token}"}
|
||||
|
||||
|
||||
def _decode_image(image_b64: str) -> tuple[bytes, str]:
|
||||
"""Return (raw_bytes, mime_type) from a base64 string or data URI."""
|
||||
if image_b64.startswith("data:"):
|
||||
header, _, data = image_b64.partition(",")
|
||||
mime = header.split(";")[0].split(":")[1] if ":" in header else "image/png"
|
||||
else:
|
||||
data = image_b64
|
||||
mime = "image/png"
|
||||
return base64.b64decode(data), mime
|
||||
|
||||
|
||||
@router.post("/attach", response_model=AttachResponse)
|
||||
def attach_screenshot(payload: AttachRequest) -> AttachResponse:
|
||||
"""Upload a screenshot to a Forgejo issue as a comment with embedded image.
|
||||
|
||||
The image is uploaded as an issue asset, then referenced in a comment
|
||||
so it is visible inline when the issue is viewed.
|
||||
"""
|
||||
token = os.environ.get("FORGEJO_API_TOKEN", "")
|
||||
if not token:
|
||||
raise HTTPException(status_code=503, detail="Feedback not configured.")
|
||||
|
||||
raw_bytes, mime = _decode_image(payload.image_b64)
|
||||
|
||||
if len(raw_bytes) > _MAX_BYTES:
|
||||
raise HTTPException(
|
||||
status_code=413,
|
||||
detail=f"Screenshot exceeds 5 MB limit ({len(raw_bytes) // 1024} KB received).",
|
||||
)
|
||||
|
||||
# Upload image as issue asset
|
||||
asset_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/assets",
|
||||
headers=_forgejo_headers(),
|
||||
files={"attachment": (payload.filename, raw_bytes, mime)},
|
||||
timeout=20,
|
||||
)
|
||||
if not asset_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo asset upload failed: {asset_resp.text[:200]}",
|
||||
)
|
||||
|
||||
asset_url = asset_resp.json().get("browser_download_url", "")
|
||||
|
||||
# Pin as a comment so the image is visible inline
|
||||
comment_body = f"**Screenshot attached by reporter:**\n\n"
|
||||
comment_resp = requests.post(
|
||||
f"{_FORGEJO_BASE}/repos/{_REPO}/issues/{payload.issue_number}/comments",
|
||||
headers={**_forgejo_headers(), "Content-Type": "application/json"},
|
||||
json={"body": comment_body},
|
||||
timeout=15,
|
||||
)
|
||||
if not comment_resp.ok:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail=f"Forgejo comment failed: {comment_resp.text[:200]}",
|
||||
)
|
||||
|
||||
comment_url = comment_resp.json().get("html_url", "")
|
||||
return AttachResponse(comment_url=comment_url)
|
||||
|
|
@ -1,217 +0,0 @@
|
|||
"""Household management endpoints — shared pantry for Premium users."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import sqlite3
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, CLOUD_DATA_ROOT, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.household import (
|
||||
HouseholdAcceptRequest,
|
||||
HouseholdAcceptResponse,
|
||||
HouseholdCreateResponse,
|
||||
HouseholdInviteResponse,
|
||||
HouseholdMember,
|
||||
HouseholdRemoveMemberRequest,
|
||||
HouseholdStatusResponse,
|
||||
MessageResponse,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
_INVITE_TTL_DAYS = 7
|
||||
_KIWI_BASE_URL = os.environ.get("KIWI_BASE_URL", "https://menagerie.circuitforge.tech/kiwi")
|
||||
|
||||
|
||||
def _require_premium(session: CloudUser = Depends(get_session)) -> CloudUser:
|
||||
if session.tier not in ("premium", "ultra", "local"):
|
||||
raise HTTPException(status_code=403, detail="Household features require Premium tier.")
|
||||
return session
|
||||
|
||||
|
||||
def _require_household_owner(session: CloudUser = Depends(_require_premium)) -> CloudUser:
|
||||
if not session.is_household_owner or not session.household_id:
|
||||
raise HTTPException(status_code=403, detail="Only the household owner can perform this action.")
|
||||
return session
|
||||
|
||||
|
||||
def _household_store(household_id: str) -> Store:
|
||||
"""Open the household DB directly (used during invite acceptance).
|
||||
Sets row_factory so dict-style column access works on raw conn queries.
|
||||
"""
|
||||
db_path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
store = Store(db_path)
|
||||
store.conn.row_factory = sqlite3.Row
|
||||
return store
|
||||
|
||||
|
||||
def _heimdall_post(path: str, body: dict) -> dict:
|
||||
"""Call Heimdall admin API. Returns response dict or raises HTTPException."""
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
log.warning("HEIMDALL_ADMIN_TOKEN not set — household Heimdall call skipped")
|
||||
return {}
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}{path}",
|
||||
json=body,
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=10,
|
||||
)
|
||||
if not resp.ok:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall error: {resp.text}")
|
||||
return resp.json()
|
||||
except requests.RequestException as exc:
|
||||
raise HTTPException(status_code=502, detail=f"Heimdall unreachable: {exc}")
|
||||
|
||||
|
||||
@router.post("/create", response_model=HouseholdCreateResponse)
|
||||
async def create_household(session: CloudUser = Depends(_require_premium)):
|
||||
"""Create a new household. The calling user becomes owner."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
data = _heimdall_post("/admin/household/create", {"owner_user_id": session.user_id})
|
||||
household_id = data.get("household_id")
|
||||
if not household_id:
|
||||
# Heimdall returned OK but without a household_id — treat as server error.
|
||||
# Fall back to a local stub only when HEIMDALL_ADMIN_TOKEN is unset (dev mode).
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
raise HTTPException(status_code=500, detail="Heimdall did not return a household_id.")
|
||||
household_id = "local-household"
|
||||
return HouseholdCreateResponse(
|
||||
household_id=household_id,
|
||||
message="Household created. Share an invite link to add members.",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/status", response_model=HouseholdStatusResponse)
|
||||
async def household_status(session: CloudUser = Depends(_require_premium)):
|
||||
"""Return current user's household membership status."""
|
||||
if not session.household_id:
|
||||
return HouseholdStatusResponse(in_household=False)
|
||||
|
||||
members: list[HouseholdMember] = []
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/admin/household/{session.household_id}",
|
||||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
raw = resp.json()
|
||||
for m in raw.get("members", []):
|
||||
members.append(HouseholdMember(
|
||||
user_id=m["user_id"],
|
||||
joined_at=m.get("joined_at", ""),
|
||||
is_owner=m["user_id"] == raw.get("owner_user_id"),
|
||||
))
|
||||
except Exception as exc:
|
||||
log.warning("Could not fetch household members: %s", exc)
|
||||
|
||||
return HouseholdStatusResponse(
|
||||
in_household=True,
|
||||
household_id=session.household_id,
|
||||
is_owner=session.is_household_owner,
|
||||
members=members,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/invite", response_model=HouseholdInviteResponse)
|
||||
async def create_invite(session: CloudUser = Depends(_require_household_owner)):
|
||||
"""Generate a one-time invite token valid for 7 days."""
|
||||
token = secrets.token_hex(32)
|
||||
expires_at = (datetime.now(timezone.utc) + timedelta(days=_INVITE_TTL_DAYS)).isoformat()
|
||||
store = Store(session.db)
|
||||
try:
|
||||
store.conn.execute(
|
||||
"""INSERT INTO household_invites (token, household_id, created_by, expires_at)
|
||||
VALUES (?, ?, ?, ?)""",
|
||||
(token, session.household_id, session.user_id, expires_at),
|
||||
)
|
||||
store.conn.commit()
|
||||
finally:
|
||||
store.close()
|
||||
invite_url = f"{_KIWI_BASE_URL}/#/join?household_id={session.household_id}&token={token}"
|
||||
return HouseholdInviteResponse(token=token, invite_url=invite_url, expires_at=expires_at)
|
||||
|
||||
|
||||
@router.post("/accept", response_model=HouseholdAcceptResponse)
|
||||
async def accept_invite(
|
||||
body: HouseholdAcceptRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Accept a household invite. Opens the household DB directly to validate the token."""
|
||||
if session.household_id:
|
||||
raise HTTPException(status_code=409, detail="You are already in a household.")
|
||||
|
||||
hh_store = _household_store(body.household_id)
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
try:
|
||||
row = hh_store.conn.execute(
|
||||
"""SELECT token, expires_at, used_at FROM household_invites
|
||||
WHERE token = ? AND household_id = ?""",
|
||||
(body.token, body.household_id),
|
||||
).fetchone()
|
||||
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Invite not found.")
|
||||
if row["used_at"] is not None:
|
||||
raise HTTPException(status_code=410, detail="Invite already used.")
|
||||
if row["expires_at"] < now:
|
||||
raise HTTPException(status_code=410, detail="Invite has expired.")
|
||||
|
||||
hh_store.conn.execute(
|
||||
"UPDATE household_invites SET used_at = ?, used_by = ? WHERE token = ?",
|
||||
(now, session.user_id, body.token),
|
||||
)
|
||||
hh_store.conn.commit()
|
||||
finally:
|
||||
hh_store.close()
|
||||
|
||||
_heimdall_post("/admin/household/add-member", {
|
||||
"household_id": body.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
|
||||
return HouseholdAcceptResponse(
|
||||
message="You have joined the household. Reload the app to switch to the shared pantry.",
|
||||
household_id=body.household_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/leave", response_model=MessageResponse)
|
||||
async def leave_household(session: CloudUser = Depends(_require_premium)) -> MessageResponse:
|
||||
"""Leave the current household (non-owners only)."""
|
||||
if not session.household_id:
|
||||
raise HTTPException(status_code=400, detail="You are not in a household.")
|
||||
if session.is_household_owner:
|
||||
raise HTTPException(status_code=400, detail="The household owner cannot leave. Delete the household instead.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": session.user_id,
|
||||
})
|
||||
return MessageResponse(message="You have left the household. Reload the app to return to your personal pantry.")
|
||||
|
||||
|
||||
@router.post("/remove-member", response_model=MessageResponse)
|
||||
async def remove_member(
|
||||
body: HouseholdRemoveMemberRequest,
|
||||
session: CloudUser = Depends(_require_household_owner),
|
||||
) -> MessageResponse:
|
||||
"""Remove a member from the household (owner only)."""
|
||||
if body.user_id == session.user_id:
|
||||
raise HTTPException(status_code=400, detail="Use /leave to remove yourself.")
|
||||
_heimdall_post("/admin/household/remove-member", {
|
||||
"household_id": session.household_id,
|
||||
"user_id": body.user_id,
|
||||
})
|
||||
return MessageResponse(message=f"Member {body.user_id} removed from household.")
|
||||
|
|
@ -1,185 +0,0 @@
|
|||
"""Kiwi — /api/v1/imitate/samples endpoint for Avocet Imitate tab.
|
||||
|
||||
Returns the actual assembled prompt Kiwi sends to its LLM for recipe generation,
|
||||
including the full pantry context (expiry-first ordering), dietary constraints
|
||||
(from user_settings if present), and the Level 3 format instructions.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import get_session, CloudUser
|
||||
from app.db.store import Store
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_LEVEL3_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"3. <continue for each step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
_LEVEL4_FORMAT = [
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
|
||||
def _read_user_settings(store: Store) -> dict:
|
||||
"""Read all key/value pairs from user_settings table."""
|
||||
try:
|
||||
rows = store.conn.execute("SELECT key, value FROM user_settings").fetchall()
|
||||
return {r["key"]: r["value"] for r in rows}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _build_recipe_prompt(
|
||||
pantry_names: list[str],
|
||||
expiring_names: list[str],
|
||||
constraints: list[str],
|
||||
allergies: list[str],
|
||||
level: int = 3,
|
||||
) -> str:
|
||||
"""Assemble the recipe generation prompt matching Kiwi's Level 3/4 format."""
|
||||
# Expiring items first, then remaining pantry items (deduped)
|
||||
expiring_set = set(expiring_names)
|
||||
ordered = list(expiring_names) + [n for n in pantry_names if n not in expiring_set]
|
||||
|
||||
if not ordered:
|
||||
ordered = pantry_names
|
||||
|
||||
if level == 4:
|
||||
lines = [
|
||||
"Surprise me with a creative, unexpected recipe.",
|
||||
"Only use ingredients that make culinary sense together. "
|
||||
"Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.",
|
||||
f"Ingredients available: {', '.join(ordered)}",
|
||||
]
|
||||
if constraints:
|
||||
lines.append(f"Constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergies)}")
|
||||
lines.append("Treat any mystery ingredient as a wildcard — use your imagination.")
|
||||
lines += _LEVEL4_FORMAT
|
||||
else:
|
||||
lines = [
|
||||
"You are a creative chef. Generate a recipe using the ingredients below.",
|
||||
"IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name "
|
||||
"from the pantry list. Do not add adjectives, quantities, or cooking states "
|
||||
"(e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').",
|
||||
"IMPORTANT: Only use pantry items that make culinary sense for the dish. "
|
||||
"Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, "
|
||||
"dessert sauces, flavoured syrups) into savoury dishes.",
|
||||
"IMPORTANT: Do not default to the same ingredient repeatedly across dishes. "
|
||||
"If a pantry item does not genuinely improve this specific dish, leave it out.",
|
||||
"",
|
||||
f"Pantry items: {', '.join(ordered)}",
|
||||
]
|
||||
if expiring_names:
|
||||
lines.append(
|
||||
f"Priority — use these soon (expiring): {', '.join(expiring_names)}"
|
||||
)
|
||||
if constraints:
|
||||
lines.append(f"Dietary constraints: {', '.join(constraints)}")
|
||||
if allergies:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergies)}")
|
||||
lines += _LEVEL3_FORMAT
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@router.get("/samples")
|
||||
async def imitate_samples(
|
||||
limit: int = 5,
|
||||
level: int = 3,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Return assembled recipe generation prompts for Avocet's Imitate tab.
|
||||
|
||||
Each sample includes:
|
||||
system_prompt empty (Kiwi uses no system context)
|
||||
input_text full Level 3/4 prompt with pantry items, expiring items,
|
||||
dietary constraints, and format instructions
|
||||
output_text empty (no prior LLM output stored per-request)
|
||||
|
||||
level: 3 (structured with element biasing context) or 4 (wildcard creative)
|
||||
limit: max number of distinct prompt variants to return (varies by pantry state)
|
||||
"""
|
||||
limit = max(1, min(limit, 10))
|
||||
store = Store(session.db)
|
||||
|
||||
# Full pantry for context
|
||||
all_items = store.list_inventory()
|
||||
pantry_names = [r["product_name"] for r in all_items if r.get("product_name")]
|
||||
|
||||
# Expiring items as priority ingredients
|
||||
expiring = store.expiring_soon(days=14)
|
||||
expiring_names = [r["product_name"] for r in expiring if r.get("product_name")]
|
||||
|
||||
# Dietary constraints from user_settings (keys: constraints, allergies)
|
||||
settings = _read_user_settings(store)
|
||||
import json as _json
|
||||
try:
|
||||
constraints = _json.loads(settings.get("dietary_constraints", "[]")) or []
|
||||
except Exception:
|
||||
constraints = []
|
||||
try:
|
||||
allergies = _json.loads(settings.get("dietary_allergies", "[]")) or []
|
||||
except Exception:
|
||||
allergies = []
|
||||
|
||||
if not pantry_names:
|
||||
return {"samples": [], "total": 0, "type": f"recipe_level{level}"}
|
||||
|
||||
# Build prompt variants: one per expiring item as the "anchor" ingredient,
|
||||
# plus one general pantry prompt. Cap at limit.
|
||||
samples = []
|
||||
seen_anchors: set[str] = set()
|
||||
|
||||
for item in (expiring[:limit - 1] if expiring else []):
|
||||
anchor = item.get("product_name", "")
|
||||
if not anchor or anchor in seen_anchors:
|
||||
continue
|
||||
seen_anchors.add(anchor)
|
||||
|
||||
# Put this item first in the list for the prompt
|
||||
ordered_expiring = [anchor] + [n for n in expiring_names if n != anchor]
|
||||
prompt = _build_recipe_prompt(pantry_names, ordered_expiring, constraints, allergies, level)
|
||||
|
||||
samples.append({
|
||||
"id": item.get("id", 0),
|
||||
"anchor_item": anchor,
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
# One general prompt using all expiring as priority
|
||||
if len(samples) < limit:
|
||||
prompt = _build_recipe_prompt(pantry_names, expiring_names, constraints, allergies, level)
|
||||
samples.append({
|
||||
"id": 0,
|
||||
"anchor_item": "full pantry",
|
||||
"expiring_count": len(expiring_names),
|
||||
"pantry_count": len(pantry_names),
|
||||
"system_prompt": "",
|
||||
"input_text": prompt,
|
||||
"output_text": "",
|
||||
})
|
||||
|
||||
return {"samples": samples, "total": len(samples), "type": f"recipe_level{level}"}
|
||||
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
|
@ -12,25 +11,15 @@ import aiofiles
|
|||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
_predictor = ExpirationPredictor()
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.inventory import (
|
||||
BarcodeScanResponse,
|
||||
BulkAddByNameRequest,
|
||||
BulkAddByNameResponse,
|
||||
BulkAddItemResult,
|
||||
DiscardRequest,
|
||||
InventoryItemCreate,
|
||||
InventoryItemResponse,
|
||||
InventoryItemUpdate,
|
||||
InventoryStats,
|
||||
PartialConsumeRequest,
|
||||
ProductCreate,
|
||||
ProductResponse,
|
||||
ProductUpdate,
|
||||
|
|
@ -41,34 +30,6 @@ from app.models.schemas.inventory import (
|
|||
router = APIRouter()
|
||||
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _enrich_item(item: dict) -> dict:
|
||||
"""Attach computed fields: opened_expiry_date, secondary_state/uses/warning."""
|
||||
from datetime import date, timedelta
|
||||
opened = item.get("opened_date")
|
||||
if opened:
|
||||
days = _predictor.days_after_opening(item.get("category"))
|
||||
if days is not None:
|
||||
try:
|
||||
opened_expiry = date.fromisoformat(opened) + timedelta(days=days)
|
||||
item = {**item, "opened_expiry_date": str(opened_expiry)}
|
||||
except ValueError:
|
||||
pass
|
||||
if "opened_expiry_date" not in item:
|
||||
item = {**item, "opened_expiry_date": None}
|
||||
|
||||
# Secondary use window — check sell-by date (not opened expiry)
|
||||
sec = _predictor.secondary_state(item.get("category"), item.get("expiration_date"))
|
||||
item = {
|
||||
**item,
|
||||
"secondary_state": sec["label"] if sec else None,
|
||||
"secondary_uses": sec["uses"] if sec else None,
|
||||
"secondary_warning": sec["warning"] if sec else None,
|
||||
}
|
||||
return item
|
||||
|
||||
|
||||
# ── Products ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/products", response_model=ProductResponse, status_code=status.HTTP_201_CREATED)
|
||||
|
|
@ -153,12 +114,7 @@ async def delete_product(product_id: int, store: Store = Depends(get_store)):
|
|||
# ── Inventory items ───────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_inventory_item(
|
||||
body: InventoryItemCreate,
|
||||
store: Store = Depends(get_store),
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
log.info("add_item auth=%s tier=%s product_id=%s", _auth_label(session.user_id), session.tier, body.product_id)
|
||||
async def create_inventory_item(body: InventoryItemCreate, store: Store = Depends(get_store)):
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
body.product_id,
|
||||
|
|
@ -171,38 +127,7 @@ async def create_inventory_item(
|
|||
notes=body.notes,
|
||||
source=body.source,
|
||||
)
|
||||
# RETURNING * omits joined columns (product_name, barcode, category).
|
||||
# Re-fetch with the products JOIN so the response is fully populated (#99).
|
||||
full_item = await asyncio.to_thread(store.get_inventory_item, item["id"])
|
||||
return InventoryItemResponse.model_validate(full_item)
|
||||
|
||||
|
||||
@router.post("/items/bulk-add-by-name", response_model=BulkAddByNameResponse)
|
||||
async def bulk_add_items_by_name(body: BulkAddByNameRequest, store: Store = Depends(get_store)):
|
||||
"""Create pantry items from a list of ingredient names (no barcode required).
|
||||
|
||||
Uses get_or_create_product so re-adding an existing product is idempotent.
|
||||
"""
|
||||
results: list[BulkAddItemResult] = []
|
||||
for entry in body.items:
|
||||
try:
|
||||
product, _ = await asyncio.to_thread(
|
||||
store.get_or_create_product, entry.name, None, source="manual"
|
||||
)
|
||||
item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"],
|
||||
entry.location,
|
||||
quantity=entry.quantity,
|
||||
unit=entry.unit,
|
||||
source="manual",
|
||||
)
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=True, item_id=item["id"]))
|
||||
except Exception as exc:
|
||||
results.append(BulkAddItemResult(name=entry.name, ok=False, error=str(exc)))
|
||||
|
||||
added = sum(1 for r in results if r.ok)
|
||||
return BulkAddByNameResponse(added=added, failed=len(results) - added, results=results)
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.get("/items", response_model=List[InventoryItemResponse])
|
||||
|
|
@ -212,13 +137,13 @@ async def list_inventory_items(
|
|||
store: Store = Depends(get_store),
|
||||
):
|
||||
items = await asyncio.to_thread(store.list_inventory, location, item_status)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/expiring", response_model=List[InventoryItemResponse])
|
||||
async def get_expiring_items(days: int = 7, store: Store = Depends(get_store)):
|
||||
items = await asyncio.to_thread(store.expiring_soon, days)
|
||||
return [InventoryItemResponse.model_validate(_enrich_item(i)) for i in items]
|
||||
return [InventoryItemResponse.model_validate(i) for i in items]
|
||||
|
||||
|
||||
@router.get("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -226,7 +151,7 @@ async def get_inventory_item(item_id: int, store: Store = Depends(get_store)):
|
|||
item = await asyncio.to_thread(store.get_inventory_item, item_id)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.patch("/items/{item_id}", response_model=InventoryItemResponse)
|
||||
|
|
@ -238,79 +163,24 @@ async def update_inventory_item(
|
|||
updates["purchase_date"] = str(updates["purchase_date"])
|
||||
if "expiration_date" in updates and updates["expiration_date"]:
|
||||
updates["expiration_date"] = str(updates["expiration_date"])
|
||||
if "opened_date" in updates and updates["opened_date"]:
|
||||
updates["opened_date"] = str(updates["opened_date"])
|
||||
item = await asyncio.to_thread(store.update_inventory_item, item_id, **updates)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/open", response_model=InventoryItemResponse)
|
||||
async def mark_item_opened(item_id: int, store: Store = Depends(get_store)):
|
||||
"""Record that this item was opened today, triggering secondary shelf-life tracking."""
|
||||
from datetime import date
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
opened_date=str(date.today()),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/consume", response_model=InventoryItemResponse)
|
||||
async def consume_item(
|
||||
item_id: int,
|
||||
body: Optional[PartialConsumeRequest] = None,
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Consume an inventory item fully or partially.
|
||||
|
||||
When body.quantity is provided, decrements by that amount and only marks
|
||||
status=consumed when quantity reaches zero. Omit body to consume all.
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
if body is not None:
|
||||
item = await asyncio.to_thread(
|
||||
store.partial_consume_item, item_id, body.quantity, now
|
||||
)
|
||||
else:
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="consumed",
|
||||
consumed_at=now,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
|
||||
|
||||
@router.post("/items/{item_id}/discard", response_model=InventoryItemResponse)
|
||||
async def discard_item(
|
||||
item_id: int,
|
||||
body: DiscardRequest = DiscardRequest(),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
"""Mark an item as discarded (not used, spoiled, etc).
|
||||
|
||||
Optional reason field accepts free text or a preset label
|
||||
('not used', 'spoiled', 'excess', 'other').
|
||||
"""
|
||||
async def consume_item(item_id: int, store: Store = Depends(get_store)):
|
||||
from datetime import datetime, timezone
|
||||
item = await asyncio.to_thread(
|
||||
store.update_inventory_item,
|
||||
item_id,
|
||||
status="discarded",
|
||||
status="consumed",
|
||||
consumed_at=datetime.now(timezone.utc).isoformat(),
|
||||
disposal_reason=body.reason,
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Inventory item not found")
|
||||
return InventoryItemResponse.model_validate(_enrich_item(item))
|
||||
return InventoryItemResponse.model_validate(item)
|
||||
|
||||
|
||||
@router.delete("/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
|
@ -340,7 +210,6 @@ async def scan_barcode_text(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from a text string (e.g. from a hardware scanner or manual entry)."""
|
||||
log.info("scan auth=%s tier=%s barcode=%r", _auth_label(session.user_id), session.tier, body.barcode)
|
||||
from app.services.openfoodfacts import OpenFoodFactsService
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
|
||||
|
|
@ -367,14 +236,10 @@ async def scan_barcode_text(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
# Use OFFs pack size when detected; caller-supplied quantity is a fallback
|
||||
resolved_qty = product_info.get("pack_quantity") or body.quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], body.location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=body.quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
|
|
@ -382,7 +247,6 @@ async def scan_barcode_text(
|
|||
else:
|
||||
result_product = None
|
||||
|
||||
product_found = product_info is not None
|
||||
return BarcodeScanResponse(
|
||||
success=True,
|
||||
barcodes_found=1,
|
||||
|
|
@ -392,8 +256,7 @@ async def scan_barcode_text(
|
|||
"product": result_product,
|
||||
"inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None,
|
||||
"added_to_inventory": inventory_item is not None,
|
||||
"needs_manual_entry": not product_found,
|
||||
"message": "Added to inventory" if inventory_item else "Not found in any product database — add manually",
|
||||
"message": "Added to inventory" if inventory_item else "Product not found in database",
|
||||
}],
|
||||
message="Barcode processed",
|
||||
)
|
||||
|
|
@ -409,7 +272,6 @@ async def scan_barcode_image(
|
|||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Scan a barcode from an uploaded image. Requires Phase 2 scanner integration."""
|
||||
log.info("scan_image auth=%s tier=%s", _auth_label(session.user_id), session.tier)
|
||||
temp_dir = Path("/tmp/kiwi_barcode_scans")
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}"
|
||||
|
|
@ -452,13 +314,10 @@ async def scan_barcode_image(
|
|||
tier=session.tier,
|
||||
has_byok=session.has_byok,
|
||||
)
|
||||
resolved_qty = product_info.get("pack_quantity") or quantity
|
||||
resolved_unit = product_info.get("pack_unit") or "count"
|
||||
inventory_item = await asyncio.to_thread(
|
||||
store.add_inventory_item,
|
||||
product["id"], location,
|
||||
quantity=resolved_qty,
|
||||
unit=resolved_unit,
|
||||
quantity=quantity,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="barcode_scan",
|
||||
)
|
||||
|
|
@ -510,23 +369,6 @@ async def list_tags(
|
|||
|
||||
# ── Stats ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/recalculate-expiry")
|
||||
async def recalculate_expiry(
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Re-run the expiration predictor over all available inventory items.
|
||||
|
||||
Uses each item's stored purchase_date and current location. Safe to call
|
||||
multiple times — idempotent per session.
|
||||
"""
|
||||
def _run(s: Store) -> tuple[int, int]:
|
||||
return s.recalculate_expiry(tier=session.tier, has_byok=session.has_byok)
|
||||
|
||||
updated, skipped = await asyncio.to_thread(_run, store)
|
||||
return {"updated": updated, "skipped": skipped}
|
||||
|
||||
|
||||
@router.get("/stats", response_model=InventoryStats)
|
||||
async def get_inventory_stats(store: Store = Depends(get_store)):
|
||||
def _stats():
|
||||
|
|
|
|||
|
|
@ -1,325 +0,0 @@
|
|||
# app/api/endpoints/meal_plans.py
|
||||
"""Meal plan CRUD, shopping list, and prep session endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import (
|
||||
CreatePlanRequest,
|
||||
GapItem,
|
||||
PlanSummary,
|
||||
PrepSessionSummary,
|
||||
PrepTaskSummary,
|
||||
ShoppingListResponse,
|
||||
SlotSummary,
|
||||
UpdatePlanRequest,
|
||||
UpdatePrepTaskRequest,
|
||||
UpsertSlotRequest,
|
||||
VALID_MEAL_TYPES,
|
||||
)
|
||||
from app.services.meal_plan.affiliates import get_retailer_links
|
||||
from app.services.meal_plan.prep_scheduler import build_prep_tasks
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
# ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _slot_summary(row: dict) -> SlotSummary:
|
||||
return SlotSummary(
|
||||
id=row["id"],
|
||||
plan_id=row["plan_id"],
|
||||
day_of_week=row["day_of_week"],
|
||||
meal_type=row["meal_type"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
recipe_title=row.get("recipe_title"),
|
||||
servings=row["servings"],
|
||||
custom_label=row.get("custom_label"),
|
||||
)
|
||||
|
||||
|
||||
def _plan_summary(plan: dict, slots: list[dict]) -> PlanSummary:
|
||||
meal_types = plan.get("meal_types") or ["dinner"]
|
||||
if isinstance(meal_types, str):
|
||||
meal_types = json.loads(meal_types)
|
||||
return PlanSummary(
|
||||
id=plan["id"],
|
||||
week_start=plan["week_start"],
|
||||
meal_types=meal_types,
|
||||
slots=[_slot_summary(s) for s in slots],
|
||||
created_at=plan["created_at"],
|
||||
)
|
||||
|
||||
|
||||
def _prep_task_summary(row: dict) -> PrepTaskSummary:
|
||||
return PrepTaskSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
task_label=row["task_label"],
|
||||
duration_minutes=row.get("duration_minutes"),
|
||||
sequence_order=row["sequence_order"],
|
||||
equipment=row.get("equipment"),
|
||||
is_parallel=bool(row.get("is_parallel", False)),
|
||||
notes=row.get("notes"),
|
||||
user_edited=bool(row.get("user_edited", False)),
|
||||
)
|
||||
|
||||
|
||||
# ── plan CRUD ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/", response_model=PlanSummary)
|
||||
async def create_plan(
|
||||
req: CreatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
import sqlite3
|
||||
|
||||
# Free tier is locked to dinner-only; paid+ may configure meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
|
||||
try:
|
||||
plan = await asyncio.to_thread(store.create_meal_plan, str(req.week_start), meal_types)
|
||||
except sqlite3.IntegrityError:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"A meal plan for the week of {req.week_start} already exists.",
|
||||
)
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan["id"])
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[PlanSummary])
|
||||
async def list_plans(
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> list[PlanSummary]:
|
||||
plans = await asyncio.to_thread(store.list_meal_plans)
|
||||
result = []
|
||||
for p in plans:
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, p["id"])
|
||||
result.append(_plan_summary(p, slots))
|
||||
return result
|
||||
|
||||
|
||||
@router.patch("/{plan_id}", response_model=PlanSummary)
|
||||
async def update_plan(
|
||||
plan_id: int,
|
||||
req: UpdatePlanRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
# Free tier stays dinner-only; paid+ may add meal types
|
||||
if can_use("meal_plan_config", session.tier):
|
||||
meal_types = [t for t in req.meal_types if t in VALID_MEAL_TYPES] or ["dinner"]
|
||||
else:
|
||||
meal_types = ["dinner"]
|
||||
updated = await asyncio.to_thread(store.update_meal_plan_types, plan_id, meal_types)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(updated, slots)
|
||||
|
||||
|
||||
@router.get("/{plan_id}", response_model=PlanSummary)
|
||||
async def get_plan(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PlanSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
return _plan_summary(plan, slots)
|
||||
|
||||
|
||||
# ── slots ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.put("/{plan_id}/slots/{day_of_week}/{meal_type}", response_model=SlotSummary)
|
||||
async def upsert_slot(
|
||||
plan_id: int,
|
||||
day_of_week: int,
|
||||
meal_type: str,
|
||||
req: UpsertSlotRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> SlotSummary:
|
||||
if day_of_week < 0 or day_of_week > 6:
|
||||
raise HTTPException(status_code=422, detail="day_of_week must be 0-6.")
|
||||
if meal_type not in VALID_MEAL_TYPES:
|
||||
raise HTTPException(status_code=422, detail=f"Invalid meal_type '{meal_type}'.")
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
row = await asyncio.to_thread(
|
||||
store.upsert_slot,
|
||||
plan_id, day_of_week, meal_type,
|
||||
req.recipe_id, req.servings, req.custom_label,
|
||||
)
|
||||
return _slot_summary(row)
|
||||
|
||||
|
||||
@router.delete("/{plan_id}/slots/{slot_id}", status_code=204)
|
||||
async def delete_slot(
|
||||
plan_id: int,
|
||||
slot_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> None:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
await asyncio.to_thread(store.delete_slot, slot_id)
|
||||
|
||||
|
||||
# ── shopping list ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/shopping-list", response_model=ShoppingListResponse)
|
||||
async def get_shopping_list(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> ShoppingListResponse:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
inventory = await asyncio.to_thread(store.list_inventory)
|
||||
|
||||
gaps, covered = compute_shopping_list(recipes, inventory)
|
||||
|
||||
# Enrich gap items with retailer links
|
||||
def _to_schema(item, enrich: bool) -> GapItem:
|
||||
links = get_retailer_links(item.ingredient_name) if enrich else []
|
||||
return GapItem(
|
||||
ingredient_name=item.ingredient_name,
|
||||
needed_raw=item.needed_raw,
|
||||
have_quantity=item.have_quantity,
|
||||
have_unit=item.have_unit,
|
||||
covered=item.covered,
|
||||
retailer_links=links,
|
||||
)
|
||||
|
||||
gap_items = [_to_schema(g, enrich=True) for g in gaps]
|
||||
covered_items = [_to_schema(c, enrich=False) for c in covered]
|
||||
|
||||
disclosure = (
|
||||
"Some links may be affiliate links. Purchases through them support Kiwi development."
|
||||
if gap_items else None
|
||||
)
|
||||
|
||||
return ShoppingListResponse(
|
||||
plan_id=plan_id,
|
||||
gap_items=gap_items,
|
||||
covered_items=covered_items,
|
||||
disclosure=disclosure,
|
||||
)
|
||||
|
||||
|
||||
# ── prep session ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def get_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
prep_session = await asyncio.to_thread(store.get_prep_session_for_plan, plan_id)
|
||||
if prep_session is None:
|
||||
raise HTTPException(status_code=404, detail="No prep session for this plan.")
|
||||
raw_tasks = await asyncio.to_thread(store.get_prep_tasks, prep_session["id"])
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=plan_id,
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(t) for t in raw_tasks],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{plan_id}/prep-session", response_model=PrepSessionSummary)
|
||||
async def create_prep_session(
|
||||
plan_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepSessionSummary:
|
||||
plan = await asyncio.to_thread(store.get_meal_plan, plan_id)
|
||||
if plan is None:
|
||||
raise HTTPException(status_code=404, detail="Plan not found.")
|
||||
|
||||
slots = await asyncio.to_thread(store.get_plan_slots, plan_id)
|
||||
recipes = await asyncio.to_thread(store.get_plan_recipes, plan_id)
|
||||
prep_tasks = build_prep_tasks(slots=slots, recipes=recipes)
|
||||
|
||||
scheduled_date = date.today().isoformat()
|
||||
prep_session = await asyncio.to_thread(
|
||||
store.create_prep_session, plan_id, scheduled_date
|
||||
)
|
||||
session_id = prep_session["id"]
|
||||
|
||||
task_dicts = [
|
||||
{
|
||||
"recipe_id": t.recipe_id,
|
||||
"slot_id": t.slot_id,
|
||||
"task_label": t.task_label,
|
||||
"duration_minutes": t.duration_minutes,
|
||||
"sequence_order": t.sequence_order,
|
||||
"equipment": t.equipment,
|
||||
"is_parallel": t.is_parallel,
|
||||
"notes": t.notes,
|
||||
}
|
||||
for t in prep_tasks
|
||||
]
|
||||
inserted = await asyncio.to_thread(store.bulk_insert_prep_tasks, session_id, task_dicts)
|
||||
|
||||
return PrepSessionSummary(
|
||||
id=prep_session["id"],
|
||||
plan_id=prep_session["plan_id"],
|
||||
scheduled_date=prep_session["scheduled_date"],
|
||||
status=prep_session["status"],
|
||||
tasks=[_prep_task_summary(r) for r in inserted],
|
||||
)
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/{plan_id}/prep-session/tasks/{task_id}",
|
||||
response_model=PrepTaskSummary,
|
||||
)
|
||||
async def update_prep_task(
|
||||
plan_id: int,
|
||||
task_id: int,
|
||||
req: UpdatePrepTaskRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> PrepTaskSummary:
|
||||
updated = await asyncio.to_thread(
|
||||
store.update_prep_task,
|
||||
task_id,
|
||||
duration_minutes=req.duration_minutes,
|
||||
sequence_order=req.sequence_order,
|
||||
notes=req.notes,
|
||||
equipment=req.equipment,
|
||||
)
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail="Task not found.")
|
||||
return _prep_task_summary(updated)
|
||||
|
|
@ -219,7 +219,7 @@ def _commit_items(
|
|||
receipt_id=receipt_id,
|
||||
purchase_date=str(purchase_date) if purchase_date else None,
|
||||
expiration_date=str(exp) if exp else None,
|
||||
source="receipt",
|
||||
source="receipt_ocr",
|
||||
)
|
||||
|
||||
created.append(ApprovedInventoryItem(
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
"""Proxy endpoint: exposes cf-orch call budget to the Kiwi frontend.
|
||||
|
||||
Only lifetime/founders users have a license_key — subscription and free
|
||||
users receive null (no budget UI shown).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.services.heimdall_orch import get_orch_usage
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def orch_usage_endpoint(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict | None:
|
||||
"""Return the current period's orch usage for the authenticated user.
|
||||
|
||||
Returns null if the user has no lifetime/founders license key (i.e. they
|
||||
are on a subscription or free plan — no budget cap applies to them).
|
||||
"""
|
||||
if session.license_key is None:
|
||||
return None
|
||||
return get_orch_usage(session.license_key, "kiwi")
|
||||
|
|
@ -42,11 +42,9 @@ async def upload_receipt(
|
|||
)
|
||||
# Only queue OCR if the feature is enabled server-side AND the user's tier allows it.
|
||||
# Check tier here, not inside the background task — once dispatched it can't be cancelled.
|
||||
# Pass session.db (a Path) rather than store — the store dependency closes before
|
||||
# background tasks run, so the task opens its own store from the DB path.
|
||||
ocr_allowed = settings.ENABLE_OCR and can_use("receipt_ocr", session.tier, session.has_byok)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
return ReceiptResponse.model_validate(receipt)
|
||||
|
||||
|
||||
|
|
@ -66,7 +64,7 @@ async def upload_receipts_batch(
|
|||
store.create_receipt, file.filename, str(saved)
|
||||
)
|
||||
if ocr_allowed:
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, session.db)
|
||||
background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store)
|
||||
results.append(ReceiptResponse.model_validate(receipt))
|
||||
return results
|
||||
|
||||
|
|
@ -99,13 +97,8 @@ async def get_receipt_quality(receipt_id: int, store: Store = Depends(get_store)
|
|||
return QualityAssessment.model_validate(qa)
|
||||
|
||||
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt.
|
||||
|
||||
Accepts db_path (not a Store instance) because FastAPI closes the request-scoped
|
||||
store before background tasks execute. This task owns its store lifecycle.
|
||||
"""
|
||||
store = Store(db_path)
|
||||
async def _process_receipt_ocr(receipt_id: int, image_path: Path, store: Store) -> None:
|
||||
"""Background task: run OCR pipeline on an uploaded receipt."""
|
||||
try:
|
||||
await asyncio.to_thread(store.update_receipt_status, receipt_id, "processing")
|
||||
from app.services.receipt_service import ReceiptService
|
||||
|
|
@ -115,5 +108,3 @@ async def _process_receipt_ocr(receipt_id: int, image_path: Path, db_path: Path)
|
|||
await asyncio.to_thread(
|
||||
store.update_receipt_status, receipt_id, "error", str(exc)
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
|
|
|||
|
|
@ -1,409 +0,0 @@
|
|||
"""Recipe suggestion and browser endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.recipe import (
|
||||
AssemblyTemplateOut,
|
||||
BuildRequest,
|
||||
RecipeJobStatus,
|
||||
RecipeRequest,
|
||||
RecipeResult,
|
||||
RecipeSuggestion,
|
||||
RoleCandidatesResponse,
|
||||
)
|
||||
from app.services.recipe.assembly_recipes import (
|
||||
build_from_selection,
|
||||
get_role_candidates,
|
||||
get_templates_for_api,
|
||||
)
|
||||
from app.services.recipe.browser_domains import (
|
||||
DOMAINS,
|
||||
category_has_subcategories,
|
||||
get_category_names,
|
||||
get_domain_labels,
|
||||
get_keywords_for_category,
|
||||
get_keywords_for_subcategory,
|
||||
get_subcategory_names,
|
||||
)
|
||||
from app.services.recipe.recipe_engine import RecipeEngine
|
||||
from app.services.heimdall_orch import check_orch_budget
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _suggest_in_thread(db_path: Path, req: RecipeRequest) -> RecipeResult:
|
||||
"""Run recipe suggestion in a worker thread with its own Store connection.
|
||||
|
||||
SQLite connections cannot be shared across threads. This function creates
|
||||
a fresh Store (and therefore a fresh sqlite3.Connection) in the same thread
|
||||
where it will be used, avoiding ProgrammingError: SQLite objects created in
|
||||
a thread can only be used in that same thread.
|
||||
"""
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return RecipeEngine(store).suggest(req)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
async def _enqueue_recipe_job(session: CloudUser, req: RecipeRequest):
|
||||
"""Queue an async recipe_llm job and return 202 with job_id.
|
||||
|
||||
Falls back to synchronous generation in CLOUD_MODE (scheduler polls only
|
||||
the shared settings DB, not per-user DBs — see snipe#45 / kiwi backlog).
|
||||
"""
|
||||
import json
|
||||
import uuid
|
||||
from fastapi.responses import JSONResponse
|
||||
from app.cloud_session import CLOUD_MODE
|
||||
from app.tasks.runner import insert_task
|
||||
|
||||
if CLOUD_MODE:
|
||||
log.warning("recipe_llm async jobs not supported in CLOUD_MODE — falling back to sync")
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
return result
|
||||
|
||||
job_id = f"rec_{uuid.uuid4().hex}"
|
||||
|
||||
def _create(db_path: Path) -> int:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
row = store.create_recipe_job(job_id, session.user_id, req.model_dump_json())
|
||||
return row["id"]
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
int_id = await asyncio.to_thread(_create, session.db)
|
||||
params_json = json.dumps({"job_id": job_id})
|
||||
task_id, is_new = insert_task(session.db, "recipe_llm", int_id, params=params_json)
|
||||
if is_new:
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(session.db).enqueue(task_id, "recipe_llm", int_id, params_json)
|
||||
|
||||
return JSONResponse(content={"job_id": job_id, "status": "queued"}, status_code=202)
|
||||
|
||||
|
||||
@router.post("/suggest")
|
||||
async def suggest_recipes(
|
||||
req: RecipeRequest,
|
||||
async_mode: bool = Query(default=False, alias="async"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
):
|
||||
log.info("recipes auth=%s tier=%s level=%s", _auth_label(session.user_id), session.tier, req.level)
|
||||
# Inject session-authoritative tier/byok immediately — client-supplied values are ignored.
|
||||
# Also read stored unit_system preference; default to metric if not set.
|
||||
unit_system = store.get_setting("unit_system") or "metric"
|
||||
req = req.model_copy(update={"tier": session.tier, "has_byok": session.has_byok, "unit_system": unit_system})
|
||||
if req.level == 4 and not req.wildcard_confirmed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Level 4 (Wildcard) requires wildcard_confirmed=true.",
|
||||
)
|
||||
if req.level in (3, 4) and not can_use("recipe_suggestions", req.tier, req.has_byok):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="LLM recipe levels require Paid tier or a configured LLM backend.",
|
||||
)
|
||||
if req.style_id and not can_use("style_picker", req.tier):
|
||||
raise HTTPException(status_code=403, detail="Style picker requires Paid tier.")
|
||||
|
||||
# Orch budget check for lifetime/founders keys — downgrade to L2 (local) if exhausted.
|
||||
# Subscription and local/BYOK users skip this check entirely.
|
||||
orch_fallback = False
|
||||
if (
|
||||
req.level in (3, 4)
|
||||
and session.license_key is not None
|
||||
and not session.has_byok
|
||||
and session.tier != "local"
|
||||
):
|
||||
budget = check_orch_budget(session.license_key, "kiwi")
|
||||
if not budget.get("allowed", True):
|
||||
req = req.model_copy(update={"level": 2})
|
||||
orch_fallback = True
|
||||
|
||||
if req.level in (3, 4) and async_mode:
|
||||
return await _enqueue_recipe_job(session, req)
|
||||
|
||||
result = await asyncio.to_thread(_suggest_in_thread, session.db, req)
|
||||
if orch_fallback:
|
||||
result = result.model_copy(update={"orch_fallback": True})
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/jobs/{job_id}", response_model=RecipeJobStatus)
|
||||
async def get_recipe_job_status(
|
||||
job_id: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeJobStatus:
|
||||
"""Poll the status of an async recipe generation job.
|
||||
|
||||
Returns 404 when job_id is unknown or belongs to a different user.
|
||||
On status='done' with suggestions=[], the LLM returned empty — client
|
||||
should show a 'no recipe generated, try again' message.
|
||||
"""
|
||||
def _get(db_path: Path) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe_job(job_id, session.user_id)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
row = await asyncio.to_thread(_get, session.db)
|
||||
if row is None:
|
||||
raise HTTPException(status_code=404, detail="Job not found.")
|
||||
|
||||
result = None
|
||||
if row["status"] == "done" and row["result"]:
|
||||
result = RecipeResult.model_validate_json(row["result"])
|
||||
|
||||
return RecipeJobStatus(
|
||||
job_id=row["job_id"],
|
||||
status=row["status"],
|
||||
result=result,
|
||||
error=row["error"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/browse/domains")
|
||||
async def list_browse_domains(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return available domain schemas for the recipe browser."""
|
||||
return get_domain_labels()
|
||||
|
||||
|
||||
@router.get("/browse/{domain}")
|
||||
async def list_browse_categories(
|
||||
domain: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return categories with recipe counts for a given domain."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
cat_names = get_category_names(domain)
|
||||
keywords_by_category = {cat: get_keywords_for_category(domain, cat) for cat in cat_names}
|
||||
has_subs = {cat: category_has_subcategories(domain, cat) for cat in cat_names}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_categories(domain, keywords_by_category, has_subs)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}/subcategories")
|
||||
async def list_browse_subcategories(
|
||||
domain: str,
|
||||
category: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[dict]:
|
||||
"""Return [{subcategory, recipe_count}] for a category that supports subcategories."""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
if not category_has_subcategories(domain, category):
|
||||
return []
|
||||
|
||||
subcat_names = get_subcategory_names(domain, category)
|
||||
keywords_by_subcat = {
|
||||
sub: get_keywords_for_subcategory(domain, category, sub)
|
||||
for sub in subcat_names
|
||||
}
|
||||
|
||||
def _get(db_path: Path) -> list[dict]:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_browser_subcategories(domain, keywords_by_subcat)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.get("/browse/{domain}/{category}")
|
||||
async def browse_recipes(
|
||||
domain: str,
|
||||
category: str,
|
||||
page: Annotated[int, Query(ge=1)] = 1,
|
||||
page_size: Annotated[int, Query(ge=1, le=100)] = 20,
|
||||
pantry_items: Annotated[str | None, Query()] = None,
|
||||
subcategory: Annotated[str | None, Query()] = None,
|
||||
q: Annotated[str | None, Query(max_length=200)] = None,
|
||||
sort: Annotated[str, Query(pattern="^(default|alpha|alpha_desc)$")] = "default",
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return a paginated list of recipes for a domain/category.
|
||||
|
||||
Pass pantry_items as a comma-separated string to receive match_pct badges.
|
||||
Pass subcategory to narrow within a category that has subcategories.
|
||||
Pass q to filter by title substring. Pass sort for ordering (default/alpha/alpha_desc).
|
||||
"""
|
||||
if domain not in DOMAINS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown domain '{domain}'.")
|
||||
|
||||
if category == "_all":
|
||||
keywords = None # unfiltered browse
|
||||
elif subcategory:
|
||||
keywords = get_keywords_for_subcategory(domain, category, subcategory)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown subcategory '{subcategory}' in '{category}'.",
|
||||
)
|
||||
else:
|
||||
keywords = get_keywords_for_category(domain, category)
|
||||
if not keywords:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Unknown category '{category}' in domain '{domain}'.",
|
||||
)
|
||||
|
||||
pantry_list = (
|
||||
[p.strip() for p in pantry_items.split(",") if p.strip()]
|
||||
if pantry_items
|
||||
else None
|
||||
)
|
||||
|
||||
def _browse(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
result = store.browse_recipes(
|
||||
keywords=keywords,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
pantry_items=pantry_list,
|
||||
q=q or None,
|
||||
sort=sort,
|
||||
)
|
||||
store.log_browser_telemetry(
|
||||
domain=domain,
|
||||
category=category,
|
||||
page=page,
|
||||
result_count=result["total"],
|
||||
)
|
||||
return result
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_browse, session.db)
|
||||
|
||||
|
||||
@router.get("/templates", response_model=list[AssemblyTemplateOut])
|
||||
async def list_assembly_templates() -> list[dict]:
|
||||
"""Return all 13 assembly templates with ordered role sequences.
|
||||
|
||||
Cache-friendly: static data, no per-user state.
|
||||
"""
|
||||
return get_templates_for_api()
|
||||
|
||||
|
||||
@router.get("/template-candidates", response_model=RoleCandidatesResponse)
|
||||
async def get_template_role_candidates(
|
||||
template_id: str = Query(..., description="Template slug, e.g. 'burrito_taco'"),
|
||||
role: str = Query(..., description="Role display name, e.g. 'protein'"),
|
||||
prior_picks: str = Query(default="", description="Comma-separated prior selections"),
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> dict:
|
||||
"""Return pantry-matched candidates for one wizard step."""
|
||||
def _get(db_path: Path) -> dict:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
pantry_list = list(pantry_set)
|
||||
prior = [p.strip() for p in prior_picks.split(",") if p.strip()]
|
||||
profile_index = store.get_element_profiles(pantry_list + prior)
|
||||
return get_role_candidates(
|
||||
template_slug=template_id,
|
||||
role_display=role,
|
||||
pantry_set=pantry_set,
|
||||
prior_picks=prior,
|
||||
profile_index=profile_index,
|
||||
)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
return await asyncio.to_thread(_get, session.db)
|
||||
|
||||
|
||||
@router.post("/build", response_model=RecipeSuggestion)
|
||||
async def build_recipe(
|
||||
req: BuildRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> RecipeSuggestion:
|
||||
"""Build a recipe from explicit role selections."""
|
||||
def _build(db_path: Path) -> RecipeSuggestion | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
items = store.list_inventory(status="available")
|
||||
pantry_set = {
|
||||
item["product_name"]
|
||||
for item in items
|
||||
if item.get("product_name")
|
||||
}
|
||||
suggestion = build_from_selection(
|
||||
template_slug=req.template_id,
|
||||
role_overrides=req.role_overrides,
|
||||
pantry_set=pantry_set,
|
||||
)
|
||||
if suggestion is None:
|
||||
return None
|
||||
# Persist to recipes table so the result can be saved/bookmarked.
|
||||
# external_id encodes template + selections for stable dedup.
|
||||
import hashlib as _hl, json as _js
|
||||
sel_hash = _hl.md5(
|
||||
_js.dumps(req.role_overrides, sort_keys=True).encode()
|
||||
).hexdigest()[:8]
|
||||
external_id = f"assembly:{req.template_id}:{sel_hash}"
|
||||
real_id = store.upsert_built_recipe(
|
||||
external_id=external_id,
|
||||
title=suggestion.title,
|
||||
ingredients=suggestion.matched_ingredients,
|
||||
directions=suggestion.directions,
|
||||
)
|
||||
return suggestion.model_copy(update={"id": real_id})
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
result = await asyncio.to_thread(_build, session.db)
|
||||
if result is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Template not found or required ingredient missing.",
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/{recipe_id}")
|
||||
async def get_recipe(recipe_id: int, session: CloudUser = Depends(get_session)) -> dict:
|
||||
def _get(db_path: Path, rid: int) -> dict | None:
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return store.get_recipe(rid)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
recipe = await asyncio.to_thread(_get, session.db, recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found.")
|
||||
return recipe
|
||||
|
|
@ -1,188 +0,0 @@
|
|||
"""Saved recipe bookmark endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.saved_recipe import (
|
||||
CollectionMemberRequest,
|
||||
CollectionRequest,
|
||||
CollectionSummary,
|
||||
SavedRecipeSummary,
|
||||
SaveRecipeRequest,
|
||||
UpdateSavedRecipeRequest,
|
||||
)
|
||||
from app.tiers import can_use
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _in_thread(db_path: Path, fn):
|
||||
"""Run a Store operation in a worker thread with its own connection."""
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
def _to_summary(row: dict, store: Store) -> SavedRecipeSummary:
|
||||
collection_ids = store.get_saved_recipe_collection_ids(row["id"])
|
||||
return SavedRecipeSummary(
|
||||
id=row["id"],
|
||||
recipe_id=row["recipe_id"],
|
||||
title=row.get("title", ""),
|
||||
saved_at=row["saved_at"],
|
||||
notes=row.get("notes"),
|
||||
rating=row.get("rating"),
|
||||
style_tags=row.get("style_tags") or [],
|
||||
collection_ids=collection_ids,
|
||||
)
|
||||
|
||||
|
||||
# ── save / unsave ─────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=SavedRecipeSummary)
|
||||
async def save_recipe(
|
||||
req: SaveRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
row = store.save_recipe(req.recipe_id, req.notes, req.rating)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.delete("/{recipe_id}", status_code=204)
|
||||
async def unsave_recipe(
|
||||
recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.unsave_recipe(recipe_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/{recipe_id}", response_model=SavedRecipeSummary)
|
||||
async def update_saved_recipe(
|
||||
recipe_id: int,
|
||||
req: UpdateSavedRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> SavedRecipeSummary:
|
||||
def _run(store: Store) -> SavedRecipeSummary:
|
||||
if not store.is_recipe_saved(recipe_id):
|
||||
raise HTTPException(status_code=404, detail="Recipe not saved.")
|
||||
row = store.update_saved_recipe(
|
||||
recipe_id, req.notes, req.rating, req.style_tags
|
||||
)
|
||||
return _to_summary(row, store)
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
@router.get("", response_model=list[SavedRecipeSummary])
|
||||
async def list_saved_recipes(
|
||||
sort_by: str = "saved_at",
|
||||
collection_id: int | None = None,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[SavedRecipeSummary]:
|
||||
def _run(store: Store) -> list[SavedRecipeSummary]:
|
||||
rows = store.get_saved_recipes(sort_by=sort_by, collection_id=collection_id)
|
||||
return [_to_summary(r, store) for r in rows]
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── collections (Paid) ────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/collections", response_model=list[CollectionSummary])
|
||||
async def list_collections(
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> list[CollectionSummary]:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
rows = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.get_collections()
|
||||
)
|
||||
return [CollectionSummary(**r) for r in rows]
|
||||
|
||||
|
||||
@router.post("/collections", response_model=CollectionSummary)
|
||||
async def create_collection(
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Collections require Paid tier.",
|
||||
)
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.create_collection(req.name, req.description),
|
||||
)
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.delete("/collections/{collection_id}", status_code=204)
|
||||
async def delete_collection(
|
||||
collection_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_collection(collection_id)
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/collections/{collection_id}", response_model=CollectionSummary)
|
||||
async def rename_collection(
|
||||
collection_id: int,
|
||||
req: CollectionRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> CollectionSummary:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
row = await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.rename_collection(collection_id, req.name, req.description),
|
||||
)
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Collection not found.")
|
||||
return CollectionSummary(**row)
|
||||
|
||||
|
||||
@router.post("/collections/{collection_id}/members", status_code=204)
|
||||
async def add_to_collection(
|
||||
collection_id: int,
|
||||
req: CollectionMemberRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.add_to_collection(collection_id, req.saved_recipe_id),
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/collections/{collection_id}/members/{saved_recipe_id}", status_code=204
|
||||
)
|
||||
async def remove_from_collection(
|
||||
collection_id: int,
|
||||
saved_recipe_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
) -> None:
|
||||
if not can_use("recipe_collections", session.tier):
|
||||
raise HTTPException(status_code=403, detail="Collections require Paid tier.")
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db,
|
||||
lambda s: s.remove_from_collection(collection_id, saved_recipe_id),
|
||||
)
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
"""Session bootstrap endpoint — called once per app load by the frontend.
|
||||
|
||||
Logs auth= + tier= for log-based analytics without client-side tracking.
|
||||
See Circuit-Forge/kiwi#86.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from app.cloud_session import CloudUser, _auth_label, get_session
|
||||
from app.core.config import settings
|
||||
|
||||
router = APIRouter()
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/bootstrap")
|
||||
def session_bootstrap(session: CloudUser = Depends(get_session)) -> dict:
|
||||
"""Record auth type and tier for log-based analytics.
|
||||
|
||||
Expected log output:
|
||||
INFO:app.api.endpoints.session: session auth=authed tier=paid
|
||||
INFO:app.api.endpoints.session: session auth=anon tier=free
|
||||
|
||||
E2E test sessions (E2E_TEST_USER_ID) are logged at DEBUG so they don't
|
||||
pollute analytics counts while still being visible when DEBUG=true.
|
||||
"""
|
||||
is_test = bool(settings.E2E_TEST_USER_ID and session.user_id == settings.E2E_TEST_USER_ID)
|
||||
logger = log.debug if is_test else log.info
|
||||
logger("session auth=%s tier=%s%s", _auth_label(session.user_id), session.tier, " e2e=true" if is_test else "")
|
||||
return {
|
||||
"auth": _auth_label(session.user_id),
|
||||
"tier": session.tier,
|
||||
"has_byok": session.has_byok,
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
"""User settings endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
_ALLOWED_KEYS = frozenset({"cooking_equipment", "unit_system"})
|
||||
|
||||
|
||||
class SettingBody(BaseModel):
|
||||
value: str
|
||||
|
||||
|
||||
@router.get("/{key}")
|
||||
async def get_setting(
|
||||
key: str,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Return the stored value for a settings key."""
|
||||
if key not in _ALLOWED_KEYS:
|
||||
raise HTTPException(status_code=422, detail=f"Unknown settings key: '{key}'.")
|
||||
value = store.get_setting(key)
|
||||
if value is None:
|
||||
raise HTTPException(status_code=404, detail=f"Setting '{key}' not found.")
|
||||
return {"key": key, "value": value}
|
||||
|
||||
|
||||
@router.put("/{key}")
|
||||
async def set_setting(
|
||||
key: str,
|
||||
body: SettingBody,
|
||||
session: CloudUser = Depends(get_session),
|
||||
store: Store = Depends(get_store),
|
||||
) -> dict:
|
||||
"""Upsert a settings key-value pair."""
|
||||
if key not in _ALLOWED_KEYS:
|
||||
raise HTTPException(status_code=422, detail=f"Unknown settings key: '{key}'.")
|
||||
store.set_setting(key, body.value)
|
||||
return {"key": key, "value": body.value}
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
"""Shopping list endpoints.
|
||||
|
||||
Free tier for all users (anonymous guests included — shopping list is the
|
||||
primary affiliate revenue surface). Confirm-purchase action is also Free:
|
||||
it moves a checked item into pantry inventory without a tier gate so the
|
||||
flow works for anyone who signs up or browses without an account.
|
||||
|
||||
Routes:
|
||||
GET /shopping — list items (with affiliate links)
|
||||
POST /shopping — add item manually
|
||||
PATCH /shopping/{id} — update (check/uncheck, rename, qty)
|
||||
DELETE /shopping/{id} — remove single item
|
||||
DELETE /shopping/checked — clear all checked items
|
||||
DELETE /shopping/all — clear entire list
|
||||
POST /shopping/from-recipe — bulk add gaps from a recipe
|
||||
POST /shopping/{id}/confirm — confirm purchase → add to pantry inventory
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from app.cloud_session import CloudUser, get_session
|
||||
from app.db.session import get_store
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.shopping import (
|
||||
BulkAddFromRecipeRequest,
|
||||
ConfirmPurchaseRequest,
|
||||
ShoppingItemCreate,
|
||||
ShoppingItemResponse,
|
||||
ShoppingItemUpdate,
|
||||
)
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _enrich(item: dict, builder: GroceryLinkBuilder) -> ShoppingItemResponse:
|
||||
"""Attach live affiliate links to a raw store row."""
|
||||
links = builder.build_links(item["name"])
|
||||
return ShoppingItemResponse(
|
||||
**{**item, "checked": bool(item.get("checked", 0))},
|
||||
grocery_links=[{"ingredient": l.ingredient, "retailer": l.retailer, "url": l.url} for l in links],
|
||||
)
|
||||
|
||||
|
||||
def _in_thread(db_path, fn):
|
||||
store = Store(db_path)
|
||||
try:
|
||||
return fn(store)
|
||||
finally:
|
||||
store.close()
|
||||
|
||||
|
||||
# ── List ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("", response_model=list[ShoppingItemResponse])
|
||||
async def list_shopping_items(
|
||||
include_checked: bool = True,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
items = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.list_shopping_items(include_checked)
|
||||
)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Add manually ──────────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("", response_model=ShoppingItemResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def add_shopping_item(
|
||||
body: ShoppingItemCreate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.add_shopping_item(
|
||||
name=body.name,
|
||||
quantity=body.quantity,
|
||||
unit=body.unit,
|
||||
category=body.category,
|
||||
notes=body.notes,
|
||||
source=body.source,
|
||||
recipe_id=body.recipe_id,
|
||||
sort_order=body.sort_order,
|
||||
),
|
||||
)
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Bulk add from recipe ───────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/from-recipe", response_model=list[ShoppingItemResponse], status_code=status.HTTP_201_CREATED)
|
||||
async def add_from_recipe(
|
||||
body: BulkAddFromRecipeRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Add missing ingredients from a recipe to the shopping list.
|
||||
|
||||
Runs pantry gap analysis and adds only the items the user doesn't have
|
||||
(unless include_covered=True). Skips duplicates already on the list.
|
||||
"""
|
||||
from app.services.meal_plan.shopping_list import compute_shopping_list
|
||||
|
||||
def _run(store: Store):
|
||||
recipe = store.get_recipe(body.recipe_id)
|
||||
if not recipe:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
inventory = store.list_inventory()
|
||||
gaps, covered = compute_shopping_list([recipe], inventory)
|
||||
targets = (gaps + covered) if body.include_covered else gaps
|
||||
|
||||
# Avoid duplicates already on the list
|
||||
existing = {i["name"].lower() for i in store.list_shopping_items()}
|
||||
added = []
|
||||
for gap in targets:
|
||||
if gap.ingredient_name.lower() in existing:
|
||||
continue
|
||||
item = store.add_shopping_item(
|
||||
name=gap.ingredient_name,
|
||||
quantity=None,
|
||||
unit=gap.have_unit,
|
||||
source="recipe",
|
||||
recipe_id=body.recipe_id,
|
||||
)
|
||||
added.append(item)
|
||||
return added
|
||||
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
items = await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
return [_enrich(i, builder) for i in items]
|
||||
|
||||
|
||||
# ── Update ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.patch("/{item_id}", response_model=ShoppingItemResponse)
|
||||
async def update_shopping_item(
|
||||
item_id: int,
|
||||
body: ShoppingItemUpdate,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
builder = GroceryLinkBuilder(tier=session.tier, has_byok=session.has_byok)
|
||||
item = await asyncio.to_thread(
|
||||
_in_thread,
|
||||
session.db,
|
||||
lambda s: s.update_shopping_item(item_id, **body.model_dump(exclude_none=True)),
|
||||
)
|
||||
if not item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
return _enrich(item, builder)
|
||||
|
||||
|
||||
# ── Confirm purchase → pantry ─────────────────────────────────────────────────
|
||||
|
||||
@router.post("/{item_id}/confirm", status_code=status.HTTP_201_CREATED)
|
||||
async def confirm_purchase(
|
||||
item_id: int,
|
||||
body: ConfirmPurchaseRequest,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
"""Confirm a checked item was purchased and add it to pantry inventory.
|
||||
|
||||
Human approval step: the user explicitly confirms what they actually bought
|
||||
before it lands in their pantry. Returns the new inventory item.
|
||||
"""
|
||||
def _run(store: Store):
|
||||
shopping_item = store.get_shopping_item(item_id)
|
||||
if not shopping_item:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
qty = body.quantity if body.quantity is not None else (shopping_item.get("quantity") or 1.0)
|
||||
unit = body.unit or shopping_item.get("unit") or "count"
|
||||
category = shopping_item.get("category")
|
||||
|
||||
product = store.get_or_create_product(
|
||||
name=shopping_item["name"],
|
||||
category=category,
|
||||
)
|
||||
inv_item = store.add_inventory_item(
|
||||
product_id=product["id"],
|
||||
location=body.location,
|
||||
quantity=qty,
|
||||
unit=unit,
|
||||
source="manual",
|
||||
)
|
||||
# Mark the shopping item checked and leave it for the user to clear
|
||||
store.update_shopping_item(item_id, checked=True)
|
||||
return inv_item
|
||||
|
||||
return await asyncio.to_thread(_in_thread, session.db, _run)
|
||||
|
||||
|
||||
# ── Delete ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_shopping_item(
|
||||
item_id: int,
|
||||
session: CloudUser = Depends(get_session),
|
||||
):
|
||||
deleted = await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.delete_shopping_item(item_id)
|
||||
)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Shopping item not found")
|
||||
|
||||
|
||||
@router.delete("/checked", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_checked(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_checked_shopping_items()
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/all", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def clear_all(session: CloudUser = Depends(get_session)):
|
||||
await asyncio.to_thread(
|
||||
_in_thread, session.db, lambda s: s.clear_all_shopping_items()
|
||||
)
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
"""Staple library endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from app.services.recipe.staple_library import StapleLibrary
|
||||
|
||||
router = APIRouter()
|
||||
_lib = StapleLibrary()
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def list_staples(dietary: str | None = None) -> list[dict]:
|
||||
staples = _lib.filter_by_dietary(dietary) if dietary else _lib.list_all()
|
||||
return [
|
||||
{
|
||||
"slug": s.slug,
|
||||
"name": s.name,
|
||||
"description": s.description,
|
||||
"dietary_labels": s.dietary_labels,
|
||||
"yield_formats": list(s.yield_formats.keys()),
|
||||
}
|
||||
for s in staples
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{slug}")
|
||||
async def get_staple(slug: str) -> dict:
|
||||
staple = _lib.get(slug)
|
||||
if not staple:
|
||||
raise HTTPException(status_code=404, detail=f"Staple '{slug}' not found.")
|
||||
return {
|
||||
"slug": staple.slug,
|
||||
"name": staple.name,
|
||||
"description": staple.description,
|
||||
"dietary_labels": staple.dietary_labels,
|
||||
"base_ingredients": staple.base_ingredients,
|
||||
"base_method": staple.base_method,
|
||||
"base_time_minutes": staple.base_time_minutes,
|
||||
"yield_formats": staple.yield_formats,
|
||||
"compatible_styles": staple.compatible_styles,
|
||||
}
|
||||
|
|
@ -1,24 +1,10 @@
|
|||
from fastapi import APIRouter
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr, recipes, settings, staples, feedback, feedback_attach, household, saved_recipes, imitate, meal_plans, orch_usage, session, shopping
|
||||
from app.api.endpoints.community import router as community_router
|
||||
from app.api.endpoints import health, receipts, export, inventory, ocr
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
api_router.include_router(session.router, prefix="/session", tags=["session"])
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"])
|
||||
api_router.include_router(export.router, tags=["export"])
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
api_router.include_router(saved_recipes.router, prefix="/recipes/saved", tags=["saved-recipes"])
|
||||
api_router.include_router(recipes.router, prefix="/recipes", tags=["recipes"])
|
||||
api_router.include_router(settings.router, prefix="/settings", tags=["settings"])
|
||||
api_router.include_router(staples.router, prefix="/staples", tags=["staples"])
|
||||
api_router.include_router(feedback.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(feedback_attach.router, prefix="/feedback", tags=["feedback"])
|
||||
api_router.include_router(household.router, prefix="/household", tags=["household"])
|
||||
api_router.include_router(imitate.router, prefix="/imitate", tags=["imitate"])
|
||||
api_router.include_router(meal_plans.router, prefix="/meal-plans", tags=["meal-plans"])
|
||||
api_router.include_router(orch_usage.router, prefix="/orch-usage", tags=["orch-usage"])
|
||||
api_router.include_router(shopping.router, prefix="/shopping", tags=["shopping"])
|
||||
api_router.include_router(community_router)
|
||||
api_router.include_router(health.router, prefix="/health", tags=["health"])
|
||||
api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"])
|
||||
api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"]) # OCR endpoints under /receipts
|
||||
api_router.include_router(export.router, tags=["export"]) # No prefix, uses /export in the router
|
||||
api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"])
|
||||
|
|
@ -22,12 +22,10 @@ import time
|
|||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import uuid
|
||||
|
||||
import jwt as pyjwt
|
||||
import requests
|
||||
import yaml
|
||||
from fastapi import Depends, HTTPException, Request, Response
|
||||
from fastapi import Depends, HTTPException, Request
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -39,60 +37,14 @@ DIRECTUS_JWT_SECRET: str = os.environ.get("DIRECTUS_JWT_SECRET", "")
|
|||
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
|
||||
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
|
||||
|
||||
# Dev bypass: comma-separated IPs or CIDR ranges that skip JWT auth.
|
||||
# NEVER set this in production. Intended only for LAN developer testing when
|
||||
# the request doesn't pass through Caddy (which normally injects X-CF-Session).
|
||||
# Example: CLOUD_AUTH_BYPASS_IPS=10.1.10.0/24,127.0.0.1
|
||||
import ipaddress as _ipaddress
|
||||
|
||||
_BYPASS_RAW: list[str] = [
|
||||
e.strip()
|
||||
for e in os.environ.get("CLOUD_AUTH_BYPASS_IPS", "").split(",")
|
||||
if e.strip()
|
||||
]
|
||||
|
||||
_BYPASS_NETS: list[_ipaddress.IPv4Network | _ipaddress.IPv6Network] = []
|
||||
_BYPASS_IPS: frozenset[str] = frozenset()
|
||||
|
||||
if _BYPASS_RAW:
|
||||
_nets, _ips = [], set()
|
||||
for entry in _BYPASS_RAW:
|
||||
try:
|
||||
_nets.append(_ipaddress.ip_network(entry, strict=False))
|
||||
except ValueError:
|
||||
_ips.add(entry) # treat non-parseable entries as bare IPs
|
||||
_BYPASS_NETS = _nets
|
||||
_BYPASS_IPS = frozenset(_ips)
|
||||
|
||||
|
||||
def _is_bypass_ip(ip: str) -> bool:
|
||||
if not ip:
|
||||
return False
|
||||
if ip in _BYPASS_IPS:
|
||||
return True
|
||||
try:
|
||||
addr = _ipaddress.ip_address(ip)
|
||||
return any(addr in net for net in _BYPASS_NETS)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
_LOCAL_KIWI_DB: Path = Path(os.environ.get("KIWI_DB", "data/kiwi.db"))
|
||||
|
||||
_TIER_CACHE: dict[str, tuple[dict, float]] = {}
|
||||
_TIER_CACHE: dict[str, tuple[str, float]] = {}
|
||||
_TIER_CACHE_TTL = 300 # 5 minutes
|
||||
|
||||
TIERS = ["free", "paid", "premium", "ultra"]
|
||||
|
||||
|
||||
def _auth_label(user_id: str) -> str:
|
||||
"""Classify a user_id into a short tag for structured log lines. No PII emitted."""
|
||||
if user_id in ("local", "local-dev"):
|
||||
return "local"
|
||||
if user_id.startswith("anon-"):
|
||||
return "anon"
|
||||
return "authed"
|
||||
|
||||
|
||||
# ── Domain ────────────────────────────────────────────────────────────────────
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
|
@ -101,9 +53,6 @@ class CloudUser:
|
|||
tier: str # free | paid | premium | ultra | local
|
||||
db: Path # per-user SQLite DB path
|
||||
has_byok: bool # True if a configured LLM backend is present in llm.yaml
|
||||
household_id: str | None = None
|
||||
is_household_owner: bool = False
|
||||
license_key: str | None = None # key_display for lifetime/founders keys; None for subscription/free
|
||||
|
||||
|
||||
# ── JWT validation ─────────────────────────────────────────────────────────────
|
||||
|
|
@ -144,16 +93,14 @@ def _ensure_provisioned(user_id: str) -> None:
|
|||
log.warning("Heimdall provision failed for user %s: %s", user_id, exc)
|
||||
|
||||
|
||||
def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
||||
"""Returns (tier, household_id | None, is_household_owner, license_key | None)."""
|
||||
def _fetch_cloud_tier(user_id: str) -> str:
|
||||
now = time.monotonic()
|
||||
cached = _TIER_CACHE.get(user_id)
|
||||
if cached and (now - cached[1]) < _TIER_CACHE_TTL:
|
||||
entry = cached[0]
|
||||
return entry["tier"], entry.get("household_id"), entry.get("is_household_owner", False), entry.get("license_key")
|
||||
return cached[0]
|
||||
|
||||
if not HEIMDALL_ADMIN_TOKEN:
|
||||
return "free", None, False, None
|
||||
return "free"
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/admin/cloud/resolve",
|
||||
|
|
@ -161,35 +108,17 @@ def _fetch_cloud_tier(user_id: str) -> tuple[str, str | None, bool, str | None]:
|
|||
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
|
||||
timeout=5,
|
||||
)
|
||||
data = resp.json() if resp.ok else {}
|
||||
tier = data.get("tier", "free")
|
||||
household_id = data.get("household_id")
|
||||
is_owner = data.get("is_household_owner", False)
|
||||
license_key = data.get("key_display")
|
||||
tier = resp.json().get("tier", "free") if resp.ok else "free"
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc)
|
||||
tier, household_id, is_owner, license_key = "free", None, False, None
|
||||
tier = "free"
|
||||
|
||||
_TIER_CACHE[user_id] = ({"tier": tier, "household_id": household_id, "is_household_owner": is_owner, "license_key": license_key}, now)
|
||||
return tier, household_id, is_owner, license_key
|
||||
_TIER_CACHE[user_id] = (tier, now)
|
||||
return tier
|
||||
|
||||
|
||||
def _user_db_path(user_id: str, household_id: str | None = None) -> Path:
|
||||
if household_id:
|
||||
path = CLOUD_DATA_ROOT / f"household_{household_id}" / "kiwi.db"
|
||||
else:
|
||||
path = CLOUD_DATA_ROOT / user_id / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def _anon_guest_db_path(guest_id: str) -> Path:
|
||||
"""Per-session DB for unauthenticated guest visitors.
|
||||
|
||||
Each anonymous visitor gets an isolated SQLite DB keyed by their guest UUID
|
||||
cookie, so shopping lists and affiliate interactions never bleed across sessions.
|
||||
"""
|
||||
path = CLOUD_DATA_ROOT / f"anon-{guest_id}" / "kiwi.db"
|
||||
def _user_db_path(user_id: str) -> Path:
|
||||
path = CLOUD_DATA_ROOT / user_id / "kiwi.db"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
|
@ -219,89 +148,32 @@ def _detect_byok(config_path: Path = _LLM_CONFIG_PATH) -> bool:
|
|||
|
||||
# ── FastAPI dependency ────────────────────────────────────────────────────────
|
||||
|
||||
_GUEST_COOKIE = "kiwi_guest_id"
|
||||
_GUEST_COOKIE_MAX_AGE = 60 * 60 * 24 * 90 # 90 days
|
||||
|
||||
|
||||
def _resolve_guest_session(request: Request, response: Response, has_byok: bool) -> CloudUser:
|
||||
"""Return a per-session anonymous CloudUser, creating a guest UUID cookie if needed."""
|
||||
guest_id = request.cookies.get(_GUEST_COOKIE, "").strip()
|
||||
is_new = not guest_id
|
||||
if is_new:
|
||||
guest_id = str(uuid.uuid4())
|
||||
log.debug("New guest session assigned: anon-%s", guest_id[:8])
|
||||
# Secure flag only when the request actually arrived over HTTPS
|
||||
# (Caddy sets X-Forwarded-Proto=https in cloud; absent on direct port access).
|
||||
# Avoids losing the session cookie on HTTP direct-port testing of the cloud stack.
|
||||
is_https = request.headers.get("x-forwarded-proto", "http").lower() == "https"
|
||||
response.set_cookie(
|
||||
key=_GUEST_COOKIE,
|
||||
value=guest_id,
|
||||
max_age=_GUEST_COOKIE_MAX_AGE,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=is_https,
|
||||
)
|
||||
return CloudUser(
|
||||
user_id=f"anon-{guest_id}",
|
||||
tier="free",
|
||||
db=_anon_guest_db_path(guest_id),
|
||||
has_byok=has_byok,
|
||||
)
|
||||
|
||||
|
||||
def get_session(request: Request, response: Response) -> CloudUser:
|
||||
def get_session(request: Request) -> CloudUser:
|
||||
"""FastAPI dependency — resolves the current user from the request.
|
||||
|
||||
Local mode: fully-privileged "local" user pointing at local DB.
|
||||
Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier.
|
||||
Dev bypass: if CLOUD_AUTH_BYPASS_IPS is set and the client IP matches,
|
||||
returns a "local" session without JWT validation (dev/LAN use only).
|
||||
Anonymous: per-session UUID cookie isolates each guest visitor's data.
|
||||
"""
|
||||
has_byok = _detect_byok()
|
||||
|
||||
if not CLOUD_MODE:
|
||||
return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok)
|
||||
|
||||
# Prefer X-Real-IP (set by Caddy from the actual client address) over the
|
||||
# TCP peer address (which is nginx's container IP when behind the proxy).
|
||||
client_ip = (
|
||||
request.headers.get("x-real-ip", "")
|
||||
or (request.client.host if request.client else "")
|
||||
raw_header = (
|
||||
request.headers.get("x-cf-session", "")
|
||||
or request.headers.get("cookie", "")
|
||||
)
|
||||
if (_BYPASS_IPS or _BYPASS_NETS) and _is_bypass_ip(client_ip):
|
||||
log.debug("CLOUD_AUTH_BYPASS_IPS match for %s — returning local session", client_ip)
|
||||
# Use a dev DB under CLOUD_DATA_ROOT so the container has a writable path.
|
||||
dev_db = _user_db_path("local-dev")
|
||||
return CloudUser(user_id="local-dev", tier="local", db=dev_db, has_byok=has_byok)
|
||||
if not raw_header:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
# Resolve cf_session JWT: prefer the explicit header injected by Caddy, then
|
||||
# fall back to the cf_session cookie value. Other cookies (e.g. kiwi_guest_id)
|
||||
# must never be treated as auth tokens.
|
||||
raw_session = request.headers.get("x-cf-session", "").strip()
|
||||
if not raw_session:
|
||||
raw_session = request.cookies.get("cf_session", "").strip()
|
||||
|
||||
if not raw_session:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
|
||||
token = _extract_session_token(raw_session) # gitleaks:allow — function name, not a secret
|
||||
token = _extract_session_token(raw_header)
|
||||
if not token:
|
||||
return _resolve_guest_session(request, response, has_byok)
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = validate_session_jwt(token)
|
||||
_ensure_provisioned(user_id)
|
||||
tier, household_id, is_household_owner, license_key = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(
|
||||
user_id=user_id,
|
||||
tier=tier,
|
||||
db=_user_db_path(user_id, household_id=household_id),
|
||||
has_byok=has_byok,
|
||||
household_id=household_id,
|
||||
is_household_owner=is_household_owner,
|
||||
license_key=license_key,
|
||||
)
|
||||
tier = _fetch_cloud_tier(user_id)
|
||||
return CloudUser(user_id=user_id, tier=tier, db=_user_db_path(user_id), has_byok=has_byok)
|
||||
|
||||
|
||||
def require_tier(min_tier: str):
|
||||
|
|
|
|||
|
|
@ -35,16 +35,6 @@ class Settings:
|
|||
# Database
|
||||
DB_PATH: Path = Path(os.environ.get("DB_PATH", str(DATA_DIR / "kiwi.db")))
|
||||
|
||||
# Community feature settings
|
||||
COMMUNITY_DB_URL: str | None = os.environ.get("COMMUNITY_DB_URL") or None
|
||||
COMMUNITY_PSEUDONYM_SALT: str = os.environ.get(
|
||||
"COMMUNITY_PSEUDONYM_SALT", "kiwi-default-salt-change-in-prod"
|
||||
)
|
||||
COMMUNITY_CLOUD_FEED_URL: str = os.environ.get(
|
||||
"COMMUNITY_CLOUD_FEED_URL",
|
||||
"https://menagerie.circuitforge.tech/kiwi/api/v1/community/posts",
|
||||
)
|
||||
|
||||
# Processing
|
||||
MAX_CONCURRENT_JOBS: int = int(os.environ.get("MAX_CONCURRENT_JOBS", "4"))
|
||||
USE_GPU: bool = os.environ.get("USE_GPU", "true").lower() in ("1", "true", "yes")
|
||||
|
|
@ -53,26 +43,8 @@ class Settings:
|
|||
# Quality
|
||||
MIN_QUALITY_SCORE: float = float(os.environ.get("MIN_QUALITY_SCORE", "50.0"))
|
||||
|
||||
# CF-core resource coordinator (VRAM lease management)
|
||||
COORDINATOR_URL: str = os.environ.get("COORDINATOR_URL", "http://localhost:7700")
|
||||
|
||||
# Hosted cf-orch coordinator — bearer token for managed cloud GPU inference (Paid+)
|
||||
# CFOrchClient reads CF_LICENSE_KEY automatically; exposed here for startup validation.
|
||||
CF_LICENSE_KEY: str | None = os.environ.get("CF_LICENSE_KEY")
|
||||
|
||||
# E2E test account — analytics logging is suppressed for this user_id so test
|
||||
# runs don't pollute session counts. Set to the Directus UUID of the test user.
|
||||
E2E_TEST_USER_ID: str | None = os.environ.get("E2E_TEST_USER_ID") or None
|
||||
|
||||
# Feature flags
|
||||
ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes")
|
||||
# Use OrchestratedScheduler (coordinator-aware, multi-GPU fan-out) instead of
|
||||
# LocalScheduler. Defaults to true in CLOUD_MODE; can be set independently
|
||||
# for multi-GPU local rigs that don't need full cloud auth.
|
||||
USE_ORCH_SCHEDULER: bool | None = (
|
||||
None if os.environ.get("USE_ORCH_SCHEDULER") is None
|
||||
else os.environ.get("USE_ORCH_SCHEDULER", "").lower() in ("1", "true", "yes")
|
||||
)
|
||||
|
||||
# Runtime
|
||||
DEBUG: bool = os.environ.get("DEBUG", "false").lower() in ("1", "true", "yes")
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ CREATE TABLE receipts_new (
|
|||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
filename TEXT NOT NULL,
|
||||
original_path TEXT NOT NULL,
|
||||
processed_path TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'uploaded'
|
||||
CHECK (status IN (
|
||||
'uploaded',
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
-- Migration 006: Ingredient element profiles + FlavorGraph molecule index.
|
||||
|
||||
CREATE TABLE ingredient_profiles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
name_variants TEXT NOT NULL DEFAULT '[]', -- JSON array of aliases/alternate spellings
|
||||
elements TEXT NOT NULL DEFAULT '[]', -- JSON array: ["Richness","Depth"]
|
||||
-- Functional submetadata (from USDA FDC)
|
||||
fat_pct REAL DEFAULT 0.0,
|
||||
fat_saturated_pct REAL DEFAULT 0.0,
|
||||
moisture_pct REAL DEFAULT 0.0,
|
||||
protein_pct REAL DEFAULT 0.0,
|
||||
starch_pct REAL DEFAULT 0.0,
|
||||
binding_score INTEGER DEFAULT 0 CHECK (binding_score BETWEEN 0 AND 3),
|
||||
glutamate_mg REAL DEFAULT 0.0,
|
||||
ph_estimate REAL,
|
||||
sodium_mg_per_100g REAL DEFAULT 0.0,
|
||||
smoke_point_c REAL,
|
||||
is_fermented INTEGER NOT NULL DEFAULT 0,
|
||||
is_emulsifier INTEGER NOT NULL DEFAULT 0,
|
||||
-- Aroma submetadata
|
||||
flavor_molecule_ids TEXT NOT NULL DEFAULT '[]', -- JSON array of FlavorGraph compound IDs
|
||||
heat_stable INTEGER NOT NULL DEFAULT 1,
|
||||
add_timing TEXT NOT NULL DEFAULT 'any'
|
||||
CHECK (add_timing IN ('early','finish','any')),
|
||||
-- Brightness submetadata
|
||||
acid_type TEXT CHECK (acid_type IN ('citric','acetic','lactic',NULL)),
|
||||
-- Texture submetadata
|
||||
texture_profile TEXT NOT NULL DEFAULT 'neutral',
|
||||
water_activity REAL,
|
||||
-- Source
|
||||
usda_fdc_id TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'usda',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_ingredient_profiles_name ON ingredient_profiles (name);
|
||||
CREATE INDEX idx_ingredient_profiles_elements ON ingredient_profiles (elements);
|
||||
|
||||
CREATE TABLE flavor_molecules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
compound_id TEXT NOT NULL UNIQUE, -- FlavorGraph node ID
|
||||
compound_name TEXT NOT NULL,
|
||||
ingredient_names TEXT NOT NULL DEFAULT '[]', -- JSON array of ingredient names
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_flavor_molecules_compound_id ON flavor_molecules (compound_id);
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
-- Migration 007: Recipe corpus index (food.com dataset).
|
||||
|
||||
CREATE TABLE recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
external_id TEXT,
|
||||
title TEXT NOT NULL,
|
||||
ingredients TEXT NOT NULL DEFAULT '[]', -- JSON array of raw ingredient strings
|
||||
ingredient_names TEXT NOT NULL DEFAULT '[]', -- JSON array of normalized names
|
||||
directions TEXT NOT NULL DEFAULT '[]', -- JSON array of step strings
|
||||
category TEXT,
|
||||
keywords TEXT NOT NULL DEFAULT '[]', -- JSON array
|
||||
calories REAL,
|
||||
fat_g REAL,
|
||||
protein_g REAL,
|
||||
sodium_mg REAL,
|
||||
-- Element coverage scores computed at import time
|
||||
element_coverage TEXT NOT NULL DEFAULT '{}', -- JSON {element: 0.0-1.0}
|
||||
source TEXT NOT NULL DEFAULT 'foodcom',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_recipes_title ON recipes (title);
|
||||
CREATE INDEX idx_recipes_category ON recipes (category);
|
||||
CREATE UNIQUE INDEX idx_recipes_external_id ON recipes (external_id);
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
-- Migration 008: Derived substitution pairs.
|
||||
-- Source: diff of lishuyang/recipepairs (GPL-3.0 derivation — raw data not shipped).
|
||||
|
||||
CREATE TABLE substitution_pairs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
original_name TEXT NOT NULL,
|
||||
substitute_name TEXT NOT NULL,
|
||||
constraint_label TEXT NOT NULL, -- 'vegan'|'vegetarian'|'dairy_free'|'gluten_free'|'low_fat'|'low_sodium'
|
||||
fat_delta REAL DEFAULT 0.0,
|
||||
moisture_delta REAL DEFAULT 0.0,
|
||||
glutamate_delta REAL DEFAULT 0.0,
|
||||
protein_delta REAL DEFAULT 0.0,
|
||||
occurrence_count INTEGER DEFAULT 1,
|
||||
compensation_hints TEXT NOT NULL DEFAULT '[]', -- JSON [{ingredient, reason, element}]
|
||||
source TEXT NOT NULL DEFAULT 'derived',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_substitution_pairs_original ON substitution_pairs (original_name);
|
||||
CREATE INDEX idx_substitution_pairs_constraint ON substitution_pairs (constraint_label);
|
||||
CREATE UNIQUE INDEX idx_substitution_pairs_pair
|
||||
ON substitution_pairs (original_name, substitute_name, constraint_label);
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 009: Staple library (bulk-preparable base components).
|
||||
|
||||
CREATE TABLE staples (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
slug TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
base_ingredients TEXT NOT NULL DEFAULT '[]', -- JSON array of ingredient strings
|
||||
base_method TEXT,
|
||||
base_time_minutes INTEGER,
|
||||
yield_formats TEXT NOT NULL DEFAULT '{}', -- JSON {format_name: {elements, shelf_days, methods, texture}}
|
||||
dietary_labels TEXT NOT NULL DEFAULT '[]', -- JSON ['vegan','high-protein']
|
||||
compatible_styles TEXT NOT NULL DEFAULT '[]', -- JSON [style_id]
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE user_staples (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
staple_slug TEXT NOT NULL REFERENCES staples(slug) ON DELETE CASCADE,
|
||||
active_format TEXT NOT NULL,
|
||||
quantity_g REAL,
|
||||
prepared_at TEXT,
|
||||
notes TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_user_staples_slug ON user_staples (staple_slug);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
-- Migration 010: User substitution approval log (opt-in dataset moat).
|
||||
|
||||
CREATE TABLE substitution_feedback (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
original_name TEXT NOT NULL,
|
||||
substitute_name TEXT NOT NULL,
|
||||
constraint_label TEXT,
|
||||
compensation_used TEXT NOT NULL DEFAULT '[]', -- JSON array of compensation ingredient names
|
||||
approved INTEGER NOT NULL DEFAULT 0,
|
||||
opted_in INTEGER NOT NULL DEFAULT 0, -- user consented to anonymized sharing
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX idx_substitution_feedback_original ON substitution_feedback (original_name);
|
||||
CREATE INDEX idx_substitution_feedback_opted_in ON substitution_feedback (opted_in);
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
-- Migration 011: Daily rate limits (leftover mode: 5/day free tier).
|
||||
|
||||
CREATE TABLE rate_limits (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
feature TEXT NOT NULL,
|
||||
window_date TEXT NOT NULL, -- YYYY-MM-DD
|
||||
count INTEGER NOT NULL DEFAULT 0,
|
||||
UNIQUE (feature, window_date)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_rate_limits_feature_date ON rate_limits (feature, window_date);
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
-- Migration 012: User settings key-value store.
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user_settings (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
-- Migration 014: Add macro nutrition columns to recipes and ingredient_profiles.
|
||||
--
|
||||
-- recipes: sugar, carbs, fiber, servings, and an estimated flag.
|
||||
-- ingredient_profiles: carbs, fiber, calories, sugar per 100g (for estimation fallback).
|
||||
|
||||
ALTER TABLE recipes ADD COLUMN sugar_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN carbs_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN fiber_g REAL;
|
||||
ALTER TABLE recipes ADD COLUMN servings REAL;
|
||||
ALTER TABLE recipes ADD COLUMN nutrition_estimated INTEGER NOT NULL DEFAULT 0;
|
||||
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN carbs_g_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN fiber_g_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN calories_per_100g REAL DEFAULT 0.0;
|
||||
ALTER TABLE ingredient_profiles ADD COLUMN sugar_g_per_100g REAL DEFAULT 0.0;
|
||||
|
||||
CREATE INDEX idx_recipes_sugar_g ON recipes (sugar_g);
|
||||
CREATE INDEX idx_recipes_carbs_g ON recipes (carbs_g);
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
-- Migration 015: FTS5 inverted index for recipe ingredient lookup.
|
||||
--
|
||||
-- Content table backed by `recipes` — stores only the inverted index, no text duplication.
|
||||
-- MATCH queries replace O(N) LIKE scans with O(log N) token lookups.
|
||||
--
|
||||
-- One-time rebuild cost on 3.2M rows: ~15-30 seconds at startup.
|
||||
-- Subsequent startups skip this migration entirely.
|
||||
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS recipes_fts USING fts5(
|
||||
ingredient_names,
|
||||
content=recipes,
|
||||
content_rowid=id,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
|
||||
INSERT INTO recipes_fts(recipes_fts) VALUES('rebuild');
|
||||
|
||||
-- Triggers to keep the FTS index in sync with the recipes table.
|
||||
-- Without these, rows inserted after the initial rebuild are invisible to FTS queries.
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
-- Migration 016: Add FTS5 sync triggers for the recipes_fts content table.
|
||||
--
|
||||
-- Migration 015 created recipes_fts and did a one-time rebuild, but omitted
|
||||
-- triggers. Without them, INSERT/UPDATE/DELETE on recipes does not update the
|
||||
-- FTS index, so new rows are invisible to MATCH queries.
|
||||
--
|
||||
-- CREATE TRIGGER IF NOT EXISTS is idempotent — safe to re-run.
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipes_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipes_fts(recipes_fts, rowid, ingredient_names)
|
||||
VALUES ('delete', old.id, old.ingredient_names);
|
||||
INSERT INTO recipes_fts(rowid, ingredient_names)
|
||||
VALUES (new.id, new.ingredient_names);
|
||||
END;
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 017_household_invites.sql
|
||||
CREATE TABLE IF NOT EXISTS household_invites (
|
||||
token TEXT PRIMARY KEY,
|
||||
household_id TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
expires_at TEXT NOT NULL,
|
||||
used_at TEXT,
|
||||
used_by TEXT
|
||||
);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 018: saved recipes bookmarks.
|
||||
|
||||
CREATE TABLE saved_recipes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
saved_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
notes TEXT,
|
||||
rating INTEGER CHECK (rating IS NULL OR (rating >= 0 AND rating <= 5)),
|
||||
style_tags TEXT NOT NULL DEFAULT '[]',
|
||||
UNIQUE (recipe_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_saved_recipes_saved_at ON saved_recipes (saved_at DESC);
|
||||
CREATE INDEX idx_saved_recipes_rating ON saved_recipes (rating);
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
-- Migration 019: recipe collections (Paid tier organisation).
|
||||
|
||||
CREATE TABLE recipe_collections (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE TABLE recipe_collection_members (
|
||||
collection_id INTEGER NOT NULL REFERENCES recipe_collections(id) ON DELETE CASCADE,
|
||||
saved_recipe_id INTEGER NOT NULL REFERENCES saved_recipes(id) ON DELETE CASCADE,
|
||||
added_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
PRIMARY KEY (collection_id, saved_recipe_id)
|
||||
);
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
-- Migration 020: recipe browser navigation telemetry.
|
||||
-- Used to determine whether category nesting depth needs increasing.
|
||||
-- Review: if any category has page > 5 and result_count > 100 consistently,
|
||||
-- consider adding a third nesting level for that category.
|
||||
|
||||
CREATE TABLE browser_telemetry (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain TEXT NOT NULL,
|
||||
category TEXT NOT NULL,
|
||||
page INTEGER NOT NULL,
|
||||
result_count INTEGER NOT NULL,
|
||||
recorded_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
-- Migration 021: FTS5 inverted index for the recipe browser (category + keywords).
|
||||
--
|
||||
-- The browser domain queries were using LIKE '%keyword%' against category and
|
||||
-- keywords columns — a leading wildcard prevents any B-tree index use, so every
|
||||
-- query was a full sequential scan of 3.1M rows. This FTS5 index replaces those
|
||||
-- scans with O(log N) token lookups.
|
||||
--
|
||||
-- Content-table backed: stores only the inverted index, no text duplication.
|
||||
-- The keywords column is a JSON array; FTS5 tokenises it as plain text, stripping
|
||||
-- the punctuation, which gives correct per-word matching.
|
||||
--
|
||||
-- One-time rebuild cost on 3.1M rows: ~20-40 seconds at first startup.
|
||||
-- Subsequent startups skip this migration (IF NOT EXISTS guard).
|
||||
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
content=recipes,
|
||||
content_rowid=id,
|
||||
tokenize="unicode61"
|
||||
);
|
||||
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ai
|
||||
AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_ad
|
||||
AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS recipe_browser_fts_au
|
||||
AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords)
|
||||
VALUES ('delete', old.id, old.category, old.keywords);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords)
|
||||
VALUES (new.id, new.category, new.keywords);
|
||||
END;
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
-- 022_meal_plans.sql
|
||||
CREATE TABLE meal_plans (
|
||||
id INTEGER PRIMARY KEY,
|
||||
week_start TEXT NOT NULL,
|
||||
meal_types TEXT NOT NULL DEFAULT '["dinner"]',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 022: Add is_generic flag to recipes
|
||||
-- Generic recipes are catch-all/dump recipes with loose ingredient lists
|
||||
-- that should not appear in Level 1 (deterministic "use what I have") results.
|
||||
-- Admins can mark recipes via the recipe editor or a bulk backfill script.
|
||||
ALTER TABLE recipes ADD COLUMN is_generic INTEGER NOT NULL DEFAULT 0;
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
-- 023_meal_plan_slots.sql
|
||||
CREATE TABLE meal_plan_slots (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
day_of_week INTEGER NOT NULL CHECK(day_of_week BETWEEN 0 AND 6),
|
||||
meal_type TEXT NOT NULL,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
servings REAL NOT NULL DEFAULT 2.0,
|
||||
custom_label TEXT,
|
||||
UNIQUE(plan_id, day_of_week, meal_type)
|
||||
);
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
-- 024_prep_sessions.sql
|
||||
CREATE TABLE prep_sessions (
|
||||
id INTEGER PRIMARY KEY,
|
||||
plan_id INTEGER NOT NULL REFERENCES meal_plans(id) ON DELETE CASCADE,
|
||||
scheduled_date TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'draft'
|
||||
CHECK(status IN ('draft','reviewed','done')),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
-- 025_prep_tasks.sql
|
||||
CREATE TABLE prep_tasks (
|
||||
id INTEGER PRIMARY KEY,
|
||||
session_id INTEGER NOT NULL REFERENCES prep_sessions(id) ON DELETE CASCADE,
|
||||
recipe_id INTEGER REFERENCES recipes(id),
|
||||
slot_id INTEGER REFERENCES meal_plan_slots(id),
|
||||
task_label TEXT NOT NULL,
|
||||
duration_minutes INTEGER,
|
||||
sequence_order INTEGER NOT NULL,
|
||||
equipment TEXT,
|
||||
is_parallel INTEGER NOT NULL DEFAULT 0,
|
||||
notes TEXT,
|
||||
user_edited INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- 028_community_pseudonyms.sql
|
||||
-- Per-user pseudonym store: maps the user's chosen community display name
|
||||
-- to their Directus user ID. This table lives in per-user kiwi.db only.
|
||||
-- It is NEVER replicated to the community PostgreSQL — pseudonym isolation is by design.
|
||||
--
|
||||
-- A user may have one active pseudonym. Old pseudonyms are retained for reference
|
||||
-- (posts published under them keep their pseudonym attribution) but only one is
|
||||
-- flagged as current (is_current = 1).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS community_pseudonyms (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pseudonym TEXT NOT NULL,
|
||||
directus_user_id TEXT NOT NULL,
|
||||
is_current INTEGER NOT NULL DEFAULT 1 CHECK (is_current IN (0, 1)),
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
-- Only one pseudonym can be current at a time per user
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_community_pseudonyms_current
|
||||
ON community_pseudonyms (directus_user_id)
|
||||
WHERE is_current = 1;
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
-- Migration 029: Add inferred_tags column and update FTS index to include it.
|
||||
--
|
||||
-- inferred_tags holds a JSON array of normalized tag strings derived by
|
||||
-- scripts/pipeline/infer_recipe_tags.py (e.g. ["cuisine:Italian",
|
||||
-- "dietary:Low-Carb", "flavor:Umami", "can_be:Gluten-Free"]).
|
||||
--
|
||||
-- The FTS5 browser table is rebuilt to index inferred_tags alongside
|
||||
-- category and keywords so browse domain queries match against all signals.
|
||||
|
||||
-- 1. Add inferred_tags column (empty array default; populated by pipeline run)
|
||||
ALTER TABLE recipes ADD COLUMN inferred_tags TEXT NOT NULL DEFAULT '[]';
|
||||
|
||||
-- 2. Drop old FTS table and triggers that only covered category + keywords
|
||||
DROP TRIGGER IF EXISTS recipes_ai;
|
||||
DROP TRIGGER IF EXISTS recipes_ad;
|
||||
DROP TRIGGER IF EXISTS recipes_au;
|
||||
DROP TABLE IF EXISTS recipe_browser_fts;
|
||||
|
||||
-- 3. Recreate FTS5 table: now indexes category, keywords, AND inferred_tags
|
||||
CREATE VIRTUAL TABLE recipe_browser_fts USING fts5(
|
||||
category,
|
||||
keywords,
|
||||
inferred_tags,
|
||||
content=recipes,
|
||||
content_rowid=id
|
||||
);
|
||||
|
||||
-- 4. Triggers to keep FTS in sync with recipes table changes
|
||||
CREATE TRIGGER recipes_ai AFTER INSERT ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_ad AFTER DELETE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER recipes_au AFTER UPDATE ON recipes BEGIN
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts, rowid, category, keywords, inferred_tags)
|
||||
VALUES ('delete', old.id, old.category, old.keywords, old.inferred_tags);
|
||||
INSERT INTO recipe_browser_fts(rowid, category, keywords, inferred_tags)
|
||||
VALUES (new.id, new.category, new.keywords, new.inferred_tags);
|
||||
END;
|
||||
|
||||
-- 5. Populate FTS from current table state
|
||||
-- (inferred_tags is '[]' for all rows at this point; run infer_recipe_tags.py
|
||||
-- to populate, then the FTS will be rebuilt as part of that script.)
|
||||
INSERT INTO recipe_browser_fts(recipe_browser_fts) VALUES('rebuild');
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
-- Migration 030: open-package tracking
|
||||
-- Adds opened_date to track when a multi-use item was first opened,
|
||||
-- enabling secondary shelf-life windows (e.g. salsa: 1 year sealed → 2 weeks opened).
|
||||
|
||||
ALTER TABLE inventory_items ADD COLUMN opened_date TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- Migration 031: add disposal_reason for waste logging (#60)
|
||||
-- status='discarded' already exists in the CHECK constraint from migration 002.
|
||||
-- This column stores free-text reason (optional) and calm-framing presets.
|
||||
ALTER TABLE inventory_items ADD COLUMN disposal_reason TEXT;
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
-- 032_meal_plan_unique_week.sql
|
||||
-- Prevent duplicate plans for the same week.
|
||||
-- Existing duplicates must be resolved before applying (keep MIN(id) per week_start).
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_meal_plans_week_start ON meal_plans (week_start);
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
-- Migration 033: standalone shopping list
|
||||
-- Items can be added manually, from recipe gap analysis, or from the recipe browser.
|
||||
-- Affiliate links are computed at query time by the API layer (never stored).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS shopping_list_items (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
quantity REAL,
|
||||
unit TEXT,
|
||||
category TEXT,
|
||||
checked INTEGER NOT NULL DEFAULT 0, -- 0=want, 1=in-cart/checked off
|
||||
notes TEXT,
|
||||
source TEXT NOT NULL DEFAULT 'manual', -- manual | recipe | meal_plan
|
||||
recipe_id INTEGER REFERENCES recipes(id) ON DELETE SET NULL,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_shopping_list_checked
|
||||
ON shopping_list_items (checked, sort_order);
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
-- Migration 034: async recipe generation job queue
|
||||
CREATE TABLE IF NOT EXISTS recipe_jobs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
job_id TEXT NOT NULL UNIQUE,
|
||||
user_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
request TEXT NOT NULL,
|
||||
result TEXT,
|
||||
error TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_job_id ON recipe_jobs (job_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_jobs_user_id ON recipe_jobs (user_id, created_at DESC);
|
||||
1215
app/db/store.py
1215
app/db/store.py
File diff suppressed because it is too large
Load diff
11
app/main.py
11
app/main.py
|
|
@ -9,11 +9,7 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
|
||||
from app.api.routes import api_router
|
||||
from app.core.config import settings
|
||||
from app.services.meal_plan.affiliates import register_kiwi_programs
|
||||
|
||||
# Structured key=value log lines — grep/awk-friendly for log-based analytics.
|
||||
# Without basicConfig, app-level INFO logs are silently dropped.
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s:%(name)s: %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -21,17 +17,12 @@ logger = logging.getLogger(__name__)
|
|||
async def lifespan(app: FastAPI):
|
||||
logger.info("Starting Kiwi API...")
|
||||
settings.ensure_dirs()
|
||||
register_kiwi_programs()
|
||||
|
||||
# Start LLM background task scheduler
|
||||
from app.tasks.scheduler import get_scheduler
|
||||
get_scheduler(settings.DB_PATH)
|
||||
logger.info("Task scheduler started.")
|
||||
|
||||
# Initialize community store (no-op if COMMUNITY_DB_URL is not set)
|
||||
from app.api.endpoints.community import init_community_store
|
||||
init_community_store(settings.COMMUNITY_DB_URL)
|
||||
|
||||
yield
|
||||
|
||||
# Graceful scheduler shutdown
|
||||
|
|
@ -44,7 +35,7 @@ async def lifespan(app: FastAPI):
|
|||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
description="Pantry tracking + leftover recipe suggestions",
|
||||
version="0.2.0",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
"""Pydantic schemas for household management endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class HouseholdCreateResponse(BaseModel):
|
||||
household_id: str
|
||||
message: str
|
||||
|
||||
|
||||
class HouseholdMember(BaseModel):
|
||||
user_id: str
|
||||
joined_at: str
|
||||
is_owner: bool
|
||||
|
||||
|
||||
class HouseholdStatusResponse(BaseModel):
|
||||
in_household: bool
|
||||
household_id: str | None = None
|
||||
is_owner: bool = False
|
||||
members: list[HouseholdMember] = Field(default_factory=list)
|
||||
max_seats: int = 4
|
||||
|
||||
|
||||
class HouseholdInviteResponse(BaseModel):
|
||||
invite_url: str
|
||||
token: str
|
||||
expires_at: str
|
||||
|
||||
|
||||
class HouseholdAcceptRequest(BaseModel):
|
||||
household_id: str
|
||||
token: str
|
||||
|
||||
|
||||
class HouseholdAcceptResponse(BaseModel):
|
||||
message: str
|
||||
household_id: str
|
||||
|
||||
|
||||
class HouseholdRemoveMemberRequest(BaseModel):
|
||||
user_id: str
|
||||
|
||||
|
||||
class MessageResponse(BaseModel):
|
||||
message: str
|
||||
|
|
@ -89,20 +89,9 @@ class InventoryItemUpdate(BaseModel):
|
|||
unit: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
sublocation: Optional[str] = None
|
||||
purchase_date: Optional[date] = None
|
||||
expiration_date: Optional[date] = None
|
||||
opened_date: Optional[date] = None
|
||||
status: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
disposal_reason: Optional[str] = None
|
||||
|
||||
|
||||
class PartialConsumeRequest(BaseModel):
|
||||
quantity: float = Field(..., gt=0, description="Amount to consume from this item")
|
||||
|
||||
|
||||
class DiscardRequest(BaseModel):
|
||||
reason: Optional[str] = Field(None, max_length=200)
|
||||
|
||||
|
||||
class InventoryItemResponse(BaseModel):
|
||||
|
|
@ -117,14 +106,8 @@ class InventoryItemResponse(BaseModel):
|
|||
sublocation: Optional[str]
|
||||
purchase_date: Optional[str]
|
||||
expiration_date: Optional[str]
|
||||
opened_date: Optional[str] = None
|
||||
opened_expiry_date: Optional[str] = None
|
||||
secondary_state: Optional[str] = None
|
||||
secondary_uses: Optional[List[str]] = None
|
||||
secondary_warning: Optional[str] = None
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
disposal_reason: Optional[str] = None
|
||||
source: str
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
|
@ -140,7 +123,6 @@ class BarcodeScanResult(BaseModel):
|
|||
product: Optional[ProductResponse]
|
||||
inventory_item: Optional[InventoryItemResponse]
|
||||
added_to_inventory: bool
|
||||
needs_manual_entry: bool = False
|
||||
message: str
|
||||
|
||||
|
||||
|
|
@ -151,32 +133,6 @@ class BarcodeScanResponse(BaseModel):
|
|||
message: str
|
||||
|
||||
|
||||
# ── Bulk add by name ─────────────────────────────────────────────────────────
|
||||
|
||||
class BulkAddItem(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: float = Field(default=1.0, gt=0)
|
||||
unit: str = "count"
|
||||
location: str = "pantry"
|
||||
|
||||
|
||||
class BulkAddByNameRequest(BaseModel):
|
||||
items: List[BulkAddItem] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class BulkAddItemResult(BaseModel):
|
||||
name: str
|
||||
ok: bool
|
||||
item_id: Optional[int] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class BulkAddByNameResponse(BaseModel):
|
||||
added: int
|
||||
failed: int
|
||||
results: List[BulkAddItemResult]
|
||||
|
||||
|
||||
# ── Stats ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
class InventoryStats(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,100 +0,0 @@
|
|||
# app/models/schemas/meal_plan.py
|
||||
"""Pydantic schemas for meal planning endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date as _date
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
VALID_MEAL_TYPES = {"breakfast", "lunch", "dinner", "snack"}
|
||||
|
||||
|
||||
class CreatePlanRequest(BaseModel):
|
||||
week_start: _date
|
||||
meal_types: list[str] = Field(default_factory=lambda: ["dinner"])
|
||||
|
||||
@field_validator("week_start")
|
||||
@classmethod
|
||||
def must_be_monday(cls, v: _date) -> _date:
|
||||
if v.weekday() != 0:
|
||||
raise ValueError("week_start must be a Monday (weekday 0)")
|
||||
return v
|
||||
|
||||
|
||||
class UpdatePlanRequest(BaseModel):
|
||||
meal_types: list[str]
|
||||
|
||||
|
||||
class UpsertSlotRequest(BaseModel):
|
||||
recipe_id: int | None = None
|
||||
servings: float = Field(2.0, gt=0)
|
||||
custom_label: str | None = None
|
||||
|
||||
|
||||
class SlotSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
day_of_week: int
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
recipe_title: str | None
|
||||
servings: float
|
||||
custom_label: str | None
|
||||
|
||||
|
||||
class PlanSummary(BaseModel):
|
||||
id: int
|
||||
week_start: str
|
||||
meal_types: list[str]
|
||||
slots: list[SlotSummary]
|
||||
created_at: str
|
||||
|
||||
|
||||
class RetailerLink(BaseModel):
|
||||
retailer: str
|
||||
label: str
|
||||
url: str
|
||||
|
||||
|
||||
class GapItem(BaseModel):
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # e.g. "2 cups" from recipe text
|
||||
have_quantity: float | None # from pantry
|
||||
have_unit: str | None
|
||||
covered: bool # True = pantry has it
|
||||
retailer_links: list[RetailerLink] = Field(default_factory=list)
|
||||
|
||||
|
||||
class ShoppingListResponse(BaseModel):
|
||||
plan_id: int
|
||||
gap_items: list[GapItem]
|
||||
covered_items: list[GapItem]
|
||||
disclosure: str | None = None # affiliate disclosure text when links present
|
||||
|
||||
|
||||
class PrepTaskSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool
|
||||
notes: str | None
|
||||
user_edited: bool
|
||||
|
||||
|
||||
class PrepSessionSummary(BaseModel):
|
||||
id: int
|
||||
plan_id: int
|
||||
scheduled_date: str
|
||||
status: str
|
||||
tasks: list[PrepTaskSummary]
|
||||
|
||||
|
||||
class UpdatePrepTaskRequest(BaseModel):
|
||||
duration_minutes: int | None = None
|
||||
sequence_order: int | None = None
|
||||
notes: str | None = None
|
||||
equipment: str | None = None
|
||||
|
|
@ -1,152 +0,0 @@
|
|||
"""Pydantic schemas for the recipe engine API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SwapCandidate(BaseModel):
|
||||
original_name: str
|
||||
substitute_name: str
|
||||
constraint_label: str
|
||||
explanation: str
|
||||
compensation_hints: list[dict] = Field(default_factory=list)
|
||||
|
||||
|
||||
class NutritionPanel(BaseModel):
|
||||
"""Per-recipe macro summary. All values are per-serving when servings is known,
|
||||
otherwise for the full recipe. None means data is unavailable."""
|
||||
calories: float | None = None
|
||||
fat_g: float | None = None
|
||||
protein_g: float | None = None
|
||||
carbs_g: float | None = None
|
||||
fiber_g: float | None = None
|
||||
sugar_g: float | None = None
|
||||
sodium_mg: float | None = None
|
||||
servings: float | None = None
|
||||
estimated: bool = False # True when nutrition was inferred from ingredient profiles
|
||||
|
||||
|
||||
class RecipeSuggestion(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
match_count: int
|
||||
element_coverage: dict[str, float] = Field(default_factory=dict)
|
||||
swap_candidates: list[SwapCandidate] = Field(default_factory=list)
|
||||
matched_ingredients: list[str] = Field(default_factory=list)
|
||||
missing_ingredients: list[str] = Field(default_factory=list)
|
||||
directions: list[str] = Field(default_factory=list)
|
||||
prep_notes: list[str] = Field(default_factory=list)
|
||||
notes: str = ""
|
||||
level: int = 1
|
||||
is_wildcard: bool = False
|
||||
nutrition: NutritionPanel | None = None
|
||||
source_url: str | None = None
|
||||
complexity: str | None = None # 'easy' | 'moderate' | 'involved'
|
||||
estimated_time_min: int | None = None # derived from step count + method signals
|
||||
|
||||
|
||||
class GroceryLink(BaseModel):
|
||||
ingredient: str
|
||||
retailer: str
|
||||
url: str
|
||||
|
||||
|
||||
class RecipeResult(BaseModel):
|
||||
suggestions: list[RecipeSuggestion]
|
||||
element_gaps: list[str]
|
||||
grocery_list: list[str] = Field(default_factory=list)
|
||||
grocery_links: list[GroceryLink] = Field(default_factory=list)
|
||||
rate_limited: bool = False
|
||||
rate_limit_count: int = 0
|
||||
orch_fallback: bool = False # True when orch budget exhausted; fell back to local LLM
|
||||
|
||||
|
||||
class RecipeJobQueued(BaseModel):
|
||||
job_id: str
|
||||
status: str = "queued"
|
||||
|
||||
|
||||
class RecipeJobStatus(BaseModel):
|
||||
job_id: str
|
||||
status: str
|
||||
result: RecipeResult | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class NutritionFilters(BaseModel):
|
||||
"""Optional per-serving upper bounds for macro filtering. None = no filter."""
|
||||
max_calories: float | None = None
|
||||
max_sugar_g: float | None = None
|
||||
max_carbs_g: float | None = None
|
||||
max_sodium_mg: float | None = None
|
||||
|
||||
|
||||
class RecipeRequest(BaseModel):
|
||||
pantry_items: list[str]
|
||||
# Maps product name → secondary state label for items past nominal expiry
|
||||
# but still within their secondary use window (e.g. {"Bread": "stale"}).
|
||||
# Used by the recipe engine to boost recipes suited to those specific states.
|
||||
secondary_pantry_items: dict[str, str] = Field(default_factory=dict)
|
||||
level: int = Field(default=1, ge=1, le=4)
|
||||
constraints: list[str] = Field(default_factory=list)
|
||||
expiry_first: bool = False
|
||||
hard_day_mode: bool = False
|
||||
max_missing: int | None = None
|
||||
style_id: str | None = None
|
||||
category: str | None = None
|
||||
tier: str = "free"
|
||||
has_byok: bool = False
|
||||
wildcard_confirmed: bool = False
|
||||
allergies: list[str] = Field(default_factory=list)
|
||||
nutrition_filters: NutritionFilters = Field(default_factory=NutritionFilters)
|
||||
excluded_ids: list[int] = Field(default_factory=list)
|
||||
shopping_mode: bool = False
|
||||
pantry_match_only: bool = False # when True, only return recipes with zero missing ingredients
|
||||
complexity_filter: str | None = None # 'easy' | 'moderate' | 'involved' — None = any
|
||||
max_time_min: int | None = None # filter by estimated cooking time ceiling
|
||||
unit_system: str = "metric" # "metric" | "imperial"
|
||||
|
||||
|
||||
# ── Build Your Own schemas ──────────────────────────────────────────────────
|
||||
|
||||
|
||||
class AssemblyRoleOut(BaseModel):
|
||||
"""One role slot in a template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
display: str
|
||||
required: bool
|
||||
keywords: list[str]
|
||||
hint: str = ""
|
||||
|
||||
|
||||
class AssemblyTemplateOut(BaseModel):
|
||||
"""One assembly template, as returned by GET /api/recipes/templates."""
|
||||
|
||||
id: str # slug, e.g. "burrito_taco"
|
||||
title: str
|
||||
icon: str
|
||||
descriptor: str
|
||||
role_sequence: list[AssemblyRoleOut]
|
||||
|
||||
|
||||
class RoleCandidateItem(BaseModel):
|
||||
"""One candidate ingredient for a wizard picker step."""
|
||||
|
||||
name: str
|
||||
in_pantry: bool
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RoleCandidatesResponse(BaseModel):
|
||||
"""Response from GET /api/recipes/template-candidates."""
|
||||
|
||||
compatible: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
other: list[RoleCandidateItem] = Field(default_factory=list)
|
||||
available_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class BuildRequest(BaseModel):
|
||||
"""Request body for POST /api/recipes/build."""
|
||||
|
||||
template_id: str
|
||||
role_overrides: dict[str, str] = Field(default_factory=dict)
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
"""Pydantic schemas for saved recipes and collections."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SaveRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
|
||||
|
||||
class UpdateSavedRecipeRequest(BaseModel):
|
||||
notes: str | None = None
|
||||
rating: int | None = Field(None, ge=0, le=5)
|
||||
style_tags: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class SavedRecipeSummary(BaseModel):
|
||||
id: int
|
||||
recipe_id: int
|
||||
title: str
|
||||
saved_at: str
|
||||
notes: str | None
|
||||
rating: int | None
|
||||
style_tags: list[str]
|
||||
collection_ids: list[int] = Field(default_factory=list)
|
||||
|
||||
|
||||
class CollectionSummary(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: str | None
|
||||
member_count: int
|
||||
created_at: str
|
||||
|
||||
|
||||
class CollectionRequest(BaseModel):
|
||||
name: str
|
||||
description: str | None = None
|
||||
|
||||
|
||||
class CollectionMemberRequest(BaseModel):
|
||||
saved_recipe_id: int
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""Pydantic schemas for the shopping list endpoints."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ShoppingItemCreate(BaseModel):
|
||||
name: str = Field(..., min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
source: str = "manual"
|
||||
recipe_id: Optional[int] = None
|
||||
sort_order: int = 0
|
||||
|
||||
|
||||
class ShoppingItemUpdate(BaseModel):
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=200)
|
||||
quantity: Optional[float] = None
|
||||
unit: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
checked: Optional[bool] = None
|
||||
notes: Optional[str] = None
|
||||
sort_order: Optional[int] = None
|
||||
|
||||
|
||||
class GroceryLinkOut(BaseModel):
|
||||
ingredient: str
|
||||
retailer: str
|
||||
url: str
|
||||
|
||||
|
||||
class ShoppingItemResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
quantity: Optional[float]
|
||||
unit: Optional[str]
|
||||
category: Optional[str]
|
||||
checked: bool
|
||||
notes: Optional[str]
|
||||
source: str
|
||||
recipe_id: Optional[int]
|
||||
sort_order: int
|
||||
created_at: str
|
||||
updated_at: str
|
||||
grocery_links: list[GroceryLinkOut] = []
|
||||
|
||||
|
||||
class BulkAddFromRecipeRequest(BaseModel):
|
||||
recipe_id: int
|
||||
include_covered: bool = False # if True, add pantry-covered items too
|
||||
|
||||
|
||||
class ConfirmPurchaseRequest(BaseModel):
|
||||
"""Move a checked item into pantry inventory."""
|
||||
location: str = "pantry"
|
||||
quantity: Optional[float] = None # override the list quantity
|
||||
unit: Optional[str] = None
|
||||
|
|
@ -5,8 +5,6 @@ This module provides functionality to detect and decode barcodes
|
|||
from images (UPC, EAN, QR codes, etc.).
|
||||
"""
|
||||
|
||||
import io
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from pyzbar import pyzbar
|
||||
|
|
@ -14,12 +12,6 @@ from pathlib import Path
|
|||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
|
||||
try:
|
||||
from PIL import Image as _PILImage
|
||||
_HAS_PIL = True
|
||||
except ImportError:
|
||||
_HAS_PIL = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -84,7 +76,9 @@ class BarcodeScanner:
|
|||
# 4. Try rotations if still no barcodes found (handles tilted/rotated barcodes)
|
||||
if not barcodes:
|
||||
logger.info("No barcodes found in standard orientation, trying rotations...")
|
||||
for angle in [90, 180, 270, 45, 135]:
|
||||
# Try incremental angles: 30°, 60°, 90° (covers 0-90° range)
|
||||
# 0° already tried, 180° is functionally same as 0°, 90°/270° are same axis
|
||||
for angle in [30, 60, 90]:
|
||||
rotated_gray = self._rotate_image(gray, angle)
|
||||
rotated_color = self._rotate_image(image, angle)
|
||||
detected = self._detect_barcodes(rotated_gray, rotated_color)
|
||||
|
|
@ -270,26 +264,6 @@ class BarcodeScanner:
|
|||
|
||||
return list(seen.values())
|
||||
|
||||
def _fix_exif_orientation(self, image_bytes: bytes) -> bytes:
|
||||
"""Apply EXIF orientation correction so cv2 sees an upright image.
|
||||
|
||||
Phone cameras embed rotation in EXIF; cv2.imdecode ignores it,
|
||||
so a photo taken in portrait may arrive physically sideways in memory.
|
||||
"""
|
||||
if not _HAS_PIL:
|
||||
return image_bytes
|
||||
try:
|
||||
pil = _PILImage.open(io.BytesIO(image_bytes))
|
||||
pil = _PILImage.fromarray(np.array(pil)) # strips EXIF but applies orientation via PIL
|
||||
# Use ImageOps.exif_transpose for proper EXIF-aware rotation
|
||||
import PIL.ImageOps
|
||||
pil = PIL.ImageOps.exif_transpose(pil)
|
||||
buf = io.BytesIO()
|
||||
pil.save(buf, format="JPEG")
|
||||
return buf.getvalue()
|
||||
except Exception:
|
||||
return image_bytes
|
||||
|
||||
def scan_from_bytes(self, image_bytes: bytes) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Scan barcodes from image bytes (uploaded file).
|
||||
|
|
@ -301,10 +275,6 @@ class BarcodeScanner:
|
|||
List of detected barcodes
|
||||
"""
|
||||
try:
|
||||
# Apply EXIF orientation correction first (phone cameras embed rotation in EXIF;
|
||||
# cv2.imdecode ignores it, causing sideways barcodes to appear rotated in memory).
|
||||
image_bytes = self._fix_exif_orientation(image_bytes)
|
||||
|
||||
# Convert bytes to numpy array
|
||||
nparr = np.frombuffer(image_bytes, np.uint8)
|
||||
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
|
@ -330,12 +300,11 @@ class BarcodeScanner:
|
|||
)
|
||||
barcodes.extend(self._detect_barcodes(thresh, image))
|
||||
|
||||
# 3. Try all 90° rotations + common tilt angles
|
||||
# 90/270 catches truly sideways barcodes; 180 catches upside-down;
|
||||
# 45/135 catches tilted barcodes on flat surfaces.
|
||||
# 3. Try rotations if still no barcodes found
|
||||
if not barcodes:
|
||||
logger.info("No barcodes found in uploaded image, trying rotations...")
|
||||
for angle in [90, 180, 270, 45, 135]:
|
||||
# Try incremental angles: 30°, 60°, 90° (covers 0-90° range)
|
||||
for angle in [30, 60, 90]:
|
||||
rotated_gray = self._rotate_image(gray, angle)
|
||||
rotated_color = self._rotate_image(image, angle)
|
||||
detected = self._detect_barcodes(rotated_gray, rotated_color)
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
# app/services/community/ap_compat.py
|
||||
# MIT License — AP scaffold only (no actor, inbox, outbox)
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
def post_to_ap_json_ld(post: dict, base_url: str) -> dict:
|
||||
"""Serialize a community post dict to an ActivityPub-compatible JSON-LD Note.
|
||||
|
||||
This is a read-only scaffold. No AP actor, inbox, or outbox.
|
||||
The slug URI is stable so a future full AP implementation can reuse posts
|
||||
without a DB migration.
|
||||
"""
|
||||
slug = post["slug"]
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
published_str = published.astimezone(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
else:
|
||||
published_str = str(published)
|
||||
|
||||
dietary_tags: list[str] = post.get("dietary_tags") or []
|
||||
tags = [{"type": "Hashtag", "name": "#kiwi"}]
|
||||
for tag in dietary_tags:
|
||||
tags.append({"type": "Hashtag", "name": f"#{tag.replace('-', '').replace(' ', '')}"})
|
||||
|
||||
return {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Note",
|
||||
"id": f"{base_url}/api/v1/community/posts/{slug}",
|
||||
"attributedTo": post.get("pseudonym", "anonymous"),
|
||||
"content": _build_content(post),
|
||||
"published": published_str,
|
||||
"tag": tags,
|
||||
}
|
||||
|
||||
|
||||
def _build_content(post: dict) -> str:
|
||||
title = post.get("title") or "Untitled"
|
||||
desc = post.get("description")
|
||||
if desc:
|
||||
return f"{title} — {desc}"
|
||||
return title
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
# app/services/community/community_store.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from circuitforge_core.community import CommunityPost, SharedStore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KiwiCommunityStore(SharedStore):
|
||||
"""Kiwi-specific community store: adds kiwi-domain query methods on top of SharedStore."""
|
||||
|
||||
def list_meal_plans(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
dietary_tags: list[str] | None = None,
|
||||
allergen_exclude: list[str] | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type="plan",
|
||||
dietary_tags=dietary_tags,
|
||||
allergen_exclude=allergen_exclude,
|
||||
source_product="kiwi",
|
||||
)
|
||||
|
||||
def list_outcomes(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
post_type: str | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
if post_type in ("recipe_success", "recipe_blooper"):
|
||||
return self.list_posts(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
post_type=post_type,
|
||||
source_product="kiwi",
|
||||
)
|
||||
success = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_success",
|
||||
source_product="kiwi",
|
||||
)
|
||||
bloopers = self.list_posts(
|
||||
limit=limit,
|
||||
offset=0,
|
||||
post_type="recipe_blooper",
|
||||
source_product="kiwi",
|
||||
)
|
||||
merged = sorted(success + bloopers, key=lambda p: p.published, reverse=True)
|
||||
return merged[:limit]
|
||||
|
||||
|
||||
def get_or_create_pseudonym(
|
||||
store,
|
||||
directus_user_id: str,
|
||||
requested_name: str | None,
|
||||
) -> str:
|
||||
"""Return the user's current pseudonym, creating it if it doesn't exist.
|
||||
|
||||
If the user has an existing pseudonym, return it (ignore requested_name).
|
||||
If not, create using requested_name (must be provided for first-time setup).
|
||||
|
||||
Raises ValueError if no existing pseudonym and requested_name is None or blank.
|
||||
"""
|
||||
existing = store.get_current_pseudonym(directus_user_id)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
if not requested_name or not requested_name.strip():
|
||||
raise ValueError(
|
||||
"A pseudonym is required for first publish. "
|
||||
"Pass requested_name with the user's chosen display name."
|
||||
)
|
||||
|
||||
name = requested_name.strip()
|
||||
if "@" in name:
|
||||
raise ValueError(
|
||||
"Pseudonym must not contain '@' — use a display name, not an email address."
|
||||
)
|
||||
|
||||
store.set_pseudonym(directus_user_id, name)
|
||||
return name
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
# app/services/community/element_snapshot.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
# Ingredient name substrings → allergen flag
|
||||
_ALLERGEN_MAP: dict[str, str] = {
|
||||
"milk": "dairy", "cream": "dairy", "cheese": "dairy", "butter": "dairy",
|
||||
"yogurt": "dairy", "whey": "dairy",
|
||||
"egg": "eggs",
|
||||
"wheat": "gluten", "pasta": "gluten", "flour": "gluten", "bread": "gluten",
|
||||
"barley": "gluten", "rye": "gluten",
|
||||
"peanut": "nuts", "almond": "nuts", "cashew": "nuts", "walnut": "nuts",
|
||||
"pecan": "nuts", "hazelnut": "nuts", "pistachio": "nuts", "macadamia": "nuts",
|
||||
"soy": "soy", "tofu": "soy", "edamame": "soy", "miso": "soy", "tempeh": "soy",
|
||||
"shrimp": "shellfish", "crab": "shellfish", "lobster": "shellfish",
|
||||
"clam": "shellfish", "mussel": "shellfish", "scallop": "shellfish",
|
||||
"fish": "fish", "salmon": "fish", "tuna": "fish", "cod": "fish",
|
||||
"tilapia": "fish", "halibut": "fish",
|
||||
"sesame": "sesame",
|
||||
}
|
||||
|
||||
_MEAT_KEYWORDS = frozenset([
|
||||
"chicken", "beef", "pork", "lamb", "turkey", "bacon", "ham", "sausage",
|
||||
"salami", "prosciutto", "guanciale", "pancetta", "steak", "ground meat",
|
||||
"mince", "veal", "duck", "venison", "bison", "lard",
|
||||
])
|
||||
_SEAFOOD_KEYWORDS = frozenset([
|
||||
"fish", "shrimp", "crab", "lobster", "tuna", "salmon", "clam", "mussel",
|
||||
"scallop", "anchovy", "sardine", "cod", "tilapia",
|
||||
])
|
||||
_ANIMAL_PRODUCT_KEYWORDS = frozenset([
|
||||
"milk", "cream", "cheese", "butter", "egg", "honey", "yogurt", "whey",
|
||||
])
|
||||
|
||||
|
||||
def _detect_allergens(ingredient_names: list[str]) -> list[str]:
|
||||
found: set[str] = set()
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
for ingredient in lowered:
|
||||
for keyword, flag in _ALLERGEN_MAP.items():
|
||||
if keyword in ingredient:
|
||||
found.add(flag)
|
||||
return sorted(found)
|
||||
|
||||
|
||||
def _detect_dietary_tags(ingredient_names: list[str]) -> list[str]:
|
||||
lowered = [n.lower() for n in ingredient_names]
|
||||
all_text = " ".join(lowered)
|
||||
|
||||
has_meat = any(k in all_text for k in _MEAT_KEYWORDS)
|
||||
has_seafood = any(k in all_text for k in _SEAFOOD_KEYWORDS)
|
||||
has_animal_products = any(k in all_text for k in _ANIMAL_PRODUCT_KEYWORDS)
|
||||
|
||||
tags: list[str] = []
|
||||
if not has_meat and not has_seafood:
|
||||
tags.append("vegetarian")
|
||||
if not has_meat and not has_seafood and not has_animal_products:
|
||||
tags.append("vegan")
|
||||
return tags
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ElementSnapshot:
|
||||
seasoning_score: float
|
||||
richness_score: float
|
||||
brightness_score: float
|
||||
depth_score: float
|
||||
aroma_score: float
|
||||
structure_score: float
|
||||
texture_profile: str
|
||||
dietary_tags: tuple
|
||||
allergen_flags: tuple
|
||||
flavor_molecules: tuple
|
||||
fat_pct: float | None
|
||||
protein_pct: float | None
|
||||
moisture_pct: float | None
|
||||
|
||||
|
||||
def compute_snapshot(recipe_ids: list[int], store) -> ElementSnapshot:
|
||||
"""Compute an element snapshot from a list of recipe IDs.
|
||||
|
||||
Pulls SFAH scores, ingredient lists, and USDA FDC macros from the corpus.
|
||||
Averages numeric scores across all recipes. Unions allergen flags and dietary tags.
|
||||
Call at publish time only — snapshot is stored denormalized in community_posts.
|
||||
"""
|
||||
if not recipe_ids:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
rows = store.get_recipes_by_ids(recipe_ids)
|
||||
if not rows:
|
||||
return ElementSnapshot(
|
||||
seasoning_score=0.0, richness_score=0.0, brightness_score=0.0,
|
||||
depth_score=0.0, aroma_score=0.0, structure_score=0.0,
|
||||
texture_profile="", dietary_tags=(), allergen_flags=(),
|
||||
flavor_molecules=(), fat_pct=None, protein_pct=None, moisture_pct=None,
|
||||
)
|
||||
|
||||
def _avg(field: str) -> float:
|
||||
vals = [r.get(field) or 0.0 for r in rows]
|
||||
return sum(vals) / len(vals)
|
||||
|
||||
all_ingredients: list[str] = []
|
||||
for r in rows:
|
||||
names = r.get("ingredient_names") or []
|
||||
all_ingredients.extend(names if isinstance(names, list) else [])
|
||||
|
||||
allergens = _detect_allergens(all_ingredients)
|
||||
dietary = _detect_dietary_tags(all_ingredients)
|
||||
|
||||
texture = rows[0].get("texture_profile") or ""
|
||||
|
||||
fat_vals = [r.get("fat") for r in rows if r.get("fat") is not None]
|
||||
prot_vals = [r.get("protein") for r in rows if r.get("protein") is not None]
|
||||
moist_vals = [r.get("moisture") for r in rows if r.get("moisture") is not None]
|
||||
|
||||
return ElementSnapshot(
|
||||
seasoning_score=_avg("seasoning_score"),
|
||||
richness_score=_avg("richness_score"),
|
||||
brightness_score=_avg("brightness_score"),
|
||||
depth_score=_avg("depth_score"),
|
||||
aroma_score=_avg("aroma_score"),
|
||||
structure_score=_avg("structure_score"),
|
||||
texture_profile=texture,
|
||||
dietary_tags=tuple(dietary),
|
||||
allergen_flags=tuple(allergens),
|
||||
flavor_molecules=(),
|
||||
fat_pct=(sum(fat_vals) / len(fat_vals)) if fat_vals else None,
|
||||
protein_pct=(sum(prot_vals) / len(prot_vals)) if prot_vals else None,
|
||||
moisture_pct=(sum(moist_vals) / len(moist_vals)) if moist_vals else None,
|
||||
)
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
# app/services/community/feed.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from email.utils import format_datetime
|
||||
from xml.etree.ElementTree import Element, SubElement, tostring
|
||||
|
||||
|
||||
def posts_to_rss(posts: list[dict], base_url: str) -> str:
|
||||
"""Generate an RSS 2.0 feed from a list of community post dicts.
|
||||
|
||||
base_url: the root URL of this Kiwi instance (no trailing slash).
|
||||
Returns UTF-8 XML string.
|
||||
"""
|
||||
rss = Element("rss", version="2.0")
|
||||
channel = SubElement(rss, "channel")
|
||||
|
||||
_sub(channel, "title", "Kiwi Community Feed")
|
||||
_sub(channel, "link", f"{base_url}/community")
|
||||
_sub(channel, "description", "Meal plans and recipe outcomes from the Kiwi community")
|
||||
_sub(channel, "language", "en")
|
||||
_sub(channel, "lastBuildDate", format_datetime(datetime.now(timezone.utc)))
|
||||
|
||||
for post in posts:
|
||||
item = SubElement(channel, "item")
|
||||
_sub(item, "title", post.get("title") or "Untitled")
|
||||
_sub(item, "link", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
_sub(item, "guid", f"{base_url}/api/v1/community/posts/{post['slug']}")
|
||||
if post.get("description"):
|
||||
_sub(item, "description", post["description"])
|
||||
published = post.get("published")
|
||||
if isinstance(published, datetime):
|
||||
_sub(item, "pubDate", format_datetime(published))
|
||||
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring(rss, encoding="unicode")
|
||||
|
||||
|
||||
def _sub(parent: Element, tag: str, text: str) -> Element:
|
||||
el = SubElement(parent, tag)
|
||||
el.text = text
|
||||
return el
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
# app/services/community/mdns.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import socket
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Import deferred to avoid hard failure when zeroconf is not installed
|
||||
try:
|
||||
from zeroconf import ServiceInfo, Zeroconf
|
||||
_ZEROCONF_AVAILABLE = True
|
||||
except ImportError:
|
||||
_ZEROCONF_AVAILABLE = False
|
||||
|
||||
|
||||
class KiwiMDNS:
|
||||
"""Advertise this Kiwi instance on the LAN via mDNS (_kiwi._tcp.local).
|
||||
|
||||
Defaults to disabled (enabled=False). User must explicitly opt in via the
|
||||
Settings page. This matches the CF a11y requirement: no surprise broadcasting.
|
||||
|
||||
Usage:
|
||||
mdns = KiwiMDNS(enabled=settings.MDNS_ENABLED, port=settings.PORT,
|
||||
feed_url=f"http://{hostname}:{settings.PORT}/api/v1/community/local-feed")
|
||||
mdns.start() # in lifespan startup
|
||||
mdns.stop() # in lifespan shutdown
|
||||
"""
|
||||
|
||||
SERVICE_TYPE = "_kiwi._tcp.local."
|
||||
|
||||
def __init__(self, enabled: bool, port: int, feed_url: str) -> None:
|
||||
self._enabled = enabled
|
||||
self._port = port
|
||||
self._feed_url = feed_url
|
||||
self._zc: "Zeroconf | None" = None
|
||||
self._info: "ServiceInfo | None" = None
|
||||
|
||||
def start(self) -> None:
|
||||
if not self._enabled:
|
||||
logger.debug("mDNS advertisement disabled (user has not opted in)")
|
||||
return
|
||||
if not _ZEROCONF_AVAILABLE:
|
||||
logger.warning("zeroconf package not installed — mDNS advertisement unavailable")
|
||||
return
|
||||
|
||||
hostname = socket.gethostname()
|
||||
service_name = f"kiwi-{hostname}.{self.SERVICE_TYPE}"
|
||||
self._info = ServiceInfo(
|
||||
type_=self.SERVICE_TYPE,
|
||||
name=service_name,
|
||||
port=self._port,
|
||||
properties={
|
||||
b"feed_url": self._feed_url.encode(),
|
||||
b"version": b"1",
|
||||
},
|
||||
addresses=[socket.inet_aton("127.0.0.1")],
|
||||
)
|
||||
self._zc = Zeroconf()
|
||||
self._zc.register_service(self._info)
|
||||
logger.info("mDNS: advertising %s on port %d", service_name, self._port)
|
||||
|
||||
def stop(self) -> None:
|
||||
if self._zc is None or self._info is None:
|
||||
return
|
||||
self._zc.unregister_service(self._info)
|
||||
self._zc.close()
|
||||
self._zc = None
|
||||
self._info = None
|
||||
logger.info("mDNS: advertisement stopped")
|
||||
|
|
@ -21,29 +21,6 @@ logger = logging.getLogger(__name__)
|
|||
class ExpirationPredictor:
|
||||
"""Predict expiration dates based on product category and storage location."""
|
||||
|
||||
# Canonical location names and their aliases.
|
||||
# All location strings are normalised through this before table lookup.
|
||||
LOCATION_ALIASES: dict[str, str] = {
|
||||
'garage_freezer': 'freezer',
|
||||
'chest_freezer': 'freezer',
|
||||
'deep_freezer': 'freezer',
|
||||
'upright_freezer': 'freezer',
|
||||
'refrigerator': 'fridge',
|
||||
'frig': 'fridge',
|
||||
'cupboard': 'cabinet',
|
||||
'shelf': 'pantry',
|
||||
'counter': 'pantry',
|
||||
}
|
||||
|
||||
# When a category has no entry for the requested location, try these
|
||||
# alternatives in order — prioritising same-temperature storage first.
|
||||
LOCATION_FALLBACK: dict[str, tuple[str, ...]] = {
|
||||
'freezer': ('freezer', 'fridge', 'pantry', 'cabinet'),
|
||||
'fridge': ('fridge', 'pantry', 'cabinet', 'freezer'),
|
||||
'pantry': ('pantry', 'cabinet', 'fridge', 'freezer'),
|
||||
'cabinet': ('cabinet', 'pantry', 'fridge', 'freezer'),
|
||||
}
|
||||
|
||||
# Default shelf life in days by category and location
|
||||
# Sources: USDA FoodKeeper app, FDA guidelines
|
||||
SHELF_LIFE = {
|
||||
|
|
@ -62,8 +39,6 @@ class ExpirationPredictor:
|
|||
'poultry': {'fridge': 2, 'freezer': 270},
|
||||
'chicken': {'fridge': 2, 'freezer': 270},
|
||||
'turkey': {'fridge': 2, 'freezer': 270},
|
||||
'tempeh': {'fridge': 10, 'freezer': 365},
|
||||
'tofu': {'fridge': 5, 'freezer': 180},
|
||||
'ground_meat': {'fridge': 2, 'freezer': 120},
|
||||
# Seafood
|
||||
'fish': {'fridge': 2, 'freezer': 180},
|
||||
|
|
@ -84,9 +59,9 @@ class ExpirationPredictor:
|
|||
'bread': {'pantry': 5, 'freezer': 90},
|
||||
'bakery': {'pantry': 3, 'fridge': 7, 'freezer': 90},
|
||||
# Frozen
|
||||
'frozen_foods': {'freezer': 180, 'fridge': 3},
|
||||
'frozen_vegetables': {'freezer': 270, 'fridge': 4},
|
||||
'frozen_fruit': {'freezer': 365, 'fridge': 4},
|
||||
'frozen_foods': {'freezer': 180},
|
||||
'frozen_vegetables': {'freezer': 270},
|
||||
'frozen_fruit': {'freezer': 365},
|
||||
'ice_cream': {'freezer': 60},
|
||||
# Pantry Staples
|
||||
'canned_goods': {'pantry': 730, 'cabinet': 730},
|
||||
|
|
@ -116,261 +91,44 @@ class ExpirationPredictor:
|
|||
'prepared_foods': {'fridge': 4, 'freezer': 90},
|
||||
}
|
||||
|
||||
# Secondary shelf life in days after a package is opened.
|
||||
# Sources: USDA FoodKeeper app, FDA consumer guides.
|
||||
# Only categories where opening significantly shortens shelf life are listed.
|
||||
# Items not listed default to None (no secondary window tracked).
|
||||
SHELF_LIFE_AFTER_OPENING: dict[str, int] = {
|
||||
# Dairy — once opened, clock ticks fast
|
||||
'dairy': 5,
|
||||
'milk': 5,
|
||||
'cream': 3,
|
||||
'yogurt': 7,
|
||||
'cheese': 14,
|
||||
'butter': 30,
|
||||
# Condiments — refrigerated after opening
|
||||
'condiments': 30,
|
||||
'ketchup': 30,
|
||||
'mustard': 30,
|
||||
'mayo': 14,
|
||||
'salad_dressing': 30,
|
||||
'soy_sauce': 90,
|
||||
# Canned goods — once opened, very short
|
||||
'canned_goods': 4,
|
||||
# Beverages
|
||||
'juice': 7,
|
||||
'soda': 4,
|
||||
# Bread / Bakery
|
||||
'bread': 5,
|
||||
'bakery': 3,
|
||||
# Produce
|
||||
'leafy_greens': 3,
|
||||
'berries': 3,
|
||||
# Pantry staples (open bag)
|
||||
'chips': 14,
|
||||
'cookies': 14,
|
||||
'cereal': 30,
|
||||
'flour': 90,
|
||||
}
|
||||
|
||||
# Post-expiry secondary use window.
|
||||
# These are NOT spoilage extensions — they describe a qualitative state
|
||||
# change where the ingredient is specifically suited for certain preparations.
|
||||
# Sources: USDA FoodKeeper, food science, culinary tradition.
|
||||
SECONDARY_WINDOW: dict[str, dict] = {
|
||||
'bread': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['croutons', 'stuffing', 'bread pudding', 'French toast', 'panzanella'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
},
|
||||
'bakery': {
|
||||
'window_days': 3,
|
||||
'label': 'day-old',
|
||||
'uses': ['French toast', 'bread pudding', 'crumbles'],
|
||||
'warning': 'Check for mold before use — discard if any is visible.',
|
||||
},
|
||||
'bananas': {
|
||||
'window_days': 5,
|
||||
'label': 'overripe',
|
||||
'uses': ['banana bread', 'smoothies', 'pancakes', 'muffins'],
|
||||
'warning': None,
|
||||
},
|
||||
'milk': {
|
||||
'window_days': 3,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'quick breads', 'baking', 'sauces'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
},
|
||||
'dairy': {
|
||||
'window_days': 2,
|
||||
'label': 'sour',
|
||||
'uses': ['pancakes', 'quick breads', 'baking'],
|
||||
'warning': 'Use only in cooked recipes — do not drink.',
|
||||
},
|
||||
'cheese': {
|
||||
'window_days': 14,
|
||||
'label': 'well-aged',
|
||||
'uses': ['broth', 'soups', 'risotto', 'gratins'],
|
||||
'warning': None,
|
||||
},
|
||||
'rice': {
|
||||
'window_days': 2,
|
||||
'label': 'day-old',
|
||||
'uses': ['fried rice', 'rice bowls', 'rice porridge'],
|
||||
'warning': 'Refrigerate immediately after cooking — do not leave at room temp.',
|
||||
},
|
||||
'tortillas': {
|
||||
'window_days': 5,
|
||||
'label': 'stale',
|
||||
'uses': ['chilaquiles', 'migas', 'tortilla soup', 'casserole'],
|
||||
'warning': None,
|
||||
},
|
||||
}
|
||||
|
||||
def days_after_opening(self, category: str | None) -> int | None:
|
||||
"""Return days of shelf life remaining once a package is opened.
|
||||
|
||||
Returns None if the category is unknown or not tracked after opening
|
||||
(e.g. frozen items, raw meat — category check irrelevant once opened).
|
||||
"""
|
||||
if not category:
|
||||
return None
|
||||
return self.SHELF_LIFE_AFTER_OPENING.get(category.lower())
|
||||
|
||||
def secondary_state(
|
||||
self, category: str | None, expiry_date: str | None
|
||||
) -> dict | None:
|
||||
"""Return secondary use info if the item is in its post-expiry secondary window.
|
||||
|
||||
Returns a dict with label, uses, warning, days_past, and window_days when the
|
||||
item is past its nominal expiry date but still within the secondary use window.
|
||||
Returns None in all other cases (unknown category, no window defined, not yet
|
||||
expired, or past the secondary window).
|
||||
"""
|
||||
if not category or not expiry_date:
|
||||
return None
|
||||
entry = self.SECONDARY_WINDOW.get(category.lower())
|
||||
if not entry:
|
||||
return None
|
||||
try:
|
||||
from datetime import date
|
||||
today = date.today()
|
||||
exp = date.fromisoformat(expiry_date)
|
||||
days_past = (today - exp).days
|
||||
if 0 <= days_past <= entry['window_days']:
|
||||
return {
|
||||
'label': entry['label'],
|
||||
'uses': list(entry['uses']),
|
||||
'warning': entry['warning'],
|
||||
'days_past': days_past,
|
||||
'window_days': entry['window_days'],
|
||||
}
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
# Keyword lists are checked in declaration order — most specific first.
|
||||
# Rules:
|
||||
# - canned/processed goods BEFORE raw-meat terms (canned chicken != raw chicken)
|
||||
# - frozen prepared foods BEFORE generic protein terms
|
||||
# - multi-word phrases before single words where ambiguity exists
|
||||
CATEGORY_KEYWORDS = {
|
||||
# ── Frozen prepared foods ─────────────────────────────────────────────
|
||||
# Before raw protein entries so plant-based frozen products don't
|
||||
# inherit 2–3 day raw-meat shelf lives.
|
||||
'ice_cream': ['ice cream', 'gelato', 'frozen yogurt', 'sorbet', 'sherbet'],
|
||||
'frozen_fruit': [
|
||||
'frozen berries', 'frozen mango', 'frozen strawberries',
|
||||
'frozen blueberries', 'frozen raspberries', 'frozen peaches',
|
||||
'frozen fruit', 'frozen cherries',
|
||||
],
|
||||
'frozen_vegetables': [
|
||||
'frozen veg', 'frozen corn', 'frozen peas', 'frozen broccoli',
|
||||
'frozen spinach', 'frozen edamame', 'frozen green beans',
|
||||
'frozen mixed vegetables', 'frozen carrots',
|
||||
'peas & carrots', 'peas and carrots', 'mixed vegetables',
|
||||
'spring rolls', 'vegetable spring rolls',
|
||||
],
|
||||
'frozen_foods': [
|
||||
'plant-based', 'plant based', 'meatless', 'impossible',
|
||||
"chik'n", 'chikn', 'veggie burger', 'veggie patty',
|
||||
'nugget', 'tater tot', 'waffle fries', 'hash brown',
|
||||
'onion ring', 'fish stick', 'fish fillet', 'potsticker',
|
||||
'dumpling', 'egg roll', 'empanada', 'tamale', 'falafel',
|
||||
'mac & cheese bite', 'cauliflower wing', 'ranchero potato',
|
||||
],
|
||||
# ── Canned / shelf-stable processed goods ─────────────────────────────
|
||||
# Before raw protein keywords so "canned chicken", "cream of chicken",
|
||||
# and "lentil soup" resolve here rather than to raw chicken/cream.
|
||||
'canned_goods': [
|
||||
'canned', 'can of', 'tin of', 'tinned',
|
||||
'cream of ', 'condensed soup', 'condensed cream',
|
||||
'baked beans', 'refried beans',
|
||||
'canned beans', 'canned tomatoes', 'canned corn', 'canned peas',
|
||||
'canned soup', 'canned tuna', 'canned salmon', 'canned chicken',
|
||||
'canned fruit', 'canned peaches', 'canned pears',
|
||||
'enchilada sauce', 'tomato sauce', 'tomato paste',
|
||||
'lentil soup', 'bean soup', 'chicken noodle soup',
|
||||
],
|
||||
# ── Condiments & brined items ─────────────────────────────────────────
|
||||
# Before produce/protein terms so brined olives, jarred peppers, etc.
|
||||
# don't inherit raw vegetable shelf lives.
|
||||
'ketchup': ['ketchup', 'catsup'],
|
||||
'mustard': ['mustard', 'dijon', 'dijion', 'stoneground mustard'],
|
||||
'mayo': ['mayo', 'mayonnaise', 'miracle whip'],
|
||||
'soy_sauce': ['soy sauce', 'tamari', 'shoyu'],
|
||||
'salad_dressing': ['salad dressing', 'ranch', 'italian dressing', 'vinaigrette'],
|
||||
'condiments': [
|
||||
# brined / jarred items
|
||||
'dill chips', 'hamburger chips', 'gherkin',
|
||||
'olive', 'capers', 'jalapeño', 'jalapeno', 'pepperoncini',
|
||||
'pimiento', 'banana pepper', 'cornichon',
|
||||
# sauces
|
||||
'hot sauce', 'hot pepper sauce', 'sriracha', 'cholula',
|
||||
'worcestershire', 'barbecue sauce', 'bbq sauce',
|
||||
'chipotle sauce', 'chipotle mayo', 'chipotle creamy',
|
||||
'salsa', 'chutney', 'relish',
|
||||
'teriyaki', 'hoisin', 'oyster sauce', 'fish sauce',
|
||||
'miso', 'ssamjang', 'gochujang', 'doenjang',
|
||||
'soybean paste', 'fermented soybean',
|
||||
# nut butters / spreads
|
||||
'peanut butter', 'almond butter', 'tahini', 'hummus',
|
||||
# seasoning mixes
|
||||
'seasoning', 'spice blend', 'borracho',
|
||||
# other shelf-stable sauces
|
||||
'yuzu', 'ponzu', 'lizano',
|
||||
],
|
||||
# ── Soy / fermented proteins ──────────────────────────────────────────
|
||||
'tempeh': ['tempeh'],
|
||||
'tofu': ['tofu', 'bean curd'],
|
||||
# ── Dairy ─────────────────────────────────────────────────────────────
|
||||
'milk': ['milk', 'whole milk', '2% milk', 'skim milk', 'almond milk', 'oat milk', 'soy milk'],
|
||||
'cheese': ['cheese', 'cheddar', 'mozzarella', 'swiss', 'parmesan', 'feta', 'gouda', 'velveeta'],
|
||||
'cheese': ['cheese', 'cheddar', 'mozzarella', 'swiss', 'parmesan', 'feta', 'gouda'],
|
||||
'yogurt': ['yogurt', 'greek yogurt', 'yoghurt'],
|
||||
'butter': ['butter', 'margarine'],
|
||||
# Bare 'cream' removed — "cream of X" is canned_goods (matched above).
|
||||
'cream': ['heavy cream', 'whipping cream', 'sour cream', 'crème fraîche',
|
||||
'cream cheese', 'whipped topping', 'whipped cream'],
|
||||
'cream': ['cream', 'heavy cream', 'whipping cream', 'sour cream'],
|
||||
'eggs': ['eggs', 'egg'],
|
||||
# ── Raw proteins ──────────────────────────────────────────────────────
|
||||
# After canned/frozen so "canned chicken" is already resolved above.
|
||||
'beef': ['beef', 'steak', 'roast', 'brisket', 'ribeye', 'sirloin'],
|
||||
'pork': ['pork', 'bacon', 'ham', 'sausage', 'pork chop'],
|
||||
'chicken': ['chicken', 'chicken breast', 'chicken thigh', 'chicken wings'],
|
||||
'turkey': ['turkey', 'turkey breast', 'ground turkey'],
|
||||
'ground_meat': ['ground beef', 'ground pork', 'ground chicken', 'hamburger'],
|
||||
'fish': ['fish', 'cod', 'tilapia', 'halibut'],
|
||||
'salmon': ['salmon'],
|
||||
'shrimp': ['shrimp', 'prawns'],
|
||||
'fish': ['fish', 'cod', 'tilapia', 'halibut', 'pollock'],
|
||||
# Specific chicken cuts only — bare 'chicken' handled in generic fallback
|
||||
'chicken': ['chicken breast', 'chicken thigh', 'chicken wings', 'chicken leg',
|
||||
'whole chicken', 'rotisserie chicken', 'raw chicken'],
|
||||
'turkey': ['turkey breast', 'whole turkey'],
|
||||
'ground_meat': ['ground beef', 'ground pork', 'ground chicken', 'ground turkey',
|
||||
'ground lamb', 'ground bison'],
|
||||
'pork': ['pork', 'bacon', 'ham', 'pork chop', 'pork loin'],
|
||||
'beef': ['beef', 'steak', 'brisket', 'ribeye', 'sirloin', 'roast beef'],
|
||||
'deli_meat': ['deli', 'sliced turkey', 'sliced ham', 'lunch meat', 'cold cuts',
|
||||
'prosciutto', 'salami', 'pepperoni'],
|
||||
# ── Produce ───────────────────────────────────────────────────────────
|
||||
'leafy_greens': ['lettuce', 'spinach', 'kale', 'arugula', 'mixed greens'],
|
||||
'leafy_greens': ['lettuce', 'spinach', 'kale', 'arugula', 'mixed greens', 'salad'],
|
||||
'berries': ['strawberries', 'blueberries', 'raspberries', 'blackberries'],
|
||||
'apples': ['apple', 'apples'],
|
||||
'bananas': ['banana', 'bananas'],
|
||||
'citrus': ['orange', 'lemon', 'lime', 'grapefruit', 'tangerine'],
|
||||
# ── Bakery ────────────────────────────────────────────────────────────
|
||||
'bakery': [
|
||||
'muffin', 'croissant', 'donut', 'danish', 'puff pastry', 'pastry puff',
|
||||
'cinnamon roll', 'dinner roll', 'parkerhouse roll', 'scone',
|
||||
],
|
||||
'bread': ['bread', 'loaf', 'baguette', 'bagel', 'bun', 'pita', 'naan',
|
||||
'english muffin', 'sourdough'],
|
||||
# ── Dry pantry staples ────────────────────────────────────────────────
|
||||
'pasta': ['pasta', 'spaghetti', 'penne', 'macaroni', 'noodles', 'couscous', 'orzo'],
|
||||
'rice': ['rice', 'brown rice', 'white rice', 'jasmine rice', 'basmati',
|
||||
'spanish rice', 'rice mix'],
|
||||
'bread': ['bread', 'loaf', 'baguette', 'roll', 'bagel', 'bun'],
|
||||
'bakery': ['muffin', 'croissant', 'donut', 'danish', 'pastry'],
|
||||
'deli_meat': ['deli', 'sliced turkey', 'sliced ham', 'lunch meat', 'cold cuts'],
|
||||
'frozen_vegetables': ['frozen veg', 'frozen corn', 'frozen peas', 'frozen broccoli'],
|
||||
'frozen_fruit': ['frozen berries', 'frozen mango', 'frozen strawberries'],
|
||||
'ice_cream': ['ice cream', 'gelato', 'frozen yogurt'],
|
||||
'pasta': ['pasta', 'spaghetti', 'penne', 'macaroni', 'noodles'],
|
||||
'rice': ['rice', 'brown rice', 'white rice', 'jasmine'],
|
||||
'cereal': ['cereal', 'granola', 'oatmeal'],
|
||||
'chips': ['chips', 'crisps', 'tortilla chips', 'pretzel', 'popcorn'],
|
||||
'cookies': ['cookies', 'biscuits', 'crackers', 'graham cracker', 'wafer'],
|
||||
# ── Beverages ─────────────────────────────────────────────────────────
|
||||
'juice': ['juice', 'orange juice', 'apple juice', 'lemonade'],
|
||||
'soda': ['soda', 'cola', 'sprite', 'pepsi', 'coke', 'carbonated soft drink'],
|
||||
'chips': ['chips', 'crisps', 'tortilla chips'],
|
||||
'cookies': ['cookies', 'biscuits', 'crackers'],
|
||||
'ketchup': ['ketchup', 'catsup'],
|
||||
'mustard': ['mustard'],
|
||||
'mayo': ['mayo', 'mayonnaise', 'miracle whip'],
|
||||
'salad_dressing': ['salad dressing', 'ranch', 'italian dressing', 'vinaigrette'],
|
||||
'soy_sauce': ['soy sauce', 'tamari'],
|
||||
'juice': ['juice', 'orange juice', 'apple juice'],
|
||||
'soda': ['soda', 'pop', 'cola', 'sprite', 'pepsi', 'coke'],
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
|
@ -418,13 +176,8 @@ class ExpirationPredictor:
|
|||
product_name: str,
|
||||
product_category: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
location: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""Determine category from product name, existing category, and tags.
|
||||
|
||||
location is used as a last-resort hint: unknown items in the freezer
|
||||
default to frozen_foods rather than dry_goods.
|
||||
"""
|
||||
"""Determine category from product name, existing category, and tags."""
|
||||
if product_category:
|
||||
cat = product_category.lower().strip()
|
||||
if cat in self.SHELF_LIFE:
|
||||
|
|
@ -444,36 +197,21 @@ class ExpirationPredictor:
|
|||
if any(kw in name for kw in keywords):
|
||||
return category
|
||||
|
||||
# Generic single-word fallbacks — checked after the keyword dict so
|
||||
# multi-word phrases (e.g. "canned chicken") already matched above.
|
||||
for words, fallback in [
|
||||
(['frozen'], 'frozen_foods'),
|
||||
(['canned', 'tinned'], 'canned_goods'),
|
||||
# bare 'chicken' / 'sausage' / 'ham' kept here so raw-meat names
|
||||
# that don't appear in the specific keyword lists still resolve.
|
||||
(['chicken', 'turkey'], 'poultry'),
|
||||
(['sausage', 'ham', 'bacon'], 'pork'),
|
||||
(['beef', 'steak'], 'beef'),
|
||||
(['meat', 'pork'], 'meat'),
|
||||
(['meat', 'beef', 'pork', 'chicken'], 'meat'),
|
||||
(['vegetable', 'veggie', 'produce'], 'vegetables'),
|
||||
(['fruit'], 'fruits'),
|
||||
(['dairy'], 'dairy'),
|
||||
(['frozen'], 'frozen_foods'),
|
||||
]:
|
||||
if any(w in name for w in words):
|
||||
return fallback
|
||||
|
||||
# Location-aware final fallback: unknown item in a freezer → frozen_foods.
|
||||
# This handles unlabelled frozen products (e.g. "Birthday Littles",
|
||||
# "Pulled BBQ Crumbles") without requiring every brand name to be listed.
|
||||
canon_loc = self._normalize_location(location or '')
|
||||
if canon_loc == 'freezer':
|
||||
return 'frozen_foods'
|
||||
|
||||
return 'dry_goods'
|
||||
|
||||
def get_shelf_life_info(self, category: str, location: str) -> Optional[int]:
|
||||
"""Shelf life in days for a given category + location, or None."""
|
||||
return self._lookup_days(category, location)
|
||||
return self.SHELF_LIFE.get(category.lower().strip(), {}).get(location)
|
||||
|
||||
def list_categories(self) -> List[str]:
|
||||
return list(self.SHELF_LIFE.keys())
|
||||
|
|
@ -486,18 +224,8 @@ class ExpirationPredictor:
|
|||
|
||||
# ── Private helpers ───────────────────────────────────────────────────────
|
||||
|
||||
def _normalize_location(self, location: str) -> str:
|
||||
"""Resolve location aliases to canonical names."""
|
||||
loc = location.lower().strip()
|
||||
return self.LOCATION_ALIASES.get(loc, loc)
|
||||
|
||||
def _lookup_days(self, category: Optional[str], location: str) -> Optional[int]:
|
||||
"""Pure deterministic lookup — no I/O.
|
||||
|
||||
Normalises location aliases (e.g. garage_freezer → freezer) and uses
|
||||
a context-aware fallback order so pantry items don't accidentally get
|
||||
fridge shelf-life and vice versa.
|
||||
"""
|
||||
"""Pure deterministic lookup — no I/O."""
|
||||
if not category:
|
||||
return None
|
||||
cat = category.lower().strip()
|
||||
|
|
@ -509,19 +237,13 @@ class ExpirationPredictor:
|
|||
else:
|
||||
return None
|
||||
|
||||
canon_loc = self._normalize_location(location)
|
||||
shelf = self.SHELF_LIFE[cat]
|
||||
|
||||
# Try the canonical location first, then work through the
|
||||
# context-aware fallback chain for that location type.
|
||||
fallback_order = self.LOCATION_FALLBACK.get(
|
||||
canon_loc, (canon_loc, 'pantry', 'fridge', 'cabinet', 'freezer')
|
||||
)
|
||||
for loc in fallback_order:
|
||||
days = shelf.get(loc)
|
||||
if days is not None:
|
||||
return days
|
||||
return None
|
||||
days = self.SHELF_LIFE[cat].get(location)
|
||||
if days is None:
|
||||
for loc in ('fridge', 'pantry', 'freezer', 'cabinet'):
|
||||
days = self.SHELF_LIFE[cat].get(loc)
|
||||
if days is not None:
|
||||
break
|
||||
return days
|
||||
|
||||
def _llm_predict_days(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -1,80 +0,0 @@
|
|||
"""Heimdall cf-orch budget client.
|
||||
|
||||
Calls Heimdall's /orch/* endpoints to gate and record cf-orch usage for
|
||||
lifetime/founders license holders. Always fails open on network errors —
|
||||
a Heimdall outage should never block the user.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
|
||||
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
|
||||
|
||||
|
||||
def _headers() -> dict[str, str]:
|
||||
if HEIMDALL_ADMIN_TOKEN:
|
||||
return {"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"}
|
||||
return {}
|
||||
|
||||
|
||||
def check_orch_budget(key_display: str, product: str) -> dict:
|
||||
"""Call POST /orch/check and return the response dict.
|
||||
|
||||
On any error (network, auth, etc.) returns a permissive dict so the
|
||||
caller can proceed without blocking the user.
|
||||
"""
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"{HEIMDALL_URL}/orch/check",
|
||||
json={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/check returned %s for key %s", resp.status_code, key_display[:12])
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/check failed (fail-open): %s", exc)
|
||||
|
||||
# Fail open — Heimdall outage must never block the user
|
||||
return {
|
||||
"allowed": True,
|
||||
"calls_used": 0,
|
||||
"calls_total": 0,
|
||||
"topup_calls": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
||||
|
||||
def get_orch_usage(key_display: str, product: str) -> dict:
|
||||
"""Call GET /orch/usage and return the response dict.
|
||||
|
||||
Returns zeros on error (non-blocking).
|
||||
"""
|
||||
try:
|
||||
resp = requests.get(
|
||||
f"{HEIMDALL_URL}/orch/usage",
|
||||
params={"key_display": key_display, "product": product},
|
||||
headers=_headers(),
|
||||
timeout=5,
|
||||
)
|
||||
if resp.ok:
|
||||
return resp.json()
|
||||
log.warning("Heimdall orch/usage returned %s", resp.status_code)
|
||||
except Exception as exc:
|
||||
log.warning("Heimdall orch/usage failed: %s", exc)
|
||||
|
||||
return {
|
||||
"calls_used": 0,
|
||||
"topup_calls": 0,
|
||||
"calls_total": 0,
|
||||
"period_start": "",
|
||||
"resets_on": "",
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""Meal planning service layer — no FastAPI imports (extraction-ready for cf-core)."""
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
# app/services/meal_plan/affiliates.py
|
||||
"""Register Kiwi-specific affiliate programs and provide search URL builders.
|
||||
|
||||
Called once at API startup. Programs not yet in core.affiliates are registered
|
||||
here. The actual affiliate IDs are read from environment variables at call
|
||||
time, so the process can start before accounts are approved (plain URLs
|
||||
returned when env vars are absent).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import AffiliateProgram, register_program, wrap_url
|
||||
|
||||
|
||||
# ── URL builders ──────────────────────────────────────────────────────────────
|
||||
|
||||
def _walmart_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}affil=apa&affiliateId={affiliate_id}"
|
||||
|
||||
|
||||
def _target_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}afid={affiliate_id}"
|
||||
|
||||
|
||||
def _thrive_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}raf={affiliate_id}"
|
||||
|
||||
|
||||
def _misfits_search(url: str, affiliate_id: str) -> str:
|
||||
sep = "&" if "?" in url else "?"
|
||||
return f"{url}{sep}ref={affiliate_id}"
|
||||
|
||||
|
||||
# ── Registration ──────────────────────────────────────────────────────────────
|
||||
|
||||
def register_kiwi_programs() -> None:
|
||||
"""Register Kiwi retailer programs. Safe to call multiple times (idempotent)."""
|
||||
register_program(AffiliateProgram(
|
||||
name="Walmart",
|
||||
retailer_key="walmart",
|
||||
env_var="WALMART_AFFILIATE_ID",
|
||||
build_url=_walmart_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Target",
|
||||
retailer_key="target",
|
||||
env_var="TARGET_AFFILIATE_ID",
|
||||
build_url=_target_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Thrive Market",
|
||||
retailer_key="thrive",
|
||||
env_var="THRIVE_AFFILIATE_ID",
|
||||
build_url=_thrive_search,
|
||||
))
|
||||
register_program(AffiliateProgram(
|
||||
name="Misfits Market",
|
||||
retailer_key="misfits",
|
||||
env_var="MISFITS_AFFILIATE_ID",
|
||||
build_url=_misfits_search,
|
||||
))
|
||||
|
||||
|
||||
# ── Search URL helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
_SEARCH_TEMPLATES: dict[str, str] = {
|
||||
"amazon": "https://www.amazon.com/s?k={q}",
|
||||
"instacart": "https://www.instacart.com/store/search_v3/term?term={q}",
|
||||
"walmart": "https://www.walmart.com/search?q={q}",
|
||||
"target": "https://www.target.com/s?searchTerm={q}",
|
||||
"thrive": "https://thrivemarket.com/search?q={q}",
|
||||
"misfits": "https://www.misfitsmarket.com/shop?search={q}",
|
||||
}
|
||||
|
||||
KIWI_RETAILERS = list(_SEARCH_TEMPLATES.keys())
|
||||
|
||||
|
||||
def get_retailer_links(ingredient_name: str) -> list[dict]:
|
||||
"""Return affiliate-wrapped search links for *ingredient_name*.
|
||||
|
||||
Returns a list of dicts: {"retailer": str, "label": str, "url": str}.
|
||||
Falls back to plain search URL when no affiliate ID is configured.
|
||||
"""
|
||||
q = quote_plus(ingredient_name)
|
||||
links = []
|
||||
for key, template in _SEARCH_TEMPLATES.items():
|
||||
plain_url = template.format(q=q)
|
||||
try:
|
||||
affiliate_url = wrap_url(plain_url, retailer=key)
|
||||
except Exception:
|
||||
affiliate_url = plain_url
|
||||
links.append({"retailer": key, "label": _label(key), "url": affiliate_url})
|
||||
return links
|
||||
|
||||
|
||||
def _label(key: str) -> str:
|
||||
return {
|
||||
"amazon": "Amazon",
|
||||
"instacart": "Instacart",
|
||||
"walmart": "Walmart",
|
||||
"target": "Target",
|
||||
"thrive": "Thrive Market",
|
||||
"misfits": "Misfits Market",
|
||||
}.get(key, key.title())
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/llm_planner.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""LLM-assisted full-week meal plan generation.
|
||||
|
||||
Returns suggestions for human review — never writes to the DB directly.
|
||||
The API endpoint presents the suggestions and waits for user approval
|
||||
before calling store.upsert_slot().
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_PLAN_SYSTEM = """\
|
||||
You are a practical meal planning assistant. Given a pantry inventory and
|
||||
dietary preferences, suggest a week of dinners (or other configured meals).
|
||||
|
||||
Prioritise ingredients that are expiring soon. Prefer variety across the week.
|
||||
Respect all dietary restrictions.
|
||||
|
||||
Respond with a JSON array only — no prose, no markdown fences.
|
||||
Each item: {"day": 0-6, "meal_type": "dinner", "recipe_id": <int or null>, "suggestion": "<recipe name>"}
|
||||
|
||||
day 0 = Monday, day 6 = Sunday.
|
||||
If you cannot match a known recipe_id, set recipe_id to null and provide a suggestion name.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PlanSuggestion:
|
||||
day: int # 0 = Monday
|
||||
meal_type: str
|
||||
recipe_id: int | None
|
||||
suggestion: str # human-readable name
|
||||
|
||||
|
||||
def generate_plan(
|
||||
pantry_items: list[str],
|
||||
meal_types: list[str],
|
||||
dietary_notes: str,
|
||||
router,
|
||||
) -> list[PlanSuggestion]:
|
||||
"""Return a list of PlanSuggestion for user review.
|
||||
|
||||
Never writes to DB — caller must upsert slots after user approves.
|
||||
Returns an empty list if router is None or response is unparseable.
|
||||
"""
|
||||
if router is None:
|
||||
return []
|
||||
|
||||
pantry_text = "\n".join(f"- {item}" for item in pantry_items[:50])
|
||||
meal_text = ", ".join(meal_types)
|
||||
user_msg = (
|
||||
f"Meal types: {meal_text}\n"
|
||||
f"Dietary notes: {dietary_notes or 'none'}\n\n"
|
||||
f"Pantry (partial):\n{pantry_text}"
|
||||
)
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_PLAN_SYSTEM,
|
||||
user=user_msg,
|
||||
max_tokens=512,
|
||||
temperature=0.7,
|
||||
)
|
||||
items = json.loads(response.strip())
|
||||
suggestions = []
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
day = item.get("day")
|
||||
meal_type = item.get("meal_type", "dinner")
|
||||
if not isinstance(day, int) or day < 0 or day > 6:
|
||||
continue
|
||||
suggestions.append(PlanSuggestion(
|
||||
day=day,
|
||||
meal_type=meal_type,
|
||||
recipe_id=item.get("recipe_id"),
|
||||
suggestion=str(item.get("suggestion", "")),
|
||||
))
|
||||
return suggestions
|
||||
except Exception as exc:
|
||||
logger.debug("LLM plan generation failed: %s", exc)
|
||||
return []
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
# app/services/meal_plan/llm_router.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Provide a router-compatible LLM client for meal plan generation tasks.
|
||||
|
||||
Cloud (CF_ORCH_URL set):
|
||||
Allocates a cf-text service via cf-orch (3B-7B GGUF, ~2GB VRAM).
|
||||
Returns an _OrchTextRouter that wraps the cf-text HTTP endpoint
|
||||
with a .complete(system, user, **kwargs) interface.
|
||||
|
||||
Local / self-hosted (no CF_ORCH_URL):
|
||||
Returns an LLMRouter instance which tries ollama, vllm, or any
|
||||
backend configured in ~/.config/circuitforge/llm.yaml.
|
||||
|
||||
Both paths expose the same interface so llm_timing.py and llm_planner.py
|
||||
need no knowledge of the backend.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import nullcontext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# cf-orch service name and VRAM budget for meal plan LLM tasks.
|
||||
# These are lighter than recipe_llm (4.0 GB) — cf-text handles them.
|
||||
_SERVICE_TYPE = "cf-text"
|
||||
_TTL_S = 120.0
|
||||
_CALLER = "kiwi-meal-plan"
|
||||
|
||||
|
||||
class _OrchTextRouter:
|
||||
"""Thin adapter that makes a cf-text HTTP endpoint look like LLMRouter."""
|
||||
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
|
||||
def complete(
|
||||
self,
|
||||
system: str = "",
|
||||
user: str = "",
|
||||
max_tokens: int = 512,
|
||||
temperature: float = 0.7,
|
||||
**_kwargs,
|
||||
) -> str:
|
||||
from openai import OpenAI
|
||||
client = OpenAI(base_url=self._base_url + "/v1", api_key="any")
|
||||
messages = []
|
||||
if system:
|
||||
messages.append({"role": "system", "content": system})
|
||||
messages.append({"role": "user", "content": user})
|
||||
try:
|
||||
model = client.models.list().data[0].id
|
||||
except Exception:
|
||||
model = "local"
|
||||
resp = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
return resp.choices[0].message.content or ""
|
||||
|
||||
|
||||
def get_meal_plan_router():
|
||||
"""Return an LLM client for meal plan tasks.
|
||||
|
||||
Tries cf-orch cf-text allocation first (cloud); falls back to LLMRouter
|
||||
(local ollama/vllm). Returns None if no backend is available.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
ctx = client.allocate(
|
||||
service=_SERVICE_TYPE,
|
||||
ttl_s=_TTL_S,
|
||||
caller=_CALLER,
|
||||
)
|
||||
alloc = ctx.__enter__()
|
||||
if alloc is not None:
|
||||
return _OrchTextRouter(alloc.url), ctx
|
||||
except Exception as exc:
|
||||
logger.debug("cf-orch cf-text allocation failed, falling back to LLMRouter: %s", exc)
|
||||
|
||||
# Local fallback: LLMRouter (ollama / vllm / openai-compat)
|
||||
try:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter(), nullcontext(None)
|
||||
except FileNotFoundError:
|
||||
logger.debug("LLMRouter: no llm.yaml and no LLM env vars — meal plan LLM disabled")
|
||||
return None, nullcontext(None)
|
||||
except Exception as exc:
|
||||
logger.debug("LLMRouter init failed: %s", exc)
|
||||
return None, nullcontext(None)
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
# app/services/meal_plan/llm_timing.py
|
||||
# BSL 1.1 — LLM feature
|
||||
"""Estimate cook times for recipes missing corpus prep/cook time fields.
|
||||
|
||||
Used only when tier allows `meal_plan_llm_timing`. Falls back gracefully
|
||||
when no LLM backend is available.
|
||||
|
||||
Routing: pass a router from get_meal_plan_router() in llm_router.py.
|
||||
Cloud: cf-text via cf-orch (3B GGUF, ~2GB VRAM).
|
||||
Local: LLMRouter (ollama / vllm / openai-compat per llm.yaml).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_TIMING_PROMPT = """\
|
||||
You are a practical cook. Given a recipe name and its ingredients, estimate:
|
||||
1. prep_time: minutes of active prep work (chopping, mixing, etc.)
|
||||
2. cook_time: minutes of cooking (oven, stovetop, etc.)
|
||||
|
||||
Respond with ONLY two integers on separate lines:
|
||||
prep_time
|
||||
cook_time
|
||||
|
||||
If you cannot estimate, respond with:
|
||||
0
|
||||
0
|
||||
"""
|
||||
|
||||
|
||||
def estimate_timing(recipe_name: str, ingredients: list[str], router) -> tuple[int | None, int | None]:
|
||||
"""Return (prep_minutes, cook_minutes) for a recipe using LLMRouter.
|
||||
|
||||
Returns (None, None) if the router is unavailable or the response is
|
||||
unparseable. Never raises.
|
||||
|
||||
Args:
|
||||
recipe_name: Name of the recipe.
|
||||
ingredients: List of raw ingredient strings from the corpus.
|
||||
router: An LLMRouter instance (from circuitforge_core.llm).
|
||||
"""
|
||||
if router is None:
|
||||
return None, None
|
||||
|
||||
ingredient_list = "\n".join(f"- {i}" for i in (ingredients or [])[:15])
|
||||
prompt = f"Recipe: {recipe_name}\n\nIngredients:\n{ingredient_list}"
|
||||
|
||||
try:
|
||||
response = router.complete(
|
||||
system=_TIMING_PROMPT,
|
||||
user=prompt,
|
||||
max_tokens=16,
|
||||
temperature=0.0,
|
||||
)
|
||||
lines = response.strip().splitlines()
|
||||
prep = int(lines[0].strip()) if lines else 0
|
||||
cook = int(lines[1].strip()) if len(lines) > 1 else 0
|
||||
if prep == 0 and cook == 0:
|
||||
return None, None
|
||||
return prep or None, cook or None
|
||||
except Exception as exc:
|
||||
logger.debug("LLM timing estimation failed for %r: %s", recipe_name, exc)
|
||||
return None, None
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# app/services/meal_plan/planner.py
|
||||
"""Plan and slot orchestration — thin layer over Store.
|
||||
|
||||
No FastAPI imports. Provides helpers used by the API endpoint.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from app.db.store import Store
|
||||
from app.models.schemas.meal_plan import VALID_MEAL_TYPES
|
||||
|
||||
|
||||
def create_plan(store: Store, week_start: str, meal_types: list[str]) -> dict:
|
||||
"""Create a plan, filtering meal_types to valid values only."""
|
||||
valid = [t for t in meal_types if t in VALID_MEAL_TYPES]
|
||||
if not valid:
|
||||
valid = ["dinner"]
|
||||
return store.create_meal_plan(week_start, valid)
|
||||
|
||||
|
||||
def get_plan_with_slots(store: Store, plan_id: int) -> dict | None:
|
||||
"""Return a plan row with its slots list attached, or None."""
|
||||
plan = store.get_meal_plan(plan_id)
|
||||
if plan is None:
|
||||
return None
|
||||
slots = store.get_plan_slots(plan_id)
|
||||
return {**plan, "slots": slots}
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
# app/services/meal_plan/prep_scheduler.py
|
||||
"""Sequence prep tasks for a batch cooking session.
|
||||
|
||||
Pure function — no DB or network calls. Sorts tasks by equipment priority
|
||||
(oven first to maximise oven utilisation) then assigns sequence_order.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
_EQUIPMENT_PRIORITY = {"oven": 0, "stovetop": 1, "cold": 2, "no-heat": 3}
|
||||
_DEFAULT_PRIORITY = 4
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrepTask:
|
||||
recipe_id: int | None
|
||||
slot_id: int | None
|
||||
task_label: str
|
||||
duration_minutes: int | None
|
||||
sequence_order: int
|
||||
equipment: str | None
|
||||
is_parallel: bool = False
|
||||
notes: str | None = None
|
||||
user_edited: bool = False
|
||||
|
||||
|
||||
def _total_minutes(recipe: dict) -> int | None:
|
||||
prep = recipe.get("prep_time")
|
||||
cook = recipe.get("cook_time")
|
||||
if prep is None and cook is None:
|
||||
return None
|
||||
return (prep or 0) + (cook or 0)
|
||||
|
||||
|
||||
def _equipment(recipe: dict) -> str | None:
|
||||
# Corpus recipes don't have an explicit equipment field; use test helper
|
||||
# field if present, otherwise infer from cook_time (long = oven heuristic).
|
||||
if "_equipment" in recipe:
|
||||
return recipe["_equipment"]
|
||||
minutes = _total_minutes(recipe)
|
||||
if minutes and minutes >= 45:
|
||||
return "oven"
|
||||
return "stovetop"
|
||||
|
||||
|
||||
def build_prep_tasks(slots: list[dict], recipes: list[dict]) -> list[PrepTask]:
|
||||
"""Return a sequenced list of PrepTask objects from plan slots + recipe rows.
|
||||
|
||||
Algorithm:
|
||||
1. Build a recipe_id → recipe dict lookup.
|
||||
2. Create one task per slot that has a recipe assigned.
|
||||
3. Sort by equipment priority (oven first).
|
||||
4. Assign contiguous sequence_order starting at 1.
|
||||
"""
|
||||
if not slots or not recipes:
|
||||
return []
|
||||
|
||||
recipe_map: dict[int, dict] = {r["id"]: r for r in recipes}
|
||||
raw_tasks: list[tuple[int, dict]] = [] # (priority, kwargs)
|
||||
|
||||
for slot in slots:
|
||||
recipe_id = slot.get("recipe_id")
|
||||
if not recipe_id:
|
||||
continue
|
||||
recipe = recipe_map.get(recipe_id)
|
||||
if not recipe:
|
||||
continue
|
||||
|
||||
eq = _equipment(recipe)
|
||||
priority = _EQUIPMENT_PRIORITY.get(eq or "", _DEFAULT_PRIORITY)
|
||||
raw_tasks.append((priority, {
|
||||
"recipe_id": recipe_id,
|
||||
"slot_id": slot.get("id"),
|
||||
"task_label": recipe.get("name", f"Recipe {recipe_id}"),
|
||||
"duration_minutes": _total_minutes(recipe),
|
||||
"equipment": eq,
|
||||
}))
|
||||
|
||||
raw_tasks.sort(key=lambda t: t[0])
|
||||
return [
|
||||
PrepTask(
|
||||
recipe_id=kw["recipe_id"],
|
||||
slot_id=kw["slot_id"],
|
||||
task_label=kw["task_label"],
|
||||
duration_minutes=kw["duration_minutes"],
|
||||
sequence_order=i,
|
||||
equipment=kw["equipment"],
|
||||
)
|
||||
for i, (_, kw) in enumerate(raw_tasks, 1)
|
||||
]
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
# app/services/meal_plan/shopping_list.py
|
||||
"""Compute a shopping list from a meal plan and current pantry inventory.
|
||||
|
||||
Pure function — no DB or network calls. Takes plain dicts from the Store
|
||||
and returns GapItem dataclasses.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GapItem:
|
||||
ingredient_name: str
|
||||
needed_raw: str | None # first quantity token from recipe text, e.g. "300g"
|
||||
have_quantity: float | None # pantry quantity when partial match
|
||||
have_unit: str | None
|
||||
covered: bool
|
||||
retailer_links: list = field(default_factory=list) # filled by API layer
|
||||
|
||||
|
||||
_QUANTITY_RE = re.compile(r"^(\d+[\d./]*\s*(?:g|kg|ml|l|oz|lb|cup|cups|tsp|tbsp|tbsps|tsps)?)\b", re.I)
|
||||
|
||||
|
||||
def _extract_quantity(ingredient_text: str) -> str | None:
|
||||
"""Pull the leading quantity string from a raw ingredient line."""
|
||||
m = _QUANTITY_RE.match(ingredient_text.strip())
|
||||
return m.group(1).strip() if m else None
|
||||
|
||||
|
||||
def _normalise(name: str) -> str:
|
||||
"""Lowercase, strip possessives and plural -s for fuzzy matching."""
|
||||
return name.lower().strip().rstrip("s")
|
||||
|
||||
|
||||
def compute_shopping_list(
|
||||
recipes: list[dict],
|
||||
inventory: list[dict],
|
||||
) -> tuple[list[GapItem], list[GapItem]]:
|
||||
"""Return (gap_items, covered_items) for a list of recipe dicts + inventory dicts.
|
||||
|
||||
Deduplicates by normalised ingredient name — the first recipe's quantity
|
||||
string wins when the same ingredient appears in multiple recipes.
|
||||
"""
|
||||
if not recipes:
|
||||
return [], []
|
||||
|
||||
# Build pantry lookup: normalised_name → inventory row
|
||||
pantry: dict[str, dict] = {}
|
||||
for item in inventory:
|
||||
pantry[_normalise(item["name"])] = item
|
||||
|
||||
# Collect unique ingredients with their first quantity token
|
||||
seen: dict[str, str | None] = {} # normalised_name → needed_raw
|
||||
for recipe in recipes:
|
||||
names: list[str] = recipe.get("ingredient_names") or []
|
||||
raw_lines: list[str] = recipe.get("ingredients") or []
|
||||
for i, name in enumerate(names):
|
||||
key = _normalise(name)
|
||||
if key in seen:
|
||||
continue
|
||||
raw = raw_lines[i] if i < len(raw_lines) else ""
|
||||
seen[key] = _extract_quantity(raw)
|
||||
|
||||
gaps: list[GapItem] = []
|
||||
covered: list[GapItem] = []
|
||||
|
||||
for norm_name, needed_raw in seen.items():
|
||||
pantry_row = pantry.get(norm_name)
|
||||
if pantry_row:
|
||||
covered.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=pantry_row.get("quantity"),
|
||||
have_unit=pantry_row.get("unit"),
|
||||
covered=True,
|
||||
))
|
||||
else:
|
||||
gaps.append(GapItem(
|
||||
ingredient_name=norm_name,
|
||||
needed_raw=needed_raw,
|
||||
have_quantity=None,
|
||||
have_unit=None,
|
||||
covered=False,
|
||||
))
|
||||
|
||||
return gaps, covered
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""Thin HTTP client for the cf-docuvision document vision service."""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
|
||||
|
||||
@dataclass
|
||||
class DocuvisionResult:
|
||||
text: str
|
||||
confidence: float | None = None
|
||||
raw: dict | None = None
|
||||
|
||||
|
||||
class DocuvisionClient:
|
||||
"""Thin client for the cf-docuvision service."""
|
||||
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self._base_url = base_url.rstrip("/")
|
||||
|
||||
def extract_text(self, image_path: str | Path) -> DocuvisionResult:
|
||||
"""Send an image to docuvision and return extracted text."""
|
||||
image_bytes = Path(image_path).read_bytes()
|
||||
b64 = base64.b64encode(image_bytes).decode()
|
||||
|
||||
with httpx.Client(timeout=30.0) as client:
|
||||
resp = client.post(
|
||||
f"{self._base_url}/extract",
|
||||
json={"image": b64},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
return DocuvisionResult(
|
||||
text=data.get("text", ""),
|
||||
confidence=data.get("confidence"),
|
||||
raw=data,
|
||||
)
|
||||
|
||||
async def extract_text_async(self, image_path: str | Path) -> DocuvisionResult:
|
||||
"""Async version."""
|
||||
image_bytes = Path(image_path).read_bytes()
|
||||
b64 = base64.b64encode(image_bytes).decode()
|
||||
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
resp = await client.post(
|
||||
f"{self._base_url}/extract",
|
||||
json={"image": b64},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
return DocuvisionResult(
|
||||
text=data.get("text", ""),
|
||||
confidence=data.get("confidence"),
|
||||
raw=data,
|
||||
)
|
||||
|
|
@ -8,7 +8,6 @@ OCR with understanding of receipt structure to extract structured JSON data.
|
|||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
|
@ -27,32 +26,6 @@ from app.core.config import settings
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _try_docuvision(image_path: str | Path) -> str | None:
|
||||
"""Try to extract text via cf-docuvision. Returns None if unavailable."""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if not cf_orch_url:
|
||||
return None
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
from app.services.ocr.docuvision_client import DocuvisionClient
|
||||
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
with client.allocate(
|
||||
service="cf-docuvision",
|
||||
model_candidates=["cf-docuvision"],
|
||||
ttl_s=60.0,
|
||||
caller="kiwi-ocr",
|
||||
) as alloc:
|
||||
if alloc is None:
|
||||
return None
|
||||
doc_client = DocuvisionClient(alloc.url)
|
||||
result = doc_client.extract_text(image_path)
|
||||
return result.text if result.text else None
|
||||
except Exception as exc:
|
||||
logger.debug("cf-docuvision fast-path failed, falling back: %s", exc)
|
||||
return None
|
||||
|
||||
|
||||
class VisionLanguageOCR:
|
||||
"""Vision-Language Model for receipt OCR and structured extraction."""
|
||||
|
||||
|
|
@ -67,7 +40,7 @@ class VisionLanguageOCR:
|
|||
self.processor = None
|
||||
self.device = "cuda" if torch.cuda.is_available() and settings.USE_GPU else "cpu"
|
||||
self.use_quantization = use_quantization
|
||||
self.model_name = "Qwen/Qwen2.5-VL-7B-Instruct"
|
||||
self.model_name = "Qwen/Qwen2-VL-2B-Instruct"
|
||||
|
||||
logger.info(f"Initializing VisionLanguageOCR with device: {self.device}")
|
||||
|
||||
|
|
@ -139,18 +112,6 @@ class VisionLanguageOCR:
|
|||
"warnings": [...]
|
||||
}
|
||||
"""
|
||||
# Try docuvision fast path first (skips heavy local VLM if available)
|
||||
docuvision_text = _try_docuvision(image_path)
|
||||
if docuvision_text is not None:
|
||||
parsed = self._parse_json_from_text(docuvision_text)
|
||||
# Only accept the docuvision result if it yielded meaningful content;
|
||||
# an empty-skeleton dict (no items, no merchant) means the text was
|
||||
# garbled and we should fall through to the local VLM instead.
|
||||
if parsed.get("items") or parsed.get("merchant"):
|
||||
parsed["raw_text"] = docuvision_text
|
||||
return self._validate_result(parsed)
|
||||
# Parsed result has no meaningful content — fall through to local VLM
|
||||
|
||||
self._load_model()
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -15,73 +15,64 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class OpenFoodFactsService:
|
||||
"""
|
||||
Service for interacting with the Open*Facts family of databases.
|
||||
Service for interacting with the OpenFoodFacts API.
|
||||
|
||||
Primary: OpenFoodFacts (food products).
|
||||
Fallback chain: Open Beauty Facts (personal care) → Open Products Facts (household).
|
||||
All three databases share the same API path and JSON format.
|
||||
OpenFoodFacts is a free, open database of food products with
|
||||
ingredients, allergens, and nutrition facts.
|
||||
"""
|
||||
|
||||
BASE_URL = "https://world.openfoodfacts.org/api/v2"
|
||||
USER_AGENT = "Kiwi/0.1.0 (https://circuitforge.tech)"
|
||||
|
||||
# Fallback databases tried in order when OFFs returns no match.
|
||||
# Same API format as OFFs — only the host differs.
|
||||
_FALLBACK_DATABASES = [
|
||||
"https://world.openbeautyfacts.org/api/v2",
|
||||
"https://world.openproductsfacts.org/api/v2",
|
||||
]
|
||||
|
||||
async def _lookup_in_database(
|
||||
self, barcode: str, base_url: str, client: httpx.AsyncClient
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Try one Open*Facts database using an existing client. Returns parsed product dict or None."""
|
||||
try:
|
||||
response = await client.get(
|
||||
f"{base_url}/product/{barcode}.json",
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
if data.get("status") != 1:
|
||||
return None
|
||||
return self._parse_product_data(data, barcode)
|
||||
except httpx.HTTPError as e:
|
||||
logger.debug("HTTP error for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.debug("Lookup failed for %s at %s: %s", barcode, base_url, e)
|
||||
return None
|
||||
|
||||
async def lookup_product(self, barcode: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Look up a product by barcode, trying OFFs then fallback databases.
|
||||
|
||||
A single httpx.AsyncClient is created for the whole lookup chain so that
|
||||
connection pooling and TLS session reuse apply across all database attempts.
|
||||
Look up a product by barcode in the OpenFoodFacts database.
|
||||
|
||||
Args:
|
||||
barcode: UPC/EAN barcode (8-13 digits)
|
||||
|
||||
Returns:
|
||||
Dictionary with product information, or None if not found in any database.
|
||||
Dictionary with product information, or None if not found
|
||||
|
||||
Example response:
|
||||
{
|
||||
"name": "Organic Milk",
|
||||
"brand": "Horizon",
|
||||
"categories": ["Dairy", "Milk"],
|
||||
"image_url": "https://...",
|
||||
"nutrition_data": {...},
|
||||
"raw_data": {...} # Full API response
|
||||
}
|
||||
"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
result = await self._lookup_in_database(barcode, self.BASE_URL, client)
|
||||
if result:
|
||||
return result
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
url = f"{self.BASE_URL}/product/{barcode}.json"
|
||||
|
||||
for db_url in self._FALLBACK_DATABASES:
|
||||
result = await self._lookup_in_database(barcode, db_url, client)
|
||||
if result:
|
||||
logger.info("Barcode %s found in fallback database: %s", barcode, db_url)
|
||||
return result
|
||||
response = await client.get(
|
||||
url,
|
||||
headers={"User-Agent": self.USER_AGENT},
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
logger.info("Barcode %s not found in any Open*Facts database", barcode)
|
||||
return None
|
||||
if response.status_code == 404:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if data.get("status") != 1:
|
||||
logger.info(f"Product not found in OpenFoodFacts: {barcode}")
|
||||
return None
|
||||
|
||||
return self._parse_product_data(data, barcode)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(f"HTTP error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up barcode {barcode}: {e}")
|
||||
return None
|
||||
|
||||
def _parse_product_data(self, data: Dict[str, Any], barcode: str) -> Dict[str, Any]:
|
||||
"""
|
||||
|
|
@ -123,9 +114,6 @@ class OpenFoodFactsService:
|
|||
allergens = product.get("allergens_tags", [])
|
||||
labels = product.get("labels_tags", [])
|
||||
|
||||
# Pack size detection: prefer explicit unit_count, fall back to serving count
|
||||
pack_quantity, pack_unit = self._extract_pack_size(product)
|
||||
|
||||
return {
|
||||
"name": name,
|
||||
"brand": brand,
|
||||
|
|
@ -136,47 +124,9 @@ class OpenFoodFactsService:
|
|||
"nutrition_data": nutrition_data,
|
||||
"allergens": allergens,
|
||||
"labels": labels,
|
||||
"pack_quantity": pack_quantity,
|
||||
"pack_unit": pack_unit,
|
||||
"raw_data": product, # Store full response for debugging
|
||||
}
|
||||
|
||||
def _extract_pack_size(self, product: Dict[str, Any]) -> tuple[float | None, str | None]:
|
||||
"""Return (quantity, unit) for multi-pack products, or (None, None).
|
||||
|
||||
OFFs fields tried in order:
|
||||
1. `number_of_units` (explicit count, highest confidence)
|
||||
2. `serving_quantity` + `product_quantity_unit` (e.g. 6 x 150g yoghurt)
|
||||
3. Parse `quantity` string like "4 x 113 g" or "6 pack"
|
||||
|
||||
Returns None, None when data is absent, ambiguous, or single-unit.
|
||||
"""
|
||||
import re
|
||||
|
||||
# Field 1: explicit unit count
|
||||
unit_count = product.get("number_of_units")
|
||||
if unit_count:
|
||||
try:
|
||||
n = float(unit_count)
|
||||
if n > 1:
|
||||
return n, product.get("serving_size_unit") or "unit"
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Field 2: parse quantity string for "N x ..." pattern
|
||||
qty_str = product.get("quantity", "")
|
||||
if qty_str:
|
||||
m = re.match(r"^(\d+(?:\.\d+)?)\s*[xX×]\s*", qty_str.strip())
|
||||
if m:
|
||||
n = float(m.group(1))
|
||||
if n > 1:
|
||||
# Try to get a sensible sub-unit label from the rest
|
||||
rest = qty_str[m.end():].strip()
|
||||
unit_label = re.sub(r"[\d.,\s]+", "", rest).strip()[:20] or "unit"
|
||||
return n, unit_label
|
||||
|
||||
return None, None
|
||||
|
||||
def _extract_nutrition_data(self, product: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Extract nutrition facts from product data.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,633 +0,0 @@
|
|||
"""
|
||||
Recipe browser domain schemas.
|
||||
|
||||
Each domain provides a two-level category hierarchy for browsing the recipe corpus.
|
||||
Keyword matching is case-insensitive against the recipes.category column and the
|
||||
recipes.keywords JSON array. A recipe may appear in multiple categories (correct).
|
||||
|
||||
Category values are either:
|
||||
- list[str] — flat keyword list (no subcategories)
|
||||
- dict — {"keywords": list[str], "subcategories": {name: list[str]}}
|
||||
keywords covers the whole category (used for "All X" browse);
|
||||
subcategories each have their own narrower keyword list.
|
||||
|
||||
These are starter mappings based on the food.com dataset structure. Run:
|
||||
|
||||
SELECT category, count(*) FROM recipes
|
||||
GROUP BY category ORDER BY count(*) DESC LIMIT 50;
|
||||
|
||||
against the corpus to verify coverage and refine keyword lists before the first
|
||||
production deploy.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
DOMAINS: dict[str, dict] = {
|
||||
"cuisine": {
|
||||
"label": "Cuisine",
|
||||
"categories": {
|
||||
"Italian": {
|
||||
"keywords": ["italian", "pasta", "pizza", "risotto", "lasagna", "carbonara"],
|
||||
"subcategories": {
|
||||
"Sicilian": ["sicilian", "sicily", "arancini", "caponata",
|
||||
"involtini", "cannoli"],
|
||||
"Neapolitan": ["neapolitan", "naples", "pizza napoletana",
|
||||
"sfogliatelle", "ragù"],
|
||||
"Tuscan": ["tuscan", "tuscany", "ribollita", "bistecca",
|
||||
"pappardelle", "crostini"],
|
||||
"Roman": ["roman", "rome", "cacio e pepe", "carbonara",
|
||||
"amatriciana", "gricia", "supplì"],
|
||||
"Venetian": ["venetian", "venice", "risotto", "bigoli",
|
||||
"baccalà", "sarde in saor"],
|
||||
"Ligurian": ["ligurian", "liguria", "pesto", "focaccia",
|
||||
"trofie", "farinata"],
|
||||
},
|
||||
},
|
||||
"Mexican": {
|
||||
"keywords": ["mexican", "taco", "enchilada", "burrito", "salsa",
|
||||
"guacamole", "mole", "tamale"],
|
||||
"subcategories": {
|
||||
"Oaxacan": ["oaxacan", "oaxaca", "mole negro", "tlayuda",
|
||||
"chapulines", "mezcal", "tasajo", "memelas"],
|
||||
"Yucatecan": ["yucatecan", "yucatan", "cochinita pibil", "poc chuc",
|
||||
"sopa de lima", "panuchos", "papadzules"],
|
||||
"Veracruz": ["veracruz", "veracruzana", "huachinango",
|
||||
"picadas", "enfrijoladas", "caldo de mariscos"],
|
||||
"Street Food": ["taco", "elote", "tlacoyos", "torta", "tamale",
|
||||
"quesadilla", "tostada", "sope", "gordita"],
|
||||
"Mole": ["mole", "mole negro", "mole rojo", "mole verde",
|
||||
"mole poblano", "mole amarillo", "pipián"],
|
||||
"Baja / Cal-Mex": ["baja", "baja california", "cal-mex", "baja fish taco",
|
||||
"fish taco", "carne asada fries", "california burrito",
|
||||
"birria", "birria tacos", "quesabirria",
|
||||
"lobster puerto nuevo", "tijuana", "ensenada",
|
||||
"agua fresca", "caesar salad tijuana"],
|
||||
"Mexico City": ["mexico city", "chilaquiles", "tlayuda cdmx",
|
||||
"tacos de canasta", "torta ahogada", "pozole",
|
||||
"chiles en nogada"],
|
||||
},
|
||||
},
|
||||
"Asian": {
|
||||
"keywords": ["asian", "chinese", "japanese", "thai", "korean", "vietnamese",
|
||||
"stir fry", "stir-fry", "ramen", "sushi", "malaysian",
|
||||
"taiwanese", "singaporean", "burmese", "cambodian",
|
||||
"laotian", "mongolian", "hong kong"],
|
||||
"subcategories": {
|
||||
"Korean": ["korean", "kimchi", "bibimbap", "bulgogi", "japchae",
|
||||
"doenjang", "gochujang", "tteokbokki", "sundubu",
|
||||
"galbi", "jjigae", "kbbq", "korean fried chicken"],
|
||||
"Japanese": ["japanese", "sushi", "ramen", "tempura", "miso",
|
||||
"teriyaki", "udon", "soba", "bento", "yakitori",
|
||||
"tonkatsu", "onigiri", "okonomiyaki", "takoyaki",
|
||||
"kaiseki", "izakaya"],
|
||||
"Chinese": ["chinese", "dim sum", "fried rice", "dumplings", "wonton",
|
||||
"spring roll", "szechuan", "sichuan", "cantonese",
|
||||
"chow mein", "mapo tofu", "lo mein", "hot pot",
|
||||
"peking duck", "char siu", "congee"],
|
||||
"Thai": ["thai", "pad thai", "green curry", "red curry",
|
||||
"coconut milk", "lemongrass", "satay", "tom yum",
|
||||
"larb", "khao man gai", "massaman", "pad see ew"],
|
||||
"Vietnamese": ["vietnamese", "pho", "banh mi", "spring rolls",
|
||||
"vermicelli", "nuoc cham", "bun bo hue",
|
||||
"banh xeo", "com tam", "bun cha"],
|
||||
"Filipino": ["filipino", "adobo", "sinigang", "pancit", "lumpia",
|
||||
"kare-kare", "lechon", "sisig", "halo-halo",
|
||||
"dinuguan", "tinola", "bistek"],
|
||||
"Indonesian": ["indonesian", "rendang", "nasi goreng", "gado-gado",
|
||||
"tempeh", "sambal", "soto", "opor ayam",
|
||||
"bakso", "mie goreng", "nasi uduk"],
|
||||
"Malaysian": ["malaysian", "laksa", "nasi lemak", "char kway teow",
|
||||
"satay malaysia", "roti canai", "bak kut teh",
|
||||
"cendol", "mee goreng mamak", "curry laksa"],
|
||||
"Taiwanese": ["taiwanese", "beef noodle soup", "lu rou fan",
|
||||
"oyster vermicelli", "scallion pancake taiwan",
|
||||
"pork chop rice", "three cup chicken",
|
||||
"bubble tea", "stinky tofu", "ba wan"],
|
||||
"Singaporean": ["singaporean", "chicken rice", "chili crab",
|
||||
"singaporean laksa", "bak chor mee", "rojak",
|
||||
"kaya toast", "nasi padang", "satay singapore"],
|
||||
"Burmese": ["burmese", "myanmar", "mohinga", "laphet thoke",
|
||||
"tea leaf salad", "ohn no khao swe",
|
||||
"mont di", "nangyi thoke"],
|
||||
"Hong Kong": ["hong kong", "hk style", "pineapple bun",
|
||||
"wonton noodle soup", "hk milk tea", "egg tart",
|
||||
"typhoon shelter crab", "char siu bao", "jook",
|
||||
"congee hk", "silk stocking tea", "dan tat",
|
||||
"siu mai hk", "cheung fun"],
|
||||
"Cambodian": ["cambodian", "khmer", "amok", "lok lak",
|
||||
"kuy teav", "bai sach chrouk", "nom banh chok",
|
||||
"samlor korko", "beef loc lac"],
|
||||
"Laotian": ["laotian", "lao", "larb", "tam mak hoong",
|
||||
"or lam", "khao niaw", "ping kai",
|
||||
"naem khao", "khao piak sen", "mok pa"],
|
||||
"Mongolian": ["mongolian", "buuz", "khuushuur", "tsuivan",
|
||||
"boodog", "airag", "khorkhog", "bansh",
|
||||
"guriltai shol", "suutei tsai"],
|
||||
"South Asian Fusion": ["south asian fusion", "indo-chinese",
|
||||
"hakka chinese", "chilli chicken",
|
||||
"manchurian", "schezwan"],
|
||||
},
|
||||
},
|
||||
"Indian": {
|
||||
"keywords": ["indian", "curry", "lentil", "dal", "tikka", "masala",
|
||||
"biryani", "naan", "chutney", "pakistani", "sri lankan",
|
||||
"bangladeshi", "nepali"],
|
||||
"subcategories": {
|
||||
"North Indian": ["north indian", "punjabi", "mughal", "tikka masala",
|
||||
"naan", "tandoori", "butter chicken", "palak paneer",
|
||||
"chole", "rajma", "aloo gobi"],
|
||||
"South Indian": ["south indian", "tamil", "kerala", "dosa", "idli",
|
||||
"sambar", "rasam", "coconut chutney", "appam",
|
||||
"fish curry kerala", "puttu", "payasam"],
|
||||
"Bengali": ["bengali", "mustard fish", "hilsa", "shorshe ilish",
|
||||
"mishti doi", "rasgulla", "kosha mangsho"],
|
||||
"Gujarati": ["gujarati", "dhokla", "thepla", "undhiyu",
|
||||
"khandvi", "fafda", "gujarati dal"],
|
||||
"Pakistani": ["pakistani", "nihari", "haleem", "seekh kebab",
|
||||
"karahi", "biryani karachi", "chapli kebab",
|
||||
"halwa puri", "paya"],
|
||||
"Sri Lankan": ["sri lankan", "kottu roti", "hoppers", "pol sambol",
|
||||
"sri lankan curry", "lamprais", "string hoppers",
|
||||
"wambatu moju"],
|
||||
"Bangladeshi": ["bangladeshi", "bangladesh", "dhaka biryani",
|
||||
"shutki", "pitha", "hilsa curry", "kacchi biryani",
|
||||
"bhuna khichuri", "doi maach", "rezala"],
|
||||
"Nepali": ["nepali", "dal bhat", "momos", "sekuwa",
|
||||
"sel roti", "gundruk", "thukpa"],
|
||||
},
|
||||
},
|
||||
"Mediterranean": {
|
||||
"keywords": ["mediterranean", "greek", "middle eastern", "turkish",
|
||||
"lebanese", "jewish", "palestinian", "yemeni", "egyptian",
|
||||
"syrian", "iraqi", "jordanian"],
|
||||
"subcategories": {
|
||||
"Greek": ["greek", "feta", "tzatziki", "moussaka", "spanakopita",
|
||||
"souvlaki", "dolmades", "spanakopita", "tiropita",
|
||||
"galaktoboureko"],
|
||||
"Turkish": ["turkish", "kebab", "borek", "meze", "baklava",
|
||||
"lahmacun", "menemen", "pide", "iskender",
|
||||
"kisir", "simit"],
|
||||
"Syrian": ["syrian", "fattet hummus", "kibbeh syria",
|
||||
"muhammara", "maklouba syria", "sfeeha",
|
||||
"halawet el jibn"],
|
||||
"Lebanese": ["lebanese", "middle eastern", "hummus", "falafel",
|
||||
"tabbouleh", "kibbeh", "fattoush", "manakish",
|
||||
"kafta", "sfiha"],
|
||||
"Jewish": ["jewish", "israeli", "ashkenazi", "sephardic",
|
||||
"shakshuka", "sabich", "za'atar", "tahini",
|
||||
"zhug", "zhoug", "s'khug", "z'houg",
|
||||
"hawaiij", "hawaij", "hawayej",
|
||||
"matzo", "latke", "rugelach", "babka", "challah",
|
||||
"cholent", "gefilte fish", "brisket", "kugel",
|
||||
"new york jewish", "new york deli", "pastrami",
|
||||
"knish", "lox", "bagel and lox", "jewish deli"],
|
||||
"Palestinian": ["palestinian", "musakhan", "maqluba", "knafeh",
|
||||
"maftoul", "freekeh", "sumac chicken"],
|
||||
"Yemeni": ["yemeni", "saltah", "lahoh", "bint al-sahn",
|
||||
"zhug", "zhoug", "hulba", "fahsa",
|
||||
"hawaiij", "hawaij", "hawayej"],
|
||||
"Egyptian": ["egyptian", "koshari", "molokhia", "mahshi",
|
||||
"ful medames", "ta'ameya", "feteer meshaltet"],
|
||||
},
|
||||
},
|
||||
"American": {
|
||||
"keywords": ["american", "southern", "comfort food", "cajun", "creole",
|
||||
"hawaiian", "tex-mex", "soul food"],
|
||||
"subcategories": {
|
||||
"Southern": ["southern", "soul food", "fried chicken",
|
||||
"collard greens", "cornbread", "biscuits and gravy",
|
||||
"mac and cheese", "sweet potato pie", "okra"],
|
||||
"Cajun/Creole": ["cajun", "creole", "new orleans", "gumbo",
|
||||
"jambalaya", "etouffee", "dirty rice", "po'boy",
|
||||
"muffuletta", "red beans and rice"],
|
||||
"Tex-Mex": ["tex-mex", "southwestern", "chili", "fajita",
|
||||
"queso", "breakfast taco", "chile con carne"],
|
||||
"New England": ["new england", "chowder", "lobster", "clam",
|
||||
"maple", "yankee", "boston baked beans",
|
||||
"johnnycake", "fish and chips"],
|
||||
"Pacific Northwest": ["pacific northwest", "pnw", "dungeness crab",
|
||||
"salmon", "cedar plank", "razor clam",
|
||||
"geoduck", "chanterelle", "marionberry"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "plate lunch", "loco moco",
|
||||
"poke", "spam musubi", "kalua pig", "lau lau",
|
||||
"haupia", "poi", "manapua", "garlic shrimp",
|
||||
"saimin", "huli huli", "malasada"],
|
||||
},
|
||||
},
|
||||
"BBQ & Smoke": {
|
||||
"keywords": ["bbq", "barbecue", "smoked", "pit", "smoke ring",
|
||||
"low and slow", "brisket", "pulled pork", "ribs"],
|
||||
"subcategories": {
|
||||
"Texas BBQ": ["texas bbq", "central texas bbq", "brisket",
|
||||
"beef ribs", "post oak", "salt and pepper rub",
|
||||
"east texas bbq", "lockhart", "franklin style"],
|
||||
"Carolina BBQ": ["carolina bbq", "north carolina bbq", "whole hog",
|
||||
"vinegar sauce", "lexington style", "eastern nc",
|
||||
"south carolina bbq", "mustard sauce"],
|
||||
"Kansas City BBQ": ["kansas city bbq", "kc bbq", "burnt ends",
|
||||
"sweet bbq sauce", "tomato molasses sauce",
|
||||
"baby back ribs kc"],
|
||||
"Memphis BBQ": ["memphis bbq", "dry rub ribs", "wet ribs",
|
||||
"memphis style", "dry rub pork"],
|
||||
"Alabama BBQ": ["alabama bbq", "white sauce", "alabama white sauce",
|
||||
"smoked chicken alabama"],
|
||||
"Kentucky BBQ": ["kentucky bbq", "mutton bbq", "owensboro bbq",
|
||||
"black dip", "western kentucky barbecue"],
|
||||
"St. Louis BBQ": ["st louis bbq", "st. louis ribs", "st louis cut ribs",
|
||||
"st louis style spare ribs"],
|
||||
"Backyard Grill": ["backyard bbq", "cookout", "grilled burgers",
|
||||
"charcoal grill", "kettle grill", "tailgate"],
|
||||
},
|
||||
},
|
||||
"European": {
|
||||
"keywords": ["french", "german", "spanish", "british", "irish", "scottish",
|
||||
"welsh", "scandinavian", "nordic", "eastern european"],
|
||||
"subcategories": {
|
||||
"French": ["french", "provencal", "beurre", "crepe",
|
||||
"ratatouille", "cassoulet", "bouillabaisse"],
|
||||
"Spanish": ["spanish", "paella", "tapas", "gazpacho",
|
||||
"tortilla espanola", "chorizo"],
|
||||
"German": ["german", "bratwurst", "sauerkraut", "schnitzel",
|
||||
"pretzel", "strudel"],
|
||||
"British": ["british", "english", "pub food", "cornish",
|
||||
"shepherd's pie", "bangers", "toad in the hole",
|
||||
"coronation chicken", "london", "londoner",
|
||||
"cornish pasty", "ploughman's"],
|
||||
"Irish": ["irish", "ireland", "colcannon", "coddle",
|
||||
"irish stew", "soda bread", "boxty", "champ"],
|
||||
"Scottish": ["scottish", "scotland", "haggis", "cullen skink",
|
||||
"cranachan", "scotch broth", "glaswegian",
|
||||
"neeps and tatties", "tablet"],
|
||||
"Scandinavian": ["scandinavian", "nordic", "swedish", "norwegian",
|
||||
"danish", "finnish", "gravlax", "swedish meatballs",
|
||||
"lefse", "smörgåsbord", "fika", "crispbread",
|
||||
"cardamom bun", "herring", "æbleskiver",
|
||||
"lingonberry", "lutefisk", "janssons frestelse",
|
||||
"knäckebröd", "kladdkaka"],
|
||||
"Eastern European": ["eastern european", "polish", "russian", "ukrainian",
|
||||
"czech", "hungarian", "pierogi", "borscht",
|
||||
"goulash", "kielbasa", "varenyky", "pelmeni"],
|
||||
},
|
||||
},
|
||||
"Latin American": {
|
||||
"keywords": ["latin american", "peruvian", "argentinian", "colombian",
|
||||
"cuban", "caribbean", "brazilian", "venezuelan", "chilean"],
|
||||
"subcategories": {
|
||||
"Peruvian": ["peruvian", "ceviche", "lomo saltado", "anticucho",
|
||||
"aji amarillo", "causa", "leche de tigre",
|
||||
"arroz con leche peru", "pollo a la brasa"],
|
||||
"Brazilian": ["brazilian", "churrasco", "feijoada", "pao de queijo",
|
||||
"brigadeiro", "coxinha", "moqueca", "vatapa",
|
||||
"caipirinha", "acai bowl"],
|
||||
"Colombian": ["colombian", "bandeja paisa", "arepas", "empanadas",
|
||||
"sancocho", "ajiaco", "buñuelos", "changua"],
|
||||
"Argentinian": ["argentinian", "asado", "chimichurri", "empanadas argentina",
|
||||
"milanesa", "locro", "dulce de leche", "medialunas"],
|
||||
"Venezuelan": ["venezuelan", "pabellón criollo", "arepas venezuela",
|
||||
"hallacas", "cachapas", "tequeños", "caraotas"],
|
||||
"Chilean": ["chilean", "cazuela", "pastel de choclo", "curanto",
|
||||
"sopaipillas", "charquicán", "completo"],
|
||||
"Cuban": ["cuban", "ropa vieja", "moros y cristianos",
|
||||
"picadillo", "lechon cubano", "vaca frita",
|
||||
"tostones", "platanos maduros"],
|
||||
"Jamaican": ["jamaican", "jerk chicken", "jerk pork", "ackee saltfish",
|
||||
"curry goat", "rice and peas", "escovitch",
|
||||
"jamaican patty", "callaloo jamaica", "festival"],
|
||||
"Puerto Rican": ["puerto rican", "mofongo", "pernil", "arroz con gandules",
|
||||
"sofrito", "pasteles", "tostones pr", "tembleque",
|
||||
"coquito", "asopao"],
|
||||
"Dominican": ["dominican", "mangu", "sancocho dominicano",
|
||||
"pollo guisado", "habichuelas guisadas",
|
||||
"tostones dominicanos", "morir soñando"],
|
||||
"Haitian": ["haitian", "griot", "pikliz", "riz et pois",
|
||||
"joumou", "akra", "pain patate", "labouyi"],
|
||||
"Trinidad": ["trinidadian", "doubles", "roti trinidad", "pelau",
|
||||
"callaloo trinidad", "bake and shark",
|
||||
"curry duck", "oil down"],
|
||||
},
|
||||
},
|
||||
"Central American": {
|
||||
"keywords": ["central american", "salvadoran", "guatemalan",
|
||||
"honduran", "nicaraguan", "costa rican", "panamanian"],
|
||||
"subcategories": {
|
||||
"Salvadoran": ["salvadoran", "el salvador", "pupusas", "curtido",
|
||||
"sopa de pata", "nuégados", "atol shuco"],
|
||||
"Guatemalan": ["guatemalan", "pepián", "jocon", "kak'ik",
|
||||
"hilachas", "rellenitos", "fiambre"],
|
||||
"Costa Rican": ["costa rican", "gallo pinto", "casado",
|
||||
"olla de carne", "arroz con leche cr",
|
||||
"tres leches cr"],
|
||||
"Honduran": ["honduran", "baleadas", "sopa de caracol",
|
||||
"tapado", "machuca", "catrachitas"],
|
||||
"Nicaraguan": ["nicaraguan", "nacatamal", "vigorón", "indio viejo",
|
||||
"gallo pinto nicaragua", "güirilas"],
|
||||
},
|
||||
},
|
||||
"African": {
|
||||
"keywords": ["african", "west african", "east african", "ethiopian",
|
||||
"nigerian", "ghanaian", "kenyan", "south african",
|
||||
"senegalese", "tunisian"],
|
||||
"subcategories": {
|
||||
"West African": ["west african", "nigerian", "ghanaian",
|
||||
"jollof rice", "egusi soup", "fufu", "suya",
|
||||
"groundnut stew", "kelewele", "kontomire",
|
||||
"waakye", "ofam", "bitterleaf soup"],
|
||||
"Senegalese": ["senegalese", "senegal", "thieboudienne",
|
||||
"yassa", "mafe", "thiou", "ceebu jen",
|
||||
"domoda"],
|
||||
"Ethiopian & Eritrean": ["ethiopian", "eritrean", "injera", "doro wat",
|
||||
"kitfo", "tibs", "shiro", "misir wat",
|
||||
"gomen", "ful ethiopian", "tegamino"],
|
||||
"East African": ["east african", "kenyan", "tanzanian", "ugandan",
|
||||
"nyama choma", "ugali", "sukuma wiki",
|
||||
"pilau kenya", "mandazi", "matoke",
|
||||
"githeri", "irio"],
|
||||
"North African": ["north african", "tunisian", "algerian", "libyan",
|
||||
"brik", "lablabi", "merguez", "shakshuka tunisian",
|
||||
"harissa tunisian", "couscous algerian"],
|
||||
"South African": ["south african", "braai", "bobotie", "boerewors",
|
||||
"bunny chow", "pap", "chakalaka", "biltong",
|
||||
"malva pudding", "koeksister", "potjiekos"],
|
||||
"Moroccan": ["moroccan", "tagine", "couscous morocco",
|
||||
"harissa", "chermoula", "preserved lemon",
|
||||
"pastilla", "mechoui", "bastilla"],
|
||||
},
|
||||
},
|
||||
"Pacific & Oceania": {
|
||||
"keywords": ["pacific", "oceania", "polynesian", "melanesian",
|
||||
"micronesian", "maori", "fijian", "samoan", "tongan",
|
||||
"hawaiian", "australian", "new zealand"],
|
||||
"subcategories": {
|
||||
"Māori / New Zealand": ["maori", "new zealand", "hangi", "rewena bread",
|
||||
"boil-up", "paua", "kumara", "pavlova nz",
|
||||
"whitebait fritter", "kina", "hokey pokey"],
|
||||
"Australian": ["australian", "meat pie", "lamington",
|
||||
"anzac biscuits", "damper", "barramundi",
|
||||
"vegemite", "pavlova australia", "tim tam",
|
||||
"sausage sizzle", "chiko roll", "fairy bread"],
|
||||
"Fijian": ["fijian", "fiji", "kokoda", "lovo",
|
||||
"rourou", "palusami fiji", "duruka",
|
||||
"vakalolo"],
|
||||
"Samoan": ["samoan", "samoa", "palusami", "oka",
|
||||
"fa'ausi", "chop suey samoa", "sapasui",
|
||||
"koko alaisa", "supo esi"],
|
||||
"Tongan": ["tongan", "tonga", "lu pulu", "'ota 'ika",
|
||||
"fekkai", "faikakai topai", "kapisi pulu"],
|
||||
"Papua New Guinean": ["papua new guinea", "png", "mumu",
|
||||
"sago", "aibika", "kaukau",
|
||||
"taro png", "coconut crab"],
|
||||
"Hawaiian": ["hawaiian", "hawaii", "poke", "loco moco",
|
||||
"plate lunch", "kalua pig", "haupia",
|
||||
"spam musubi", "poi", "malasada"],
|
||||
},
|
||||
},
|
||||
"Central Asian & Caucasus": {
|
||||
"keywords": ["central asian", "caucasus", "georgian", "armenian", "uzbek",
|
||||
"afghan", "persian", "iranian", "azerbaijani", "kazakh"],
|
||||
"subcategories": {
|
||||
"Persian / Iranian": ["persian", "iranian", "ghormeh sabzi", "fesenjan",
|
||||
"tahdig", "joojeh kabab", "ash reshteh",
|
||||
"zereshk polo", "khoresh", "mast o khiar",
|
||||
"kashk-e-bademjan", "mirza ghasemi",
|
||||
"baghali polo"],
|
||||
"Georgian": ["georgian", "georgia", "khachapuri", "khinkali",
|
||||
"churchkhela", "ajapsandali", "satsivi",
|
||||
"pkhali", "lobiani", "badrijani nigvzit"],
|
||||
"Armenian": ["armenian", "dolma armenia", "lahmajoun",
|
||||
"manti armenia", "ghapama", "basturma",
|
||||
"harissa armenia", "nazook", "tolma"],
|
||||
"Azerbaijani": ["azerbaijani", "azerbaijan", "plov azerbaijan",
|
||||
"dolma azeri", "dushbara", "levengi",
|
||||
"shah plov", "gutab"],
|
||||
"Uzbek": ["uzbek", "uzbekistan", "plov", "samsa",
|
||||
"lagman", "shashlik", "manti uzbek",
|
||||
"non bread", "dimlama", "sumalak"],
|
||||
"Afghan": ["afghan", "afghanistan", "kabuli pulao", "mantu",
|
||||
"bolani", "qorma", "ashak", "shorwa",
|
||||
"aushak", "borani banjan"],
|
||||
"Kazakh": ["kazakh", "beshbarmak", "kuyrdak", "baursak",
|
||||
"kurt", "shubat", "kazy"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"meal_type": {
|
||||
"label": "Meal Type",
|
||||
"categories": {
|
||||
"Breakfast": {
|
||||
"keywords": ["breakfast", "brunch", "eggs", "pancakes", "waffles",
|
||||
"oatmeal", "muffin"],
|
||||
"subcategories": {
|
||||
"Eggs": ["egg", "omelette", "frittata", "quiche",
|
||||
"scrambled", "benedict", "shakshuka"],
|
||||
"Pancakes & Waffles": ["pancake", "waffle", "crepe", "french toast"],
|
||||
"Baked Goods": ["muffin", "scone", "biscuit", "quick bread",
|
||||
"coffee cake", "danish"],
|
||||
"Oats & Grains": ["oatmeal", "granola", "porridge", "muesli",
|
||||
"overnight oats"],
|
||||
},
|
||||
},
|
||||
"Lunch": {
|
||||
"keywords": ["lunch", "sandwich", "wrap", "salad", "soup", "light meal"],
|
||||
"subcategories": {
|
||||
"Sandwiches": ["sandwich", "sub", "hoagie", "panini", "club",
|
||||
"grilled cheese", "blt"],
|
||||
"Salads": ["salad", "grain bowl", "chopped", "caesar",
|
||||
"niçoise", "cobb"],
|
||||
"Soups": ["soup", "bisque", "chowder", "gazpacho",
|
||||
"minestrone", "lentil soup"],
|
||||
"Wraps": ["wrap", "burrito bowl", "pita", "lettuce wrap",
|
||||
"quesadilla"],
|
||||
},
|
||||
},
|
||||
"Dinner": {
|
||||
"keywords": ["dinner", "main dish", "entree", "main course", "supper"],
|
||||
"subcategories": {
|
||||
"Casseroles": ["casserole", "bake", "gratin", "lasagna",
|
||||
"sheperd's pie", "pot pie"],
|
||||
"Stews": ["stew", "braise", "slow cooker", "pot roast",
|
||||
"daube", "ragù"],
|
||||
"Grilled": ["grilled", "grill", "barbecue", "charred",
|
||||
"kebab", "skewer"],
|
||||
"Stir-Fries": ["stir fry", "stir-fry", "wok", "sauté",
|
||||
"sauteed"],
|
||||
"Roasts": ["roast", "roasted", "oven", "baked chicken",
|
||||
"pot roast"],
|
||||
},
|
||||
},
|
||||
"Snack": {
|
||||
"keywords": ["snack", "appetizer", "finger food", "dip", "bite",
|
||||
"starter"],
|
||||
"subcategories": {
|
||||
"Dips & Spreads": ["dip", "spread", "hummus", "guacamole",
|
||||
"salsa", "pate"],
|
||||
"Finger Foods": ["finger food", "bite", "skewer", "slider",
|
||||
"wing", "nugget"],
|
||||
"Chips & Crackers": ["chip", "cracker", "crisp", "popcorn",
|
||||
"pretzel"],
|
||||
},
|
||||
},
|
||||
"Dessert": {
|
||||
"keywords": ["dessert", "cake", "cookie", "pie", "sweet", "pudding",
|
||||
"ice cream", "brownie"],
|
||||
"subcategories": {
|
||||
"Cakes": ["cake", "cupcake", "layer cake", "bundt",
|
||||
"cheesecake", "torte"],
|
||||
"Cookies & Bars": ["cookie", "brownie", "blondie", "bar",
|
||||
"biscotti", "shortbread"],
|
||||
"Pies & Tarts": ["pie", "tart", "galette", "cobbler", "crisp",
|
||||
"crumble"],
|
||||
"Frozen": ["ice cream", "gelato", "sorbet", "frozen dessert",
|
||||
"popsicle", "granita"],
|
||||
"Puddings": ["pudding", "custard", "mousse", "panna cotta",
|
||||
"flan", "creme brulee"],
|
||||
"Candy": ["candy", "fudge", "truffle", "brittle",
|
||||
"caramel", "toffee"],
|
||||
},
|
||||
},
|
||||
"Beverage": ["drink", "smoothie", "cocktail", "beverage", "juice", "shake"],
|
||||
"Side Dish": ["side dish", "side", "accompaniment", "garnish"],
|
||||
},
|
||||
},
|
||||
"dietary": {
|
||||
"label": "Dietary",
|
||||
"categories": {
|
||||
"Vegetarian": ["vegetarian"],
|
||||
"Vegan": ["vegan", "plant-based", "plant based"],
|
||||
"Gluten-Free": ["gluten-free", "gluten free", "celiac"],
|
||||
"Low-Carb": ["low-carb", "low carb", "keto", "ketogenic"],
|
||||
"High-Protein": ["high protein", "high-protein"],
|
||||
"Low-Fat": ["low-fat", "low fat", "light"],
|
||||
"Dairy-Free": ["dairy-free", "dairy free", "lactose"],
|
||||
},
|
||||
},
|
||||
"main_ingredient": {
|
||||
"label": "Main Ingredient",
|
||||
"categories": {
|
||||
# keywords use exact inferred_tag strings (main:X) — indexed into recipe_browser_fts.
|
||||
"Chicken": {
|
||||
"keywords": ["main:Chicken"],
|
||||
"subcategories": {
|
||||
"Baked": ["baked chicken", "roast chicken", "chicken casserole",
|
||||
"chicken bake"],
|
||||
"Grilled": ["grilled chicken", "chicken kebab", "bbq chicken",
|
||||
"chicken skewer"],
|
||||
"Fried": ["fried chicken", "chicken cutlet", "chicken schnitzel",
|
||||
"crispy chicken"],
|
||||
"Stewed": ["chicken stew", "chicken soup", "coq au vin",
|
||||
"chicken curry", "chicken braise"],
|
||||
},
|
||||
},
|
||||
"Beef": {
|
||||
"keywords": ["main:Beef"],
|
||||
"subcategories": {
|
||||
"Ground Beef": ["ground beef", "hamburger", "meatball", "meatloaf",
|
||||
"bolognese", "burger"],
|
||||
"Steak": ["steak", "sirloin", "ribeye", "flank steak",
|
||||
"filet mignon", "t-bone"],
|
||||
"Roasts": ["beef roast", "pot roast", "brisket", "prime rib",
|
||||
"chuck roast"],
|
||||
"Stews": ["beef stew", "beef braise", "beef bourguignon",
|
||||
"short ribs"],
|
||||
},
|
||||
},
|
||||
"Pork": {
|
||||
"keywords": ["main:Pork"],
|
||||
"subcategories": {
|
||||
"Chops": ["pork chop", "pork loin", "pork cutlet"],
|
||||
"Pulled/Slow": ["pulled pork", "pork shoulder", "pork butt",
|
||||
"carnitas", "slow cooker pork"],
|
||||
"Sausage": ["sausage", "bratwurst", "chorizo", "andouille",
|
||||
"Italian sausage"],
|
||||
"Ribs": ["pork ribs", "baby back ribs", "spare ribs",
|
||||
"pork belly"],
|
||||
},
|
||||
},
|
||||
"Fish": {
|
||||
"keywords": ["main:Fish"],
|
||||
"subcategories": {
|
||||
"Salmon": ["salmon", "smoked salmon", "gravlax"],
|
||||
"Tuna": ["tuna", "albacore", "ahi"],
|
||||
"White Fish": ["cod", "tilapia", "halibut", "sole", "snapper",
|
||||
"flounder", "bass"],
|
||||
"Shellfish": ["shrimp", "prawn", "crab", "lobster", "scallop",
|
||||
"mussel", "clam", "oyster"],
|
||||
},
|
||||
},
|
||||
"Pasta": ["main:Pasta"],
|
||||
"Vegetables": {
|
||||
"keywords": ["main:Vegetables"],
|
||||
"subcategories": {
|
||||
"Root Veg": ["potato", "sweet potato", "carrot", "beet",
|
||||
"parsnip", "turnip"],
|
||||
"Leafy": ["spinach", "kale", "chard", "arugula",
|
||||
"collard greens", "lettuce"],
|
||||
"Brassicas": ["broccoli", "cauliflower", "brussels sprouts",
|
||||
"cabbage", "bok choy"],
|
||||
"Nightshades": ["tomato", "eggplant", "bell pepper", "zucchini",
|
||||
"squash"],
|
||||
"Mushrooms": ["mushroom", "portobello", "shiitake", "oyster mushroom",
|
||||
"chanterelle"],
|
||||
},
|
||||
},
|
||||
"Eggs": ["main:Eggs"],
|
||||
"Legumes": ["main:Legumes"],
|
||||
"Grains": ["main:Grains"],
|
||||
"Cheese": ["main:Cheese"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _get_category_def(domain: str, category: str) -> list[str] | dict | None:
|
||||
"""Return the raw category definition, or None if not found."""
|
||||
return DOMAINS.get(domain, {}).get("categories", {}).get(category)
|
||||
|
||||
|
||||
def get_domain_labels() -> list[dict]:
|
||||
"""Return [{id, label}] for all available domains."""
|
||||
return [{"id": k, "label": v["label"]} for k, v in DOMAINS.items()]
|
||||
|
||||
|
||||
def get_keywords_for_category(domain: str, category: str) -> list[str]:
|
||||
"""Return the keyword list for the category (top-level, covers all subcategories).
|
||||
|
||||
For flat categories returns the list directly.
|
||||
For nested categories returns the 'keywords' key.
|
||||
Returns [] if category or domain not found.
|
||||
"""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if cat_def is None:
|
||||
return []
|
||||
if isinstance(cat_def, list):
|
||||
return cat_def
|
||||
return cat_def.get("keywords", [])
|
||||
|
||||
|
||||
def category_has_subcategories(domain: str, category: str) -> bool:
|
||||
"""Return True when a category has a subcategory level."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return False
|
||||
return bool(cat_def.get("subcategories"))
|
||||
|
||||
|
||||
def get_subcategory_names(domain: str, category: str) -> list[str]:
|
||||
"""Return subcategory names for a category, or [] if none exist."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return list(cat_def.get("subcategories", {}).keys())
|
||||
|
||||
|
||||
def get_keywords_for_subcategory(domain: str, category: str, subcategory: str) -> list[str]:
|
||||
"""Return keyword list for a specific subcategory, or [] if not found."""
|
||||
cat_def = _get_category_def(domain, category)
|
||||
if not isinstance(cat_def, dict):
|
||||
return []
|
||||
return cat_def.get("subcategories", {}).get(subcategory, [])
|
||||
|
||||
|
||||
def get_category_names(domain: str) -> list[str]:
|
||||
"""Return category names for a domain, or [] if domain unknown."""
|
||||
domain_data = DOMAINS.get(domain, {})
|
||||
return list(domain_data.get("categories", {}).keys())
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
"""
|
||||
ElementClassifier -- classify pantry items into culinary element tags.
|
||||
|
||||
Lookup order:
|
||||
1. ingredient_profiles table (pre-computed from USDA FDC)
|
||||
2. Keyword heuristic fallback (for unlisted ingredients)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
# All valid ingredient-level element labels (Method is recipe-level, not ingredient-level)
|
||||
ELEMENTS = frozenset({
|
||||
"Seasoning", "Richness", "Brightness", "Depth",
|
||||
"Aroma", "Structure", "Texture",
|
||||
})
|
||||
|
||||
_HEURISTIC: list[tuple[list[str], str]] = [
|
||||
(["vinegar", "lemon", "lime", "citrus", "wine", "yogurt", "kefir",
|
||||
"buttermilk", "tomato", "tamarind"], "Brightness"),
|
||||
(["oil", "butter", "cream", "lard", "fat", "avocado", "coconut milk",
|
||||
"ghee", "shortening", "crisco"], "Richness"),
|
||||
(["salt", "soy", "miso", "tamari", "fish sauce", "worcestershire",
|
||||
"anchov", "capers", "olive", "brine"], "Seasoning"),
|
||||
(["mushroom", "parmesan", "miso", "nutritional yeast", "bouillon",
|
||||
"broth", "umami", "anchov", "dried tomato", "soy"], "Depth"),
|
||||
(["garlic", "onion", "shallot", "herb", "basil", "oregano", "thyme",
|
||||
"rosemary", "spice", "cumin", "coriander", "paprika", "chili",
|
||||
"ginger", "cinnamon", "pepper", "cilantro", "dill", "fennel",
|
||||
"cardamom", "turmeric", "smoke"], "Aroma"),
|
||||
(["flour", "starch", "cornstarch", "arrowroot", "egg", "gelatin",
|
||||
"agar", "breadcrumb", "panko", "roux"], "Structure"),
|
||||
(["nut", "seed", "cracker", "crisp", "wafer", "chip", "crouton",
|
||||
"granola", "tofu", "tempeh"], "Texture"),
|
||||
]
|
||||
|
||||
|
||||
def _safe_json_list(val) -> list:
|
||||
if isinstance(val, list):
|
||||
return val
|
||||
if isinstance(val, str):
|
||||
try:
|
||||
return json.loads(val)
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class IngredientProfile:
|
||||
name: str
|
||||
elements: list[str]
|
||||
fat_pct: float = 0.0
|
||||
fat_saturated_pct: float = 0.0
|
||||
moisture_pct: float = 0.0
|
||||
protein_pct: float = 0.0
|
||||
starch_pct: float = 0.0
|
||||
binding_score: int = 0
|
||||
glutamate_mg: float = 0.0
|
||||
ph_estimate: float | None = None
|
||||
flavor_molecule_ids: list[str] = field(default_factory=list)
|
||||
heat_stable: bool = True
|
||||
add_timing: str = "any"
|
||||
acid_type: str | None = None
|
||||
sodium_mg_per_100g: float = 0.0
|
||||
is_fermented: bool = False
|
||||
texture_profile: str = "neutral"
|
||||
smoke_point_c: float | None = None
|
||||
is_emulsifier: bool = False
|
||||
source: str = "heuristic"
|
||||
|
||||
|
||||
class ElementClassifier:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
|
||||
def classify(self, ingredient_name: str) -> IngredientProfile:
|
||||
"""Return element profile for a single ingredient name."""
|
||||
name = ingredient_name.lower().strip()
|
||||
if not name:
|
||||
return IngredientProfile(name="", elements=[], source="heuristic")
|
||||
c = self._store._cp
|
||||
row = self._store._fetch_one(
|
||||
f"SELECT * FROM {c}ingredient_profiles WHERE name = ?", (name,)
|
||||
)
|
||||
if row:
|
||||
return self._row_to_profile(row)
|
||||
return self._heuristic_profile(name)
|
||||
|
||||
def classify_batch(self, names: list[str]) -> list[IngredientProfile]:
|
||||
return [self.classify(n) for n in names]
|
||||
|
||||
def identify_gaps(self, profiles: list[IngredientProfile]) -> list[str]:
|
||||
"""Return element names that have no coverage in the given profile list."""
|
||||
covered = set()
|
||||
for p in profiles:
|
||||
covered.update(p.elements)
|
||||
return sorted(ELEMENTS - covered)
|
||||
|
||||
def _row_to_profile(self, row: dict) -> IngredientProfile:
|
||||
return IngredientProfile(
|
||||
name=row["name"],
|
||||
elements=_safe_json_list(row.get("elements")),
|
||||
fat_pct=row.get("fat_pct") or 0.0,
|
||||
fat_saturated_pct=row.get("fat_saturated_pct") or 0.0,
|
||||
moisture_pct=row.get("moisture_pct") or 0.0,
|
||||
protein_pct=row.get("protein_pct") or 0.0,
|
||||
starch_pct=row.get("starch_pct") or 0.0,
|
||||
binding_score=row.get("binding_score") or 0,
|
||||
glutamate_mg=row.get("glutamate_mg") or 0.0,
|
||||
ph_estimate=row.get("ph_estimate"),
|
||||
flavor_molecule_ids=_safe_json_list(row.get("flavor_molecule_ids")),
|
||||
heat_stable=bool(row.get("heat_stable", 1)),
|
||||
add_timing=row.get("add_timing") or "any",
|
||||
acid_type=row.get("acid_type"),
|
||||
sodium_mg_per_100g=row.get("sodium_mg_per_100g") or 0.0,
|
||||
is_fermented=bool(row.get("is_fermented", 0)),
|
||||
texture_profile=row.get("texture_profile") or "neutral",
|
||||
smoke_point_c=row.get("smoke_point_c"),
|
||||
is_emulsifier=bool(row.get("is_emulsifier", 0)),
|
||||
source="db",
|
||||
)
|
||||
|
||||
def _heuristic_profile(self, name: str) -> IngredientProfile:
|
||||
seen: set[str] = set()
|
||||
elements: list[str] = []
|
||||
for keywords, element in _HEURISTIC:
|
||||
if element not in seen and any(kw in name for kw in keywords):
|
||||
elements.append(element)
|
||||
seen.add(element)
|
||||
return IngredientProfile(name=name, elements=elements, source="heuristic")
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
"""
|
||||
GroceryLinkBuilder — affiliate deeplinks for missing ingredient grocery lists.
|
||||
|
||||
Delegates URL wrapping to circuitforge_core.affiliates.wrap_url, which handles
|
||||
the full resolution chain: opt-out → BYOK id → CF env var → plain URL.
|
||||
|
||||
Registered programs (via cf-core):
|
||||
amazon — Amazon Associates (env: AMAZON_ASSOCIATES_TAG)
|
||||
instacart — Instacart (env: INSTACART_AFFILIATE_ID)
|
||||
|
||||
Walmart is kept inline until cf-core adds Impact network support:
|
||||
env: WALMART_AFFILIATE_ID
|
||||
|
||||
Links are always generated (plain URLs are useful even without affiliate IDs).
|
||||
Walmart links only appear when WALMART_AFFILIATE_ID is set.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from circuitforge_core.affiliates import wrap_url
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _amazon_fresh_link(ingredient: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
base = f"https://www.amazon.com/s?k={q}&i=amazonfresh"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Amazon Fresh", url=wrap_url(base, "amazon"))
|
||||
|
||||
|
||||
def _instacart_link(ingredient: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
base = f"https://www.instacart.com/store/s?k={q}"
|
||||
return GroceryLink(ingredient=ingredient, retailer="Instacart", url=wrap_url(base, "instacart"))
|
||||
|
||||
|
||||
def _walmart_link(ingredient: str, affiliate_id: str) -> GroceryLink:
|
||||
q = quote_plus(ingredient)
|
||||
# Walmart uses Impact network — affiliate ID is in the redirect path, not a param
|
||||
url = (
|
||||
f"https://goto.walmart.com/c/{affiliate_id}/walmart"
|
||||
f"?u=https://www.walmart.com/search?q={q}"
|
||||
)
|
||||
return GroceryLink(ingredient=ingredient, retailer="Walmart Grocery", url=url)
|
||||
|
||||
|
||||
class GroceryLinkBuilder:
|
||||
def __init__(self, tier: str = "free", has_byok: bool = False) -> None:
|
||||
self._tier = tier
|
||||
self._walmart_id = os.environ.get("WALMART_AFFILIATE_ID", "").strip()
|
||||
|
||||
def build_links(self, ingredient: str) -> list[GroceryLink]:
|
||||
"""Build grocery deeplinks for a single ingredient.
|
||||
|
||||
Amazon Fresh and Instacart links are always included; wrap_url handles
|
||||
affiliate ID injection (or returns a plain URL if none is configured).
|
||||
Walmart requires WALMART_AFFILIATE_ID to be set (Impact network uses a
|
||||
path-based redirect that doesn't degrade cleanly to a plain URL).
|
||||
"""
|
||||
if not ingredient.strip():
|
||||
return []
|
||||
|
||||
links: list[GroceryLink] = [
|
||||
_amazon_fresh_link(ingredient),
|
||||
_instacart_link(ingredient),
|
||||
]
|
||||
if self._walmart_id:
|
||||
links.append(_walmart_link(ingredient, self._walmart_id))
|
||||
|
||||
return links
|
||||
|
||||
def build_all(self, ingredients: list[str]) -> list[GroceryLink]:
|
||||
"""Build links for a list of ingredients."""
|
||||
links: list[GroceryLink] = []
|
||||
for ingredient in ingredients:
|
||||
links.extend(self.build_links(ingredient))
|
||||
return links
|
||||
|
|
@ -1,352 +0,0 @@
|
|||
"""LLM-driven recipe generator for Levels 3 and 4."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from contextlib import nullcontext
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from openai import OpenAI
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import RecipeRequest, RecipeResult, RecipeSuggestion
|
||||
from app.services.recipe.element_classifier import IngredientProfile
|
||||
from app.services.recipe.style_adapter import StyleAdapter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _filter_allergies(pantry_items: list[str], allergies: list[str]) -> list[str]:
|
||||
"""Return pantry items with allergy matches removed (bidirectional substring)."""
|
||||
if not allergies:
|
||||
return list(pantry_items)
|
||||
return [
|
||||
item for item in pantry_items
|
||||
if not any(
|
||||
allergy.lower() in item.lower() or item.lower() in allergy.lower()
|
||||
for allergy in allergies
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class LLMRecipeGenerator:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
self._style_adapter = StyleAdapter()
|
||||
|
||||
def build_level3_prompt(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
profiles: list[IngredientProfile],
|
||||
gaps: list[str],
|
||||
) -> str:
|
||||
"""Build a structured element-scaffold prompt for Level 3."""
|
||||
allergy_list = req.allergies
|
||||
safe_pantry = _filter_allergies(req.pantry_items, allergy_list)
|
||||
|
||||
covered_elements: list[str] = []
|
||||
for profile in profiles:
|
||||
for element in profile.elements:
|
||||
if element not in covered_elements:
|
||||
covered_elements.append(element)
|
||||
|
||||
lines: list[str] = [
|
||||
"You are a creative chef. Generate a recipe using the ingredients below.",
|
||||
"IMPORTANT: When you use a pantry item, list it in Ingredients using its exact name from the pantry list. Do not add adjectives, quantities, or cooking states (e.g. use 'butter', not 'unsalted butter' or '2 tbsp butter').",
|
||||
"IMPORTANT: Only use pantry items that make culinary sense for the dish. Do NOT force flavoured/sweetened items (vanilla yoghurt, fruit yoghurt, jam, dessert sauces, flavoured syrups) into savoury dishes. Plain yoghurt, plain cream, and plain dairy are fine in savoury cooking.",
|
||||
"IMPORTANT: Do not default to the same ingredient repeatedly across dishes. If a pantry item does not genuinely improve this specific dish, leave it out.",
|
||||
"",
|
||||
f"Pantry items: {', '.join(safe_pantry)}",
|
||||
]
|
||||
|
||||
if req.constraints:
|
||||
lines.append(f"Dietary constraints: {', '.join(req.constraints)}")
|
||||
|
||||
if allergy_list:
|
||||
lines.append(f"IMPORTANT — must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"Covered culinary elements: {', '.join(covered_elements) or 'none'}")
|
||||
|
||||
if gaps:
|
||||
lines.append(
|
||||
f"Missing elements to address: {', '.join(gaps)}. "
|
||||
"Incorporate ingredients or techniques to fill these gaps."
|
||||
)
|
||||
|
||||
if req.style_id:
|
||||
template = self._style_adapter.get(req.style_id)
|
||||
if template:
|
||||
lines.append(f"Cuisine style: {template.name}")
|
||||
if template.aromatics:
|
||||
lines.append(f"Preferred aromatics: {', '.join(template.aromatics[:4])}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
unit_line,
|
||||
"",
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold, no extra commentary:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"3. <continue for each step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def build_level4_prompt(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
) -> str:
|
||||
"""Build a minimal wildcard prompt for Level 4."""
|
||||
allergy_list = req.allergies
|
||||
safe_pantry = _filter_allergies(req.pantry_items, allergy_list)
|
||||
|
||||
lines: list[str] = [
|
||||
"Surprise me with a creative, unexpected recipe.",
|
||||
"Only use ingredients that make culinary sense together. Do not force flavoured/sweetened items (vanilla yoghurt, flavoured syrups, jam) into savoury dishes.",
|
||||
f"Ingredients available: {', '.join(safe_pantry)}",
|
||||
]
|
||||
|
||||
if req.constraints:
|
||||
lines.append(f"Constraints: {', '.join(req.constraints)}")
|
||||
|
||||
if allergy_list:
|
||||
lines.append(f"Must NOT contain: {', '.join(allergy_list)}")
|
||||
|
||||
unit_line = (
|
||||
"Use metric units (grams, ml, Celsius) for all quantities and temperatures."
|
||||
if req.unit_system == "metric"
|
||||
else "Use imperial units (oz, cups, Fahrenheit) for all quantities and temperatures."
|
||||
)
|
||||
lines += [
|
||||
"Treat any mystery ingredient as a wildcard — use your imagination.",
|
||||
unit_line,
|
||||
"Reply using EXACTLY this plain-text format — no markdown, no bold:",
|
||||
"Title: <name of the dish>",
|
||||
"Ingredients: <comma-separated list>",
|
||||
"Directions:",
|
||||
"1. <first step>",
|
||||
"2. <second step>",
|
||||
"Notes: <optional tips>",
|
||||
]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
_SERVICE_TYPE = "cf-text"
|
||||
_TTL_S = 300.0
|
||||
_CALLER = "kiwi-recipe"
|
||||
|
||||
def _get_llm_context(self):
|
||||
"""Return a sync context manager that yields an Allocation or None.
|
||||
|
||||
When CF_ORCH_URL is set, uses CFOrchClient to acquire a cf-text allocation
|
||||
(which handles service lifecycle and VRAM). Falls back to nullcontext(None)
|
||||
when the env var is absent or CFOrchClient raises on construction.
|
||||
"""
|
||||
cf_orch_url = os.environ.get("CF_ORCH_URL")
|
||||
if cf_orch_url:
|
||||
try:
|
||||
from circuitforge_orch.client import CFOrchClient
|
||||
client = CFOrchClient(cf_orch_url)
|
||||
return client.allocate(
|
||||
service=self._SERVICE_TYPE,
|
||||
ttl_s=self._TTL_S,
|
||||
caller=self._CALLER,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug("CFOrchClient init failed, falling back to direct URL: %s", exc)
|
||||
return nullcontext(None)
|
||||
|
||||
def _call_llm(self, prompt: str) -> str:
|
||||
"""Call the LLM, using CFOrchClient allocation when CF_ORCH_URL is set.
|
||||
|
||||
With CF_ORCH_URL set: acquires a vLLM allocation via CFOrchClient and
|
||||
calls the OpenAI-compatible API directly against the allocated service URL.
|
||||
Allocation failure falls through to LLMRouter rather than silently returning "".
|
||||
Without CF_ORCH_URL: uses LLMRouter directly.
|
||||
"""
|
||||
ctx = self._get_llm_context()
|
||||
alloc = None
|
||||
try:
|
||||
alloc = ctx.__enter__()
|
||||
except Exception as exc:
|
||||
msg = str(exc)
|
||||
# 429 = coordinator at capacity (all nodes at max_concurrent limit).
|
||||
# Don't fall back to LLMRouter — it's also overloaded and the slow
|
||||
# fallback causes nginx 504s. Return "" fast so the caller degrades
|
||||
# gracefully (empty recipe result) rather than timing out.
|
||||
if "429" in msg or "max_concurrent" in msg.lower():
|
||||
logger.info("cf-orch at capacity — returning empty result (graceful degradation)")
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
return ""
|
||||
logger.debug("cf-orch allocation failed, falling back to LLMRouter: %s", exc)
|
||||
ctx = None # __enter__ raised — do not call __exit__
|
||||
|
||||
try:
|
||||
if alloc is not None:
|
||||
base_url = alloc.url.rstrip("/") + "/v1"
|
||||
client = OpenAI(base_url=base_url, api_key="any")
|
||||
model = alloc.model or "__auto__"
|
||||
if model == "__auto__":
|
||||
model = client.models.list().data[0].id
|
||||
resp = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
)
|
||||
return resp.choices[0].message.content or ""
|
||||
else:
|
||||
from circuitforge_core.llm.router import LLMRouter
|
||||
return LLMRouter().complete(prompt)
|
||||
except Exception as exc:
|
||||
logger.error("LLM call failed: %s", exc)
|
||||
return ""
|
||||
finally:
|
||||
if ctx is not None:
|
||||
try:
|
||||
ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Strips markdown bold/italic markers so "**Directions:**" parses like "Directions:"
|
||||
_MD_BOLD = re.compile(r"\*{1,2}([^*]+)\*{1,2}")
|
||||
|
||||
def _strip_md(self, text: str) -> str:
|
||||
return self._MD_BOLD.sub(r"\1", text).strip()
|
||||
|
||||
def _parse_response(self, response: str) -> dict[str, str | list[str]]:
|
||||
"""Parse LLM response text into structured recipe fields.
|
||||
|
||||
Handles both plain-text and markdown-formatted responses. Directions are
|
||||
preserved as newline-separated text so the caller can split on step numbers.
|
||||
"""
|
||||
result: dict[str, str | list[str]] = {
|
||||
"title": "",
|
||||
"ingredients": [],
|
||||
"directions": "",
|
||||
"notes": "",
|
||||
}
|
||||
|
||||
current_key: str | None = None
|
||||
buffer: list[str] = []
|
||||
|
||||
def _flush(key: str | None, buf: list[str]) -> None:
|
||||
if key is None or not buf:
|
||||
return
|
||||
if key == "directions":
|
||||
result["directions"] = "\n".join(buf)
|
||||
elif key == "ingredients":
|
||||
text = " ".join(buf)
|
||||
result["ingredients"] = [i.strip() for i in text.split(",") if i.strip()]
|
||||
else:
|
||||
result[key] = " ".join(buf).strip()
|
||||
|
||||
for raw_line in response.splitlines():
|
||||
line = self._strip_md(raw_line)
|
||||
lower = line.lower()
|
||||
if lower.startswith("title:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "title", [line.split(":", 1)[1].strip()]
|
||||
elif lower.startswith("ingredients:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "ingredients", [line.split(":", 1)[1].strip()]
|
||||
elif lower.startswith("directions:"):
|
||||
_flush(current_key, buffer)
|
||||
rest = line.split(":", 1)[1].strip()
|
||||
current_key, buffer = "directions", ([rest] if rest else [])
|
||||
elif lower.startswith("notes:"):
|
||||
_flush(current_key, buffer)
|
||||
current_key, buffer = "notes", [line.split(":", 1)[1].strip()]
|
||||
elif current_key and line.strip():
|
||||
buffer.append(line.strip())
|
||||
elif current_key is None and line.strip() and ":" not in line:
|
||||
# Before any section header: a 2-10 word colon-free line is the dish name
|
||||
words = line.split()
|
||||
if 2 <= len(words) <= 10 and not result["title"]:
|
||||
result["title"] = line.strip()
|
||||
|
||||
_flush(current_key, buffer)
|
||||
return result
|
||||
|
||||
def generate(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
profiles: list[IngredientProfile],
|
||||
gaps: list[str],
|
||||
) -> RecipeResult:
|
||||
"""Generate a recipe via LLM and return a RecipeResult."""
|
||||
if req.level == 4:
|
||||
prompt = self.build_level4_prompt(req)
|
||||
else:
|
||||
prompt = self.build_level3_prompt(req, profiles, gaps)
|
||||
|
||||
response = self._call_llm(prompt)
|
||||
|
||||
if not response:
|
||||
return RecipeResult(suggestions=[], element_gaps=gaps)
|
||||
|
||||
parsed = self._parse_response(response)
|
||||
|
||||
raw_directions = parsed.get("directions", "")
|
||||
if isinstance(raw_directions, str):
|
||||
# Split on newlines; strip leading step numbers ("1.", "2.", "- ", "* ")
|
||||
_step_prefix = re.compile(r"^\s*(?:\d+[.)]\s*|[-*]\s+)")
|
||||
directions_list = [
|
||||
_step_prefix.sub("", s).strip()
|
||||
for s in raw_directions.splitlines()
|
||||
if s.strip()
|
||||
]
|
||||
else:
|
||||
directions_list = list(raw_directions)
|
||||
raw_notes = parsed.get("notes", "")
|
||||
notes_str: str = raw_notes if isinstance(raw_notes, str) else ""
|
||||
|
||||
all_ingredients: list[str] = list(parsed.get("ingredients", []))
|
||||
pantry_set = {item.lower() for item in (req.pantry_items or [])}
|
||||
|
||||
# Strip leading quantities/units (e.g. "2 cups rice" → "rice") before
|
||||
# checking against pantry, since LLMs return formatted ingredient strings.
|
||||
_qty_re = re.compile(
|
||||
r"^\s*[\d½¼¾⅓⅔]+[\s/\-]*" # leading digits or fractions
|
||||
r"(?:cup|cups|tbsp|tsp|tablespoon|teaspoon|oz|lb|lbs|g|kg|"
|
||||
r"can|cans|clove|cloves|bunch|package|pkg|slice|slices|"
|
||||
r"piece|pieces|pinch|dash|handful|head|heads|large|small|medium"
|
||||
r")s?\b[,\s]*",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
missing = []
|
||||
for ing in all_ingredients:
|
||||
bare = _qty_re.sub("", ing).strip().lower()
|
||||
if bare not in pantry_set and ing.lower() not in pantry_set:
|
||||
missing.append(bare or ing)
|
||||
|
||||
suggestion = RecipeSuggestion(
|
||||
id=0,
|
||||
title=parsed.get("title") or "LLM Recipe",
|
||||
match_count=len(req.pantry_items),
|
||||
element_coverage={},
|
||||
missing_ingredients=missing,
|
||||
directions=directions_list,
|
||||
notes=notes_str,
|
||||
level=req.level,
|
||||
is_wildcard=(req.level == 4),
|
||||
)
|
||||
|
||||
return RecipeResult(
|
||||
suggestions=[suggestion],
|
||||
element_gaps=gaps,
|
||||
)
|
||||
|
|
@ -1,160 +0,0 @@
|
|||
"""
|
||||
Shopping locale configuration.
|
||||
|
||||
Maps a locale key to Amazon domain, currency metadata, and retailer availability.
|
||||
Instacart and Walmart are US/CA-only; all other locales get Amazon only.
|
||||
Amazon Fresh (&i=amazonfresh) is US-only — international domains use the general
|
||||
grocery department (&rh=n:16310101) where available, plain search elsewhere.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class LocaleConfig(TypedDict):
|
||||
amazon_domain: str
|
||||
amazon_grocery_dept: str # URL fragment for grocery department on this locale's site
|
||||
currency_code: str
|
||||
currency_symbol: str
|
||||
instacart: bool
|
||||
walmart: bool
|
||||
|
||||
|
||||
LOCALES: dict[str, LocaleConfig] = {
|
||||
"us": {
|
||||
"amazon_domain": "amazon.com",
|
||||
"amazon_grocery_dept": "i=amazonfresh",
|
||||
"currency_code": "USD",
|
||||
"currency_symbol": "$",
|
||||
"instacart": True,
|
||||
"walmart": True,
|
||||
},
|
||||
"ca": {
|
||||
"amazon_domain": "amazon.ca",
|
||||
"amazon_grocery_dept": "rh=n:6967215011", # Grocery dept on .ca # gitleaks:allow
|
||||
"currency_code": "CAD",
|
||||
"currency_symbol": "CA$",
|
||||
"instacart": True,
|
||||
"walmart": False,
|
||||
},
|
||||
"gb": {
|
||||
"amazon_domain": "amazon.co.uk",
|
||||
"amazon_grocery_dept": "rh=n:340831031", # Grocery dept on .co.uk
|
||||
"currency_code": "GBP",
|
||||
"currency_symbol": "£",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"au": {
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # Pantry/grocery on .com.au # gitleaks:allow
|
||||
"currency_code": "AUD",
|
||||
"currency_symbol": "A$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nz": {
|
||||
# NZ has no Amazon storefront — route to .com.au as nearest option
|
||||
"amazon_domain": "amazon.com.au",
|
||||
"amazon_grocery_dept": "rh=n:5765081051", # gitleaks:allow
|
||||
"currency_code": "NZD",
|
||||
"currency_symbol": "NZ$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"de": {
|
||||
"amazon_domain": "amazon.de",
|
||||
"amazon_grocery_dept": "rh=n:340843031", # Lebensmittel & Getränke
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"fr": {
|
||||
"amazon_domain": "amazon.fr",
|
||||
"amazon_grocery_dept": "rh=n:197858031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"it": {
|
||||
"amazon_domain": "amazon.it",
|
||||
"amazon_grocery_dept": "rh=n:525616031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"es": {
|
||||
"amazon_domain": "amazon.es",
|
||||
"amazon_grocery_dept": "rh=n:599364031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"nl": {
|
||||
"amazon_domain": "amazon.nl",
|
||||
"amazon_grocery_dept": "rh=n:16584827031",
|
||||
"currency_code": "EUR",
|
||||
"currency_symbol": "€",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"se": {
|
||||
"amazon_domain": "amazon.se",
|
||||
"amazon_grocery_dept": "rh=n:20741393031",
|
||||
"currency_code": "SEK",
|
||||
"currency_symbol": "kr",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"jp": {
|
||||
"amazon_domain": "amazon.co.jp",
|
||||
"amazon_grocery_dept": "rh=n:2246283051", # gitleaks:allow
|
||||
"currency_code": "JPY",
|
||||
"currency_symbol": "¥",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"in": {
|
||||
"amazon_domain": "amazon.in",
|
||||
"amazon_grocery_dept": "rh=n:2454178031", # gitleaks:allow
|
||||
"currency_code": "INR",
|
||||
"currency_symbol": "₹",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"mx": {
|
||||
"amazon_domain": "amazon.com.mx",
|
||||
"amazon_grocery_dept": "rh=n:10737659011",
|
||||
"currency_code": "MXN",
|
||||
"currency_symbol": "MX$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"br": {
|
||||
"amazon_domain": "amazon.com.br",
|
||||
"amazon_grocery_dept": "rh=n:17878420011",
|
||||
"currency_code": "BRL",
|
||||
"currency_symbol": "R$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
"sg": {
|
||||
"amazon_domain": "amazon.sg",
|
||||
"amazon_grocery_dept": "rh=n:6981647051", # gitleaks:allow
|
||||
"currency_code": "SGD",
|
||||
"currency_symbol": "S$",
|
||||
"instacart": False,
|
||||
"walmart": False,
|
||||
},
|
||||
}
|
||||
|
||||
DEFAULT_LOCALE = "us"
|
||||
|
||||
|
||||
def get_locale(key: str) -> LocaleConfig:
|
||||
"""Return locale config for *key*, falling back to US if unknown."""
|
||||
return LOCALES.get(key, LOCALES[DEFAULT_LOCALE])
|
||||
|
|
@ -1,878 +0,0 @@
|
|||
"""
|
||||
RecipeEngine — orchestrates the four creativity levels.
|
||||
|
||||
Level 1: corpus lookup ranked by ingredient match + expiry urgency
|
||||
Level 2: Level 1 + deterministic substitution swaps
|
||||
Level 3: element scaffold → LLM constrained prompt (see llm_recipe.py)
|
||||
Level 4: wildcard LLM (see llm_recipe.py)
|
||||
|
||||
Amendments:
|
||||
- max_missing: filter to recipes missing ≤ N pantry items
|
||||
- hard_day_mode: filter to easy-method recipes only
|
||||
- grocery_list: aggregated missing ingredients across suggestions
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.db.store import Store
|
||||
|
||||
from app.models.schemas.recipe import GroceryLink, NutritionPanel, RecipeRequest, RecipeResult, RecipeSuggestion, SwapCandidate
|
||||
from app.services.recipe.element_classifier import ElementClassifier
|
||||
from app.services.recipe.grocery_links import GroceryLinkBuilder
|
||||
from app.services.recipe.substitution_engine import SubstitutionEngine
|
||||
|
||||
_LEFTOVER_DAILY_MAX_FREE = 5
|
||||
|
||||
# Words that carry no ingredient-identity signal — stripped before overlap scoring
|
||||
_SWAP_STOPWORDS = frozenset({
|
||||
"a", "an", "the", "of", "in", "for", "with", "and", "or",
|
||||
"to", "from", "at", "by", "as", "on",
|
||||
})
|
||||
|
||||
# Maps product-label substrings to recipe-corpus canonical terms.
|
||||
# Kept in sync with Store._FTS_SYNONYMS — both must agree on canonical names.
|
||||
# Used to expand pantry_set so single-word recipe ingredients can match
|
||||
# multi-word product names (e.g. "hamburger" satisfied by "burger patties").
|
||||
_PANTRY_LABEL_SYNONYMS: dict[str, str] = {
|
||||
"burger patt": "hamburger",
|
||||
"beef patt": "hamburger",
|
||||
"ground beef": "hamburger",
|
||||
"ground chuck": "hamburger",
|
||||
"ground round": "hamburger",
|
||||
"mince": "hamburger",
|
||||
"veggie burger": "hamburger",
|
||||
"beyond burger": "hamburger",
|
||||
"impossible burger": "hamburger",
|
||||
"plant burger": "hamburger",
|
||||
"chicken patt": "chicken patty",
|
||||
"kielbasa": "sausage",
|
||||
"bratwurst": "sausage",
|
||||
"frankfurter": "hotdog",
|
||||
"wiener": "hotdog",
|
||||
"chicken breast": "chicken",
|
||||
"chicken thigh": "chicken",
|
||||
"chicken drumstick": "chicken",
|
||||
"chicken wing": "chicken",
|
||||
"rotisserie chicken": "chicken",
|
||||
"chicken tender": "chicken",
|
||||
"chicken strip": "chicken",
|
||||
"chicken piece": "chicken",
|
||||
"fake chicken": "chicken",
|
||||
"plant chicken": "chicken",
|
||||
"vegan chicken": "chicken",
|
||||
"daring": "chicken",
|
||||
"gardein chick": "chicken",
|
||||
"quorn chick": "chicken",
|
||||
"chick'n": "chicken",
|
||||
"chikn": "chicken",
|
||||
"not-chicken": "chicken",
|
||||
"no-chicken": "chicken",
|
||||
# Plant-based beef subs → broad "beef" (strips ≠ ground; texture matters)
|
||||
"not-beef": "beef",
|
||||
"no-beef": "beef",
|
||||
"plant beef": "beef",
|
||||
"vegan beef": "beef",
|
||||
# Plant-based pork subs
|
||||
"not-pork": "pork",
|
||||
"no-pork": "pork",
|
||||
"plant pork": "pork",
|
||||
"vegan pork": "pork",
|
||||
"omnipork": "pork",
|
||||
"omni pork": "pork",
|
||||
# Generic alt-meat catch-alls → broad "beef"
|
||||
"fake meat": "beef",
|
||||
"plant meat": "beef",
|
||||
"vegan meat": "beef",
|
||||
"meat-free": "beef",
|
||||
"meatless": "beef",
|
||||
"pork chop": "pork",
|
||||
"pork loin": "pork",
|
||||
"pork tenderloin": "pork",
|
||||
"marinara": "tomato sauce",
|
||||
"pasta sauce": "tomato sauce",
|
||||
"spaghetti sauce": "tomato sauce",
|
||||
"pizza sauce": "tomato sauce",
|
||||
"macaroni": "pasta",
|
||||
"noodles": "pasta",
|
||||
"spaghetti": "pasta",
|
||||
"penne": "pasta",
|
||||
"fettuccine": "pasta",
|
||||
"rigatoni": "pasta",
|
||||
"linguine": "pasta",
|
||||
"rotini": "pasta",
|
||||
"farfalle": "pasta",
|
||||
"shredded cheese": "cheese",
|
||||
"sliced cheese": "cheese",
|
||||
"american cheese": "cheese",
|
||||
"cheddar": "cheese",
|
||||
"mozzarella": "cheese",
|
||||
"heavy cream": "cream",
|
||||
"whipping cream": "cream",
|
||||
"half and half": "cream",
|
||||
"burger bun": "buns",
|
||||
"hamburger bun": "buns",
|
||||
"hot dog bun": "buns",
|
||||
"bread roll": "buns",
|
||||
"dinner roll": "buns",
|
||||
# Tortillas / wraps — assembly dishes (burritos, tacos, quesadillas)
|
||||
"flour tortilla": "tortillas",
|
||||
"corn tortilla": "tortillas",
|
||||
"tortilla wrap": "tortillas",
|
||||
"soft taco shell": "tortillas",
|
||||
"taco shell": "taco shells",
|
||||
"pita bread": "pita",
|
||||
"flatbread": "flatbread",
|
||||
# Canned beans — extremely interchangeable in assembly dishes
|
||||
"black bean": "beans",
|
||||
"pinto bean": "beans",
|
||||
"kidney bean": "beans",
|
||||
"refried bean": "beans",
|
||||
"chickpea": "beans",
|
||||
"garbanzo": "beans",
|
||||
# Rice variants
|
||||
"white rice": "rice",
|
||||
"brown rice": "rice",
|
||||
"jasmine rice": "rice",
|
||||
"basmati rice": "rice",
|
||||
"instant rice": "rice",
|
||||
"microwavable rice": "rice",
|
||||
# Salsa / hot sauce
|
||||
"hot sauce": "salsa",
|
||||
"taco sauce": "salsa",
|
||||
"enchilada sauce": "salsa",
|
||||
# Sour cream / Greek yogurt — functional substitutes
|
||||
"greek yogurt": "sour cream",
|
||||
# Frozen/prepackaged meal token extraction — handled by individual token
|
||||
# fallback in _normalize_for_fts; these are the most common single-serve meal types
|
||||
"lean cuisine": "casserole",
|
||||
"stouffer": "casserole",
|
||||
"healthy choice": "casserole",
|
||||
"marie callender": "casserole",
|
||||
}
|
||||
|
||||
|
||||
# When a pantry item is in a secondary state (e.g. bread → "stale"), expand
|
||||
# the pantry set with terms that recipe ingredients commonly use to describe
|
||||
# that state. This lets "stale bread" in a recipe ingredient match a pantry
|
||||
# entry that is simply called "Bread" but is past its nominal use-by date.
|
||||
# Each key is (category_in_SECONDARY_WINDOW, label_returned_by_secondary_state).
|
||||
# Values are additional strings added to the pantry set for FTS coverage.
|
||||
_SECONDARY_STATE_SYNONYMS: dict[tuple[str, str], list[str]] = {
|
||||
("bread", "stale"): ["stale bread", "day-old bread", "old bread", "dried bread"],
|
||||
("bakery", "day-old"): ["day-old bread", "stale bread", "stale pastry"],
|
||||
("bananas", "overripe"): ["overripe bananas", "very ripe banana", "ripe bananas", "mashed banana"],
|
||||
("milk", "sour"): ["sour milk", "slightly sour milk", "buttermilk"],
|
||||
("dairy", "sour"): ["sour milk", "slightly sour milk"],
|
||||
("cheese", "well-aged"): ["parmesan rind", "cheese rind", "aged cheese"],
|
||||
("rice", "day-old"): ["day-old rice", "leftover rice", "cold rice", "cooked rice"],
|
||||
("tortillas", "stale"): ["stale tortillas", "dried tortillas", "day-old tortillas"],
|
||||
}
|
||||
|
||||
|
||||
# Matches leading quantity/unit prefixes in recipe ingredient strings,
|
||||
# e.g. "2 cups flour" → "flour", "1/2 c. ketchup" → "ketchup",
|
||||
# "3 oz. butter" → "butter"
|
||||
_QUANTITY_PREFIX = re.compile(
|
||||
r"^\s*(?:\d+(?:[./]\d+)?\s*)?" # optional leading number (1, 1/2, 2.5)
|
||||
r"(?:to\s+\d+\s*)?" # optional "to N" range
|
||||
r"(?:c\.|cup|cups|tbsp|tsp|oz|lb|lbs|g|kg|ml|l|"
|
||||
r"can|cans|pkg|pkg\.|package|slice|slices|clove|cloves|"
|
||||
r"small|medium|large|bunch|head|piece|pieces|"
|
||||
r"pinch|dash|handful|sprig|sprigs)\s*\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
# Preparation-state words that modify an ingredient without changing what it is.
|
||||
# Stripped from both ends so "melted butter", "butter, melted" both → "butter".
|
||||
_PREP_STATES = re.compile(
|
||||
r"\b(melted|softened|cold|warm|hot|room.temperature|"
|
||||
r"diced|sliced|chopped|minced|grated|shredded|shredded|beaten|whipped|"
|
||||
r"cooked|raw|frozen|canned|dried|dehydrated|marinated|seasoned|"
|
||||
r"roasted|toasted|ground|crushed|pressed|peeled|seeded|pitted|"
|
||||
r"boneless|skinless|trimmed|halved|quartered|julienned|"
|
||||
r"thinly|finely|roughly|coarsely|freshly|lightly|"
|
||||
r"packed|heaping|level|sifted|divided|optional)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
# Trailing comma + optional prep state (e.g. "butter, melted")
|
||||
_TRAILING_PREP = re.compile(r",\s*\w+$")
|
||||
|
||||
|
||||
# Maps prep-state words to human-readable instruction templates.
|
||||
# {ingredient} is replaced with the actual ingredient name.
|
||||
# None means the state is passive (frozen, canned) — no note needed.
|
||||
_PREP_INSTRUCTIONS: dict[str, str | None] = {
|
||||
"melted": "Melt the {ingredient} before starting.",
|
||||
"softened": "Let the {ingredient} soften to room temperature before using.",
|
||||
"room temperature": "Bring the {ingredient} to room temperature before using.",
|
||||
"beaten": "Beat the {ingredient} lightly before adding.",
|
||||
"whipped": "Whip the {ingredient} until soft peaks form.",
|
||||
"sifted": "Sift the {ingredient} before measuring.",
|
||||
"toasted": "Toast the {ingredient} in a dry pan until fragrant.",
|
||||
"roasted": "Roast the {ingredient} before using.",
|
||||
"pressed": "Press the {ingredient} to remove excess moisture.",
|
||||
"diced": "Dice the {ingredient} into small pieces.",
|
||||
"sliced": "Slice the {ingredient} thinly.",
|
||||
"chopped": "Chop the {ingredient} roughly.",
|
||||
"minced": "Mince the {ingredient} finely.",
|
||||
"grated": "Grate the {ingredient}.",
|
||||
"shredded": "Shred the {ingredient}.",
|
||||
"ground": "Grind the {ingredient}.",
|
||||
"crushed": "Crush the {ingredient}.",
|
||||
"peeled": "Peel the {ingredient} before use.",
|
||||
"seeded": "Remove seeds from the {ingredient}.",
|
||||
"pitted": "Pit the {ingredient} before use.",
|
||||
"trimmed": "Trim any excess from the {ingredient}.",
|
||||
"julienned": "Cut the {ingredient} into thin matchstick strips.",
|
||||
"cooked": "Pre-cook the {ingredient} before adding.",
|
||||
# Passive states — ingredient is used as-is, no prep note needed
|
||||
"cold": None,
|
||||
"warm": None,
|
||||
"hot": None,
|
||||
"raw": None,
|
||||
"frozen": None,
|
||||
"canned": None,
|
||||
"dried": None,
|
||||
"dehydrated": None,
|
||||
"marinated": None,
|
||||
"seasoned": None,
|
||||
"boneless": None,
|
||||
"skinless": None,
|
||||
"divided": None,
|
||||
"optional": None,
|
||||
"fresh": None,
|
||||
"freshly": None,
|
||||
"thinly": None,
|
||||
"finely": None,
|
||||
"roughly": None,
|
||||
"coarsely": None,
|
||||
"lightly": None,
|
||||
"packed": None,
|
||||
"heaping": None,
|
||||
"level": None,
|
||||
}
|
||||
|
||||
# Finds the first actionable prep state in an ingredient string
|
||||
_PREP_STATE_SEARCH = re.compile(
|
||||
r"\b(" + "|".join(re.escape(k) for k in _PREP_INSTRUCTIONS) + r")\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def _strip_quantity(ingredient: str) -> str:
|
||||
"""Remove leading quantity/unit and preparation-state words from a recipe ingredient.
|
||||
|
||||
e.g. "2 tbsp melted butter" → "butter"
|
||||
"butter, melted" → "butter"
|
||||
"1/4 cup flour, sifted" → "flour"
|
||||
"""
|
||||
stripped = _QUANTITY_PREFIX.sub("", ingredient).strip()
|
||||
# Strip any remaining leading number (e.g. "3 eggs" → "eggs")
|
||||
stripped = re.sub(r"^\d+\s+", "", stripped)
|
||||
# Strip trailing ", prep_state"
|
||||
stripped = _TRAILING_PREP.sub("", stripped).strip()
|
||||
# Strip prep-state words (may be leading or embedded)
|
||||
stripped = _PREP_STATES.sub("", stripped).strip()
|
||||
# Clean up any double spaces left behind
|
||||
stripped = re.sub(r"\s{2,}", " ", stripped).strip()
|
||||
return stripped or ingredient
|
||||
|
||||
|
||||
def _prep_note_for(ingredient: str) -> str | None:
|
||||
"""Return a human-readable prep instruction for this ingredient string, or None.
|
||||
|
||||
e.g. "2 tbsp melted butter" → "Melt the butter before starting."
|
||||
"onion, diced" → "Dice the onion into small pieces."
|
||||
"frozen peas" → None (passive state, no action needed)
|
||||
"""
|
||||
match = _PREP_STATE_SEARCH.search(ingredient)
|
||||
if not match:
|
||||
return None
|
||||
state = match.group(1).lower()
|
||||
template = _PREP_INSTRUCTIONS.get(state)
|
||||
if not template:
|
||||
return None
|
||||
# Use the stripped ingredient name as the subject
|
||||
ingredient_name = _strip_quantity(ingredient)
|
||||
return template.format(ingredient=ingredient_name)
|
||||
|
||||
|
||||
def _expand_pantry_set(
|
||||
pantry_items: list[str],
|
||||
secondary_pantry_items: dict[str, str] | None = None,
|
||||
) -> set[str]:
|
||||
"""Return pantry_set expanded with canonical recipe-corpus synonyms.
|
||||
|
||||
For each pantry item, checks _PANTRY_LABEL_SYNONYMS for substring matches
|
||||
and adds the canonical form. This lets single-word recipe ingredients
|
||||
("hamburger", "chicken") match product-label pantry entries
|
||||
("burger patties", "rotisserie chicken").
|
||||
|
||||
If secondary_pantry_items is provided (product_name → state label), items
|
||||
in a secondary state also receive state-specific synonym expansion so that
|
||||
recipe ingredients like "stale bread" or "day-old rice" are matched.
|
||||
"""
|
||||
from app.services.expiration_predictor import ExpirationPredictor
|
||||
_predictor = ExpirationPredictor()
|
||||
|
||||
expanded: set[str] = set()
|
||||
for item in pantry_items:
|
||||
lower = item.lower().strip()
|
||||
expanded.add(lower)
|
||||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in lower:
|
||||
expanded.add(canonical)
|
||||
|
||||
# Secondary state expansion — adds terms like "stale bread", "day-old rice"
|
||||
if secondary_pantry_items and item in secondary_pantry_items:
|
||||
state_label = secondary_pantry_items[item]
|
||||
category = _predictor.get_category_from_product(item)
|
||||
if category:
|
||||
synonyms = _SECONDARY_STATE_SYNONYMS.get((category, state_label), [])
|
||||
expanded.update(synonyms)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
def _ingredient_in_pantry(ingredient: str, pantry_set: set[str]) -> bool:
|
||||
"""Return True if the recipe ingredient is satisfied by the pantry.
|
||||
|
||||
Checks three layers in order:
|
||||
1. Exact match after quantity stripping
|
||||
2. Synonym lookup: ingredient → canonical → in pantry_set
|
||||
(handles "ground beef" matched by "burger patties" via shared canonical)
|
||||
3. Token subset: all content tokens of the ingredient appear in pantry
|
||||
(handles "diced onions" when "onions" is in pantry)
|
||||
"""
|
||||
clean = _strip_quantity(ingredient).lower()
|
||||
if clean in pantry_set:
|
||||
return True
|
||||
|
||||
# Check if this recipe ingredient maps to a canonical that's in pantry
|
||||
for pattern, canonical in _PANTRY_LABEL_SYNONYMS.items():
|
||||
if pattern in clean and canonical in pantry_set:
|
||||
return True
|
||||
|
||||
# Single-token ingredient whose token appears in pantry (e.g. "ketchup" in "c. ketchup")
|
||||
tokens = [t for t in clean.split() if t not in _SWAP_STOPWORDS and len(t) > 2]
|
||||
if tokens and all(t in pantry_set for t in tokens):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _content_tokens(text: str) -> frozenset[str]:
|
||||
return frozenset(
|
||||
w for w in text.lower().split()
|
||||
if w not in _SWAP_STOPWORDS and len(w) > 1
|
||||
)
|
||||
|
||||
|
||||
def _pantry_creative_swap(required: str, pantry_items: set[str]) -> str | None:
|
||||
"""Return a pantry item that's a plausible creative substitute, or None.
|
||||
|
||||
Requires ≥2 shared content tokens AND ≥50% bidirectional overlap so that
|
||||
single-word differences (cream-of-mushroom vs cream-of-potato) qualify while
|
||||
single-word ingredients (butter, flour) don't accidentally match supersets
|
||||
(peanut butter, bread flour).
|
||||
"""
|
||||
req_tokens = _content_tokens(required)
|
||||
if len(req_tokens) < 2:
|
||||
return None # single-word ingredients must already be in pantry_set
|
||||
|
||||
best: str | None = None
|
||||
best_score = 0.0
|
||||
for item in pantry_items:
|
||||
if item.lower() == required.lower():
|
||||
continue
|
||||
pan_tokens = _content_tokens(item)
|
||||
if not pan_tokens:
|
||||
continue
|
||||
overlap = len(req_tokens & pan_tokens)
|
||||
if overlap < 2:
|
||||
continue
|
||||
score = min(overlap / len(req_tokens), overlap / len(pan_tokens))
|
||||
if score >= 0.5 and score > best_score:
|
||||
best_score = score
|
||||
best = item
|
||||
return best
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Functional-category swap table (Level 2 only)
|
||||
# ---------------------------------------------------------------------------
|
||||
# Maps cleaned ingredient names → functional category label. Used as a
|
||||
# fallback when _pantry_creative_swap returns None (which always happens for
|
||||
# single-token ingredients, because that function requires ≥2 shared tokens).
|
||||
# A pantry item that belongs to the same category is offered as a substitute.
|
||||
_FUNCTIONAL_SWAP_CATEGORIES: dict[str, str] = {
|
||||
# Solid fats
|
||||
"butter": "solid_fat",
|
||||
"margarine": "solid_fat",
|
||||
"shortening": "solid_fat",
|
||||
"lard": "solid_fat",
|
||||
"ghee": "solid_fat",
|
||||
# Liquid/neutral cooking oils
|
||||
"oil": "liquid_fat",
|
||||
"vegetable oil": "liquid_fat",
|
||||
"olive oil": "liquid_fat",
|
||||
"canola oil": "liquid_fat",
|
||||
"sunflower oil": "liquid_fat",
|
||||
"avocado oil": "liquid_fat",
|
||||
# Sweeteners
|
||||
"sugar": "sweetener",
|
||||
"brown sugar": "sweetener",
|
||||
"honey": "sweetener",
|
||||
"maple syrup": "sweetener",
|
||||
"agave": "sweetener",
|
||||
"molasses": "sweetener",
|
||||
"stevia": "sweetener",
|
||||
"powdered sugar": "sweetener",
|
||||
# All-purpose flours and baking bases
|
||||
"flour": "flour",
|
||||
"all-purpose flour": "flour",
|
||||
"whole wheat flour": "flour",
|
||||
"bread flour": "flour",
|
||||
"self-rising flour": "flour",
|
||||
"cake flour": "flour",
|
||||
# Dairy and non-dairy milk
|
||||
"milk": "dairy_milk",
|
||||
"whole milk": "dairy_milk",
|
||||
"skim milk": "dairy_milk",
|
||||
"2% milk": "dairy_milk",
|
||||
"oat milk": "dairy_milk",
|
||||
"almond milk": "dairy_milk",
|
||||
"soy milk": "dairy_milk",
|
||||
"rice milk": "dairy_milk",
|
||||
# Heavy/whipping creams
|
||||
"cream": "heavy_cream",
|
||||
"heavy cream": "heavy_cream",
|
||||
"whipping cream": "heavy_cream",
|
||||
"double cream": "heavy_cream",
|
||||
"coconut cream": "heavy_cream",
|
||||
# Cultured dairy (acid + thick)
|
||||
"sour cream": "cultured_dairy",
|
||||
"greek yogurt": "cultured_dairy",
|
||||
"yogurt": "cultured_dairy",
|
||||
"buttermilk": "cultured_dairy",
|
||||
# Starch thickeners
|
||||
"cornstarch": "thickener",
|
||||
"arrowroot": "thickener",
|
||||
"tapioca starch": "thickener",
|
||||
"potato starch": "thickener",
|
||||
"rice flour": "thickener",
|
||||
# Egg binders
|
||||
"egg": "egg_binder",
|
||||
"eggs": "egg_binder",
|
||||
# Acids
|
||||
"vinegar": "acid",
|
||||
"apple cider vinegar": "acid",
|
||||
"white vinegar": "acid",
|
||||
"red wine vinegar": "acid",
|
||||
"lemon juice": "acid",
|
||||
"lime juice": "acid",
|
||||
# Stocks and broths
|
||||
"broth": "stock",
|
||||
"stock": "stock",
|
||||
"chicken broth": "stock",
|
||||
"beef broth": "stock",
|
||||
"vegetable broth": "stock",
|
||||
"chicken stock": "stock",
|
||||
"beef stock": "stock",
|
||||
"bouillon": "stock",
|
||||
# Hard cheeses (grating / melting interchangeable)
|
||||
"parmesan": "hard_cheese",
|
||||
"romano": "hard_cheese",
|
||||
"pecorino": "hard_cheese",
|
||||
"asiago": "hard_cheese",
|
||||
# Melting cheeses
|
||||
"cheddar": "melting_cheese",
|
||||
"mozzarella": "melting_cheese",
|
||||
"swiss": "melting_cheese",
|
||||
"gouda": "melting_cheese",
|
||||
"monterey jack": "melting_cheese",
|
||||
"colby": "melting_cheese",
|
||||
"provolone": "melting_cheese",
|
||||
# Canned tomato products
|
||||
"tomato sauce": "canned_tomato",
|
||||
"tomato paste": "canned_tomato",
|
||||
"crushed tomatoes": "canned_tomato",
|
||||
"diced tomatoes": "canned_tomato",
|
||||
"marinara": "canned_tomato",
|
||||
}
|
||||
|
||||
|
||||
def _category_swap(ingredient: str, pantry_items: set[str]) -> str | None:
|
||||
"""Level-2 fallback: find a same-category pantry substitute for a single-token ingredient.
|
||||
|
||||
_pantry_creative_swap requires ≥2 shared content tokens, so it always returns
|
||||
None for single-word ingredients like 'butter' or 'flour'. This function looks
|
||||
up the ingredient's functional category and returns any pantry item in that
|
||||
same category, enabling swaps like butter → ghee, milk → oat milk.
|
||||
"""
|
||||
clean = _strip_quantity(ingredient).lower()
|
||||
category = _FUNCTIONAL_SWAP_CATEGORIES.get(clean)
|
||||
if not category:
|
||||
return None
|
||||
for item in pantry_items:
|
||||
if item.lower() == clean:
|
||||
continue
|
||||
item_lower = item.lower()
|
||||
# Direct match: pantry item name is a known member of the same category
|
||||
if _FUNCTIONAL_SWAP_CATEGORIES.get(item_lower) == category:
|
||||
return item
|
||||
# Substring match: handles "organic oat milk" containing "oat milk"
|
||||
for known_ing, cat in _FUNCTIONAL_SWAP_CATEGORIES.items():
|
||||
if cat == category and known_ing in item_lower and item_lower != clean:
|
||||
return item
|
||||
return None
|
||||
|
||||
|
||||
# Assembly template caps by tier — prevents flooding results with templates
|
||||
# when a well-stocked pantry satisfies every required role.
|
||||
_SOURCE_URL_BUILDERS: dict[str, str] = {
|
||||
"foodcom": "https://www.food.com/recipe/{id}",
|
||||
}
|
||||
|
||||
|
||||
def _build_source_url(row: dict) -> str | None:
|
||||
"""Construct a canonical source URL from DB row fields, or None for generated recipes."""
|
||||
source = row.get("source") or ""
|
||||
external_id = row.get("external_id")
|
||||
template = _SOURCE_URL_BUILDERS.get(source)
|
||||
if not template or not external_id:
|
||||
return None
|
||||
try:
|
||||
return template.format(id=int(float(external_id)))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
# Method complexity classification patterns
|
||||
_EASY_METHODS = re.compile(
|
||||
r"\b(microwave|mix|stir|blend|toast|assemble|heat)\b", re.IGNORECASE
|
||||
)
|
||||
_INVOLVED_METHODS = re.compile(
|
||||
r"\b(braise|roast|knead|deep.?fry|fry|sauté|saute|bake|boil)\b", re.IGNORECASE
|
||||
)
|
||||
|
||||
# Hard day mode sort tier patterns
|
||||
_PREMADE_TITLE_RE = re.compile(
|
||||
r"\b(frozen|instant|microwave|ready.?made|pre.?made|packaged|heat.?and.?eat)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
_HEAT_ONLY_RE = re.compile(r"\b(microwave|heat|warm|thaw)\b", re.IGNORECASE)
|
||||
|
||||
|
||||
def _hard_day_sort_tier(
|
||||
title: str,
|
||||
ingredient_names: list[str],
|
||||
directions: list[str],
|
||||
) -> int:
|
||||
"""Return a sort priority tier for hard day mode.
|
||||
|
||||
0 — premade / heat-only (frozen dinner, quesadilla, microwave meal)
|
||||
1 — super simple (≤3 ingredients, easy method)
|
||||
2 — easy/moderate (everything else that passed the 'involved' filter)
|
||||
|
||||
Lower tier surfaces first.
|
||||
"""
|
||||
dir_text = " ".join(directions)
|
||||
n_ingredients = len(ingredient_names)
|
||||
n_steps = len(directions)
|
||||
|
||||
# Tier 0: title signals premade, OR very few ingredients with heat-only steps
|
||||
if _PREMADE_TITLE_RE.search(title):
|
||||
return 0
|
||||
if n_ingredients <= 2 and n_steps <= 3 and _HEAT_ONLY_RE.search(dir_text):
|
||||
return 0
|
||||
|
||||
# Tier 1: ≤3 ingredients with any easy method (quesadilla, cheese toast, etc.)
|
||||
if n_ingredients <= 3 and _EASY_METHODS.search(dir_text):
|
||||
return 1
|
||||
|
||||
return 2
|
||||
|
||||
|
||||
def _estimate_time_min(directions: list[str], complexity: str) -> int:
|
||||
"""Rough cooking time estimate from step count and method complexity.
|
||||
|
||||
Not precise — intended for filtering and display hints only.
|
||||
"""
|
||||
steps = len(directions)
|
||||
if complexity == "easy":
|
||||
return max(5, 10 + steps * 3)
|
||||
if complexity == "involved":
|
||||
return max(20, 30 + steps * 6)
|
||||
return max(10, 20 + steps * 4) # moderate
|
||||
|
||||
|
||||
def _classify_method_complexity(
|
||||
directions: list[str],
|
||||
available_equipment: list[str] | None = None,
|
||||
) -> str:
|
||||
"""Classify recipe method complexity from direction strings.
|
||||
|
||||
Returns 'easy', 'moderate', or 'involved'.
|
||||
available_equipment can expand the easy set (e.g. ['toaster', 'air fryer']).
|
||||
"""
|
||||
text = " ".join(directions).lower()
|
||||
equipment_set = {e.lower() for e in (available_equipment or [])}
|
||||
|
||||
if _INVOLVED_METHODS.search(text):
|
||||
return "involved"
|
||||
|
||||
if _EASY_METHODS.search(text):
|
||||
return "easy"
|
||||
|
||||
# Check equipment-specific easy methods
|
||||
for equip in equipment_set:
|
||||
if equip in text:
|
||||
return "easy"
|
||||
|
||||
return "moderate"
|
||||
|
||||
|
||||
class RecipeEngine:
|
||||
def __init__(self, store: "Store") -> None:
|
||||
self._store = store
|
||||
self._classifier = ElementClassifier(store)
|
||||
self._substitution = SubstitutionEngine(store)
|
||||
|
||||
def suggest(
|
||||
self,
|
||||
req: RecipeRequest,
|
||||
available_equipment: list[str] | None = None,
|
||||
) -> RecipeResult:
|
||||
# Load cooking equipment from user settings when hard_day_mode is active
|
||||
if req.hard_day_mode and available_equipment is None:
|
||||
equipment_json = self._store.get_setting("cooking_equipment")
|
||||
if equipment_json:
|
||||
try:
|
||||
available_equipment = json.loads(equipment_json)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
available_equipment = []
|
||||
else:
|
||||
available_equipment = []
|
||||
# Rate-limit leftover mode for free tier
|
||||
if req.expiry_first and req.tier == "free":
|
||||
allowed, count = self._store.check_and_increment_rate_limit(
|
||||
"leftover_mode", _LEFTOVER_DAILY_MAX_FREE
|
||||
)
|
||||
if not allowed:
|
||||
return RecipeResult(
|
||||
suggestions=[], element_gaps=[], rate_limited=True, rate_limit_count=count
|
||||
)
|
||||
|
||||
profiles = self._classifier.classify_batch(req.pantry_items)
|
||||
gaps = self._classifier.identify_gaps(profiles)
|
||||
pantry_set = _expand_pantry_set(req.pantry_items, req.secondary_pantry_items or None)
|
||||
|
||||
if req.level >= 3:
|
||||
from app.services.recipe.llm_recipe import LLMRecipeGenerator
|
||||
gen = LLMRecipeGenerator(self._store)
|
||||
return gen.generate(req, profiles, gaps)
|
||||
|
||||
# Level 1 & 2: deterministic path
|
||||
# L1 ("Use What I Have") applies strict quality gates:
|
||||
# - exclude_generic: filter catch-all recipes at the DB level
|
||||
# - effective_max_missing: default to 2 when user hasn't set a cap
|
||||
# - match ratio: require ≥60% ingredient coverage to avoid low-signal results
|
||||
_l1 = req.level == 1 and not req.shopping_mode
|
||||
nf = req.nutrition_filters
|
||||
rows = self._store.search_recipes_by_ingredients(
|
||||
req.pantry_items,
|
||||
limit=20,
|
||||
category=req.category or None,
|
||||
max_calories=nf.max_calories,
|
||||
max_sugar_g=nf.max_sugar_g,
|
||||
max_carbs_g=nf.max_carbs_g,
|
||||
max_sodium_mg=nf.max_sodium_mg,
|
||||
excluded_ids=req.excluded_ids or [],
|
||||
exclude_generic=_l1,
|
||||
)
|
||||
|
||||
# L1 strict defaults: cap missing ingredients and require a minimum ratio.
|
||||
_L1_MAX_MISSING_DEFAULT = 2
|
||||
_L1_MIN_MATCH_RATIO = 0.6
|
||||
effective_max_missing = req.max_missing
|
||||
if _l1 and effective_max_missing is None:
|
||||
effective_max_missing = _L1_MAX_MISSING_DEFAULT
|
||||
|
||||
suggestions = []
|
||||
hard_day_tier_map: dict[int, int] = {} # recipe_id → tier when hard_day_mode
|
||||
|
||||
for row in rows:
|
||||
ingredient_names: list[str] = row.get("ingredient_names") or []
|
||||
if isinstance(ingredient_names, str):
|
||||
try:
|
||||
ingredient_names = json.loads(ingredient_names)
|
||||
except Exception:
|
||||
ingredient_names = []
|
||||
|
||||
# Compute missing ingredients, detecting pantry coverage first.
|
||||
# When covered, collect any prep-state annotations (e.g. "melted butter"
|
||||
# → note "Melt the butter before starting.") to surface separately.
|
||||
swap_candidates: list[SwapCandidate] = []
|
||||
matched: list[str] = []
|
||||
missing: list[str] = []
|
||||
prep_note_set: set[str] = set()
|
||||
for n in ingredient_names:
|
||||
if _ingredient_in_pantry(n, pantry_set):
|
||||
matched.append(_strip_quantity(n))
|
||||
note = _prep_note_for(n)
|
||||
if note:
|
||||
prep_note_set.add(note)
|
||||
continue
|
||||
swap_item = _pantry_creative_swap(n, pantry_set)
|
||||
# L2: also try functional-category swap for single-token ingredients
|
||||
# that _pantry_creative_swap can't match (requires ≥2 shared tokens).
|
||||
if swap_item is None and req.level == 2:
|
||||
swap_item = _category_swap(n, pantry_set)
|
||||
if swap_item:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=n,
|
||||
substitute_name=swap_item,
|
||||
constraint_label="pantry_swap",
|
||||
explanation=f"You have {swap_item} — use it in place of {n}.",
|
||||
compensation_hints=[],
|
||||
))
|
||||
else:
|
||||
missing.append(n)
|
||||
|
||||
# Filter by max_missing — skipped in shopping mode (user is willing to buy)
|
||||
if not req.shopping_mode and effective_max_missing is not None and len(missing) > effective_max_missing:
|
||||
continue
|
||||
|
||||
# "Can make now" toggle: drop any recipe that still has missing ingredients
|
||||
# after swaps are applied. Swapped items count as covered.
|
||||
if req.pantry_match_only and missing:
|
||||
continue
|
||||
|
||||
# L1 match ratio gate: drop results where less than 60% of the recipe's
|
||||
# ingredients are in the pantry. Prevents low-signal results like a
|
||||
# 10-ingredient recipe matching on only one common item.
|
||||
if _l1 and ingredient_names:
|
||||
match_ratio = len(matched) / len(ingredient_names)
|
||||
if match_ratio < _L1_MIN_MATCH_RATIO:
|
||||
continue
|
||||
|
||||
# Parse directions — needed for complexity, hard_day_mode, and time estimate.
|
||||
directions: list[str] = row.get("directions") or []
|
||||
if isinstance(directions, str):
|
||||
try:
|
||||
directions = json.loads(directions)
|
||||
except Exception:
|
||||
directions = [directions]
|
||||
|
||||
# Compute complexity for every suggestion (used for badge + filter).
|
||||
row_complexity = _classify_method_complexity(directions, available_equipment)
|
||||
row_time_min = _estimate_time_min(directions, row_complexity)
|
||||
|
||||
# Filter and tier-rank by hard_day_mode
|
||||
if req.hard_day_mode:
|
||||
if row_complexity == "involved":
|
||||
continue
|
||||
hard_day_tier_map[row["id"]] = _hard_day_sort_tier(
|
||||
title=row.get("title", ""),
|
||||
ingredient_names=ingredient_names,
|
||||
directions=directions,
|
||||
)
|
||||
|
||||
# Complexity filter (#58)
|
||||
if req.complexity_filter and row_complexity != req.complexity_filter:
|
||||
continue
|
||||
|
||||
# Max time filter (#58)
|
||||
if req.max_time_min is not None and row_time_min > req.max_time_min:
|
||||
continue
|
||||
|
||||
# Level 2: also add dietary constraint swaps from substitution_pairs
|
||||
if req.level == 2 and req.constraints:
|
||||
for ing in ingredient_names:
|
||||
for constraint in req.constraints:
|
||||
swaps = self._substitution.find_substitutes(ing, constraint)
|
||||
for swap in swaps[:1]:
|
||||
swap_candidates.append(SwapCandidate(
|
||||
original_name=swap.original_name,
|
||||
substitute_name=swap.substitute_name,
|
||||
constraint_label=swap.constraint_label,
|
||||
explanation=swap.explanation,
|
||||
compensation_hints=swap.compensation_hints,
|
||||
))
|
||||
|
||||
coverage_raw = row.get("element_coverage") or {}
|
||||
if isinstance(coverage_raw, str):
|
||||
try:
|
||||
coverage_raw = json.loads(coverage_raw)
|
||||
except Exception:
|
||||
coverage_raw = {}
|
||||
|
||||
servings = row.get("servings") or None
|
||||
nutrition = NutritionPanel(
|
||||
calories=row.get("calories"),
|
||||
fat_g=row.get("fat_g"),
|
||||
protein_g=row.get("protein_g"),
|
||||
carbs_g=row.get("carbs_g"),
|
||||
fiber_g=row.get("fiber_g"),
|
||||
sugar_g=row.get("sugar_g"),
|
||||
sodium_mg=row.get("sodium_mg"),
|
||||
servings=servings,
|
||||
estimated=bool(row.get("nutrition_estimated", 0)),
|
||||
)
|
||||
has_nutrition = any(
|
||||
v is not None
|
||||
for v in (nutrition.calories, nutrition.sugar_g, nutrition.carbs_g)
|
||||
)
|
||||
suggestions.append(RecipeSuggestion(
|
||||
id=row["id"],
|
||||
title=row["title"],
|
||||
match_count=int(row.get("match_count") or 0),
|
||||
element_coverage=coverage_raw,
|
||||
swap_candidates=swap_candidates,
|
||||
matched_ingredients=matched,
|
||||
missing_ingredients=missing,
|
||||
prep_notes=sorted(prep_note_set),
|
||||
level=req.level,
|
||||
nutrition=nutrition if has_nutrition else None,
|
||||
source_url=_build_source_url(row),
|
||||
complexity=row_complexity,
|
||||
estimated_time_min=row_time_min,
|
||||
))
|
||||
|
||||
# Sort corpus results — assembly templates are now served from a dedicated tab.
|
||||
# Hard day mode: primary sort by tier (0=premade, 1=simple, 2=moderate),
|
||||
# then by match_count within each tier.
|
||||
# Normal mode: sort by match_count descending.
|
||||
if req.hard_day_mode and hard_day_tier_map:
|
||||
suggestions = sorted(
|
||||
suggestions,
|
||||
key=lambda s: (hard_day_tier_map.get(s.id, 1), -s.match_count),
|
||||
)
|
||||
else:
|
||||
suggestions = sorted(suggestions, key=lambda s: -s.match_count)
|
||||
|
||||
# Build grocery list — deduplicated union of all missing ingredients
|
||||
seen: set[str] = set()
|
||||
grocery_list: list[str] = []
|
||||
for s in suggestions:
|
||||
for item in s.missing_ingredients:
|
||||
if item not in seen:
|
||||
grocery_list.append(item)
|
||||
seen.add(item)
|
||||
|
||||
# Build grocery links — affiliate deeplinks for each missing ingredient
|
||||
link_builder = GroceryLinkBuilder(tier=req.tier, has_byok=req.has_byok)
|
||||
grocery_links = link_builder.build_all(grocery_list)
|
||||
|
||||
return RecipeResult(
|
||||
suggestions=suggestions,
|
||||
element_gaps=gaps,
|
||||
grocery_list=grocery_list,
|
||||
grocery_links=grocery_links,
|
||||
)
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
"""
|
||||
StapleLibrary -- bulk-preparable base component reference data.
|
||||
Loaded from YAML files in app/staples/.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
_STAPLES_DIR = Path(__file__).parents[2] / "staples"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StapleEntry:
|
||||
slug: str
|
||||
name: str
|
||||
description: str
|
||||
dietary_labels: list[str]
|
||||
base_ingredients: list[str]
|
||||
base_method: str
|
||||
base_time_minutes: int
|
||||
yield_formats: dict[str, Any]
|
||||
compatible_styles: list[str]
|
||||
|
||||
|
||||
class StapleLibrary:
|
||||
def __init__(self, staples_dir: Path = _STAPLES_DIR) -> None:
|
||||
self._staples: dict[str, StapleEntry] = {}
|
||||
for yaml_path in sorted(staples_dir.glob("*.yaml")):
|
||||
entry = self._load(yaml_path)
|
||||
self._staples[entry.slug] = entry
|
||||
|
||||
def get(self, slug: str) -> StapleEntry | None:
|
||||
return self._staples.get(slug)
|
||||
|
||||
def list_all(self) -> list[StapleEntry]:
|
||||
return list(self._staples.values())
|
||||
|
||||
def filter_by_dietary(self, label: str) -> list[StapleEntry]:
|
||||
return [s for s in self._staples.values() if label in s.dietary_labels]
|
||||
|
||||
def _load(self, path: Path) -> StapleEntry:
|
||||
try:
|
||||
data = yaml.safe_load(path.read_text())
|
||||
return StapleEntry(
|
||||
slug=data["slug"],
|
||||
name=data["name"],
|
||||
description=data.get("description", ""),
|
||||
dietary_labels=data.get("dietary_labels", []),
|
||||
base_ingredients=data.get("base_ingredients", []),
|
||||
base_method=data.get("base_method", ""),
|
||||
base_time_minutes=int(data.get("base_time_minutes", 0)),
|
||||
yield_formats=data.get("yield_formats", {}),
|
||||
compatible_styles=data.get("compatible_styles", []),
|
||||
)
|
||||
except (KeyError, yaml.YAMLError) as exc:
|
||||
raise ValueError(f"Failed to load staple from {path}: {exc}") from exc
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue