From 8cbde774e5d4ccb7c267b77f90ab251d28076fe7 Mon Sep 17 00:00:00 2001 From: pyr0ball Date: Mon, 30 Mar 2026 22:20:48 -0700 Subject: [PATCH] =?UTF-8?q?chore:=20initial=20commit=20=E2=80=94=20kiwi=20?= =?UTF-8?q?Phase=202=20complete?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pantry tracker app with: - FastAPI backend + Vue 3 SPA frontend - SQLite via circuitforge-core (migrations 001-005) - Inventory CRUD, barcode scan, receipt OCR pipeline - Expiry prediction (deterministic + LLM fallback) - CF-core tier system integration - Cloud session support (menagerie) --- .env.example | 37 + .gitignore | 21 + Dockerfile | 26 + PRIVACY.md | 7 + README.md | 66 + app/__init__.py | 7 + app/api/__init__.py | 5 + app/api/endpoints/__init__.py | 4 + app/api/endpoints/export.py | 47 + app/api/endpoints/health.py | 14 + app/api/endpoints/inventory.py | 394 ++++ app/api/endpoints/ocr.py | 233 ++ app/api/endpoints/receipts.py | 110 + app/api/routes.py | 10 + app/cloud_session.py | 196 ++ app/core/__init__.py | 5 + app/core/config.py | 59 + app/db/__init__.py | 1 + app/db/base.py | 1 + app/db/migrations/001_initial_schema.sql | 32 + .../migrations/002_inventory_and_products.sql | 53 + app/db/migrations/003_receipt_data.sql | 38 + app/db/migrations/004_tagging_system.sql | 23 + .../migrations/005_receipt_staged_status.sql | 36 + app/db/models.py | 577 +++++ app/db/session.py | 23 + app/db/store.py | 262 +++ app/main.py | 44 + app/models/__init__.py | 5 + app/models/domain/__init__.py | 5 + app/models/schemas/__init__.py | 4 + app/models/schemas/inventory.py | 143 ++ app/models/schemas/ocr.py | 138 ++ app/models/schemas/quality.py | 17 + app/models/schemas/receipt.py | 46 + app/services/__init__.py | 8 + app/services/barcode_scanner.py | 365 ++++ app/services/expiration_predictor.py | 306 +++ app/services/export/__init__.py | 1 + app/services/export/spreadsheet_export.py | 325 +++ app/services/image_preprocessing/__init__.py | 10 + .../image_preprocessing/enhancement.py | 172 ++ .../image_preprocessing/format_conversion.py | 89 + app/services/inventory_service.py | 539 +++++ app/services/ocr/__init__.py | 5 + app/services/ocr/vl_model.py | 371 ++++ app/services/openfoodfacts.py | 234 ++ app/services/quality/__init__.py | 9 + app/services/quality/assessment.py | 332 +++ app/services/receipt_service.py | 126 ++ .../receipt_service_inmemory_backup.py | 295 +++ app/static/index.html | 926 ++++++++ app/static/upload.html | 459 ++++ app/static/upload.html.backup | 459 ++++ app/tiers.py | 61 + app/utils/__init__.py | 5 + app/utils/progress.py | 248 +++ app/utils/units.py | 185 ++ compose.cloud.yml | 43 + compose.yml | 21 + docker/web/Dockerfile | 22 + docker/web/nginx.cloud.conf | 32 + docker/web/nginx.conf | 27 + environment.yml | 18 + frontend/.env | 3 + frontend/.gitignore | 24 + frontend/.vscode/extensions.json | 3 + frontend/THEMING_SYSTEM.md | 458 ++++ frontend/index.html | 13 + frontend/package-lock.json | 1884 +++++++++++++++++ frontend/package.json | 25 + frontend/public/vite.svg | 1 + frontend/src/App.vue | 199 ++ frontend/src/assets/vue.svg | 1 + frontend/src/components/ConfirmDialog.vue | 189 ++ frontend/src/components/EditItemModal.vue | 452 ++++ frontend/src/components/HelloWorld.vue | 41 + frontend/src/components/InventoryList.vue | 1281 +++++++++++ frontend/src/components/ReceiptsView.vue | 454 ++++ frontend/src/components/ToastNotification.vue | 252 +++ frontend/src/main.ts | 11 + frontend/src/services/api.ts | 407 ++++ frontend/src/stores/inventory.ts | 173 ++ frontend/src/style.css | 255 +++ frontend/src/theme.css | 536 +++++ frontend/tsconfig.app.json | 16 + frontend/tsconfig.json | 7 + frontend/tsconfig.node.json | 26 + frontend/vite.config.ts | 26 + manage.sh | 98 + pyproject.toml | 33 + 91 files changed, 15250 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 Dockerfile create mode 100644 PRIVACY.md create mode 100644 README.md create mode 100644 app/__init__.py create mode 100644 app/api/__init__.py create mode 100644 app/api/endpoints/__init__.py create mode 100644 app/api/endpoints/export.py create mode 100644 app/api/endpoints/health.py create mode 100644 app/api/endpoints/inventory.py create mode 100644 app/api/endpoints/ocr.py create mode 100644 app/api/endpoints/receipts.py create mode 100644 app/api/routes.py create mode 100644 app/cloud_session.py create mode 100644 app/core/__init__.py create mode 100644 app/core/config.py create mode 100644 app/db/__init__.py create mode 100644 app/db/base.py create mode 100644 app/db/migrations/001_initial_schema.sql create mode 100644 app/db/migrations/002_inventory_and_products.sql create mode 100644 app/db/migrations/003_receipt_data.sql create mode 100644 app/db/migrations/004_tagging_system.sql create mode 100644 app/db/migrations/005_receipt_staged_status.sql create mode 100644 app/db/models.py create mode 100644 app/db/session.py create mode 100644 app/db/store.py create mode 100644 app/main.py create mode 100644 app/models/__init__.py create mode 100644 app/models/domain/__init__.py create mode 100644 app/models/schemas/__init__.py create mode 100644 app/models/schemas/inventory.py create mode 100644 app/models/schemas/ocr.py create mode 100644 app/models/schemas/quality.py create mode 100644 app/models/schemas/receipt.py create mode 100644 app/services/__init__.py create mode 100644 app/services/barcode_scanner.py create mode 100644 app/services/expiration_predictor.py create mode 100644 app/services/export/__init__.py create mode 100644 app/services/export/spreadsheet_export.py create mode 100644 app/services/image_preprocessing/__init__.py create mode 100644 app/services/image_preprocessing/enhancement.py create mode 100644 app/services/image_preprocessing/format_conversion.py create mode 100644 app/services/inventory_service.py create mode 100644 app/services/ocr/__init__.py create mode 100644 app/services/ocr/vl_model.py create mode 100644 app/services/openfoodfacts.py create mode 100644 app/services/quality/__init__.py create mode 100644 app/services/quality/assessment.py create mode 100644 app/services/receipt_service.py create mode 100644 app/services/receipt_service_inmemory_backup.py create mode 100644 app/static/index.html create mode 100644 app/static/upload.html create mode 100644 app/static/upload.html.backup create mode 100644 app/tiers.py create mode 100644 app/utils/__init__.py create mode 100644 app/utils/progress.py create mode 100644 app/utils/units.py create mode 100644 compose.cloud.yml create mode 100644 compose.yml create mode 100644 docker/web/Dockerfile create mode 100644 docker/web/nginx.cloud.conf create mode 100644 docker/web/nginx.conf create mode 100644 environment.yml create mode 100644 frontend/.env create mode 100644 frontend/.gitignore create mode 100644 frontend/.vscode/extensions.json create mode 100644 frontend/THEMING_SYSTEM.md create mode 100644 frontend/index.html create mode 100644 frontend/package-lock.json create mode 100644 frontend/package.json create mode 100644 frontend/public/vite.svg create mode 100644 frontend/src/App.vue create mode 100644 frontend/src/assets/vue.svg create mode 100644 frontend/src/components/ConfirmDialog.vue create mode 100644 frontend/src/components/EditItemModal.vue create mode 100644 frontend/src/components/HelloWorld.vue create mode 100644 frontend/src/components/InventoryList.vue create mode 100644 frontend/src/components/ReceiptsView.vue create mode 100644 frontend/src/components/ToastNotification.vue create mode 100644 frontend/src/main.ts create mode 100644 frontend/src/services/api.ts create mode 100644 frontend/src/stores/inventory.ts create mode 100644 frontend/src/style.css create mode 100644 frontend/src/theme.css create mode 100644 frontend/tsconfig.app.json create mode 100644 frontend/tsconfig.json create mode 100644 frontend/tsconfig.node.json create mode 100644 frontend/vite.config.ts create mode 100755 manage.sh create mode 100644 pyproject.toml diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..f0f6415 --- /dev/null +++ b/.env.example @@ -0,0 +1,37 @@ +# Kiwi โ€” environment variables +# Copy to .env and fill in values + +# API +API_PREFIX=/api/v1 +CORS_ORIGINS=http://localhost:5173,http://localhost:8509 + +# Storage +DATA_DIR=./data + +# Database (defaults to DATA_DIR/kiwi.db) +# DB_PATH=./data/kiwi.db + +# Processing +USE_GPU=true +GPU_MEMORY_LIMIT=6144 +MAX_CONCURRENT_JOBS=4 +MIN_QUALITY_SCORE=50.0 + +# Feature flags +ENABLE_OCR=false + +# Runtime +DEBUG=false +CLOUD_MODE=false +DEMO_MODE=false + +# Cloud mode (set in compose.cloud.yml; also set here for reference) +# CLOUD_DATA_ROOT=/devl/kiwi-cloud-data +# KIWI_DB=data/kiwi.db # local-mode DB path override + +# Heimdall license server (required for cloud tier resolution) +# HEIMDALL_URL=https://license.circuitforge.tech +# HEIMDALL_ADMIN_TOKEN= + +# Directus JWT (must match cf-directus SECRET env var) +# DIRECTUS_JWT_SECRET= diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2b8682f --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ + +# Superpowers brainstorming artifacts +.superpowers/ + +# Git worktrees +.worktrees/ + +# Python bytecode +__pycache__/ +*.pyc +*.pyo + +# Environment files (keep .env.example) +.env + +# Node modules +node_modules/ +dist/ + +# Data directories +data/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..23f8899 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM continuumio/miniconda3:latest + +WORKDIR /app + +# Install system dependencies for OpenCV + pyzbar +RUN apt-get update && apt-get install -y --no-install-recommends \ + libzbar0 libgl1 libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +# Install circuitforge-core from sibling directory (compose sets context: ..) +COPY circuitforge-core/ ./circuitforge-core/ +RUN conda run -n base pip install --no-cache-dir -e ./circuitforge-core + +# Create kiwi conda env and install app +COPY kiwi/environment.yml . +RUN conda env create -f environment.yml + +COPY kiwi/ ./kiwi/ +# Install cf-core into the kiwi env BEFORE installing kiwi (kiwi lists it as a dep) +RUN conda run -n kiwi pip install --no-cache-dir -e /app/circuitforge-core +WORKDIR /app/kiwi +RUN conda run -n kiwi pip install --no-cache-dir -e . + +EXPOSE 8512 +CMD ["conda", "run", "--no-capture-output", "-n", "kiwi", \ + "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8512"] diff --git a/PRIVACY.md b/PRIVACY.md new file mode 100644 index 0000000..afc7b9f --- /dev/null +++ b/PRIVACY.md @@ -0,0 +1,7 @@ +# Privacy Policy + +CircuitForge LLC's privacy policy applies to this product and is published at: + +**** + +Last reviewed: March 2026. diff --git a/README.md b/README.md new file mode 100644 index 0000000..852a0d2 --- /dev/null +++ b/README.md @@ -0,0 +1,66 @@ +# ๐Ÿฅ Kiwi + +> *Part of the CircuitForge LLC "AI for the tasks the system made hard on purpose" suite.* + +**Pantry tracking and leftover recipe suggestions.** + +Scan barcodes, photograph receipts, and get recipe ideas based on what you already have โ€” before it expires. + +**Status:** Pre-alpha ยท CircuitForge LLC + +--- + +## What it does + +- **Inventory tracking** โ€” add items by barcode scan, receipt upload, or manually +- **Expiry alerts** โ€” know what's about to go bad +- **Receipt OCR** โ€” extract line items from receipt photos automatically (Paid tier) +- **Recipe suggestions** โ€” LLM-powered ideas based on what's expiring (Paid tier, BYOK-unlockable) +- **Leftover mode** โ€” prioritize nearly-expired items in recipe ranking (Premium tier) + +## Stack + +- **Frontend:** Vue 3 SPA (Vite + TypeScript) +- **Backend:** FastAPI + SQLite (via `circuitforge-core`) +- **Auth:** CF session cookie โ†’ Directus JWT (cloud mode) +- **Licensing:** Heimdall (free tier auto-provisioned at signup) + +## Running locally + +```bash +cp .env.example .env +./manage.sh build +./manage.sh start +# Web: http://localhost:8511 +# API: http://localhost:8512 +``` + +## Cloud instance + +```bash +./manage.sh cloud-build +./manage.sh cloud-start +# Served at menagerie.circuitforge.tech/kiwi (JWT-gated) +``` + +## Tiers + +| Feature | Free | Paid | Premium | +|---------|------|------|---------| +| Inventory CRUD | โœ“ | โœ“ | โœ“ | +| Barcode scan | โœ“ | โœ“ | โœ“ | +| Receipt upload | โœ“ | โœ“ | โœ“ | +| Expiry alerts | โœ“ | โœ“ | โœ“ | +| CSV export | โœ“ | โœ“ | โœ“ | +| Receipt OCR | BYOK | โœ“ | โœ“ | +| Recipe suggestions | BYOK | โœ“ | โœ“ | +| Meal planning | โ€” | โœ“ | โœ“ | +| Multi-household | โ€” | โ€” | โœ“ | +| Leftover mode | โ€” | โ€” | โœ“ | + +BYOK = bring your own LLM backend (configure `~/.config/circuitforge/llm.yaml`) + +## License + +Discovery/pipeline layer: MIT +AI features: BSL 1.1 (free for personal non-commercial self-hosting) diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..569f9ce --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,7 @@ +# app/__init__.py +""" +Kiwi: Pantry tracking and leftover recipe suggestions. +""" + +__version__ = "0.1.0" +__author__ = "Alan 'pyr0ball' Weinstock" \ No newline at end of file diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..c501d14 --- /dev/null +++ b/app/api/__init__.py @@ -0,0 +1,5 @@ +# app/api/__init__.py +""" +API package for Kiwi. +Contains all API routes and endpoint handlers. +""" diff --git a/app/api/endpoints/__init__.py b/app/api/endpoints/__init__.py new file mode 100644 index 0000000..ecc2abf --- /dev/null +++ b/app/api/endpoints/__init__.py @@ -0,0 +1,4 @@ +# app/api/endpoints/__init__.py +""" +API endpoint implementations for Kiwi. +""" \ No newline at end of file diff --git a/app/api/endpoints/export.py b/app/api/endpoints/export.py new file mode 100644 index 0000000..3c5849c --- /dev/null +++ b/app/api/endpoints/export.py @@ -0,0 +1,47 @@ +"""Export endpoints โ€” CSV/Excel of receipt and inventory data.""" +from __future__ import annotations + +import asyncio +import csv +import io + +from fastapi import APIRouter, Depends +from fastapi.responses import StreamingResponse + +from app.db.session import get_store +from app.db.store import Store + +router = APIRouter(prefix="/export", tags=["export"]) + + +@router.get("/receipts/csv") +async def export_receipts_csv(store: Store = Depends(get_store)): + receipts = await asyncio.to_thread(store.list_receipts, 1000, 0) + output = io.StringIO() + fields = ["id", "filename", "status", "created_at", "updated_at"] + writer = csv.DictWriter(output, fieldnames=fields, extrasaction="ignore") + writer.writeheader() + writer.writerows(receipts) + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=receipts.csv"}, + ) + + +@router.get("/inventory/csv") +async def export_inventory_csv(store: Store = Depends(get_store)): + items = await asyncio.to_thread(store.list_inventory) + output = io.StringIO() + fields = ["id", "product_name", "barcode", "category", "quantity", "unit", + "location", "expiration_date", "status", "created_at"] + writer = csv.DictWriter(output, fieldnames=fields, extrasaction="ignore") + writer.writeheader() + writer.writerows(items) + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=inventory.csv"}, + ) diff --git a/app/api/endpoints/health.py b/app/api/endpoints/health.py new file mode 100644 index 0000000..c385a5e --- /dev/null +++ b/app/api/endpoints/health.py @@ -0,0 +1,14 @@ +# app/api/endpoints/health.py +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/") +async def health_check(): + return {"status": "ok", "service": "kiwi-api"} + + +@router.get("/ping") +async def ping(): + return {"ping": "pong"} diff --git a/app/api/endpoints/inventory.py b/app/api/endpoints/inventory.py new file mode 100644 index 0000000..5045dd0 --- /dev/null +++ b/app/api/endpoints/inventory.py @@ -0,0 +1,394 @@ +"""Inventory API endpoints โ€” products, items, barcode scanning, tags, stats.""" + +from __future__ import annotations + +import asyncio +import uuid +from pathlib import Path +from typing import Any, Dict, List, Optional + +import aiofiles +from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status +from pydantic import BaseModel + +from app.cloud_session import CloudUser, get_session +from app.db.session import get_store +from app.db.store import Store +from app.models.schemas.inventory import ( + BarcodeScanResponse, + InventoryItemCreate, + InventoryItemResponse, + InventoryItemUpdate, + InventoryStats, + ProductCreate, + ProductResponse, + ProductUpdate, + TagCreate, + TagResponse, +) + +router = APIRouter() + + +# โ”€โ”€ Products โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.post("/products", response_model=ProductResponse, status_code=status.HTTP_201_CREATED) +async def create_product(body: ProductCreate, store: Store = Depends(get_store)): + product, _ = await asyncio.to_thread( + store.get_or_create_product, + body.name, + body.barcode, + brand=body.brand, + category=body.category, + description=body.description, + image_url=body.image_url, + nutrition_data=body.nutrition_data, + source=body.source, + source_data=body.source_data, + ) + return ProductResponse.model_validate(product) + + +@router.get("/products", response_model=List[ProductResponse]) +async def list_products(store: Store = Depends(get_store)): + products = await asyncio.to_thread(store.list_products) + return [ProductResponse.model_validate(p) for p in products] + + +@router.get("/products/{product_id}", response_model=ProductResponse) +async def get_product(product_id: int, store: Store = Depends(get_store)): + product = await asyncio.to_thread(store.get_product, product_id) + if not product: + raise HTTPException(status_code=404, detail="Product not found") + return ProductResponse.model_validate(product) + + +@router.get("/products/barcode/{barcode}", response_model=ProductResponse) +async def get_product_by_barcode(barcode: str, store: Store = Depends(get_store)): + from app.db import store as store_module # avoid circular + product = await asyncio.to_thread( + store._fetch_one, "SELECT * FROM products WHERE barcode = ?", (barcode,) + ) + if not product: + raise HTTPException(status_code=404, detail="Product not found") + return ProductResponse.model_validate(product) + + +@router.patch("/products/{product_id}", response_model=ProductResponse) +async def update_product( + product_id: int, body: ProductUpdate, store: Store = Depends(get_store) +): + updates = body.model_dump(exclude_none=True) + if not updates: + product = await asyncio.to_thread(store.get_product, product_id) + else: + import json + sets = ", ".join(f"{k} = ?" for k in updates) + values = [] + for k, v in updates.items(): + values.append(json.dumps(v) if isinstance(v, dict) else v) + values.append(product_id) + await asyncio.to_thread( + store.conn.execute, + f"UPDATE products SET {sets}, updated_at = datetime('now') WHERE id = ?", + values, + ) + store.conn.commit() + product = await asyncio.to_thread(store.get_product, product_id) + if not product: + raise HTTPException(status_code=404, detail="Product not found") + return ProductResponse.model_validate(product) + + +@router.delete("/products/{product_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_product(product_id: int, store: Store = Depends(get_store)): + existing = await asyncio.to_thread(store.get_product, product_id) + if not existing: + raise HTTPException(status_code=404, detail="Product not found") + await asyncio.to_thread( + store.conn.execute, "DELETE FROM products WHERE id = ?", (product_id,) + ) + store.conn.commit() + + +# โ”€โ”€ Inventory items โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.post("/items", response_model=InventoryItemResponse, status_code=status.HTTP_201_CREATED) +async def create_inventory_item(body: InventoryItemCreate, store: Store = Depends(get_store)): + item = await asyncio.to_thread( + store.add_inventory_item, + body.product_id, + body.location, + quantity=body.quantity, + unit=body.unit, + sublocation=body.sublocation, + purchase_date=str(body.purchase_date) if body.purchase_date else None, + expiration_date=str(body.expiration_date) if body.expiration_date else None, + notes=body.notes, + source=body.source, + ) + return InventoryItemResponse.model_validate(item) + + +@router.get("/items", response_model=List[InventoryItemResponse]) +async def list_inventory_items( + location: Optional[str] = None, + item_status: str = "available", + store: Store = Depends(get_store), +): + items = await asyncio.to_thread(store.list_inventory, location, item_status) + return [InventoryItemResponse.model_validate(i) for i in items] + + +@router.get("/items/expiring", response_model=List[InventoryItemResponse]) +async def get_expiring_items(days: int = 7, store: Store = Depends(get_store)): + items = await asyncio.to_thread(store.expiring_soon, days) + return [InventoryItemResponse.model_validate(i) for i in items] + + +@router.get("/items/{item_id}", response_model=InventoryItemResponse) +async def get_inventory_item(item_id: int, store: Store = Depends(get_store)): + item = await asyncio.to_thread(store.get_inventory_item, item_id) + if not item: + raise HTTPException(status_code=404, detail="Inventory item not found") + return InventoryItemResponse.model_validate(item) + + +@router.patch("/items/{item_id}", response_model=InventoryItemResponse) +async def update_inventory_item( + item_id: int, body: InventoryItemUpdate, store: Store = Depends(get_store) +): + updates = body.model_dump(exclude_none=True) + if "purchase_date" in updates and updates["purchase_date"]: + updates["purchase_date"] = str(updates["purchase_date"]) + if "expiration_date" in updates and updates["expiration_date"]: + updates["expiration_date"] = str(updates["expiration_date"]) + item = await asyncio.to_thread(store.update_inventory_item, item_id, **updates) + if not item: + raise HTTPException(status_code=404, detail="Inventory item not found") + return InventoryItemResponse.model_validate(item) + + +@router.post("/items/{item_id}/consume", response_model=InventoryItemResponse) +async def consume_item(item_id: int, store: Store = Depends(get_store)): + from datetime import datetime, timezone + item = await asyncio.to_thread( + store.update_inventory_item, + item_id, + status="consumed", + consumed_at=datetime.now(timezone.utc).isoformat(), + ) + if not item: + raise HTTPException(status_code=404, detail="Inventory item not found") + return InventoryItemResponse.model_validate(item) + + +@router.delete("/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_inventory_item(item_id: int, store: Store = Depends(get_store)): + existing = await asyncio.to_thread(store.get_inventory_item, item_id) + if not existing: + raise HTTPException(status_code=404, detail="Inventory item not found") + await asyncio.to_thread( + store.conn.execute, "DELETE FROM inventory_items WHERE id = ?", (item_id,) + ) + store.conn.commit() + + +# โ”€โ”€ Barcode scanning โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class BarcodeScanTextRequest(BaseModel): + barcode: str + location: str = "pantry" + quantity: float = 1.0 + auto_add_to_inventory: bool = True + + +@router.post("/scan/text", response_model=BarcodeScanResponse) +async def scan_barcode_text( + body: BarcodeScanTextRequest, + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + """Scan a barcode from a text string (e.g. from a hardware scanner or manual entry).""" + from app.services.openfoodfacts import OpenFoodFactsService + from app.services.expiration_predictor import ExpirationPredictor + + off = OpenFoodFactsService() + predictor = ExpirationPredictor() + product_info = await off.lookup_product(body.barcode) + inventory_item = None + + if product_info and body.auto_add_to_inventory: + product, _ = await asyncio.to_thread( + store.get_or_create_product, + product_info.get("name", body.barcode), + body.barcode, + brand=product_info.get("brand"), + category=product_info.get("category"), + nutrition_data=product_info.get("nutrition_data", {}), + source="openfoodfacts", + source_data=product_info, + ) + exp = predictor.predict_expiration( + product_info.get("category", ""), + body.location, + product_name=product_info.get("name", body.barcode), + tier=session.tier, + has_byok=session.has_byok, + ) + inventory_item = await asyncio.to_thread( + store.add_inventory_item, + product["id"], body.location, + quantity=body.quantity, + expiration_date=str(exp) if exp else None, + source="barcode_scan", + ) + result_product = ProductResponse.model_validate(product) + else: + result_product = None + + return BarcodeScanResponse( + success=True, + barcodes_found=1, + results=[{ + "barcode": body.barcode, + "barcode_type": "text", + "product": result_product, + "inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None, + "added_to_inventory": inventory_item is not None, + "message": "Added to inventory" if inventory_item else "Product not found in database", + }], + message="Barcode processed", + ) + + +@router.post("/scan", response_model=BarcodeScanResponse) +async def scan_barcode_image( + file: UploadFile = File(...), + auto_add_to_inventory: bool = Form(True), + location: str = Form("pantry"), + quantity: float = Form(1.0), + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + """Scan a barcode from an uploaded image. Requires Phase 2 scanner integration.""" + temp_dir = Path("/tmp/kiwi_barcode_scans") + temp_dir.mkdir(parents=True, exist_ok=True) + temp_file = temp_dir / f"{uuid.uuid4()}_{file.filename}" + try: + async with aiofiles.open(temp_file, "wb") as f: + await f.write(await file.read()) + from app.services.barcode_scanner import BarcodeScanner + from app.services.openfoodfacts import OpenFoodFactsService + from app.services.expiration_predictor import ExpirationPredictor + + barcodes = await asyncio.to_thread(BarcodeScanner().scan_image, temp_file) + if not barcodes: + return BarcodeScanResponse( + success=False, barcodes_found=0, results=[], + message="No barcodes detected in image" + ) + + off = OpenFoodFactsService() + predictor = ExpirationPredictor() + results = [] + for bc in barcodes: + code = bc["data"] + product_info = await off.lookup_product(code) + inventory_item = None + if product_info and auto_add_to_inventory: + product, _ = await asyncio.to_thread( + store.get_or_create_product, + product_info.get("name", code), + code, + brand=product_info.get("brand"), + category=product_info.get("category"), + nutrition_data=product_info.get("nutrition_data", {}), + source="openfoodfacts", + source_data=product_info, + ) + exp = predictor.predict_expiration( + product_info.get("category", ""), + location, + product_name=product_info.get("name", code), + tier=session.tier, + has_byok=session.has_byok, + ) + inventory_item = await asyncio.to_thread( + store.add_inventory_item, + product["id"], location, + quantity=quantity, + expiration_date=str(exp) if exp else None, + source="barcode_scan", + ) + results.append({ + "barcode": code, + "barcode_type": bc.get("type", "unknown"), + "product": ProductResponse.model_validate(product) if product_info else None, + "inventory_item": InventoryItemResponse.model_validate(inventory_item) if inventory_item else None, + "added_to_inventory": inventory_item is not None, + "message": "Added to inventory" if inventory_item else "Barcode scanned", + }) + return BarcodeScanResponse( + success=True, barcodes_found=len(barcodes), results=results, + message=f"Processed {len(barcodes)} barcode(s)" + ) + finally: + if temp_file.exists(): + temp_file.unlink() + + +# โ”€โ”€ Tags โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.post("/tags", response_model=TagResponse, status_code=status.HTTP_201_CREATED) +async def create_tag(body: TagCreate, store: Store = Depends(get_store)): + cur = await asyncio.to_thread( + store.conn.execute, + "INSERT INTO tags (name, slug, description, color, category) VALUES (?,?,?,?,?) RETURNING *", + (body.name, body.slug, body.description, body.color, body.category), + ) + store.conn.commit() + import sqlite3; store.conn.row_factory = sqlite3.Row + return TagResponse.model_validate(store._row_to_dict(cur.fetchone())) + + +@router.get("/tags", response_model=List[TagResponse]) +async def list_tags( + category: Optional[str] = None, store: Store = Depends(get_store) +): + if category: + tags = await asyncio.to_thread( + store._fetch_all, "SELECT * FROM tags WHERE category = ? ORDER BY name", (category,) + ) + else: + tags = await asyncio.to_thread( + store._fetch_all, "SELECT * FROM tags ORDER BY name" + ) + return [TagResponse.model_validate(t) for t in tags] + + +# โ”€โ”€ Stats โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.get("/stats", response_model=InventoryStats) +async def get_inventory_stats(store: Store = Depends(get_store)): + def _stats(): + rows = store._fetch_all( + """SELECT status, location, COUNT(*) as cnt + FROM inventory_items GROUP BY status, location""" + ) + total = sum(r["cnt"] for r in rows) + available = sum(r["cnt"] for r in rows if r["status"] == "available") + expired = sum(r["cnt"] for r in rows if r["status"] == "expired") + expiring = len(store.expiring_soon(7)) + locations = {} + for r in rows: + if r["status"] == "available": + locations[r["location"]] = locations.get(r["location"], 0) + r["cnt"] + return { + "total_items": total, + "available_items": available, + "expiring_soon": expiring, + "expired_items": expired, + "locations": locations, + } + return InventoryStats.model_validate(await asyncio.to_thread(_stats)) diff --git a/app/api/endpoints/ocr.py b/app/api/endpoints/ocr.py new file mode 100644 index 0000000..f70d2e8 --- /dev/null +++ b/app/api/endpoints/ocr.py @@ -0,0 +1,233 @@ +"""OCR status, trigger, and approval endpoints.""" +from __future__ import annotations + +import asyncio +import json +import logging +from datetime import date +from pathlib import Path +from typing import Any + +from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException + +from app.cloud_session import CloudUser, get_session +from app.core.config import settings +from app.db.session import get_store +from app.db.store import Store +from app.models.schemas.receipt import ( + ApproveOCRRequest, + ApproveOCRResponse, + ApprovedInventoryItem, +) +from app.services.expiration_predictor import ExpirationPredictor +from app.tiers import can_use +from app.utils.units import normalize_to_metric + +logger = logging.getLogger(__name__) +router = APIRouter() + + +# โ”€โ”€ Status โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.get("/{receipt_id}/ocr/status") +async def get_ocr_status(receipt_id: int, store: Store = Depends(get_store)): + receipt = await asyncio.to_thread(store.get_receipt, receipt_id) + if not receipt: + raise HTTPException(status_code=404, detail="Receipt not found") + rd = await asyncio.to_thread( + store._fetch_one, + "SELECT id, processing_time FROM receipt_data WHERE receipt_id = ?", + (receipt_id,), + ) + return { + "receipt_id": receipt_id, + "status": receipt["status"], + "ocr_complete": rd is not None, + "ocr_enabled": settings.ENABLE_OCR, + } + + +# โ”€โ”€ Trigger โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.post("/{receipt_id}/ocr/trigger") +async def trigger_ocr( + receipt_id: int, + background_tasks: BackgroundTasks, + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + """Manually trigger OCR processing for an already-uploaded receipt.""" + if not can_use("receipt_ocr", session.tier, session.has_byok): + raise HTTPException( + status_code=403, + detail="Receipt OCR requires Paid tier or a configured local LLM backend (BYOK).", + ) + if not settings.ENABLE_OCR: + raise HTTPException(status_code=503, detail="OCR not enabled on this server.") + + receipt = await asyncio.to_thread(store.get_receipt, receipt_id) + if not receipt: + raise HTTPException(status_code=404, detail="Receipt not found") + if receipt["status"] == "processing": + raise HTTPException(status_code=409, detail="OCR already in progress for this receipt.") + + image_path = Path(receipt["original_path"]) + if not image_path.exists(): + raise HTTPException(status_code=404, detail="Image file not found on disk.") + + async def _run() -> None: + try: + await asyncio.to_thread(store.update_receipt_status, receipt_id, "processing") + from app.services.receipt_service import ReceiptService + await ReceiptService(store).process(receipt_id, image_path) + except Exception as exc: + logger.exception("OCR pipeline failed for receipt %s", receipt_id) + await asyncio.to_thread(store.update_receipt_status, receipt_id, "error", str(exc)) + + background_tasks.add_task(_run) + return {"receipt_id": receipt_id, "status": "queued"} + + +# โ”€โ”€ Data โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.get("/{receipt_id}/ocr/data") +async def get_ocr_data(receipt_id: int, store: Store = Depends(get_store)): + rd = await asyncio.to_thread( + store._fetch_one, + "SELECT * FROM receipt_data WHERE receipt_id = ?", + (receipt_id,), + ) + if not rd: + raise HTTPException(status_code=404, detail="No OCR data for this receipt") + return rd + + +# โ”€โ”€ Approve โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@router.post("/{receipt_id}/ocr/approve", response_model=ApproveOCRResponse) +async def approve_ocr_items( + receipt_id: int, + body: ApproveOCRRequest, + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + """Commit reviewed OCR line items into inventory. + + Reads items from receipt_data, optionally filtered by item_indices, + and creates inventory entries. Receipt status moves to 'processed'. + """ + receipt = await asyncio.to_thread(store.get_receipt, receipt_id) + if not receipt: + raise HTTPException(status_code=404, detail="Receipt not found") + if receipt["status"] not in ("staged", "processed"): + raise HTTPException( + status_code=409, + detail=f"Receipt is not staged for approval (status={receipt['status']}).", + ) + + rd = await asyncio.to_thread( + store._fetch_one, + "SELECT items, transaction_date FROM receipt_data WHERE receipt_id = ?", + (receipt_id,), + ) + if not rd: + raise HTTPException(status_code=404, detail="No OCR data found for this receipt.") + + raw_items: list[dict[str, Any]] = json.loads(rd["items"] or "[]") + if not raw_items: + raise HTTPException(status_code=422, detail="No items found in OCR data.") + + # Filter to requested indices, or use all + if body.item_indices is not None: + invalid = [i for i in body.item_indices if i >= len(raw_items) or i < 0] + if invalid: + raise HTTPException( + status_code=422, + detail=f"Item indices out of range: {invalid} (receipt has {len(raw_items)} items)", + ) + selected = [(i, raw_items[i]) for i in body.item_indices] + skipped = len(raw_items) - len(selected) + else: + selected = list(enumerate(raw_items)) + skipped = 0 + + created = await asyncio.to_thread( + _commit_items, store, receipt_id, selected, body.location, rd.get("transaction_date") + ) + + await asyncio.to_thread(store.update_receipt_status, receipt_id, "processed") + + return ApproveOCRResponse( + receipt_id=receipt_id, + approved=len(created), + skipped=skipped, + inventory_items=created, + ) + + +def _commit_items( + store: Store, + receipt_id: int, + selected: list[tuple[int, dict[str, Any]]], + location: str, + transaction_date: str | None, +) -> list[ApprovedInventoryItem]: + """Create product + inventory entries for approved OCR line items. + + Runs synchronously inside asyncio.to_thread. + """ + predictor = ExpirationPredictor() + + purchase_date: date | None = None + if transaction_date: + try: + purchase_date = date.fromisoformat(transaction_date) + except ValueError: + logger.warning("Could not parse transaction_date %r", transaction_date) + + created: list[ApprovedInventoryItem] = [] + + for _idx, item in selected: + name = (item.get("name") or "").strip() + if not name: + logger.debug("Skipping nameless item at index %d", _idx) + continue + + category = (item.get("category") or "").strip() + quantity = float(item.get("quantity") or 1.0) + + raw_unit = (item.get("unit") or "each").strip() + metric_qty, base_unit = normalize_to_metric(quantity, raw_unit) + + product, _ = store.get_or_create_product( + name, + category=category or None, + source="receipt_ocr", + ) + + exp = predictor.predict_expiration( + category, location, + purchase_date=purchase_date, + product_name=name, + ) + + inv = store.add_inventory_item( + product["id"], + location, + quantity=metric_qty, + unit=base_unit, + receipt_id=receipt_id, + purchase_date=str(purchase_date) if purchase_date else None, + expiration_date=str(exp) if exp else None, + source="receipt_ocr", + ) + + created.append(ApprovedInventoryItem( + inventory_id=inv["id"], + product_name=name, + quantity=quantity, + location=location, + expiration_date=str(exp) if exp else None, + )) + + return created diff --git a/app/api/endpoints/receipts.py b/app/api/endpoints/receipts.py new file mode 100644 index 0000000..891c9b7 --- /dev/null +++ b/app/api/endpoints/receipts.py @@ -0,0 +1,110 @@ +"""Receipt upload, OCR, and quality endpoints.""" + +from __future__ import annotations + +import asyncio +import uuid +from pathlib import Path +from typing import List + +import aiofiles +from fastapi import APIRouter, BackgroundTasks, Depends, File, HTTPException, UploadFile + +from app.cloud_session import CloudUser, get_session +from app.core.config import settings +from app.db.session import get_store +from app.db.store import Store +from app.models.schemas.receipt import ReceiptResponse +from app.models.schemas.quality import QualityAssessment +from app.tiers import can_use + +router = APIRouter() + + +async def _save_upload(file: UploadFile, dest_dir: Path) -> Path: + dest = dest_dir / f"{uuid.uuid4()}_{file.filename}" + async with aiofiles.open(dest, "wb") as f: + await f.write(await file.read()) + return dest + + +@router.post("/", response_model=ReceiptResponse, status_code=201) +async def upload_receipt( + background_tasks: BackgroundTasks, + file: UploadFile = File(...), + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + settings.ensure_dirs() + saved = await _save_upload(file, settings.UPLOAD_DIR) + receipt = await asyncio.to_thread( + store.create_receipt, file.filename, str(saved) + ) + # Only queue OCR if the feature is enabled server-side AND the user's tier allows it. + # Check tier here, not inside the background task โ€” once dispatched it can't be cancelled. + ocr_allowed = settings.ENABLE_OCR and can_use("receipt_ocr", session.tier, session.has_byok) + if ocr_allowed: + background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store) + return ReceiptResponse.model_validate(receipt) + + +@router.post("/batch", response_model=List[ReceiptResponse], status_code=201) +async def upload_receipts_batch( + background_tasks: BackgroundTasks, + files: List[UploadFile] = File(...), + store: Store = Depends(get_store), + session: CloudUser = Depends(get_session), +): + settings.ensure_dirs() + ocr_allowed = settings.ENABLE_OCR and can_use("receipt_ocr", session.tier, session.has_byok) + results = [] + for file in files: + saved = await _save_upload(file, settings.UPLOAD_DIR) + receipt = await asyncio.to_thread( + store.create_receipt, file.filename, str(saved) + ) + if ocr_allowed: + background_tasks.add_task(_process_receipt_ocr, receipt["id"], saved, store) + results.append(ReceiptResponse.model_validate(receipt)) + return results + + +@router.get("/{receipt_id}", response_model=ReceiptResponse) +async def get_receipt(receipt_id: int, store: Store = Depends(get_store)): + receipt = await asyncio.to_thread(store.get_receipt, receipt_id) + if not receipt: + raise HTTPException(status_code=404, detail="Receipt not found") + return ReceiptResponse.model_validate(receipt) + + +@router.get("/", response_model=List[ReceiptResponse]) +async def list_receipts( + limit: int = 50, offset: int = 0, store: Store = Depends(get_store) +): + receipts = await asyncio.to_thread(store.list_receipts, limit, offset) + return [ReceiptResponse.model_validate(r) for r in receipts] + + +@router.get("/{receipt_id}/quality", response_model=QualityAssessment) +async def get_receipt_quality(receipt_id: int, store: Store = Depends(get_store)): + qa = await asyncio.to_thread( + store._fetch_one, + "SELECT * FROM quality_assessments WHERE receipt_id = ?", + (receipt_id,), + ) + if not qa: + raise HTTPException(status_code=404, detail="Quality assessment not found") + return QualityAssessment.model_validate(qa) + + +async def _process_receipt_ocr(receipt_id: int, image_path: Path, store: Store) -> None: + """Background task: run OCR pipeline on an uploaded receipt.""" + try: + await asyncio.to_thread(store.update_receipt_status, receipt_id, "processing") + from app.services.receipt_service import ReceiptService + service = ReceiptService(store) + await service.process(receipt_id, image_path) + except Exception as exc: + await asyncio.to_thread( + store.update_receipt_status, receipt_id, "error", str(exc) + ) diff --git a/app/api/routes.py b/app/api/routes.py new file mode 100644 index 0000000..2405e56 --- /dev/null +++ b/app/api/routes.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter +from app.api.endpoints import health, receipts, export, inventory, ocr + +api_router = APIRouter() + +api_router.include_router(health.router, prefix="/health", tags=["health"]) +api_router.include_router(receipts.router, prefix="/receipts", tags=["receipts"]) +api_router.include_router(ocr.router, prefix="/receipts", tags=["ocr"]) # OCR endpoints under /receipts +api_router.include_router(export.router, tags=["export"]) # No prefix, uses /export in the router +api_router.include_router(inventory.router, prefix="/inventory", tags=["inventory"]) \ No newline at end of file diff --git a/app/cloud_session.py b/app/cloud_session.py new file mode 100644 index 0000000..4cba4a1 --- /dev/null +++ b/app/cloud_session.py @@ -0,0 +1,196 @@ +"""Cloud session resolution for Kiwi FastAPI. + +Local mode (CLOUD_MODE unset/false): returns a local CloudUser with no auth +checks, full tier access, and DB path pointing to settings.DB_PATH. + +Cloud mode (CLOUD_MODE=true): validates the cf_session JWT injected by Caddy +as X-CF-Session, resolves user_id, auto-provisions a free Heimdall license on +first visit, fetches the tier, and returns a per-user DB path. + +FastAPI usage: + @app.get("/api/v1/inventory/items") + def list_items(session: CloudUser = Depends(get_session)): + store = Store(session.db) + ... +""" +from __future__ import annotations + +import logging +import os +import re +import time +from dataclasses import dataclass +from pathlib import Path + +import jwt as pyjwt +import requests +import yaml +from fastapi import Depends, HTTPException, Request + +log = logging.getLogger(__name__) + +# โ”€โ”€ Config โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +CLOUD_MODE: bool = os.environ.get("CLOUD_MODE", "").lower() in ("1", "true", "yes") +CLOUD_DATA_ROOT: Path = Path(os.environ.get("CLOUD_DATA_ROOT", "/devl/kiwi-cloud-data")) +DIRECTUS_JWT_SECRET: str = os.environ.get("DIRECTUS_JWT_SECRET", "") +HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech") +HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "") + +_LOCAL_KIWI_DB: Path = Path(os.environ.get("KIWI_DB", "data/kiwi.db")) + +_TIER_CACHE: dict[str, tuple[str, float]] = {} +_TIER_CACHE_TTL = 300 # 5 minutes + +TIERS = ["free", "paid", "premium", "ultra"] + + +# โ”€โ”€ Domain โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +@dataclass(frozen=True) +class CloudUser: + user_id: str # Directus UUID, or "local" + tier: str # free | paid | premium | ultra | local + db: Path # per-user SQLite DB path + has_byok: bool # True if a configured LLM backend is present in llm.yaml + + +# โ”€โ”€ JWT validation โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +def _extract_session_token(header_value: str) -> str: + m = re.search(r'(?:^|;)\s*cf_session=([^;]+)', header_value) + return m.group(1).strip() if m else header_value.strip() + + +def validate_session_jwt(token: str) -> str: + """Validate cf_session JWT and return the Directus user_id.""" + try: + payload = pyjwt.decode( + token, + DIRECTUS_JWT_SECRET, + algorithms=["HS256"], + options={"require": ["id", "exp"]}, + ) + return payload["id"] + except Exception as exc: + log.debug("JWT validation failed: %s", exc) + raise HTTPException(status_code=401, detail="Session invalid or expired") + + +# โ”€โ”€ Heimdall integration โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +def _ensure_provisioned(user_id: str) -> None: + if not HEIMDALL_ADMIN_TOKEN: + return + try: + requests.post( + f"{HEIMDALL_URL}/admin/provision", + json={"directus_user_id": user_id, "product": "kiwi", "tier": "free"}, + headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"}, + timeout=5, + ) + except Exception as exc: + log.warning("Heimdall provision failed for user %s: %s", user_id, exc) + + +def _fetch_cloud_tier(user_id: str) -> str: + now = time.monotonic() + cached = _TIER_CACHE.get(user_id) + if cached and (now - cached[1]) < _TIER_CACHE_TTL: + return cached[0] + + if not HEIMDALL_ADMIN_TOKEN: + return "free" + try: + resp = requests.post( + f"{HEIMDALL_URL}/admin/cloud/resolve", + json={"directus_user_id": user_id, "product": "kiwi"}, + headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"}, + timeout=5, + ) + tier = resp.json().get("tier", "free") if resp.ok else "free" + except Exception as exc: + log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc) + tier = "free" + + _TIER_CACHE[user_id] = (tier, now) + return tier + + +def _user_db_path(user_id: str) -> Path: + path = CLOUD_DATA_ROOT / user_id / "kiwi.db" + path.parent.mkdir(parents=True, exist_ok=True) + return path + + +# โ”€โ”€ BYOK detection โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +_LLM_CONFIG_PATH = Path.home() / ".config" / "circuitforge" / "llm.yaml" + + +def _detect_byok(config_path: Path = _LLM_CONFIG_PATH) -> bool: + """Return True if at least one enabled non-vision LLM backend is configured. + + Reads the same llm.yaml that LLMRouter uses. Local (Ollama, vLLM) and + API-key backends both count โ€” the policy is "user is supplying compute", + regardless of where that compute lives. + """ + try: + with open(config_path) as f: + cfg = yaml.safe_load(f) or {} + return any( + b.get("enabled", True) and b.get("type") != "vision_service" + for b in cfg.get("backends", {}).values() + ) + except Exception: + return False + + +# โ”€โ”€ FastAPI dependency โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +def get_session(request: Request) -> CloudUser: + """FastAPI dependency โ€” resolves the current user from the request. + + Local mode: fully-privileged "local" user pointing at local DB. + Cloud mode: validates X-CF-Session JWT, provisions license, resolves tier. + """ + has_byok = _detect_byok() + + if not CLOUD_MODE: + return CloudUser(user_id="local", tier="local", db=_LOCAL_KIWI_DB, has_byok=has_byok) + + raw_header = ( + request.headers.get("x-cf-session", "") + or request.headers.get("cookie", "") + ) + if not raw_header: + raise HTTPException(status_code=401, detail="Not authenticated") + + token = _extract_session_token(raw_header) + if not token: + raise HTTPException(status_code=401, detail="Not authenticated") + + user_id = validate_session_jwt(token) + _ensure_provisioned(user_id) + tier = _fetch_cloud_tier(user_id) + return CloudUser(user_id=user_id, tier=tier, db=_user_db_path(user_id), has_byok=has_byok) + + +def require_tier(min_tier: str): + """Dependency factory โ€” raises 403 if tier is below min_tier.""" + min_idx = TIERS.index(min_tier) + + def _check(session: CloudUser = Depends(get_session)) -> CloudUser: + if session.tier == "local": + return session + try: + if TIERS.index(session.tier) < min_idx: + raise HTTPException( + status_code=403, + detail=f"This feature requires {min_tier} tier or above.", + ) + except ValueError: + raise HTTPException(status_code=403, detail="Unknown tier.") + return session + + return _check diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..778fa17 --- /dev/null +++ b/app/core/__init__.py @@ -0,0 +1,5 @@ +# app/core/__init__.py +""" +Core components for Kiwi. +Contains configuration, dependencies, and other core functionality. +""" \ No newline at end of file diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..1f8015d --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,59 @@ +""" +Kiwi application config. +Uses circuitforge-core for env loading; no pydantic-settings dependency. +""" +from __future__ import annotations + +import os +from pathlib import Path + +from circuitforge_core.config.settings import load_env + +# Load .env from the repo root (two levels up from app/core/) +_ROOT = Path(__file__).resolve().parents[2] +load_env(_ROOT / ".env") + + +class Settings: + # API + API_PREFIX: str = os.environ.get("API_PREFIX", "/api/v1") + PROJECT_NAME: str = "Kiwi โ€” Pantry Intelligence" + + # CORS + CORS_ORIGINS: list[str] = [ + o.strip() + for o in os.environ.get("CORS_ORIGINS", "").split(",") + if o.strip() + ] + + # File storage + DATA_DIR: Path = Path(os.environ.get("DATA_DIR", str(_ROOT / "data"))) + UPLOAD_DIR: Path = DATA_DIR / "uploads" + PROCESSING_DIR: Path = DATA_DIR / "processing" + ARCHIVE_DIR: Path = DATA_DIR / "archive" + + # Database + DB_PATH: Path = Path(os.environ.get("DB_PATH", str(DATA_DIR / "kiwi.db"))) + + # Processing + MAX_CONCURRENT_JOBS: int = int(os.environ.get("MAX_CONCURRENT_JOBS", "4")) + USE_GPU: bool = os.environ.get("USE_GPU", "true").lower() in ("1", "true", "yes") + GPU_MEMORY_LIMIT: int = int(os.environ.get("GPU_MEMORY_LIMIT", "6144")) + + # Quality + MIN_QUALITY_SCORE: float = float(os.environ.get("MIN_QUALITY_SCORE", "50.0")) + + # Feature flags + ENABLE_OCR: bool = os.environ.get("ENABLE_OCR", "false").lower() in ("1", "true", "yes") + + # Runtime + DEBUG: bool = os.environ.get("DEBUG", "false").lower() in ("1", "true", "yes") + CLOUD_MODE: bool = os.environ.get("CLOUD_MODE", "false").lower() in ("1", "true", "yes") + DEMO_MODE: bool = os.environ.get("DEMO_MODE", "false").lower() in ("1", "true", "yes") + + def ensure_dirs(self) -> None: + for d in (self.UPLOAD_DIR, self.PROCESSING_DIR, self.ARCHIVE_DIR): + d.mkdir(parents=True, exist_ok=True) + + +settings = Settings() diff --git a/app/db/__init__.py b/app/db/__init__.py new file mode 100644 index 0000000..b8b7c3e --- /dev/null +++ b/app/db/__init__.py @@ -0,0 +1 @@ +# DB package โ€” use app.db.store.Store for all database access diff --git a/app/db/base.py b/app/db/base.py new file mode 100644 index 0000000..87dd163 --- /dev/null +++ b/app/db/base.py @@ -0,0 +1 @@ +# Replaced by app.db.store โ€” SQLAlchemy removed in favour of CF-core SQLite stack diff --git a/app/db/migrations/001_initial_schema.sql b/app/db/migrations/001_initial_schema.sql new file mode 100644 index 0000000..788c8e1 --- /dev/null +++ b/app/db/migrations/001_initial_schema.sql @@ -0,0 +1,32 @@ +-- Migration 001: receipts + quality assessments (ported from Alembic f31d9044277e) + +CREATE TABLE IF NOT EXISTS receipts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + filename TEXT NOT NULL, + original_path TEXT NOT NULL, + processed_path TEXT, + status TEXT NOT NULL DEFAULT 'uploaded' + CHECK (status IN ('uploaded', 'processing', 'processed', 'error')), + error TEXT, + metadata TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_receipts_status ON receipts (status); +CREATE INDEX IF NOT EXISTS idx_receipts_created_at ON receipts (created_at DESC); + +CREATE TABLE IF NOT EXISTS quality_assessments ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + receipt_id INTEGER NOT NULL UNIQUE + REFERENCES receipts (id) ON DELETE CASCADE, + overall_score REAL NOT NULL CHECK (overall_score >= 0 AND overall_score <= 100), + is_acceptable INTEGER NOT NULL DEFAULT 0, + metrics TEXT NOT NULL DEFAULT '{}', + improvement_suggestions TEXT NOT NULL DEFAULT '[]', + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_quality_receipt_id ON quality_assessments (receipt_id); +CREATE INDEX IF NOT EXISTS idx_quality_score ON quality_assessments (overall_score); +CREATE INDEX IF NOT EXISTS idx_quality_acceptable ON quality_assessments (is_acceptable); diff --git a/app/db/migrations/002_inventory_and_products.sql b/app/db/migrations/002_inventory_and_products.sql new file mode 100644 index 0000000..fa06905 --- /dev/null +++ b/app/db/migrations/002_inventory_and_products.sql @@ -0,0 +1,53 @@ +-- Migration 002: products + inventory items (ported from Alembic 8fc1bf4e7a91) + +CREATE TABLE IF NOT EXISTS products ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + barcode TEXT UNIQUE, + name TEXT NOT NULL, + brand TEXT, + category TEXT, + description TEXT, + image_url TEXT, + nutrition_data TEXT NOT NULL DEFAULT '{}', + source TEXT NOT NULL DEFAULT 'manual' + CHECK (source IN ('openfoodfacts', 'manual', 'receipt_ocr')), + source_data TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_products_barcode ON products (barcode); +CREATE INDEX IF NOT EXISTS idx_products_name ON products (name); +CREATE INDEX IF NOT EXISTS idx_products_category ON products (category); +CREATE INDEX IF NOT EXISTS idx_products_source ON products (source); + +CREATE TABLE IF NOT EXISTS inventory_items ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + product_id INTEGER NOT NULL + REFERENCES products (id) ON DELETE RESTRICT, + receipt_id INTEGER + REFERENCES receipts (id) ON DELETE SET NULL, + quantity REAL NOT NULL DEFAULT 1 CHECK (quantity > 0), + unit TEXT NOT NULL DEFAULT 'count', + location TEXT NOT NULL, + sublocation TEXT, + purchase_date TEXT, + expiration_date TEXT, + status TEXT NOT NULL DEFAULT 'available' + CHECK (status IN ('available', 'consumed', 'expired', 'discarded')), + consumed_at TEXT, + notes TEXT, + source TEXT NOT NULL DEFAULT 'manual' + CHECK (source IN ('barcode_scan', 'manual', 'receipt')), + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_inventory_product ON inventory_items (product_id); +CREATE INDEX IF NOT EXISTS idx_inventory_receipt ON inventory_items (receipt_id); +CREATE INDEX IF NOT EXISTS idx_inventory_status ON inventory_items (status); +CREATE INDEX IF NOT EXISTS idx_inventory_location ON inventory_items (location); +CREATE INDEX IF NOT EXISTS idx_inventory_expiration ON inventory_items (expiration_date); +CREATE INDEX IF NOT EXISTS idx_inventory_created ON inventory_items (created_at DESC); +CREATE INDEX IF NOT EXISTS idx_inventory_active_loc ON inventory_items (status, location) + WHERE status = 'available'; diff --git a/app/db/migrations/003_receipt_data.sql b/app/db/migrations/003_receipt_data.sql new file mode 100644 index 0000000..3db1b00 --- /dev/null +++ b/app/db/migrations/003_receipt_data.sql @@ -0,0 +1,38 @@ +-- Migration 003: OCR receipt data table (ported from Alembic 54cddaf4f4e2) + +CREATE TABLE IF NOT EXISTS receipt_data ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + receipt_id INTEGER NOT NULL UNIQUE + REFERENCES receipts (id) ON DELETE CASCADE, + merchant_name TEXT, + merchant_address TEXT, + merchant_phone TEXT, + merchant_email TEXT, + merchant_website TEXT, + merchant_tax_id TEXT, + transaction_date TEXT, + transaction_time TEXT, + receipt_number TEXT, + register_number TEXT, + cashier_name TEXT, + transaction_id TEXT, + items TEXT NOT NULL DEFAULT '[]', + subtotal REAL, + tax REAL, + discount REAL, + tip REAL, + total REAL, + payment_method TEXT, + amount_paid REAL, + change_given REAL, + raw_text TEXT, + confidence_scores TEXT NOT NULL DEFAULT '{}', + warnings TEXT NOT NULL DEFAULT '[]', + processing_time REAL, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_receipt_data_receipt_id ON receipt_data (receipt_id); +CREATE INDEX IF NOT EXISTS idx_receipt_data_merchant ON receipt_data (merchant_name); +CREATE INDEX IF NOT EXISTS idx_receipt_data_date ON receipt_data (transaction_date); diff --git a/app/db/migrations/004_tagging_system.sql b/app/db/migrations/004_tagging_system.sql new file mode 100644 index 0000000..39c8aa0 --- /dev/null +++ b/app/db/migrations/004_tagging_system.sql @@ -0,0 +1,23 @@ +-- Migration 004: tags + product_tags join table (ported from Alembic 14f688cde2ca) + +CREATE TABLE IF NOT EXISTS tags ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + slug TEXT NOT NULL UNIQUE, + description TEXT, + color TEXT, + category TEXT CHECK (category IN ('food_type', 'dietary', 'allergen', 'custom') OR category IS NULL), + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_tags_name ON tags (name); +CREATE INDEX IF NOT EXISTS idx_tags_slug ON tags (slug); +CREATE INDEX IF NOT EXISTS idx_tags_category ON tags (category); + +CREATE TABLE IF NOT EXISTS product_tags ( + product_id INTEGER NOT NULL REFERENCES products (id) ON DELETE CASCADE, + tag_id INTEGER NOT NULL REFERENCES tags (id) ON DELETE CASCADE, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (product_id, tag_id) +); diff --git a/app/db/migrations/005_receipt_staged_status.sql b/app/db/migrations/005_receipt_staged_status.sql new file mode 100644 index 0000000..d323526 --- /dev/null +++ b/app/db/migrations/005_receipt_staged_status.sql @@ -0,0 +1,36 @@ +-- Migration 005: Add 'staged' and 'low_quality' to receipts status constraint. +-- +-- SQLite does not support ALTER TABLE to modify CHECK constraints. +-- Pattern: create new table โ†’ copy data โ†’ drop old โ†’ rename. + +PRAGMA foreign_keys = OFF; + +CREATE TABLE receipts_new ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + filename TEXT NOT NULL, + original_path TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'uploaded' + CHECK (status IN ( + 'uploaded', + 'processing', + 'processed', + 'staged', + 'low_quality', + 'error' + )), + error TEXT, + metadata TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +INSERT INTO receipts_new SELECT * FROM receipts; + +DROP TABLE receipts; + +ALTER TABLE receipts_new RENAME TO receipts; + +CREATE INDEX IF NOT EXISTS idx_receipts_status ON receipts (status); +CREATE INDEX IF NOT EXISTS idx_receipts_created_at ON receipts (created_at DESC); + +PRAGMA foreign_keys = ON; diff --git a/app/db/models.py b/app/db/models.py new file mode 100644 index 0000000..542ac1f --- /dev/null +++ b/app/db/models.py @@ -0,0 +1,577 @@ +""" +REMOVED โ€” schema is now managed by plain SQL migrations in app/db/migrations/. +This file is kept for historical reference only. Nothing imports it. +""" +# fmt: off # noqa โ€” dead file, not linted + +from sqlalchemy import ( + Column, + String, + Text, + Boolean, + Numeric, + DateTime, + Date, + ForeignKey, + CheckConstraint, + Index, + Table, +) +from sqlalchemy.dialects.postgresql import UUID, JSONB +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func +from datetime import datetime +import uuid + +from app.db.base import Base + + +# Association table for many-to-many relationship between products and tags +product_tags = Table( + "product_tags", + Base.metadata, + Column( + "product_id", + UUID(as_uuid=True), + ForeignKey("products.id", ondelete="CASCADE"), + primary_key=True, + ), + Column( + "tag_id", + UUID(as_uuid=True), + ForeignKey("tags.id", ondelete="CASCADE"), + primary_key=True, + ), + Column( + "created_at", + DateTime(timezone=True), + nullable=False, + server_default=func.now(), + ), +) + + +class Receipt(Base): + """ + Receipt model - stores receipt metadata and processing status. + + Corresponds to the 'receipts' table in the database schema. + """ + + __tablename__ = "receipts" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # File Information + filename = Column(String(255), nullable=False) + original_path = Column(Text, nullable=False) + processed_path = Column(Text, nullable=True) + + # Processing Status + status = Column( + String(50), + nullable=False, + default="uploaded", + server_default="uploaded", + ) + error = Column(Text, nullable=True) + + # Metadata (JSONB for flexibility) + # Using 'receipt_metadata' to avoid conflict with SQLAlchemy's metadata attribute + receipt_metadata = Column("metadata", JSONB, nullable=False, default={}, server_default="{}") + + # Timestamps + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + updated_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + onupdate=func.now(), + ) + + # Relationships + quality_assessment = relationship( + "QualityAssessment", + back_populates="receipt", + uselist=False, # One-to-one relationship + cascade="all, delete-orphan", + ) + receipt_data = relationship( + "ReceiptData", + back_populates="receipt", + uselist=False, # One-to-one relationship + cascade="all, delete-orphan", + ) + + # Constraints and Indexes + __table_args__ = ( + CheckConstraint( + "status IN ('uploaded', 'processing', 'processed', 'error')", + name="receipts_status_check", + ), + # Indexes will be created after table definition + ) + + def __repr__(self) -> str: + return f"" + + +# Create indexes for Receipt table +Index("idx_receipts_status", Receipt.status) +Index("idx_receipts_created_at", Receipt.created_at.desc()) +Index("idx_receipts_metadata", Receipt.receipt_metadata, postgresql_using="gin") + + +class QualityAssessment(Base): + """ + Quality Assessment model - stores quality evaluation results. + + One-to-one relationship with Receipt. + Corresponds to the 'quality_assessments' table in the database schema. + """ + + __tablename__ = "quality_assessments" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # Foreign Key (1:1 with receipts) + receipt_id = Column( + UUID(as_uuid=True), + ForeignKey("receipts.id", ondelete="CASCADE"), + nullable=False, + unique=True, + ) + + # Quality Scores + overall_score = Column(Numeric(5, 2), nullable=False) + is_acceptable = Column(Boolean, nullable=False, default=False, server_default="false") + + # Detailed Metrics (JSONB) + metrics = Column(JSONB, nullable=False, default={}, server_default="{}") + + # Improvement Suggestions + improvement_suggestions = Column(JSONB, nullable=False, default=[], server_default="[]") + + # Timestamp + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + + # Relationships + receipt = relationship("Receipt", back_populates="quality_assessment") + + # Constraints + __table_args__ = ( + CheckConstraint( + "overall_score >= 0 AND overall_score <= 100", + name="quality_assessments_score_range", + ), + Index("idx_quality_assessments_receipt_id", "receipt_id"), + Index("idx_quality_assessments_score", "overall_score"), + Index("idx_quality_assessments_acceptable", "is_acceptable"), + Index("idx_quality_assessments_metrics", "metrics", postgresql_using="gin"), + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + +class Product(Base): + """ + Product model - stores product catalog information. + + Products can come from: + - Barcode scans (OpenFoodFacts API) + - Manual user entries + - Future: OCR extraction from receipts + + One product can have many inventory items. + """ + + __tablename__ = "products" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # Identifiers + barcode = Column(String(50), unique=True, nullable=True) # UPC/EAN code + + # Product Information + name = Column(String(500), nullable=False) + brand = Column(String(255), nullable=True) + category = Column(String(255), nullable=True) + + # Additional Details + description = Column(Text, nullable=True) + image_url = Column(Text, nullable=True) + + # Nutritional Data (JSONB for flexibility) + nutrition_data = Column(JSONB, nullable=False, default={}, server_default="{}") + + # Source Tracking + source = Column( + String(50), + nullable=False, + default="manual", + server_default="manual", + ) # 'openfoodfacts', 'manual', 'receipt_ocr' + source_data = Column(JSONB, nullable=False, default={}, server_default="{}") + + # Timestamps + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + updated_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + onupdate=func.now(), + ) + + # Relationships + inventory_items = relationship( + "InventoryItem", + back_populates="product", + cascade="all, delete-orphan", + ) + tags = relationship( + "Tag", + secondary=product_tags, + back_populates="products", + ) + + # Constraints + __table_args__ = ( + CheckConstraint( + "source IN ('openfoodfacts', 'manual', 'receipt_ocr')", + name="products_source_check", + ), + ) + + def __repr__(self) -> str: + return f"" + + +class Tag(Base): + """ + Tag model - stores tags/labels for organizing products. + + Tags can be used to categorize products by: + - Food type (dairy, meat, vegetables, fruit, etc.) + - Dietary restrictions (vegan, gluten-free, kosher, halal, etc.) + - Allergens (contains nuts, contains dairy, etc.) + - Custom user categories + + Many-to-many relationship with products. + """ + + __tablename__ = "tags" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # Tag Information + name = Column(String(100), nullable=False, unique=True) + slug = Column(String(100), nullable=False, unique=True) # URL-safe version + description = Column(Text, nullable=True) + color = Column(String(7), nullable=True) # Hex color code for UI (#FF5733) + + # Category (optional grouping) + category = Column(String(50), nullable=True) # 'food_type', 'dietary', 'allergen', 'custom' + + # Timestamps + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + updated_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + onupdate=func.now(), + ) + + # Relationships + products = relationship( + "Product", + secondary=product_tags, + back_populates="tags", + ) + + # Constraints + __table_args__ = ( + CheckConstraint( + "category IN ('food_type', 'dietary', 'allergen', 'custom', NULL)", + name="tags_category_check", + ), + ) + + def __repr__(self) -> str: + return f"" + + +class InventoryItem(Base): + """ + Inventory Item model - tracks individual items in user's inventory. + + Links to a Product and adds user-specific information like + quantity, location, expiration date, etc. + """ + + __tablename__ = "inventory_items" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # Foreign Keys + product_id = Column( + UUID(as_uuid=True), + ForeignKey("products.id", ondelete="RESTRICT"), + nullable=False, + ) + receipt_id = Column( + UUID(as_uuid=True), + ForeignKey("receipts.id", ondelete="SET NULL"), + nullable=True, + ) + + # Quantity + quantity = Column(Numeric(10, 2), nullable=False, default=1) + unit = Column(String(50), nullable=False, default="count", server_default="count") + + # Location + location = Column(String(100), nullable=False) + sublocation = Column(String(255), nullable=True) + + # Dates + purchase_date = Column(Date, nullable=True) + expiration_date = Column(Date, nullable=True) + + # Status + status = Column( + String(50), + nullable=False, + default="available", + server_default="available", + ) + consumed_at = Column(DateTime(timezone=True), nullable=True) + + # Notes + notes = Column(Text, nullable=True) + + # Source Tracking + source = Column( + String(50), + nullable=False, + default="manual", + server_default="manual", + ) # 'barcode_scan', 'manual', 'receipt' + + # Timestamps + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + updated_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + onupdate=func.now(), + ) + + # Relationships + product = relationship("Product", back_populates="inventory_items") + receipt = relationship("Receipt") + + # Constraints + __table_args__ = ( + CheckConstraint( + "status IN ('available', 'consumed', 'expired', 'discarded')", + name="inventory_items_status_check", + ), + CheckConstraint( + "source IN ('barcode_scan', 'manual', 'receipt')", + name="inventory_items_source_check", + ), + CheckConstraint( + "quantity > 0", + name="inventory_items_quantity_positive", + ), + ) + + def __repr__(self) -> str: + return ( + f"" + ) + + +# Create indexes for Product table +Index("idx_products_barcode", Product.barcode) +Index("idx_products_name", Product.name) +Index("idx_products_category", Product.category) +Index("idx_products_source", Product.source) +Index("idx_products_nutrition_data", Product.nutrition_data, postgresql_using="gin") + +# Create indexes for Tag table +Index("idx_tags_name", Tag.name) +Index("idx_tags_slug", Tag.slug) +Index("idx_tags_category", Tag.category) + +# Create indexes for InventoryItem table +Index("idx_inventory_items_product", InventoryItem.product_id) +Index("idx_inventory_items_receipt", InventoryItem.receipt_id) +Index("idx_inventory_items_status", InventoryItem.status) +Index("idx_inventory_items_location", InventoryItem.location) +Index("idx_inventory_items_expiration", InventoryItem.expiration_date) +Index("idx_inventory_items_created", InventoryItem.created_at.desc()) +# Composite index for common query: active items by location +Index( + "idx_inventory_items_active_by_location", + InventoryItem.status, + InventoryItem.location, + postgresql_where=(InventoryItem.status == "available"), +) + + +class ReceiptData(Base): + """ + Receipt Data model - stores OCR-extracted structured data from receipts. + + One-to-one relationship with Receipt. + Stores merchant info, transaction details, line items, and totals. + """ + + __tablename__ = "receipt_data" + + # Primary Key + id = Column( + UUID(as_uuid=True), + primary_key=True, + default=uuid.uuid4, + server_default=func.gen_random_uuid(), + ) + + # Foreign Key (1:1 with receipts) + receipt_id = Column( + UUID(as_uuid=True), + ForeignKey("receipts.id", ondelete="CASCADE"), + nullable=False, + unique=True, + ) + + # Merchant Information + merchant_name = Column(String(500), nullable=True) + merchant_address = Column(Text, nullable=True) + merchant_phone = Column(String(50), nullable=True) + merchant_email = Column(String(255), nullable=True) + merchant_website = Column(String(255), nullable=True) + merchant_tax_id = Column(String(100), nullable=True) + + # Transaction Information + transaction_date = Column(Date, nullable=True) + transaction_time = Column(String(20), nullable=True) # Store as string for flexibility + receipt_number = Column(String(100), nullable=True) + register_number = Column(String(50), nullable=True) + cashier_name = Column(String(255), nullable=True) + transaction_id = Column(String(100), nullable=True) + + # Line Items (JSONB array) + items = Column(JSONB, nullable=False, default=[], server_default="[]") + + # Financial Totals + subtotal = Column(Numeric(12, 2), nullable=True) + tax = Column(Numeric(12, 2), nullable=True) + discount = Column(Numeric(12, 2), nullable=True) + tip = Column(Numeric(12, 2), nullable=True) + total = Column(Numeric(12, 2), nullable=True) + payment_method = Column(String(100), nullable=True) + amount_paid = Column(Numeric(12, 2), nullable=True) + change_given = Column(Numeric(12, 2), nullable=True) + + # OCR Metadata + raw_text = Column(Text, nullable=True) # Full OCR text output + confidence_scores = Column(JSONB, nullable=False, default={}, server_default="{}") + warnings = Column(JSONB, nullable=False, default=[], server_default="[]") + processing_time = Column(Numeric(8, 3), nullable=True) # seconds + + # Timestamps + created_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + ) + updated_at = Column( + DateTime(timezone=True), + nullable=False, + default=datetime.utcnow, + server_default=func.now(), + onupdate=func.now(), + ) + + # Relationships + receipt = relationship("Receipt", back_populates="receipt_data") + + def __repr__(self) -> str: + return ( + f"" + ) + + +# Create indexes for ReceiptData table +Index("idx_receipt_data_receipt_id", ReceiptData.receipt_id) +Index("idx_receipt_data_merchant", ReceiptData.merchant_name) +Index("idx_receipt_data_date", ReceiptData.transaction_date) +Index("idx_receipt_data_items", ReceiptData.items, postgresql_using="gin") +Index("idx_receipt_data_confidence", ReceiptData.confidence_scores, postgresql_using="gin") diff --git a/app/db/session.py b/app/db/session.py new file mode 100644 index 0000000..ea70682 --- /dev/null +++ b/app/db/session.py @@ -0,0 +1,23 @@ +""" +FastAPI dependency that provides a Store instance per request. + +Local mode: opens a Store at settings.DB_PATH. +Cloud mode: opens a Store at the per-user DB path from the CloudUser session. +""" +from __future__ import annotations + +from typing import Generator + +from fastapi import Depends + +from app.cloud_session import CloudUser, get_session +from app.db.store import Store + + +def get_store(session: CloudUser = Depends(get_session)) -> Generator[Store, None, None]: + """FastAPI dependency โ€” yields a Store for the current user, closes on completion.""" + store = Store(session.db) + try: + yield store + finally: + store.close() diff --git a/app/db/store.py b/app/db/store.py new file mode 100644 index 0000000..9a0e366 --- /dev/null +++ b/app/db/store.py @@ -0,0 +1,262 @@ +""" +SQLite data store for Kiwi. +Uses circuitforge-core for connection management and migrations. +""" +from __future__ import annotations + +import json +import sqlite3 +from pathlib import Path +from typing import Any + +from circuitforge_core.db.base import get_connection +from circuitforge_core.db.migrations import run_migrations + +MIGRATIONS_DIR = Path(__file__).parent / "migrations" + + +class Store: + def __init__(self, db_path: Path, key: str = "") -> None: + self.conn: sqlite3.Connection = get_connection(db_path, key) + self.conn.execute("PRAGMA journal_mode=WAL") + self.conn.execute("PRAGMA foreign_keys=ON") + run_migrations(self.conn, MIGRATIONS_DIR) + + def close(self) -> None: + self.conn.close() + + # โ”€โ”€ helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def _row_to_dict(self, row: sqlite3.Row) -> dict[str, Any]: + d = dict(row) + # Deserialise any TEXT columns that contain JSON + for key in ("metadata", "nutrition_data", "source_data", "items", + "metrics", "improvement_suggestions", "confidence_scores", + "warnings"): + if key in d and isinstance(d[key], str): + try: + d[key] = json.loads(d[key]) + except (json.JSONDecodeError, TypeError): + pass + return d + + def _fetch_one(self, sql: str, params: tuple = ()) -> dict[str, Any] | None: + self.conn.row_factory = sqlite3.Row + row = self.conn.execute(sql, params).fetchone() + return self._row_to_dict(row) if row else None + + def _fetch_all(self, sql: str, params: tuple = ()) -> list[dict[str, Any]]: + self.conn.row_factory = sqlite3.Row + rows = self.conn.execute(sql, params).fetchall() + return [self._row_to_dict(r) for r in rows] + + def _dump(self, value: Any) -> str: + """Serialise a Python object to a JSON string for storage.""" + return json.dumps(value) + + # โ”€โ”€ receipts โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def _insert_returning(self, sql: str, params: tuple = ()) -> dict[str, Any]: + """Execute an INSERT ... RETURNING * and return the new row as a dict. + Fetches the row BEFORE committing โ€” SQLite requires the cursor to be + fully consumed before the transaction is committed.""" + self.conn.row_factory = sqlite3.Row + cur = self.conn.execute(sql, params) + row = self._row_to_dict(cur.fetchone()) + self.conn.commit() + return row + + def create_receipt(self, filename: str, original_path: str) -> dict[str, Any]: + return self._insert_returning( + "INSERT INTO receipts (filename, original_path) VALUES (?, ?) RETURNING *", + (filename, original_path), + ) + + def get_receipt(self, receipt_id: int) -> dict[str, Any] | None: + return self._fetch_one("SELECT * FROM receipts WHERE id = ?", (receipt_id,)) + + def list_receipts(self, limit: int = 50, offset: int = 0) -> list[dict[str, Any]]: + return self._fetch_all( + "SELECT * FROM receipts ORDER BY created_at DESC LIMIT ? OFFSET ?", + (limit, offset), + ) + + def update_receipt_status(self, receipt_id: int, status: str, + error: str | None = None) -> None: + self.conn.execute( + "UPDATE receipts SET status = ?, error = ?, updated_at = datetime('now') WHERE id = ?", + (status, error, receipt_id), + ) + self.conn.commit() + + def update_receipt_metadata(self, receipt_id: int, metadata: dict) -> None: + self.conn.execute( + "UPDATE receipts SET metadata = ?, updated_at = datetime('now') WHERE id = ?", + (self._dump(metadata), receipt_id), + ) + self.conn.commit() + + # โ”€โ”€ quality assessments โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def upsert_quality_assessment(self, receipt_id: int, overall_score: float, + is_acceptable: bool, metrics: dict, + suggestions: list) -> dict[str, Any]: + self.conn.execute( + """INSERT INTO quality_assessments + (receipt_id, overall_score, is_acceptable, metrics, improvement_suggestions) + VALUES (?, ?, ?, ?, ?) + ON CONFLICT (receipt_id) DO UPDATE SET + overall_score = excluded.overall_score, + is_acceptable = excluded.is_acceptable, + metrics = excluded.metrics, + improvement_suggestions = excluded.improvement_suggestions""", + (receipt_id, overall_score, int(is_acceptable), + self._dump(metrics), self._dump(suggestions)), + ) + self.conn.commit() + return self._fetch_one( + "SELECT * FROM quality_assessments WHERE receipt_id = ?", (receipt_id,) + ) + + # โ”€โ”€ products โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def get_or_create_product(self, name: str, barcode: str | None = None, + **kwargs) -> tuple[dict[str, Any], bool]: + """Returns (product, created). Looks up by barcode first, then name.""" + if barcode: + existing = self._fetch_one( + "SELECT * FROM products WHERE barcode = ?", (barcode,) + ) + if existing: + return existing, False + + existing = self._fetch_one("SELECT * FROM products WHERE name = ?", (name,)) + if existing: + return existing, False + + row = self._insert_returning( + """INSERT INTO products (name, barcode, brand, category, description, + image_url, nutrition_data, source, source_data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING *""", + ( + name, barcode, + kwargs.get("brand"), kwargs.get("category"), + kwargs.get("description"), kwargs.get("image_url"), + self._dump(kwargs.get("nutrition_data", {})), + kwargs.get("source", "manual"), + self._dump(kwargs.get("source_data", {})), + ), + ) + return row, True + + def get_product(self, product_id: int) -> dict[str, Any] | None: + return self._fetch_one("SELECT * FROM products WHERE id = ?", (product_id,)) + + def list_products(self) -> list[dict[str, Any]]: + return self._fetch_all("SELECT * FROM products ORDER BY name") + + # โ”€โ”€ inventory โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def add_inventory_item(self, product_id: int, location: str, + quantity: float = 1.0, unit: str = "count", + **kwargs) -> dict[str, Any]: + return self._insert_returning( + """INSERT INTO inventory_items + (product_id, receipt_id, quantity, unit, location, sublocation, + purchase_date, expiration_date, notes, source) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING *""", + ( + product_id, kwargs.get("receipt_id"), + quantity, unit, location, kwargs.get("sublocation"), + kwargs.get("purchase_date"), kwargs.get("expiration_date"), + kwargs.get("notes"), kwargs.get("source", "manual"), + ), + ) + + def get_inventory_item(self, item_id: int) -> dict[str, Any] | None: + return self._fetch_one( + """SELECT i.*, p.name as product_name, p.barcode, p.category + FROM inventory_items i + JOIN products p ON p.id = i.product_id + WHERE i.id = ?""", + (item_id,), + ) + + def list_inventory(self, location: str | None = None, + status: str = "available") -> list[dict[str, Any]]: + if location: + return self._fetch_all( + """SELECT i.*, p.name as product_name, p.barcode, p.category + FROM inventory_items i + JOIN products p ON p.id = i.product_id + WHERE i.status = ? AND i.location = ? + ORDER BY i.expiration_date ASC NULLS LAST""", + (status, location), + ) + return self._fetch_all( + """SELECT i.*, p.name as product_name, p.barcode, p.category + FROM inventory_items i + JOIN products p ON p.id = i.product_id + WHERE i.status = ? + ORDER BY i.expiration_date ASC NULLS LAST""", + (status,), + ) + + def update_inventory_item(self, item_id: int, **kwargs) -> dict[str, Any] | None: + allowed = {"quantity", "unit", "location", "sublocation", + "expiration_date", "status", "notes", "consumed_at"} + updates = {k: v for k, v in kwargs.items() if k in allowed} + if not updates: + return self.get_inventory_item(item_id) + sets = ", ".join(f"{k} = ?" for k in updates) + values = list(updates.values()) + [item_id] + self.conn.execute( + f"UPDATE inventory_items SET {sets}, updated_at = datetime('now') WHERE id = ?", + values, + ) + self.conn.commit() + return self.get_inventory_item(item_id) + + def expiring_soon(self, days: int = 7) -> list[dict[str, Any]]: + return self._fetch_all( + """SELECT i.*, p.name as product_name, p.category + FROM inventory_items i + JOIN products p ON p.id = i.product_id + WHERE i.status = 'available' + AND i.expiration_date IS NOT NULL + AND date(i.expiration_date) <= date('now', ? || ' days') + ORDER BY i.expiration_date ASC""", + (str(days),), + ) + + # โ”€โ”€ receipt_data โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def upsert_receipt_data(self, receipt_id: int, data: dict) -> dict[str, Any]: + fields = [ + "merchant_name", "merchant_address", "merchant_phone", "merchant_email", + "merchant_website", "merchant_tax_id", "transaction_date", "transaction_time", + "receipt_number", "register_number", "cashier_name", "transaction_id", + "items", "subtotal", "tax", "discount", "tip", "total", + "payment_method", "amount_paid", "change_given", + "raw_text", "confidence_scores", "warnings", "processing_time", + ] + json_fields = {"items", "confidence_scores", "warnings"} + cols = ", ".join(fields) + placeholders = ", ".join("?" for _ in fields) + values = [ + self._dump(data.get(f)) if f in json_fields and data.get(f) is not None + else data.get(f) + for f in fields + ] + self.conn.execute( + f"""INSERT INTO receipt_data (receipt_id, {cols}) + VALUES (?, {placeholders}) + ON CONFLICT (receipt_id) DO UPDATE SET + {', '.join(f'{f} = excluded.{f}' for f in fields)}, + updated_at = datetime('now')""", + [receipt_id] + values, + ) + self.conn.commit() + return self._fetch_one( + "SELECT * FROM receipt_data WHERE receipt_id = ?", (receipt_id,) + ) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..912660f --- /dev/null +++ b/app/main.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# app/main.py + +import logging +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.api.routes import api_router +from app.core.config import settings + +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + logger.info("Starting Kiwi API...") + settings.ensure_dirs() + yield + logger.info("Kiwi API shutting down.") + + +app = FastAPI( + title=settings.PROJECT_NAME, + description="Pantry tracking + leftover recipe suggestions", + version="0.1.0", + lifespan=lifespan, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(api_router, prefix=settings.API_PREFIX) + + +@app.get("/") +async def root(): + return {"service": "kiwi-api", "docs": "/docs"} diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..bd67b17 --- /dev/null +++ b/app/models/__init__.py @@ -0,0 +1,5 @@ +# app/models/__init__.py +""" +Data models for Kiwi. +Contains domain models and Pydantic schemas. +""" \ No newline at end of file diff --git a/app/models/domain/__init__.py b/app/models/domain/__init__.py new file mode 100644 index 0000000..632ef59 --- /dev/null +++ b/app/models/domain/__init__.py @@ -0,0 +1,5 @@ +# app/models/domain/__init__.py +""" +Domain models for Kiwi. +These represent the core business entities. +""" \ No newline at end of file diff --git a/app/models/schemas/__init__.py b/app/models/schemas/__init__.py new file mode 100644 index 0000000..2cc146c --- /dev/null +++ b/app/models/schemas/__init__.py @@ -0,0 +1,4 @@ +from app.models.schemas.receipt import ReceiptResponse +from app.models.schemas.quality import QualityAssessment + +__all__ = ["ReceiptResponse", "QualityAssessment"] diff --git a/app/models/schemas/inventory.py b/app/models/schemas/inventory.py new file mode 100644 index 0000000..4983749 --- /dev/null +++ b/app/models/schemas/inventory.py @@ -0,0 +1,143 @@ +"""Pydantic schemas for inventory management (integer IDs, SQLite-compatible).""" + +from __future__ import annotations + +from datetime import date, datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field + + +# โ”€โ”€ Tags โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class TagCreate(BaseModel): + name: str = Field(..., max_length=100) + slug: str = Field(..., max_length=100) + description: Optional[str] = None + color: Optional[str] = Field(None, max_length=7) + category: Optional[str] = None + + +class TagResponse(BaseModel): + id: int + name: str + slug: str + description: Optional[str] + color: Optional[str] + category: Optional[str] + created_at: str + updated_at: str + + model_config = {"from_attributes": True} + + +# โ”€โ”€ Products โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class ProductCreate(BaseModel): + name: str = Field(..., max_length=500) + barcode: Optional[str] = Field(None, max_length=50) + brand: Optional[str] = None + category: Optional[str] = None + description: Optional[str] = None + image_url: Optional[str] = None + nutrition_data: Dict[str, Any] = Field(default_factory=dict) + source: str = "manual" + source_data: Dict[str, Any] = Field(default_factory=dict) + + +class ProductUpdate(BaseModel): + name: Optional[str] = None + brand: Optional[str] = None + category: Optional[str] = None + description: Optional[str] = None + image_url: Optional[str] = None + nutrition_data: Optional[Dict[str, Any]] = None + + +class ProductResponse(BaseModel): + id: int + barcode: Optional[str] + name: str + brand: Optional[str] + category: Optional[str] + description: Optional[str] + image_url: Optional[str] + nutrition_data: Dict[str, Any] + source: str + created_at: str + updated_at: str + + model_config = {"from_attributes": True} + + +# โ”€โ”€ Inventory Items โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class InventoryItemCreate(BaseModel): + product_id: int + quantity: float = Field(default=1.0, gt=0) + unit: str = "count" + location: str + sublocation: Optional[str] = None + purchase_date: Optional[date] = None + expiration_date: Optional[date] = None + notes: Optional[str] = None + source: str = "manual" + + +class InventoryItemUpdate(BaseModel): + quantity: Optional[float] = Field(None, gt=0) + unit: Optional[str] = None + location: Optional[str] = None + sublocation: Optional[str] = None + expiration_date: Optional[date] = None + status: Optional[str] = None + notes: Optional[str] = None + + +class InventoryItemResponse(BaseModel): + id: int + product_id: int + product_name: Optional[str] = None + barcode: Optional[str] = None + category: Optional[str] = None + quantity: float + unit: str + location: str + sublocation: Optional[str] + purchase_date: Optional[str] + expiration_date: Optional[str] + status: str + notes: Optional[str] + source: str + created_at: str + updated_at: str + + model_config = {"from_attributes": True} + + +# โ”€โ”€ Barcode scan โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class BarcodeScanResult(BaseModel): + barcode: str + barcode_type: str + product: Optional[ProductResponse] + inventory_item: Optional[InventoryItemResponse] + added_to_inventory: bool + message: str + + +class BarcodeScanResponse(BaseModel): + success: bool + barcodes_found: int + results: List[BarcodeScanResult] + message: str + + +# โ”€โ”€ Stats โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +class InventoryStats(BaseModel): + total_items: int + available_items: int + expiring_soon: int + expired_items: int + locations: Dict[str, int] diff --git a/app/models/schemas/ocr.py b/app/models/schemas/ocr.py new file mode 100644 index 0000000..2250ecd --- /dev/null +++ b/app/models/schemas/ocr.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +""" +Pydantic schemas for OCR data models. +""" + +from datetime import datetime, date, time +from typing import Optional, List, Dict, Any +from uuid import UUID +from pydantic import BaseModel, Field, validator + + +class MerchantInfo(BaseModel): + """Merchant/store information from receipt.""" + name: Optional[str] = None + address: Optional[str] = None + phone: Optional[str] = None + email: Optional[str] = None + website: Optional[str] = None + tax_id: Optional[str] = None + + +class TransactionInfo(BaseModel): + """Transaction details from receipt.""" + date: Optional[date] = None + time: Optional[time] = None + receipt_number: Optional[str] = None + register: Optional[str] = None + cashier: Optional[str] = None + transaction_id: Optional[str] = None + + +class ReceiptItem(BaseModel): + """Individual line item from receipt.""" + name: str + quantity: float = 1.0 + unit_price: Optional[float] = None + total_price: float + category: Optional[str] = None + tax_code: Optional[str] = None + discount: Optional[float] = 0.0 + barcode: Optional[str] = None + notes: Optional[str] = None + + +class ReceiptTotals(BaseModel): + """Financial totals from receipt.""" + subtotal: float + tax: Optional[float] = 0.0 + discount: Optional[float] = 0.0 + tip: Optional[float] = 0.0 + total: float + payment_method: Optional[str] = None + amount_paid: Optional[float] = None + change: Optional[float] = 0.0 + calculated_subtotal: Optional[float] = None # For validation + + +class ConfidenceScores(BaseModel): + """Confidence scores for extracted data.""" + overall: float = Field(ge=0.0, le=1.0) + merchant: Optional[float] = Field(default=0.5, ge=0.0, le=1.0) + items: Optional[float] = Field(default=0.5, ge=0.0, le=1.0) + totals: Optional[float] = Field(default=0.5, ge=0.0, le=1.0) + transaction: Optional[float] = Field(default=0.5, ge=0.0, le=1.0) + + +class OCRResult(BaseModel): + """Complete OCR extraction result.""" + merchant: MerchantInfo + transaction: TransactionInfo + items: List[ReceiptItem] + totals: ReceiptTotals + confidence: ConfidenceScores + raw_text: Optional[str] = None + warnings: List[str] = Field(default_factory=list) + processing_time: Optional[float] = None # seconds + + +class ReceiptDataCreate(BaseModel): + """Schema for creating receipt data.""" + receipt_id: UUID + merchant_name: Optional[str] = None + merchant_address: Optional[str] = None + merchant_phone: Optional[str] = None + transaction_date: Optional[date] = None + transaction_time: Optional[time] = None + receipt_number: Optional[str] = None + items: List[Dict[str, Any]] = Field(default_factory=list) + subtotal: Optional[float] = None + tax: Optional[float] = None + tip: Optional[float] = None + total: Optional[float] = None + payment_method: Optional[str] = None + raw_text: Optional[str] = None + confidence_scores: Optional[Dict[str, float]] = None + warnings: List[str] = Field(default_factory=list) + + +class ReceiptDataResponse(BaseModel): + """Schema for receipt data response.""" + id: UUID + receipt_id: UUID + merchant_name: Optional[str] + merchant_address: Optional[str] + merchant_phone: Optional[str] + transaction_date: Optional[date] + transaction_time: Optional[time] + receipt_number: Optional[str] + items: List[Dict[str, Any]] + subtotal: Optional[float] + tax: Optional[float] + tip: Optional[float] + total: Optional[float] + payment_method: Optional[str] + raw_text: Optional[str] + confidence_scores: Optional[Dict[str, float]] + warnings: List[str] + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class OCRStatusResponse(BaseModel): + """OCR processing status response.""" + receipt_id: UUID + ocr_completed: bool + has_data: bool + confidence: Optional[float] = None + item_count: Optional[int] = None + warnings: List[str] = Field(default_factory=list) + + +class OCRTriggerRequest(BaseModel): + """Request to trigger OCR processing.""" + force_reprocess: bool = False + use_quantization: bool = False diff --git a/app/models/schemas/quality.py b/app/models/schemas/quality.py new file mode 100644 index 0000000..d316318 --- /dev/null +++ b/app/models/schemas/quality.py @@ -0,0 +1,17 @@ +"""Quality assessment schemas (integer IDs, SQLite-compatible).""" +from __future__ import annotations + +from typing import Any, Dict, List +from pydantic import BaseModel + + +class QualityAssessment(BaseModel): + id: int + receipt_id: int + overall_score: float + is_acceptable: bool + metrics: Dict[str, Any] = {} + improvement_suggestions: List[str] = [] + created_at: str + + model_config = {"from_attributes": True} diff --git a/app/models/schemas/receipt.py b/app/models/schemas/receipt.py new file mode 100644 index 0000000..9834353 --- /dev/null +++ b/app/models/schemas/receipt.py @@ -0,0 +1,46 @@ +"""Receipt schemas (integer IDs, SQLite-compatible).""" +from __future__ import annotations + +from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field + + +class ReceiptResponse(BaseModel): + id: int + filename: str + status: str + error: Optional[str] = None + metadata: Dict[str, Any] = {} + created_at: str + updated_at: str + + model_config = {"from_attributes": True} + + +class ApproveOCRRequest(BaseModel): + """Approve staged OCR items for inventory population. + + item_indices: which items (by 0-based index) to approve. + Omit or pass null to approve all items. + location: pantry location for created inventory items. + """ + item_indices: Optional[List[int]] = Field( + default=None, + description="0-based indices of items to approve. Null = approve all.", + ) + location: str = Field(default="pantry") + + +class ApprovedInventoryItem(BaseModel): + inventory_id: int + product_name: str + quantity: float + location: str + expiration_date: Optional[str] = None + + +class ApproveOCRResponse(BaseModel): + receipt_id: int + approved: int + skipped: int + inventory_items: List[ApprovedInventoryItem] diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..d4727fd --- /dev/null +++ b/app/services/__init__.py @@ -0,0 +1,8 @@ +# app/services/__init__.py +""" +Business logic services for Kiwi. +""" + +from app.services.receipt_service import ReceiptService + +__all__ = ["ReceiptService"] \ No newline at end of file diff --git a/app/services/barcode_scanner.py b/app/services/barcode_scanner.py new file mode 100644 index 0000000..889e807 --- /dev/null +++ b/app/services/barcode_scanner.py @@ -0,0 +1,365 @@ +""" +Barcode scanning service using pyzbar. + +This module provides functionality to detect and decode barcodes +from images (UPC, EAN, QR codes, etc.). +""" + +import cv2 +import numpy as np +from pyzbar import pyzbar +from pathlib import Path +from typing import List, Dict, Any, Optional +import logging + +logger = logging.getLogger(__name__) + + +class BarcodeScanner: + """ + Service for scanning barcodes from images. + + Supports various barcode formats: + - UPC-A, UPC-E + - EAN-8, EAN-13 + - Code 39, Code 128 + - QR codes + - And more via pyzbar/libzbar + """ + + def scan_image(self, image_path: Path) -> List[Dict[str, Any]]: + """ + Scan an image for barcodes. + + Args: + image_path: Path to the image file + + Returns: + List of detected barcodes, each as a dictionary with: + - data: Barcode data (string) + - type: Barcode type (e.g., 'EAN13', 'QRCODE') + - quality: Quality score (0-100) + - rect: Bounding box (x, y, width, height) + """ + try: + # Read image + image = cv2.imread(str(image_path)) + if image is None: + logger.error(f"Failed to load image: {image_path}") + return [] + + # Convert to grayscale for better detection + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + + # Try multiple preprocessing techniques and rotations for better detection + barcodes = [] + + # 1. Try on original grayscale + barcodes.extend(self._detect_barcodes(gray, image)) + + # 2. Try with adaptive thresholding (helps with poor lighting) + if not barcodes: + thresh = cv2.adaptiveThreshold( + gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, + cv2.THRESH_BINARY, 11, 2 + ) + barcodes.extend(self._detect_barcodes(thresh, image)) + + # 3. Try with sharpening (helps with blurry images) + if not barcodes: + kernel = np.array([[-1, -1, -1], + [-1, 9, -1], + [-1, -1, -1]]) + sharpened = cv2.filter2D(gray, -1, kernel) + barcodes.extend(self._detect_barcodes(sharpened, image)) + + # 4. Try rotations if still no barcodes found (handles tilted/rotated barcodes) + if not barcodes: + logger.info("No barcodes found in standard orientation, trying rotations...") + # Try incremental angles: 30ยฐ, 60ยฐ, 90ยฐ (covers 0-90ยฐ range) + # 0ยฐ already tried, 180ยฐ is functionally same as 0ยฐ, 90ยฐ/270ยฐ are same axis + for angle in [30, 60, 90]: + rotated_gray = self._rotate_image(gray, angle) + rotated_color = self._rotate_image(image, angle) + detected = self._detect_barcodes(rotated_gray, rotated_color) + if detected: + logger.info(f"Found barcode(s) at {angle}ยฐ rotation") + barcodes.extend(detected) + break # Stop after first successful rotation + + # Remove duplicates (same data) + unique_barcodes = self._deduplicate_barcodes(barcodes) + + logger.info(f"Found {len(unique_barcodes)} barcode(s) in {image_path}") + return unique_barcodes + + except Exception as e: + logger.error(f"Error scanning image {image_path}: {e}") + return [] + + def _detect_barcodes( + self, + image: np.ndarray, + original_image: np.ndarray + ) -> List[Dict[str, Any]]: + """ + Detect barcodes in a preprocessed image. + + Args: + image: Preprocessed image (grayscale) + original_image: Original color image (for quality assessment) + + Returns: + List of detected barcodes + """ + detected = pyzbar.decode(image) + barcodes = [] + + for barcode in detected: + # Decode barcode data + barcode_data = barcode.data.decode("utf-8") + barcode_type = barcode.type + + # Get bounding box + rect = barcode.rect + bbox = { + "x": rect.left, + "y": rect.top, + "width": rect.width, + "height": rect.height, + } + + # Assess quality of barcode region + quality = self._assess_barcode_quality(original_image, bbox) + + barcodes.append({ + "data": barcode_data, + "type": barcode_type, + "quality": quality, + "rect": bbox, + }) + + return barcodes + + def _assess_barcode_quality( + self, + image: np.ndarray, + bbox: Dict[str, int] + ) -> int: + """ + Assess the quality of a detected barcode. + + Args: + image: Original image + bbox: Bounding box of barcode + + Returns: + Quality score (0-100) + """ + try: + # Extract barcode region + x, y, w, h = bbox["x"], bbox["y"], bbox["width"], bbox["height"] + + # Add padding + pad = 10 + y1 = max(0, y - pad) + y2 = min(image.shape[0], y + h + pad) + x1 = max(0, x - pad) + x2 = min(image.shape[1], x + w + pad) + + region = image[y1:y2, x1:x2] + + if region.size == 0: + return 50 + + # Convert to grayscale if needed + if len(region.shape) == 3: + region = cv2.cvtColor(region, cv2.COLOR_BGR2GRAY) + + # Calculate sharpness (Laplacian variance) + laplacian_var = cv2.Laplacian(region, cv2.CV_64F).var() + sharpness_score = min(100, laplacian_var / 10) # Normalize + + # Calculate contrast + min_val, max_val = region.min(), region.max() + contrast = (max_val - min_val) / 255.0 * 100 + + # Calculate size score (larger is better, up to a point) + area = w * h + size_score = min(100, area / 100) # Normalize + + # Weighted average + quality = (sharpness_score * 0.4 + contrast * 0.4 + size_score * 0.2) + + return int(quality) + + except Exception as e: + logger.warning(f"Error assessing barcode quality: {e}") + return 50 + + def _rotate_image(self, image: np.ndarray, angle: float) -> np.ndarray: + """ + Rotate an image by a given angle. + + Args: + image: Input image + angle: Rotation angle in degrees (any angle, but optimized for 90ยฐ increments) + + Returns: + Rotated image + """ + # Use fast optimized rotation for common angles + if angle == 90: + return cv2.rotate(image, cv2.ROTATE_90_CLOCKWISE) + elif angle == 180: + return cv2.rotate(image, cv2.ROTATE_180) + elif angle == 270: + return cv2.rotate(image, cv2.ROTATE_90_COUNTERCLOCKWISE) + elif angle == 0: + return image + else: + # For arbitrary angles, use affine transformation + (h, w) = image.shape[:2] + center = (w // 2, h // 2) + + # Get rotation matrix + M = cv2.getRotationMatrix2D(center, angle, 1.0) + + # Calculate new bounding dimensions + cos = np.abs(M[0, 0]) + sin = np.abs(M[0, 1]) + new_w = int((h * sin) + (w * cos)) + new_h = int((h * cos) + (w * sin)) + + # Adjust rotation matrix for new dimensions + M[0, 2] += (new_w / 2) - center[0] + M[1, 2] += (new_h / 2) - center[1] + + # Perform rotation + return cv2.warpAffine(image, M, (new_w, new_h), + flags=cv2.INTER_CUBIC, + borderMode=cv2.BORDER_REPLICATE) + + def _deduplicate_barcodes( + self, + barcodes: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + Remove duplicate barcodes (same data). + + If multiple detections of the same barcode, keep the one + with the highest quality score. + + Args: + barcodes: List of detected barcodes + + Returns: + Deduplicated list + """ + seen = {} + for barcode in barcodes: + data = barcode["data"] + if data not in seen or barcode["quality"] > seen[data]["quality"]: + seen[data] = barcode + + return list(seen.values()) + + def scan_from_bytes(self, image_bytes: bytes) -> List[Dict[str, Any]]: + """ + Scan barcodes from image bytes (uploaded file). + + Args: + image_bytes: Image data as bytes + + Returns: + List of detected barcodes + """ + try: + # Convert bytes to numpy array + nparr = np.frombuffer(image_bytes, np.uint8) + image = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + + if image is None: + logger.error("Failed to decode image from bytes") + return [] + + # Convert to grayscale + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + + # Try multiple approaches for better detection + barcodes = [] + + # 1. Try original orientation + barcodes.extend(self._detect_barcodes(gray, image)) + + # 2. Try with adaptive thresholding + if not barcodes: + thresh = cv2.adaptiveThreshold( + gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, + cv2.THRESH_BINARY, 11, 2 + ) + barcodes.extend(self._detect_barcodes(thresh, image)) + + # 3. Try rotations if still no barcodes found + if not barcodes: + logger.info("No barcodes found in uploaded image, trying rotations...") + # Try incremental angles: 30ยฐ, 60ยฐ, 90ยฐ (covers 0-90ยฐ range) + for angle in [30, 60, 90]: + rotated_gray = self._rotate_image(gray, angle) + rotated_color = self._rotate_image(image, angle) + detected = self._detect_barcodes(rotated_gray, rotated_color) + if detected: + logger.info(f"Found barcode(s) in uploaded image at {angle}ยฐ rotation") + barcodes.extend(detected) + break + + return self._deduplicate_barcodes(barcodes) + + except Exception as e: + logger.error(f"Error scanning image from bytes: {e}") + return [] + + def validate_barcode(self, barcode: str, barcode_type: str) -> bool: + """ + Validate a barcode using check digits (for EAN/UPC). + + Args: + barcode: Barcode string + barcode_type: Type of barcode (e.g., 'EAN13', 'UPCA') + + Returns: + True if valid, False otherwise + """ + if barcode_type in ["EAN13", "UPCA"]: + return self._validate_ean13(barcode) + elif barcode_type == "EAN8": + return self._validate_ean8(barcode) + + # For other types, assume valid if detected + return True + + def _validate_ean13(self, barcode: str) -> bool: + """Validate EAN-13 barcode using check digit.""" + if len(barcode) != 13 or not barcode.isdigit(): + return False + + # Calculate check digit + odd_sum = sum(int(barcode[i]) for i in range(0, 12, 2)) + even_sum = sum(int(barcode[i]) for i in range(1, 12, 2)) + total = odd_sum + (even_sum * 3) + check_digit = (10 - (total % 10)) % 10 + + return int(barcode[12]) == check_digit + + def _validate_ean8(self, barcode: str) -> bool: + """Validate EAN-8 barcode using check digit.""" + if len(barcode) != 8 or not barcode.isdigit(): + return False + + # Calculate check digit + odd_sum = sum(int(barcode[i]) for i in range(1, 7, 2)) + even_sum = sum(int(barcode[i]) for i in range(0, 7, 2)) + total = (odd_sum * 3) + even_sum + check_digit = (10 - (total % 10)) % 10 + + return int(barcode[7]) == check_digit diff --git a/app/services/expiration_predictor.py b/app/services/expiration_predictor.py new file mode 100644 index 0000000..d51919d --- /dev/null +++ b/app/services/expiration_predictor.py @@ -0,0 +1,306 @@ +""" +Expiration Date Prediction Service. + +Predicts expiration dates for food items based on category and storage location. +Fast path: deterministic lookup table (USDA FoodKeeper / FDA guidelines). +Fallback path: LLMRouter โ€” only fires for unknown products when tier allows it + and a LLM backend is configured. +""" + +import logging +import re +from datetime import date, timedelta +from typing import Optional, List + +from circuitforge_core.llm.router import LLMRouter +from app.tiers import can_use + +logger = logging.getLogger(__name__) + + +class ExpirationPredictor: + """Predict expiration dates based on product category and storage location.""" + + # Default shelf life in days by category and location + # Sources: USDA FoodKeeper app, FDA guidelines + SHELF_LIFE = { + # Dairy + 'dairy': {'fridge': 7, 'freezer': 90}, + 'milk': {'fridge': 7, 'freezer': 90}, + 'cheese': {'fridge': 21, 'freezer': 180}, + 'yogurt': {'fridge': 14, 'freezer': 60}, + 'butter': {'fridge': 30, 'freezer': 365}, + 'cream': {'fridge': 5, 'freezer': 60}, + # Meat & Poultry + 'meat': {'fridge': 3, 'freezer': 180}, + 'beef': {'fridge': 3, 'freezer': 270}, + 'pork': {'fridge': 3, 'freezer': 180}, + 'lamb': {'fridge': 3, 'freezer': 270}, + 'poultry': {'fridge': 2, 'freezer': 270}, + 'chicken': {'fridge': 2, 'freezer': 270}, + 'turkey': {'fridge': 2, 'freezer': 270}, + 'ground_meat': {'fridge': 2, 'freezer': 120}, + # Seafood + 'fish': {'fridge': 2, 'freezer': 180}, + 'seafood': {'fridge': 2, 'freezer': 180}, + 'shrimp': {'fridge': 2, 'freezer': 180}, + 'salmon': {'fridge': 2, 'freezer': 180}, + # Eggs + 'eggs': {'fridge': 35, 'freezer': None}, + # Produce + 'vegetables': {'fridge': 7, 'pantry': 5, 'freezer': 270}, + 'fruits': {'fridge': 7, 'pantry': 5, 'freezer': 365}, + 'leafy_greens': {'fridge': 5, 'freezer': 270}, + 'berries': {'fridge': 5, 'freezer': 270}, + 'apples': {'fridge': 30, 'pantry': 14}, + 'bananas': {'pantry': 5, 'fridge': 7}, + 'citrus': {'fridge': 21, 'pantry': 7}, + # Bread & Bakery + 'bread': {'pantry': 5, 'freezer': 90}, + 'bakery': {'pantry': 3, 'fridge': 7, 'freezer': 90}, + # Frozen + 'frozen_foods': {'freezer': 180}, + 'frozen_vegetables': {'freezer': 270}, + 'frozen_fruit': {'freezer': 365}, + 'ice_cream': {'freezer': 60}, + # Pantry Staples + 'canned_goods': {'pantry': 730, 'cabinet': 730}, + 'dry_goods': {'pantry': 365, 'cabinet': 365}, + 'pasta': {'pantry': 730, 'cabinet': 730}, + 'rice': {'pantry': 730, 'cabinet': 730}, + 'flour': {'pantry': 180, 'cabinet': 180}, + 'sugar': {'pantry': 730, 'cabinet': 730}, + 'cereal': {'pantry': 180, 'cabinet': 180}, + 'chips': {'pantry': 90, 'cabinet': 90}, + 'cookies': {'pantry': 90, 'cabinet': 90}, + # Condiments + 'condiments': {'fridge': 90, 'pantry': 180}, + 'ketchup': {'fridge': 180, 'pantry': 365}, + 'mustard': {'fridge': 365, 'pantry': 365}, + 'mayo': {'fridge': 60, 'pantry': 180}, + 'salad_dressing': {'fridge': 90, 'pantry': 180}, + 'soy_sauce': {'fridge': 730, 'pantry': 730}, + # Beverages + 'beverages': {'fridge': 14, 'pantry': 180}, + 'juice': {'fridge': 7, 'freezer': 90}, + 'soda': {'fridge': 270, 'pantry': 270}, + 'water': {'fridge': 365, 'pantry': 365}, + # Other + 'deli_meat': {'fridge': 5, 'freezer': 60}, + 'leftovers': {'fridge': 4, 'freezer': 90}, + 'prepared_foods': {'fridge': 4, 'freezer': 90}, + } + + CATEGORY_KEYWORDS = { + 'milk': ['milk', 'whole milk', '2% milk', 'skim milk', 'almond milk', 'oat milk', 'soy milk'], + 'cheese': ['cheese', 'cheddar', 'mozzarella', 'swiss', 'parmesan', 'feta', 'gouda'], + 'yogurt': ['yogurt', 'greek yogurt', 'yoghurt'], + 'butter': ['butter', 'margarine'], + 'cream': ['cream', 'heavy cream', 'whipping cream', 'sour cream'], + 'eggs': ['eggs', 'egg'], + 'beef': ['beef', 'steak', 'roast', 'brisket', 'ribeye', 'sirloin'], + 'pork': ['pork', 'bacon', 'ham', 'sausage', 'pork chop'], + 'chicken': ['chicken', 'chicken breast', 'chicken thigh', 'chicken wings'], + 'turkey': ['turkey', 'turkey breast', 'ground turkey'], + 'ground_meat': ['ground beef', 'ground pork', 'ground chicken', 'hamburger'], + 'fish': ['fish', 'cod', 'tilapia', 'halibut'], + 'salmon': ['salmon'], + 'shrimp': ['shrimp', 'prawns'], + 'leafy_greens': ['lettuce', 'spinach', 'kale', 'arugula', 'mixed greens', 'salad'], + 'berries': ['strawberries', 'blueberries', 'raspberries', 'blackberries'], + 'apples': ['apple', 'apples'], + 'bananas': ['banana', 'bananas'], + 'citrus': ['orange', 'lemon', 'lime', 'grapefruit', 'tangerine'], + 'bread': ['bread', 'loaf', 'baguette', 'roll', 'bagel', 'bun'], + 'bakery': ['muffin', 'croissant', 'donut', 'danish', 'pastry'], + 'deli_meat': ['deli', 'sliced turkey', 'sliced ham', 'lunch meat', 'cold cuts'], + 'frozen_vegetables': ['frozen veg', 'frozen corn', 'frozen peas', 'frozen broccoli'], + 'frozen_fruit': ['frozen berries', 'frozen mango', 'frozen strawberries'], + 'ice_cream': ['ice cream', 'gelato', 'frozen yogurt'], + 'pasta': ['pasta', 'spaghetti', 'penne', 'macaroni', 'noodles'], + 'rice': ['rice', 'brown rice', 'white rice', 'jasmine'], + 'cereal': ['cereal', 'granola', 'oatmeal'], + 'chips': ['chips', 'crisps', 'tortilla chips'], + 'cookies': ['cookies', 'biscuits', 'crackers'], + 'ketchup': ['ketchup', 'catsup'], + 'mustard': ['mustard'], + 'mayo': ['mayo', 'mayonnaise', 'miracle whip'], + 'salad_dressing': ['salad dressing', 'ranch', 'italian dressing', 'vinaigrette'], + 'soy_sauce': ['soy sauce', 'tamari'], + 'juice': ['juice', 'orange juice', 'apple juice'], + 'soda': ['soda', 'pop', 'cola', 'sprite', 'pepsi', 'coke'], + } + + def __init__(self) -> None: + self._router: Optional[LLMRouter] = None + try: + self._router = LLMRouter() + except FileNotFoundError: + logger.debug("LLM config not found โ€” expiry LLM fallback disabled") + except Exception as e: + logger.warning("LLMRouter init failed (%s) โ€” expiry LLM fallback disabled", e) + + # โ”€โ”€ Public API โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def predict_expiration( + self, + category: Optional[str], + location: str, + purchase_date: Optional[date] = None, + product_name: Optional[str] = None, + tier: str = "free", + has_byok: bool = False, + ) -> Optional[date]: + """ + Predict expiration date. + + Fast path: deterministic lookup table. + Fallback: LLM query when table has no match, tier allows it, and a + backend is configured. Returns None rather than crashing if + inference fails. + """ + if not purchase_date: + purchase_date = date.today() + + days = self._lookup_days(category, location) + + if days is None and product_name and self._router and can_use("expiry_llm_matching", tier, has_byok): + days = self._llm_predict_days(product_name, category, location) + + if days is None: + return None + return purchase_date + timedelta(days=days) + + def get_category_from_product( + self, + product_name: str, + product_category: Optional[str] = None, + tags: Optional[List[str]] = None, + ) -> Optional[str]: + """Determine category from product name, existing category, and tags.""" + if product_category: + cat = product_category.lower().strip() + if cat in self.SHELF_LIFE: + return cat + for key in self.SHELF_LIFE: + if key in cat or cat in key: + return key + + if tags: + for tag in tags: + t = tag.lower().strip() + if t in self.SHELF_LIFE: + return t + + name = product_name.lower().strip() + for category, keywords in self.CATEGORY_KEYWORDS.items(): + if any(kw in name for kw in keywords): + return category + + for words, fallback in [ + (['meat', 'beef', 'pork', 'chicken'], 'meat'), + (['vegetable', 'veggie', 'produce'], 'vegetables'), + (['fruit'], 'fruits'), + (['dairy'], 'dairy'), + (['frozen'], 'frozen_foods'), + ]: + if any(w in name for w in words): + return fallback + + return 'dry_goods' + + def get_shelf_life_info(self, category: str, location: str) -> Optional[int]: + """Shelf life in days for a given category + location, or None.""" + return self.SHELF_LIFE.get(category.lower().strip(), {}).get(location) + + def list_categories(self) -> List[str]: + return list(self.SHELF_LIFE.keys()) + + def list_locations(self) -> List[str]: + locations: set[str] = set() + for shelf_life in self.SHELF_LIFE.values(): + locations.update(shelf_life.keys()) + return sorted(locations) + + # โ”€โ”€ Private helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + def _lookup_days(self, category: Optional[str], location: str) -> Optional[int]: + """Pure deterministic lookup โ€” no I/O.""" + if not category: + return None + cat = category.lower().strip() + if cat not in self.SHELF_LIFE: + for key in self.SHELF_LIFE: + if key in cat or cat in key: + cat = key + break + else: + return None + + days = self.SHELF_LIFE[cat].get(location) + if days is None: + for loc in ('fridge', 'pantry', 'freezer', 'cabinet'): + days = self.SHELF_LIFE[cat].get(loc) + if days is not None: + break + return days + + def _llm_predict_days( + self, + product_name: str, + category: Optional[str], + location: str, + ) -> Optional[int]: + """ + Ask the LLM how many days this product keeps in the given location. + + TODO: Fill in the prompts below. Good prompts should: + - Give enough context for the LLM to reason about food safety + - Specify output format clearly (just an integer โ€” nothing else) + - Err conservative (shorter shelf life) when uncertain + - Stay concise โ€” this fires on every unknown barcode scan + + Parameters available: + product_name โ€” e.g. "Trader Joe's Organic Tempeh" + category โ€” best-guess from get_category_from_product(), may be None + location โ€” "fridge" | "freezer" | "pantry" | "cabinet" + """ + assert self._router is not None + + system = ( + "You are a food safety expert. Given a food product name, an optional " + "category hint, and a storage location, respond with ONLY a single " + "integer: the number of days the product typically remains safe to eat " + "from purchase when stored as specified. No explanation, no units, no " + "punctuation โ€” just the integer. When uncertain, give the conservative " + "(shorter) estimate." + ) + + parts = [f"Product: {product_name}"] + if category: + parts.append(f"Category: {category}") + parts.append(f"Storage location: {location}") + parts.append("Days until expiry from purchase:") + prompt = "\n".join(parts) + + try: + raw = self._router.complete(prompt, system=system, max_tokens=16) + match = re.search(r'\b(\d+)\b', raw) + if match: + days = int(match.group(1)) + # Sanity cap: >5 years is implausible for a perishable unknown to + # the deterministic table. If the LLM returns something absurd, + # fall back to None rather than storing a misleading date. + if days > 1825: + logger.warning( + "LLM returned implausible shelf life (%d days) for %r โ€” discarding", + days, product_name, + ) + return None + logger.debug( + "LLM shelf life for %r in %s: %d days", product_name, location, days + ) + return days + except Exception as e: + logger.warning("LLM expiry prediction failed for %r: %s", product_name, e) + return None diff --git a/app/services/export/__init__.py b/app/services/export/__init__.py new file mode 100644 index 0000000..3dac33f --- /dev/null +++ b/app/services/export/__init__.py @@ -0,0 +1 @@ +# app/services/export/__init__.py diff --git a/app/services/export/spreadsheet_export.py b/app/services/export/spreadsheet_export.py new file mode 100644 index 0000000..5901e79 --- /dev/null +++ b/app/services/export/spreadsheet_export.py @@ -0,0 +1,325 @@ +# app/services/export/spreadsheet_export.py +""" +Service for exporting receipt data to CSV and Excel formats. + +This module provides functionality to convert receipt and quality assessment +data into spreadsheet formats for easy viewing and analysis. +""" + +import pandas as pd +from datetime import datetime +from typing import List, Dict, Optional +from pathlib import Path + +from app.models.schemas.receipt import ReceiptResponse +from app.models.schemas.quality import QualityAssessment + + +class SpreadsheetExporter: + """ + Service for exporting receipt data to CSV/Excel formats. + + Provides methods to convert receipt and quality assessment data into + spreadsheet formats that can be opened in Excel, Google Sheets, or + LibreOffice Calc. + """ + + def export_to_csv( + self, + receipts: List[ReceiptResponse], + quality_data: Dict[str, QualityAssessment], + ocr_data: Optional[Dict[str, Dict]] = None + ) -> str: + """ + Export receipts to CSV format. + + Args: + receipts: List of receipt responses + quality_data: Dict mapping receipt_id to quality assessment + ocr_data: Optional dict mapping receipt_id to OCR extracted data + + Returns: + CSV string ready for download + """ + df = self._receipts_to_dataframe(receipts, quality_data, ocr_data) + return df.to_csv(index=False) + + def export_to_excel( + self, + receipts: List[ReceiptResponse], + quality_data: Dict[str, QualityAssessment], + output_path: str, + ocr_data: Optional[Dict[str, Dict]] = None + ) -> None: + """ + Export receipts to Excel format with multiple sheets. + + Creates an Excel file with sheets: + - Receipts: Main receipt data with OCR results + - Line Items: Detailed items from all receipts (if OCR available) + - Quality Details: Detailed quality metrics + - Summary: Aggregated statistics + + Args: + receipts: List of receipt responses + quality_data: Dict mapping receipt_id to quality assessment + output_path: Path to save Excel file + ocr_data: Optional dict mapping receipt_id to OCR extracted data + """ + with pd.ExcelWriter(output_path, engine='openpyxl') as writer: + # Sheet 1: Receipts with OCR data + receipts_df = self._receipts_to_dataframe(receipts, quality_data, ocr_data) + receipts_df.to_excel(writer, sheet_name='Receipts', index=False) + + # Sheet 2: Line Items (if OCR data available) + if ocr_data: + items_df = self._items_to_dataframe(receipts, ocr_data) + if not items_df.empty: + items_df.to_excel(writer, sheet_name='Line Items', index=False) + + # Sheet 3: Quality Details + if quality_data: + quality_df = self._quality_to_dataframe(quality_data) + quality_df.to_excel(writer, sheet_name='Quality Details', index=False) + + # Sheet 4: Summary + summary_df = self._create_summary(receipts, quality_data, ocr_data) + summary_df.to_excel(writer, sheet_name='Summary', index=False) + + def _receipts_to_dataframe( + self, + receipts: List[ReceiptResponse], + quality_data: Dict[str, QualityAssessment], + ocr_data: Optional[Dict[str, Dict]] = None + ) -> pd.DataFrame: + """ + Convert receipts to pandas DataFrame. + + Args: + receipts: List of receipt responses + quality_data: Dict mapping receipt_id to quality assessment + ocr_data: Optional dict mapping receipt_id to OCR extracted data + + Returns: + DataFrame with receipt data + """ + data = [] + for receipt in receipts: + quality = quality_data.get(receipt.id) + ocr = ocr_data.get(receipt.id) if ocr_data else None + + # Base columns + row = { + 'ID': receipt.id, + 'Filename': receipt.filename, + 'Status': receipt.status, + 'Quality Score': quality.overall_score if quality else None, + } + + # Add OCR data if available + if ocr: + merchant = ocr.get('merchant', {}) + transaction = ocr.get('transaction', {}) + totals = ocr.get('totals', {}) + items = ocr.get('items', []) + + row.update({ + 'Merchant': merchant.get('name', ''), + 'Store Address': merchant.get('address', ''), + 'Store Phone': merchant.get('phone', ''), + 'Date': transaction.get('date', ''), + 'Time': transaction.get('time', ''), + 'Receipt Number': transaction.get('receipt_number', ''), + 'Item Count': len(items), + 'Subtotal': totals.get('subtotal', ''), + 'Tax': totals.get('tax', ''), + 'Total': totals.get('total', ''), + 'Payment Method': totals.get('payment_method', ''), + 'OCR Confidence': ocr.get('confidence', {}).get('overall', ''), + }) + + # Add items as text + items_text = '; '.join([ + f"{item.get('name', 'Unknown')} (${item.get('total_price', 0):.2f})" + for item in items[:10] # Limit to first 10 items for CSV + ]) + if len(items) > 10: + items_text += f'; ... and {len(items) - 10} more items' + row['Items'] = items_text + else: + # No OCR data - show image metadata instead + row.update({ + 'Merchant': 'N/A - No OCR', + 'Date': '', + 'Total': '', + 'Item Count': 0, + 'Width': receipt.metadata.get('width'), + 'Height': receipt.metadata.get('height'), + 'File Size (KB)': round(receipt.metadata.get('file_size_bytes', 0) / 1024, 2), + }) + + data.append(row) + + return pd.DataFrame(data) + + def _items_to_dataframe( + self, + receipts: List[ReceiptResponse], + ocr_data: Dict[str, Dict] + ) -> pd.DataFrame: + """ + Convert line items from all receipts to DataFrame. + + Args: + receipts: List of receipt responses + ocr_data: Dict mapping receipt_id to OCR extracted data + + Returns: + DataFrame with all line items from all receipts + """ + data = [] + for receipt in receipts: + ocr = ocr_data.get(receipt.id) + if not ocr: + continue + + merchant = ocr.get('merchant', {}).get('name', 'Unknown') + date = ocr.get('transaction', {}).get('date', '') + items = ocr.get('items', []) + + for item in items: + data.append({ + 'Receipt ID': receipt.id, + 'Receipt File': receipt.filename, + 'Merchant': merchant, + 'Date': date, + 'Item Name': item.get('name', 'Unknown'), + 'Quantity': item.get('quantity', 1), + 'Unit Price': item.get('unit_price', ''), + 'Total Price': item.get('total_price', 0), + 'Category': item.get('category', ''), + 'Tax Code': item.get('tax_code', ''), + 'Discount': item.get('discount', 0), + }) + + return pd.DataFrame(data) + + def _quality_to_dataframe( + self, + quality_data: Dict[str, QualityAssessment] + ) -> pd.DataFrame: + """ + Convert quality assessments to DataFrame. + + Args: + quality_data: Dict mapping receipt_id to quality assessment + + Returns: + DataFrame with quality metrics + """ + data = [] + for receipt_id, quality in quality_data.items(): + metrics = quality.metrics + row = { + 'Receipt ID': receipt_id, + 'Overall Score': round(quality.overall_score, 2), + 'Acceptable': quality.is_acceptable, + 'Blur Score': round(metrics.get('blur_score', 0), 2), + 'Lighting Score': round(metrics.get('lighting_score', 0), 2), + 'Contrast Score': round(metrics.get('contrast_score', 0), 2), + 'Size Score': round(metrics.get('size_score', 0), 2), + 'Fold Detected': metrics.get('fold_detected', False), + 'Fold Severity': round(metrics.get('fold_severity', 0), 2), + 'Suggestions': '; '.join(quality.suggestions) if quality.suggestions else 'None', + } + data.append(row) + + return pd.DataFrame(data) + + def _create_summary( + self, + receipts: List[ReceiptResponse], + quality_data: Dict[str, QualityAssessment], + ocr_data: Optional[Dict[str, Dict]] = None + ) -> pd.DataFrame: + """ + Create summary statistics DataFrame. + + Args: + receipts: List of receipt responses + quality_data: Dict mapping receipt_id to quality assessment + ocr_data: Optional dict mapping receipt_id to OCR extracted data + + Returns: + DataFrame with summary statistics + """ + quality_scores = [q.overall_score for q in quality_data.values() if q] + + # Count statuses + status_counts = {} + for receipt in receipts: + status_counts[receipt.status] = status_counts.get(receipt.status, 0) + 1 + + metrics = [ + 'Total Receipts', + 'Processed', + 'Processing', + 'Uploaded', + 'Failed', + 'Average Quality Score', + 'Best Quality Score', + 'Worst Quality Score', + 'Acceptable Quality Count', + 'Unacceptable Quality Count', + ] + + values = [ + len(receipts), + status_counts.get('processed', 0), + status_counts.get('processing', 0), + status_counts.get('uploaded', 0), + status_counts.get('error', 0), + f"{sum(quality_scores) / len(quality_scores):.2f}" if quality_scores else 'N/A', + f"{max(quality_scores):.2f}" if quality_scores else 'N/A', + f"{min(quality_scores):.2f}" if quality_scores else 'N/A', + len([q for q in quality_data.values() if q and q.is_acceptable]), + len([q for q in quality_data.values() if q and not q.is_acceptable]), + ] + + # Add OCR statistics if available + if ocr_data: + receipts_with_ocr = len([r for r in receipts if r.id in ocr_data]) + total_items = sum(len(ocr.get('items', [])) for ocr in ocr_data.values()) + total_spent = sum( + ocr.get('totals', {}).get('total', 0) or 0 + for ocr in ocr_data.values() + ) + avg_confidence = sum( + ocr.get('confidence', {}).get('overall', 0) or 0 + for ocr in ocr_data.values() + ) / len(ocr_data) if ocr_data else 0 + + metrics.extend([ + '', # Blank row + 'OCR Statistics', + 'Receipts with OCR Data', + 'Total Line Items Extracted', + 'Total Amount Spent', + 'Average OCR Confidence', + ]) + + values.extend([ + '', + '', + receipts_with_ocr, + total_items, + f"${total_spent:.2f}" if total_spent > 0 else 'N/A', + f"{avg_confidence:.2%}" if avg_confidence > 0 else 'N/A', + ]) + + summary = { + 'Metric': metrics, + 'Value': values + } + + return pd.DataFrame(summary) diff --git a/app/services/image_preprocessing/__init__.py b/app/services/image_preprocessing/__init__.py new file mode 100644 index 0000000..617c1e0 --- /dev/null +++ b/app/services/image_preprocessing/__init__.py @@ -0,0 +1,10 @@ +# app/services/image_preprocessing/__init__.py +""" +Image preprocessing services for Kiwi. +Contains functions for image enhancement, format conversion, and perspective correction. +""" + +from app.services.image_preprocessing.format_conversion import convert_to_standard_format, extract_metadata +from app.services.image_preprocessing.enhancement import enhance_image, correct_perspective + +__all__ = ["convert_to_standard_format", "extract_metadata", "enhance_image", "correct_perspective"] \ No newline at end of file diff --git a/app/services/image_preprocessing/enhancement.py b/app/services/image_preprocessing/enhancement.py new file mode 100644 index 0000000..9080315 --- /dev/null +++ b/app/services/image_preprocessing/enhancement.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python +# app/services/image_preprocessing/ +import cv2 +import numpy as np +import logging +from pathlib import Path +from typing import Tuple, Optional + +logger = logging.getLogger(__name__) + +def enhance_image( + image_path: Path, + output_path: Optional[Path] = None, + adaptive_threshold: bool = True, + denoise: bool = True, +) -> Tuple[bool, str, Optional[Path]]: + """ + Enhance receipt image for better OCR. + + Args: + image_path: Path to input image + output_path: Optional path to save enhanced image + adaptive_threshold: Whether to apply adaptive thresholding + denoise: Whether to apply denoising + + Returns: + Tuple containing (success, message, output_path) + """ + try: + # Check if CUDA is available + use_cuda = cv2.cuda.getCudaEnabledDeviceCount() > 0 + + # Set output path if not provided + if output_path is None: + output_path = image_path.with_stem(f"{image_path.stem}_enhanced") + + # Read image + img = cv2.imread(str(image_path)) + if img is None: + return False, f"Failed to read image: {image_path}", None + + # Convert to grayscale + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + # Apply denoising if requested + if denoise: + if use_cuda: + # GPU accelerated denoising + gpu_img = cv2.cuda_GpuMat() + gpu_img.upload(gray) + gpu_result = cv2.cuda.createNonLocalMeans().apply(gpu_img) + denoised = gpu_result.download() + else: + # CPU denoising + denoised = cv2.fastNlMeansDenoising(gray, None, 10, 7, 21) + else: + denoised = gray + + # Apply adaptive thresholding if requested + if adaptive_threshold: + # Adaptive thresholding works well for receipts with varying backgrounds + binary = cv2.adaptiveThreshold( + denoised, + 255, + cv2.ADAPTIVE_THRESH_GAUSSIAN_C, + cv2.THRESH_BINARY, + 11, + 2 + ) + processed = binary + else: + processed = denoised + + # Write enhanced image + success = cv2.imwrite(str(output_path), processed) + if not success: + return False, f"Failed to write enhanced image to {output_path}", None + + return True, "Image enhanced successfully", output_path + + except Exception as e: + logger.exception(f"Error enhancing image: {e}") + return False, f"Error enhancing image: {str(e)}", None + +def correct_perspective( + image_path: Path, + output_path: Optional[Path] = None, +) -> Tuple[bool, str, Optional[Path]]: + """ + Correct perspective distortion in receipt image. + + Args: + image_path: Path to input image + output_path: Optional path to save corrected image + + Returns: + Tuple containing (success, message, output_path) + """ + try: + # Set output path if not provided + if output_path is None: + output_path = image_path.with_stem(f"{image_path.stem}_perspective") + + # Read image + img = cv2.imread(str(image_path)) + if img is None: + return False, f"Failed to read image: {image_path}", None + + # Convert to grayscale + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + # Apply Gaussian blur to reduce noise + blur = cv2.GaussianBlur(gray, (5, 5), 0) + + # Apply edge detection + edges = cv2.Canny(blur, 50, 150, apertureSize=3) + + # Find contours + contours, _ = cv2.findContours(edges, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) + + # Find the largest contour by area which is likely the receipt + if not contours: + return False, "No contours found in image", None + + largest_contour = max(contours, key=cv2.contourArea) + + # Approximate the contour to get the corners + epsilon = 0.02 * cv2.arcLength(largest_contour, True) + approx = cv2.approxPolyDP(largest_contour, epsilon, True) + + # If we have a quadrilateral, we can apply perspective transform + if len(approx) == 4: + # Sort the points for the perspective transform + # This is a simplified implementation + src_pts = approx.reshape(4, 2).astype(np.float32) + + # Get width and height for the destination image + width = int(max( + np.linalg.norm(src_pts[0] - src_pts[1]), + np.linalg.norm(src_pts[2] - src_pts[3]) + )) + height = int(max( + np.linalg.norm(src_pts[0] - src_pts[3]), + np.linalg.norm(src_pts[1] - src_pts[2]) + )) + + # Define destination points + dst_pts = np.array([ + [0, 0], + [width - 1, 0], + [width - 1, height - 1], + [0, height - 1] + ], dtype=np.float32) + + # Get perspective transform matrix + M = cv2.getPerspectiveTransform(src_pts, dst_pts) + + # Apply perspective transform + warped = cv2.warpPerspective(img, M, (width, height)) + + # Write corrected image + success = cv2.imwrite(str(output_path), warped) + if not success: + return False, f"Failed to write perspective-corrected image to {output_path}", None + + return True, "Perspective corrected successfully", output_path + else: + return False, "Receipt corners not clearly detected", None + + except Exception as e: + logger.exception(f"Error correcting perspective: {e}") + return False, f"Error correcting perspective: {str(e)}", None \ No newline at end of file diff --git a/app/services/image_preprocessing/format_conversion.py b/app/services/image_preprocessing/format_conversion.py new file mode 100644 index 0000000..7da5fd5 --- /dev/null +++ b/app/services/image_preprocessing/format_conversion.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# app/services/image_preprocessing/format_conversion.py +import cv2 +import numpy as np +import logging +from pathlib import Path +from typing import Tuple, Optional + +logger = logging.getLogger(__name__) + +def convert_to_standard_format( + image_path: Path, + output_path: Optional[Path] = None, + target_format: str = "png" +) -> Tuple[bool, str, Optional[Path]]: + """ + Convert image to standard internal format. + + Args: + image_path: Path to input image + output_path: Optional path to save converted image + target_format: Target format (png, jpg) + + Returns: + Tuple containing (success, message, output_path) + """ + try: + # Check if CUDA is available and set up GPU processing + if cv2.cuda.getCudaEnabledDeviceCount() > 0: + logger.info("CUDA available, using GPU acceleration") + use_cuda = True + else: + logger.info("CUDA not available, using CPU processing") + use_cuda = False + + # Read image + img = cv2.imread(str(image_path)) + if img is None: + return False, f"Failed to read image: {image_path}", None + + # If PDF, extract first page (simplified for Phase 1) + if image_path.suffix.lower() == '.pdf': + # This is a placeholder for PDF handling + # In a real implementation, you'd use a PDF processing library + return False, "PDF processing not implemented in Phase 1", None + + # Set output path if not provided + if output_path is None: + output_path = image_path.with_suffix(f".{target_format}") + + # Write converted image + success = cv2.imwrite(str(output_path), img) + if not success: + return False, f"Failed to write converted image to {output_path}", None + + return True, "Image converted successfully", output_path + + except Exception as e: + logger.exception(f"Error converting image: {e}") + return False, f"Error converting image: {str(e)}", None + +def extract_metadata(image_path: Path) -> dict: + """ + Extract metadata from image file. + + Args: + image_path: Path to input image + + Returns: + Dictionary containing metadata + """ + metadata = { + "filename": image_path.name, + "original_format": image_path.suffix.lstrip(".").lower(), + "file_size_bytes": image_path.stat().st_size, + } + + try: + img = cv2.imread(str(image_path)) + if img is not None: + metadata.update({ + "width": img.shape[1], + "height": img.shape[0], + "channels": img.shape[2] if len(img.shape) > 2 else 1, + }) + except Exception as e: + logger.exception(f"Error extracting image metadata: {e}") + + return metadata \ No newline at end of file diff --git a/app/services/inventory_service.py b/app/services/inventory_service.py new file mode 100644 index 0000000..1993481 --- /dev/null +++ b/app/services/inventory_service.py @@ -0,0 +1,539 @@ +""" +Inventory management service. + +This service orchestrates: +- Barcode scanning +- Product lookups (OpenFoodFacts) +- Inventory CRUD operations +- Tag management +""" + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func, and_, or_ +from sqlalchemy.orm import selectinload +from typing import List, Optional, Dict, Any +from datetime import date, datetime, timedelta +from pathlib import Path +from uuid import UUID +import uuid +import logging + +from app.db.models import Product, InventoryItem, Tag, product_tags +from app.models.schemas.inventory import ( + ProductCreate, + ProductUpdate, + ProductResponse, + InventoryItemCreate, + InventoryItemUpdate, + InventoryItemResponse, + TagCreate, + TagResponse, + InventoryStats, +) +from app.services.barcode_scanner import BarcodeScanner +from app.services.openfoodfacts import OpenFoodFactsService +from app.services.expiration_predictor import ExpirationPredictor + +logger = logging.getLogger(__name__) + + +class InventoryService: + """Service for managing inventory and products.""" + + def __init__(self): + self.barcode_scanner = BarcodeScanner() + self.openfoodfacts = OpenFoodFactsService() + self.expiration_predictor = ExpirationPredictor() + + # ========== Barcode Scanning ========== + + async def scan_barcode_image( + self, + image_path: Path, + db: AsyncSession, + auto_add: bool = True, + location: str = "pantry", + quantity: float = 1.0, + ) -> Dict[str, Any]: + """ + Scan an image for barcodes and optionally add to inventory. + + Args: + image_path: Path to image file + db: Database session + auto_add: Whether to auto-add to inventory + location: Default storage location + quantity: Default quantity + + Returns: + Dictionary with scan results + """ + # Scan for barcodes + barcodes = self.barcode_scanner.scan_image(image_path) + + if not barcodes: + return { + "success": False, + "barcodes_found": 0, + "results": [], + "message": "No barcodes detected in image", + } + + results = [] + for barcode_data in barcodes: + result = await self._process_barcode( + barcode_data, db, auto_add, location, quantity + ) + results.append(result) + + return { + "success": True, + "barcodes_found": len(barcodes), + "results": results, + "message": f"Found {len(barcodes)} barcode(s)", + } + + async def _process_barcode( + self, + barcode_data: Dict[str, Any], + db: AsyncSession, + auto_add: bool, + location: str, + quantity: float, + ) -> Dict[str, Any]: + """Process a single barcode detection.""" + barcode = barcode_data["data"] + barcode_type = barcode_data["type"] + + # Check if product already exists + product = await self.get_product_by_barcode(db, barcode) + + # If not found, lookup in OpenFoodFacts + if not product: + off_data = await self.openfoodfacts.lookup_product(barcode) + + if off_data: + # Create product from OpenFoodFacts data + product_create = ProductCreate( + barcode=barcode, + name=off_data["name"], + brand=off_data.get("brand"), + category=off_data.get("category"), + description=off_data.get("description"), + image_url=off_data.get("image_url"), + nutrition_data=off_data.get("nutrition_data", {}), + source="openfoodfacts", + source_data=off_data.get("raw_data", {}), + ) + product = await self.create_product(db, product_create) + source = "openfoodfacts" + else: + # Product not found in OpenFoodFacts + # Create a placeholder product + product_create = ProductCreate( + barcode=barcode, + name=f"Unknown Product ({barcode})", + source="manual", + ) + product = await self.create_product(db, product_create) + source = "manual" + else: + source = product.source + + # Auto-add to inventory if requested + inventory_item = None + predicted_expiration = None + if auto_add: + # Predict expiration date based on product category and location + category = self.expiration_predictor.get_category_from_product( + product.name, + product.category, + [tag.name for tag in product.tags] if product.tags else None + ) + if category: + predicted_expiration = self.expiration_predictor.predict_expiration( + category, + location, + date.today() + ) + + item_create = InventoryItemCreate( + product_id=product.id, + quantity=quantity, + location=location, + purchase_date=date.today(), + expiration_date=predicted_expiration, + source="barcode_scan", + ) + inventory_item = await self.create_inventory_item(db, item_create) + + return { + "barcode": barcode, + "barcode_type": barcode_type, + "quality": barcode_data["quality"], + "product": ProductResponse.from_orm(product), + "inventory_item": ( + InventoryItemResponse.from_orm(inventory_item) if inventory_item else None + ), + "source": source, + "predicted_expiration": predicted_expiration.isoformat() if predicted_expiration else None, + "predicted_category": category if auto_add else None, + } + + # ========== Product Management ========== + + async def create_product( + self, + db: AsyncSession, + product: ProductCreate, + ) -> Product: + """Create a new product.""" + # Create product + db_product = Product( + id=uuid.uuid4(), + barcode=product.barcode, + name=product.name, + brand=product.brand, + category=product.category, + description=product.description, + image_url=product.image_url, + nutrition_data=product.nutrition_data, + source=product.source, + source_data=product.source_data, + ) + + db.add(db_product) + await db.flush() + + # Add tags if specified + if product.tag_ids: + for tag_id in product.tag_ids: + tag = await db.get(Tag, tag_id) + if tag: + db_product.tags.append(tag) + + await db.commit() + await db.refresh(db_product, ["tags"]) + + return db_product + + async def get_product(self, db: AsyncSession, product_id: UUID) -> Optional[Product]: + """Get a product by ID.""" + result = await db.execute( + select(Product).where(Product.id == product_id).options(selectinload(Product.tags)) + ) + return result.scalar_one_or_none() + + async def get_product_by_barcode( + self, db: AsyncSession, barcode: str + ) -> Optional[Product]: + """Get a product by barcode.""" + result = await db.execute( + select(Product).where(Product.barcode == barcode).options(selectinload(Product.tags)) + ) + return result.scalar_one_or_none() + + async def list_products( + self, + db: AsyncSession, + skip: int = 0, + limit: int = 100, + category: Optional[str] = None, + ) -> List[Product]: + """List products with optional filtering.""" + query = select(Product).options(selectinload(Product.tags)) + + if category: + query = query.where(Product.category == category) + + query = query.offset(skip).limit(limit).order_by(Product.name) + + result = await db.execute(query) + return list(result.scalars().all()) + + async def update_product( + self, + db: AsyncSession, + product_id: UUID, + product_update: ProductUpdate, + ) -> Optional[Product]: + """Update a product.""" + product = await self.get_product(db, product_id) + if not product: + return None + + # Update fields + for field, value in product_update.dict(exclude_unset=True).items(): + if field == "tag_ids": + # Update tags + product.tags = [] + for tag_id in value: + tag = await db.get(Tag, tag_id) + if tag: + product.tags.append(tag) + else: + setattr(product, field, value) + + product.updated_at = datetime.utcnow() + await db.commit() + await db.refresh(product, ["tags"]) + + return product + + async def delete_product(self, db: AsyncSession, product_id: UUID) -> bool: + """Delete a product (will fail if inventory items exist).""" + product = await self.get_product(db, product_id) + if not product: + return False + + await db.delete(product) + await db.commit() + return True + + # ========== Inventory Item Management ========== + + async def create_inventory_item( + self, + db: AsyncSession, + item: InventoryItemCreate, + ) -> InventoryItem: + """Create a new inventory item.""" + db_item = InventoryItem( + id=uuid.uuid4(), + product_id=item.product_id, + quantity=item.quantity, + unit=item.unit, + location=item.location, + sublocation=item.sublocation, + purchase_date=item.purchase_date, + expiration_date=item.expiration_date, + notes=item.notes, + source=item.source, + status="available", + ) + + db.add(db_item) + await db.commit() + await db.refresh(db_item, ["product"]) + + return db_item + + async def get_inventory_item( + self, db: AsyncSession, item_id: UUID + ) -> Optional[InventoryItem]: + """Get an inventory item by ID.""" + result = await db.execute( + select(InventoryItem) + .where(InventoryItem.id == item_id) + .options(selectinload(InventoryItem.product).selectinload(Product.tags)) + ) + return result.scalar_one_or_none() + + async def list_inventory_items( + self, + db: AsyncSession, + skip: int = 0, + limit: int = 100, + location: Optional[str] = None, + status: str = "available", + ) -> List[InventoryItem]: + """List inventory items with filtering.""" + query = select(InventoryItem).options( + selectinload(InventoryItem.product).selectinload(Product.tags) + ) + + query = query.where(InventoryItem.status == status) + + if location: + query = query.where(InventoryItem.location == location) + + query = ( + query.offset(skip) + .limit(limit) + .order_by(InventoryItem.expiration_date.asc().nullsfirst()) + ) + + result = await db.execute(query) + return list(result.scalars().all()) + + async def update_inventory_item( + self, + db: AsyncSession, + item_id: UUID, + item_update: InventoryItemUpdate, + ) -> Optional[InventoryItem]: + """Update an inventory item.""" + item = await self.get_inventory_item(db, item_id) + if not item: + return None + + for field, value in item_update.dict(exclude_unset=True).items(): + setattr(item, field, value) + + item.updated_at = datetime.utcnow() + + if item_update.status == "consumed" and not item.consumed_at: + item.consumed_at = datetime.utcnow() + + await db.commit() + await db.refresh(item, ["product"]) + + return item + + async def delete_inventory_item(self, db: AsyncSession, item_id: UUID) -> bool: + """Delete an inventory item.""" + item = await self.get_inventory_item(db, item_id) + if not item: + return False + + await db.delete(item) + await db.commit() + return True + + async def mark_as_consumed( + self, db: AsyncSession, item_id: UUID + ) -> Optional[InventoryItem]: + """Mark an inventory item as consumed.""" + return await self.update_inventory_item( + db, item_id, InventoryItemUpdate(status="consumed") + ) + + # ========== Tag Management ========== + + async def create_tag(self, db: AsyncSession, tag: TagCreate) -> Tag: + """Create a new tag.""" + db_tag = Tag( + id=uuid.uuid4(), + name=tag.name, + slug=tag.slug, + description=tag.description, + color=tag.color, + category=tag.category, + ) + + db.add(db_tag) + await db.commit() + await db.refresh(db_tag) + + return db_tag + + async def get_tag(self, db: AsyncSession, tag_id: UUID) -> Optional[Tag]: + """Get a tag by ID.""" + return await db.get(Tag, tag_id) + + async def list_tags( + self, db: AsyncSession, category: Optional[str] = None + ) -> List[Tag]: + """List all tags, optionally filtered by category.""" + query = select(Tag).order_by(Tag.name) + + if category: + query = query.where(Tag.category == category) + + result = await db.execute(query) + return list(result.scalars().all()) + + # ========== Statistics and Analytics ========== + + async def get_inventory_stats(self, db: AsyncSession) -> InventoryStats: + """Get inventory statistics.""" + # Total items (available only) + total_result = await db.execute( + select(func.count(InventoryItem.id)).where(InventoryItem.status == "available") + ) + total_items = total_result.scalar() or 0 + + # Total unique products + products_result = await db.execute( + select(func.count(func.distinct(InventoryItem.product_id))).where( + InventoryItem.status == "available" + ) + ) + total_products = products_result.scalar() or 0 + + # Items by location + location_result = await db.execute( + select( + InventoryItem.location, + func.count(InventoryItem.id).label("count"), + ) + .where(InventoryItem.status == "available") + .group_by(InventoryItem.location) + ) + items_by_location = {row[0]: row[1] for row in location_result} + + # Items by status + status_result = await db.execute( + select(InventoryItem.status, func.count(InventoryItem.id).label("count")).group_by( + InventoryItem.status + ) + ) + items_by_status = {row[0]: row[1] for row in status_result} + + # Expiring soon (next 7 days) + today = date.today() + week_from_now = today + timedelta(days=7) + + expiring_result = await db.execute( + select(func.count(InventoryItem.id)).where( + and_( + InventoryItem.status == "available", + InventoryItem.expiration_date.isnot(None), + InventoryItem.expiration_date <= week_from_now, + InventoryItem.expiration_date >= today, + ) + ) + ) + expiring_soon = expiring_result.scalar() or 0 + + # Expired + expired_result = await db.execute( + select(func.count(InventoryItem.id)).where( + and_( + InventoryItem.status == "available", + InventoryItem.expiration_date.isnot(None), + InventoryItem.expiration_date < today, + ) + ) + ) + expired = expired_result.scalar() or 0 + + return InventoryStats( + total_items=total_items, + total_products=total_products, + items_by_location=items_by_location, + items_by_status=items_by_status, + expiring_soon=expiring_soon, + expired=expired, + ) + + async def get_expiring_items( + self, db: AsyncSession, days: int = 7 + ) -> List[Dict[str, Any]]: + """Get items expiring within N days.""" + today = date.today() + cutoff_date = today + timedelta(days=days) + + result = await db.execute( + select(InventoryItem) + .where( + and_( + InventoryItem.status == "available", + InventoryItem.expiration_date.isnot(None), + InventoryItem.expiration_date <= cutoff_date, + InventoryItem.expiration_date >= today, + ) + ) + .options(selectinload(InventoryItem.product).selectinload(Product.tags)) + .order_by(InventoryItem.expiration_date.asc()) + ) + + items = result.scalars().all() + + return [ + { + "inventory_item": item, + "days_until_expiry": (item.expiration_date - today).days, + } + for item in items + ] diff --git a/app/services/ocr/__init__.py b/app/services/ocr/__init__.py new file mode 100644 index 0000000..e00217c --- /dev/null +++ b/app/services/ocr/__init__.py @@ -0,0 +1,5 @@ +"""OCR services for receipt text extraction.""" + +from .vl_model import VisionLanguageOCR + +__all__ = ["VisionLanguageOCR"] diff --git a/app/services/ocr/vl_model.py b/app/services/ocr/vl_model.py new file mode 100644 index 0000000..feea1f2 --- /dev/null +++ b/app/services/ocr/vl_model.py @@ -0,0 +1,371 @@ +#!/usr/bin/env python +""" +Vision-Language Model service for receipt OCR and structured data extraction. + +Uses Qwen3-VL-2B-Instruct for intelligent receipt processing that combines +OCR with understanding of receipt structure to extract structured JSON data. +""" + +import json +import logging +import re +from pathlib import Path +from typing import Dict, Any, Optional, List +from datetime import datetime + +from PIL import Image +import torch +from transformers import ( + Qwen2VLForConditionalGeneration, + AutoProcessor, + BitsAndBytesConfig +) + +from app.core.config import settings + +logger = logging.getLogger(__name__) + + +class VisionLanguageOCR: + """Vision-Language Model for receipt OCR and structured extraction.""" + + def __init__(self, use_quantization: bool = False): + """ + Initialize the VLM OCR service. + + Args: + use_quantization: Use 8-bit quantization to reduce memory usage + """ + self.model = None + self.processor = None + self.device = "cuda" if torch.cuda.is_available() and settings.USE_GPU else "cpu" + self.use_quantization = use_quantization + self.model_name = "Qwen/Qwen2-VL-2B-Instruct" + + logger.info(f"Initializing VisionLanguageOCR with device: {self.device}") + + # Lazy loading - model will be loaded on first use + self._model_loaded = False + + def _load_model(self): + """Load the VLM model (lazy loading).""" + if self._model_loaded: + return + + logger.info(f"Loading VLM model: {self.model_name}") + + try: + if self.use_quantization and self.device == "cuda": + # Use 8-bit quantization for lower memory usage + quantization_config = BitsAndBytesConfig( + load_in_8bit=True, + llm_int8_threshold=6.0 + ) + + self.model = Qwen2VLForConditionalGeneration.from_pretrained( + self.model_name, + quantization_config=quantization_config, + device_map="auto", + low_cpu_mem_usage=True + ) + logger.info("Model loaded with 8-bit quantization") + else: + # Standard FP16 loading + self.model = Qwen2VLForConditionalGeneration.from_pretrained( + self.model_name, + torch_dtype=torch.float16 if self.device == "cuda" else torch.float32, + device_map="auto" if self.device == "cuda" else None, + low_cpu_mem_usage=True + ) + + if self.device == "cpu": + self.model = self.model.to("cpu") + + logger.info(f"Model loaded in {'FP16' if self.device == 'cuda' else 'FP32'} mode") + + self.processor = AutoProcessor.from_pretrained(self.model_name) + self.model.eval() # Set to evaluation mode + + self._model_loaded = True + logger.info("VLM model loaded successfully") + + except Exception as e: + logger.error(f"Failed to load VLM model: {e}") + raise RuntimeError(f"Could not load VLM model: {e}") + + def extract_receipt_data(self, image_path: str) -> Dict[str, Any]: + """ + Extract structured data from receipt image. + + Args: + image_path: Path to the receipt image + + Returns: + Dictionary containing extracted receipt data with structure: + { + "merchant": {...}, + "transaction": {...}, + "items": [...], + "totals": {...}, + "confidence": {...}, + "raw_text": "...", + "warnings": [...] + } + """ + self._load_model() + + try: + # Load image + image = Image.open(image_path) + + # Convert to RGB if needed + if image.mode != 'RGB': + image = image.convert('RGB') + + # Build extraction prompt + prompt = self._build_extraction_prompt() + + # Process image and text + logger.info(f"Processing receipt image: {image_path}") + inputs = self.processor( + images=image, + text=prompt, + return_tensors="pt" + ) + + # Move to device + if self.device == "cuda": + inputs = {k: v.to("cuda", torch.float16) if isinstance(v, torch.Tensor) else v + for k, v in inputs.items()} + + # Generate + with torch.no_grad(): + output_ids = self.model.generate( + **inputs, + max_new_tokens=2048, + do_sample=False, # Deterministic for consistency + temperature=0.0, + pad_token_id=self.processor.tokenizer.pad_token_id, + eos_token_id=self.processor.tokenizer.eos_token_id, + ) + + # Decode output + output_text = self.processor.decode( + output_ids[0], + skip_special_tokens=True + ) + + # Remove the prompt from output + output_text = output_text.replace(prompt, "").strip() + + logger.info(f"VLM output length: {len(output_text)} characters") + + # Parse JSON from output + result = self._parse_json_from_text(output_text) + + # Add raw text for reference + result["raw_text"] = output_text + + # Validate and enhance result + result = self._validate_result(result) + + return result + + except Exception as e: + logger.error(f"Error extracting receipt data: {e}", exc_info=True) + return { + "error": str(e), + "merchant": {}, + "transaction": {}, + "items": [], + "totals": {}, + "confidence": {"overall": 0.0}, + "warnings": [f"Extraction failed: {str(e)}"] + } + + def _build_extraction_prompt(self) -> str: + """Build the prompt for receipt data extraction.""" + return """You are a receipt OCR specialist. Extract all information from this receipt image and return it in the exact JSON format specified below. + +Return a JSON object with this exact structure: +{ + "merchant": { + "name": "Store Name", + "address": "123 Main St, City, State ZIP", + "phone": "555-1234" + }, + "transaction": { + "date": "2025-10-30", + "time": "14:30:00", + "receipt_number": "12345", + "register": "01", + "cashier": "Jane" + }, + "items": [ + { + "name": "Product name", + "quantity": 2, + "unit_price": 10.99, + "total_price": 21.98, + "category": "grocery", + "tax_code": "F", + "discount": 0.00 + } + ], + "totals": { + "subtotal": 21.98, + "tax": 1.98, + "discount": 0.00, + "total": 23.96, + "payment_method": "Credit Card", + "amount_paid": 23.96, + "change": 0.00 + }, + "confidence": { + "overall": 0.95, + "merchant": 0.98, + "items": 0.92, + "totals": 0.97 + } +} + +Important instructions: +1. Extract ALL items from the receipt, no matter how many there are +2. Use null for fields you cannot find +3. For dates, use YYYY-MM-DD format +4. For times, use HH:MM:SS format +5. For prices, use numeric values (not strings) +6. Estimate confidence scores (0.0-1.0) based on image quality and text clarity +7. Return ONLY the JSON object, no other text or explanation""" + + def _parse_json_from_text(self, text: str) -> Dict[str, Any]: + """ + Extract and parse JSON from model output text. + + Args: + text: Raw text output from the model + + Returns: + Parsed JSON dictionary + """ + # Try to find JSON object in text + # Look for content between first { and last } + json_match = re.search(r'\{.*\}', text, re.DOTALL) + + if json_match: + json_str = json_match.group(0) + try: + return json.loads(json_str) + except json.JSONDecodeError as e: + logger.warning(f"Failed to parse JSON: {e}") + # Try to fix common issues + json_str = self._fix_json(json_str) + try: + return json.loads(json_str) + except json.JSONDecodeError: + logger.error("Could not parse JSON even after fixes") + + # Return empty structure if parsing fails + logger.warning("No valid JSON found in output, returning empty structure") + return { + "merchant": {}, + "transaction": {}, + "items": [], + "totals": {}, + "confidence": {"overall": 0.1} + } + + def _fix_json(self, json_str: str) -> str: + """Attempt to fix common JSON formatting issues.""" + # Remove trailing commas + json_str = re.sub(r',\s*}', '}', json_str) + json_str = re.sub(r',\s*]', ']', json_str) + + # Fix single quotes to double quotes + json_str = json_str.replace("'", '"') + + return json_str + + def _validate_result(self, result: Dict[str, Any]) -> Dict[str, Any]: + """ + Validate and enhance extracted data. + + Args: + result: Extracted receipt data + + Returns: + Validated and enhanced result with warnings + """ + warnings = [] + + # Ensure required fields exist + required_fields = ["merchant", "transaction", "items", "totals", "confidence"] + for field in required_fields: + if field not in result: + result[field] = {} if field != "items" else [] + warnings.append(f"Missing required field: {field}") + + # Validate items + if not result.get("items"): + warnings.append("No items found on receipt") + else: + # Validate item structure + for i, item in enumerate(result["items"]): + if "total_price" not in item and "unit_price" in item and "quantity" in item: + item["total_price"] = item["unit_price"] * item["quantity"] + + # Validate totals + if result.get("items") and result.get("totals"): + calculated_subtotal = sum( + item.get("total_price", 0) + for item in result["items"] + ) + reported_subtotal = result["totals"].get("subtotal", 0) + + # Allow small variance (rounding errors) + if abs(calculated_subtotal - reported_subtotal) > 0.10: + warnings.append( + f"Total mismatch: calculated ${calculated_subtotal:.2f}, " + f"reported ${reported_subtotal:.2f}" + ) + result["totals"]["calculated_subtotal"] = calculated_subtotal + + # Validate date format + if result.get("transaction", {}).get("date"): + try: + datetime.strptime(result["transaction"]["date"], "%Y-%m-%d") + except ValueError: + warnings.append(f"Invalid date format: {result['transaction']['date']}") + + # Add warnings to result + if warnings: + result["warnings"] = warnings + + # Ensure confidence exists + if "confidence" not in result or not result["confidence"]: + result["confidence"] = { + "overall": 0.5, + "merchant": 0.5, + "items": 0.5, + "totals": 0.5 + } + + return result + + def get_model_info(self) -> Dict[str, Any]: + """Get information about the loaded model.""" + return { + "model_name": self.model_name, + "device": self.device, + "quantization": self.use_quantization, + "loaded": self._model_loaded, + "gpu_available": torch.cuda.is_available(), + "gpu_memory_allocated": torch.cuda.memory_allocated() if torch.cuda.is_available() else 0, + "gpu_memory_reserved": torch.cuda.memory_reserved() if torch.cuda.is_available() else 0 + } + + def clear_cache(self): + """Clear GPU memory cache.""" + if torch.cuda.is_available(): + torch.cuda.empty_cache() + logger.info("GPU cache cleared") diff --git a/app/services/openfoodfacts.py b/app/services/openfoodfacts.py new file mode 100644 index 0000000..dbe31cd --- /dev/null +++ b/app/services/openfoodfacts.py @@ -0,0 +1,234 @@ +""" +OpenFoodFacts API integration service. + +This module provides functionality to look up product information +from the OpenFoodFacts database using barcodes (UPC/EAN). +""" + +import httpx +from typing import Optional, Dict, Any +from app.core.config import settings +import logging + +logger = logging.getLogger(__name__) + + +class OpenFoodFactsService: + """ + Service for interacting with the OpenFoodFacts API. + + OpenFoodFacts is a free, open database of food products with + ingredients, allergens, and nutrition facts. + """ + + BASE_URL = "https://world.openfoodfacts.org/api/v2" + USER_AGENT = "Kiwi/0.1.0 (https://circuitforge.tech)" + + async def lookup_product(self, barcode: str) -> Optional[Dict[str, Any]]: + """ + Look up a product by barcode in the OpenFoodFacts database. + + Args: + barcode: UPC/EAN barcode (8-13 digits) + + Returns: + Dictionary with product information, or None if not found + + Example response: + { + "name": "Organic Milk", + "brand": "Horizon", + "categories": ["Dairy", "Milk"], + "image_url": "https://...", + "nutrition_data": {...}, + "raw_data": {...} # Full API response + } + """ + try: + async with httpx.AsyncClient() as client: + url = f"{self.BASE_URL}/product/{barcode}.json" + + response = await client.get( + url, + headers={"User-Agent": self.USER_AGENT}, + timeout=10.0, + ) + + if response.status_code == 404: + logger.info(f"Product not found in OpenFoodFacts: {barcode}") + return None + + response.raise_for_status() + data = response.json() + + if data.get("status") != 1: + logger.info(f"Product not found in OpenFoodFacts: {barcode}") + return None + + return self._parse_product_data(data, barcode) + + except httpx.HTTPError as e: + logger.error(f"HTTP error looking up barcode {barcode}: {e}") + return None + except Exception as e: + logger.error(f"Error looking up barcode {barcode}: {e}") + return None + + def _parse_product_data(self, data: Dict[str, Any], barcode: str) -> Dict[str, Any]: + """ + Parse OpenFoodFacts API response into our product format. + + Args: + data: Raw API response + barcode: Original barcode + + Returns: + Parsed product dictionary + """ + product = data.get("product", {}) + + # Extract basic info + name = ( + product.get("product_name") + or product.get("product_name_en") + or f"Unknown Product ({barcode})" + ) + + brand = product.get("brands", "").split(",")[0].strip() if product.get("brands") else None + + # Categories (comma-separated string to list) + categories_str = product.get("categories", "") + categories = [c.strip() for c in categories_str.split(",") if c.strip()] + category = categories[0] if categories else None + + # Description + description = product.get("generic_name") or product.get("generic_name_en") + + # Image + image_url = product.get("image_url") or product.get("image_front_url") + + # Nutrition data + nutrition_data = self._extract_nutrition_data(product) + + # Allergens and dietary info + allergens = product.get("allergens_tags", []) + labels = product.get("labels_tags", []) + + return { + "name": name, + "brand": brand, + "category": category, + "categories": categories, + "description": description, + "image_url": image_url, + "nutrition_data": nutrition_data, + "allergens": allergens, + "labels": labels, + "raw_data": product, # Store full response for debugging + } + + def _extract_nutrition_data(self, product: Dict[str, Any]) -> Dict[str, Any]: + """ + Extract nutrition facts from product data. + + Args: + product: Product data from OpenFoodFacts + + Returns: + Dictionary of nutrition facts + """ + nutriments = product.get("nutriments", {}) + + # Extract common nutrients (per 100g) + nutrition = {} + + # Energy + if "energy-kcal_100g" in nutriments: + nutrition["calories"] = nutriments["energy-kcal_100g"] + elif "energy_100g" in nutriments: + # Convert kJ to kcal (1 kcal = 4.184 kJ) + nutrition["calories"] = round(nutriments["energy_100g"] / 4.184, 1) + + # Macronutrients + if "fat_100g" in nutriments: + nutrition["fat_g"] = nutriments["fat_100g"] + if "saturated-fat_100g" in nutriments: + nutrition["saturated_fat_g"] = nutriments["saturated-fat_100g"] + if "carbohydrates_100g" in nutriments: + nutrition["carbohydrates_g"] = nutriments["carbohydrates_100g"] + if "sugars_100g" in nutriments: + nutrition["sugars_g"] = nutriments["sugars_100g"] + if "fiber_100g" in nutriments: + nutrition["fiber_g"] = nutriments["fiber_100g"] + if "proteins_100g" in nutriments: + nutrition["protein_g"] = nutriments["proteins_100g"] + + # Minerals + if "salt_100g" in nutriments: + nutrition["salt_g"] = nutriments["salt_100g"] + elif "sodium_100g" in nutriments: + # Convert sodium to salt (1g sodium = 2.5g salt) + nutrition["salt_g"] = round(nutriments["sodium_100g"] * 2.5, 2) + + # Serving size + if "serving_size" in product: + nutrition["serving_size"] = product["serving_size"] + + return nutrition + + async def search_products( + self, + query: str, + page: int = 1, + page_size: int = 20 + ) -> Dict[str, Any]: + """ + Search for products by name in OpenFoodFacts. + + Args: + query: Search query + page: Page number (1-indexed) + page_size: Number of results per page + + Returns: + Dictionary with search results and metadata + """ + try: + async with httpx.AsyncClient() as client: + url = f"{self.BASE_URL}/search" + + response = await client.get( + url, + params={ + "search_terms": query, + "page": page, + "page_size": page_size, + "json": 1, + }, + headers={"User-Agent": self.USER_AGENT}, + timeout=10.0, + ) + + response.raise_for_status() + data = response.json() + + products = [ + self._parse_product_data({"product": p}, p.get("code", "")) + for p in data.get("products", []) + ] + + return { + "products": products, + "count": data.get("count", 0), + "page": data.get("page", page), + "page_size": data.get("page_size", page_size), + } + + except Exception as e: + logger.error(f"Error searching OpenFoodFacts: {e}") + return { + "products": [], + "count": 0, + "page": page, + "page_size": page_size, + } diff --git a/app/services/quality/__init__.py b/app/services/quality/__init__.py new file mode 100644 index 0000000..5a873fc --- /dev/null +++ b/app/services/quality/__init__.py @@ -0,0 +1,9 @@ +# app/services/quality/__init__.py +""" +Quality assessment services for Kiwi. +Contains functionality for evaluating receipt image quality. +""" + +from app.services.quality.assessment import QualityAssessor + +__all__ = ["QualityAssessor"] \ No newline at end of file diff --git a/app/services/quality/assessment.py b/app/services/quality/assessment.py new file mode 100644 index 0000000..73aea1f --- /dev/null +++ b/app/services/quality/assessment.py @@ -0,0 +1,332 @@ +#!/usr/bin/env python +# app/services/quality/assessment.py +import cv2 +import numpy as np +import logging +from pathlib import Path +from typing import Dict, Any, Optional, Tuple + +logger = logging.getLogger(__name__) + +class QualityAssessor: + """ + Assesses the quality of receipt images for processing suitability. + """ + + def __init__(self, min_quality_score: float = 50.0): + """ + Initialize the quality assessor. + + Args: + min_quality_score: Minimum acceptable quality score (0-100) + """ + self.min_quality_score = min_quality_score + + def assess_image(self, image_path: Path) -> Dict[str, Any]: + """ + Assess the quality of an image. + + Args: + image_path: Path to the image + + Returns: + Dictionary containing quality metrics + """ + try: + # Read image + img = cv2.imread(str(image_path)) + if img is None: + return { + "success": False, + "error": f"Failed to read image: {image_path}", + "overall_score": 0.0, + } + + # Convert to grayscale for some metrics + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + + # Calculate various quality metrics + blur_score = self._calculate_blur_score(gray) + lighting_score = self._calculate_lighting_score(gray) + contrast_score = self._calculate_contrast_score(gray) + size_score = self._calculate_size_score(img.shape) + + # Check for potential fold lines + fold_detected, fold_severity = self._detect_folds(gray) + + # Calculate overall quality score + overall_score = self._calculate_overall_score({ + "blur": blur_score, + "lighting": lighting_score, + "contrast": contrast_score, + "size": size_score, + "fold": 100.0 if not fold_detected else (100.0 - fold_severity * 20.0) + }) + + # Create assessment result + result = { + "success": True, + "metrics": { + "blur_score": blur_score, + "lighting_score": lighting_score, + "contrast_score": contrast_score, + "size_score": size_score, + "fold_detected": fold_detected, + "fold_severity": fold_severity if fold_detected else 0.0, + }, + "overall_score": overall_score, + "is_acceptable": overall_score >= self.min_quality_score, + "improvement_suggestions": self._generate_suggestions({ + "blur": blur_score, + "lighting": lighting_score, + "contrast": contrast_score, + "size": size_score, + "fold": fold_detected, + "fold_severity": fold_severity if fold_detected else 0.0, + }), + } + + return result + + except Exception as e: + logger.exception(f"Error assessing image quality: {e}") + return { + "success": False, + "error": f"Error assessing image quality: {str(e)}", + "overall_score": 0.0, + } + + def _calculate_blur_score(self, gray_img: np.ndarray) -> float: + """ + Calculate blur score using Laplacian variance. + Higher variance = less blurry (higher score) + + Args: + gray_img: Grayscale image + + Returns: + Blur score (0-100) + """ + # Use Laplacian for edge detection + laplacian = cv2.Laplacian(gray_img, cv2.CV_64F) + + # Calculate variance of Laplacian + variance = laplacian.var() + + # Map variance to a 0-100 score + # These thresholds might need adjustment based on your specific requirements + if variance < 10: + return 0.0 # Very blurry + elif variance < 100: + return (variance - 10) / 90 * 50 # Map 10-100 to 0-50 + elif variance < 1000: + return 50 + (variance - 100) / 900 * 50 # Map 100-1000 to 50-100 + else: + return 100.0 # Very sharp + + def _calculate_lighting_score(self, gray_img: np.ndarray) -> float: + """ + Calculate lighting score based on average brightness and std dev. + + Args: + gray_img: Grayscale image + + Returns: + Lighting score (0-100) + """ + # Calculate mean brightness + mean = gray_img.mean() + + # Calculate standard deviation of brightness + std = gray_img.std() + + # Ideal mean would be around 127 (middle of 0-255) + # Penalize if too dark or too bright + mean_score = 100 - abs(mean - 127) / 127 * 100 + + # Higher std dev generally means better contrast + # But we'll cap at 60 for reasonable balance + std_score = min(std / 60 * 100, 100) + + # Combine scores (weighted) + return 0.6 * mean_score + 0.4 * std_score + + def _calculate_contrast_score(self, gray_img: np.ndarray) -> float: + """ + Calculate contrast score. + + Args: + gray_img: Grayscale image + + Returns: + Contrast score (0-100) + """ + # Calculate histogram + hist = cv2.calcHist([gray_img], [0], None, [256], [0, 256]) + + # Calculate percentage of pixels in each brightness range + total_pixels = gray_img.shape[0] * gray_img.shape[1] + dark_pixels = np.sum(hist[:50]) / total_pixels + mid_pixels = np.sum(hist[50:200]) / total_pixels + bright_pixels = np.sum(hist[200:]) / total_pixels + + # Ideal: good distribution across ranges with emphasis on mid-range + # This is a simplified model - real receipts may need different distributions + score = ( + (0.2 * min(dark_pixels * 500, 100)) + # Want some dark pixels (text) + (0.6 * min(mid_pixels * 200, 100)) + # Want many mid pixels + (0.2 * min(bright_pixels * 500, 100)) # Want some bright pixels (background) + ) + + return score + + def _calculate_size_score(self, shape: Tuple[int, int, int]) -> float: + """ + Calculate score based on image dimensions. + + Args: + shape: Image shape (height, width, channels) + + Returns: + Size score (0-100) + """ + height, width = shape[0], shape[1] + + # Minimum recommended dimensions for good OCR + min_height, min_width = 800, 600 + + # Calculate size score + if height < min_height or width < min_width: + # Penalize if below minimum dimensions + return min(height / min_height, width / min_width) * 100 + else: + # Full score if dimensions are adequate + return 100.0 + + def _detect_folds(self, gray_img: np.ndarray) -> Tuple[bool, float]: + """ + Detect potential fold lines in the image. + + Args: + gray_img: Grayscale image + + Returns: + Tuple of (fold_detected, fold_severity) + fold_severity is a value between 0 and 5, with 5 being the most severe + """ + # Apply edge detection + edges = cv2.Canny(gray_img, 50, 150, apertureSize=3) + + # Apply Hough Line Transform to detect straight lines + lines = cv2.HoughLinesP( + edges, + rho=1, + theta=np.pi/180, + threshold=100, + minLineLength=gray_img.shape[1] // 3, # Look for lines at least 1/3 of image width + maxLineGap=10 + ) + + if lines is None: + return False, 0.0 + + # Check for horizontal or vertical lines that could be folds + potential_folds = [] + height, width = gray_img.shape + + for line in lines: + x1, y1, x2, y2 = line[0] + length = np.sqrt((x2 - x1)**2 + (y2 - y1)**2) + angle = np.abs(np.arctan2(y2 - y1, x2 - x1) * 180 / np.pi) + + # Check if horizontal (0ยฑ10ยฐ) or vertical (90ยฑ10ยฐ) + is_horizontal = angle < 10 or angle > 170 + is_vertical = abs(angle - 90) < 10 + + # Check if length is significant + is_significant = (is_horizontal and length > width * 0.5) or \ + (is_vertical and length > height * 0.5) + + if (is_horizontal or is_vertical) and is_significant: + # Calculate intensity variance along the line + # This helps determine if it's a fold (sharp brightness change) + # Simplified implementation for Phase 1 + potential_folds.append({ + "length": length, + "is_horizontal": is_horizontal, + }) + + # Determine if folds are detected and their severity + if not potential_folds: + return False, 0.0 + + # Severity based on number and length of potential folds + # This is a simplified metric for Phase 1 + total_len = sum(fold["length"] for fold in potential_folds) + if is_horizontal: + severity = min(5.0, total_len / width * 2.5) + else: + severity = min(5.0, total_len / height * 2.5) + + return True, severity + + def _calculate_overall_score(self, scores: Dict[str, float]) -> float: + """ + Calculate overall quality score from individual metrics. + + Args: + scores: Dictionary of individual quality scores + + Returns: + Overall quality score (0-100) + """ + # Weights for different factors + weights = { + "blur": 0.30, + "lighting": 0.25, + "contrast": 0.25, + "size": 0.10, + "fold": 0.10, + } + + # Calculate weighted average + overall = sum(weights[key] * scores[key] for key in weights) + + return overall + + def _generate_suggestions(self, metrics: Dict[str, Any]) -> list: + """ + Generate improvement suggestions based on metrics. + + Args: + metrics: Dictionary of quality metrics + + Returns: + List of improvement suggestions + """ + suggestions = [] + + # Blur suggestions + if metrics["blur"] < 60: + suggestions.append("Hold the camera steady and ensure the receipt is in focus.") + + # Lighting suggestions + if metrics["lighting"] < 60: + suggestions.append("Improve lighting conditions and avoid shadows on the receipt.") + + # Contrast suggestions + if metrics["contrast"] < 60: + suggestions.append("Ensure good contrast between text and background.") + + # Size suggestions + if metrics["size"] < 60: + suggestions.append("Move the camera closer to the receipt for better resolution.") + + # Fold suggestions + if metrics["fold"]: + if metrics["fold_severity"] > 3.0: + suggestions.append("The receipt has severe folds. Try to flatten it before capturing.") + else: + suggestions.append("Flatten the receipt to remove fold lines for better processing.") + + return suggestions \ No newline at end of file diff --git a/app/services/receipt_service.py b/app/services/receipt_service.py new file mode 100644 index 0000000..721bfa0 --- /dev/null +++ b/app/services/receipt_service.py @@ -0,0 +1,126 @@ +""" +Receipt processing service โ€” orchestrates the OCR pipeline. + +Pipeline stages: + 1. Preprocess โ€” enhance image, convert to PNG + 2. Quality โ€” score image; abort to 'low_quality' if below threshold + 3. OCR โ€” VisionLanguageOCR extracts structured data + 4. Persist โ€” flatten result into receipt_data table + 5. Stage โ€” set status to 'staged'; items await human approval + +Items are NOT added to inventory automatically. Use the +POST /receipts/{id}/ocr/approve endpoint to commit approved items. +""" +from __future__ import annotations + +import logging +from pathlib import Path +from typing import Any + +from app.db.store import Store + +logger = logging.getLogger(__name__) + + +def _flatten_ocr_result(result: dict[str, Any]) -> dict[str, Any]: + """Map nested VisionLanguageOCR output to the flat receipt_data schema.""" + merchant = result.get("merchant") or {} + transaction = result.get("transaction") or {} + totals = result.get("totals") or {} + return { + "merchant_name": merchant.get("name"), + "merchant_address": merchant.get("address"), + "merchant_phone": merchant.get("phone"), + "transaction_date": transaction.get("date"), + "transaction_time": transaction.get("time"), + "receipt_number": transaction.get("receipt_number"), + "register_number": transaction.get("register"), + "cashier_name": transaction.get("cashier"), + "items": result.get("items") or [], + "subtotal": totals.get("subtotal"), + "tax": totals.get("tax"), + "discount": totals.get("discount"), + "total": totals.get("total"), + "payment_method": totals.get("payment_method"), + "amount_paid": totals.get("amount_paid"), + "change_given": totals.get("change"), + "raw_text": result.get("raw_text"), + "confidence_scores": result.get("confidence") or {}, + "warnings": result.get("warnings") or [], + } + + +class ReceiptService: + def __init__(self, store: Store) -> None: + self.store = store + + async def process(self, receipt_id: int, image_path: Path) -> None: + """Run the full OCR pipeline for a receipt image. + + Stages run synchronously inside asyncio.to_thread so SQLite and the + VLM (which uses torch) both stay off the async event loop. + """ + import asyncio + await asyncio.to_thread(self._run_pipeline, receipt_id, image_path) + + def _run_pipeline(self, receipt_id: int, image_path: Path) -> None: + from app.core.config import settings + from app.services.image_preprocessing.enhancement import ImageEnhancer + from app.services.image_preprocessing.format_conversion import FormatConverter + from app.services.quality.assessment import QualityAssessor + + # โ”€โ”€ Stage 1: Preprocess โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + enhancer = ImageEnhancer() + converter = FormatConverter() + enhanced = enhancer.enhance(image_path) + processed_path = converter.to_png(enhanced) + + # โ”€โ”€ Stage 2: Quality assessment โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + assessor = QualityAssessor() + assessment = assessor.assess(processed_path) + self.store.upsert_quality_assessment( + receipt_id, + overall_score=assessment["overall_score"], + is_acceptable=assessment["is_acceptable"], + metrics=assessment.get("metrics", {}), + suggestions=assessment.get("suggestions", []), + ) + + if not assessment["is_acceptable"]: + self.store.update_receipt_status(receipt_id, "low_quality") + logger.warning( + "Receipt %s: quality too low for OCR (score=%.1f) โ€” %s", + receipt_id, assessment["overall_score"], + "; ".join(assessment.get("suggestions", [])), + ) + return + + if not settings.ENABLE_OCR: + self.store.update_receipt_status(receipt_id, "processed") + logger.info("Receipt %s: quality OK but ENABLE_OCR=false โ€” skipping OCR", receipt_id) + return + + # โ”€โ”€ Stage 3: OCR extraction โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + from app.services.ocr.vl_model import VisionLanguageOCR + ocr = VisionLanguageOCR() + result = ocr.extract_receipt_data(str(processed_path)) + + if result.get("error"): + self.store.update_receipt_status(receipt_id, "error", result["error"]) + logger.error("Receipt %s: OCR failed โ€” %s", receipt_id, result["error"]) + return + + # โ”€โ”€ Stage 4: Persist extracted data โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + flat = _flatten_ocr_result(result) + self.store.upsert_receipt_data(receipt_id, flat) + + item_count = len(flat.get("items") or []) + + # โ”€โ”€ Stage 5: Stage for human approval โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + self.store.update_receipt_status(receipt_id, "staged") + logger.info( + "Receipt %s: OCR complete โ€” %d item(s) staged for review " + "(confidence=%.2f)", + receipt_id, item_count, + (result.get("confidence") or {}).get("overall", 0.0), + ) diff --git a/app/services/receipt_service_inmemory_backup.py b/app/services/receipt_service_inmemory_backup.py new file mode 100644 index 0000000..b47afa9 --- /dev/null +++ b/app/services/receipt_service_inmemory_backup.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python +# app/services/receipt_service.py +import os +import uuid +import shutil +import aiofiles +from pathlib import Path +from typing import Optional, List, Dict, Any +from fastapi import UploadFile, BackgroundTasks, HTTPException +import asyncio +import logging +import sys +from app.utils.progress import ProgressIndicator + +from app.services.image_preprocessing.format_conversion import convert_to_standard_format, extract_metadata +from app.services.image_preprocessing.enhancement import enhance_image, correct_perspective +from app.services.quality.assessment import QualityAssessor +from app.models.schemas.receipt import ReceiptCreate, ReceiptResponse +from app.models.schemas.quality import QualityAssessment +from app.core.config import settings + +logger = logging.getLogger(__name__) + +class ReceiptService: + """ + Service for handling receipt processing. + """ + + def __init__(self): + """ + Initialize the receipt service. + """ + self.quality_assessor = QualityAssessor() + self.upload_dir = Path(settings.UPLOAD_DIR) + self.processing_dir = Path(settings.PROCESSING_DIR) + + # Create directories if they don't exist + self.upload_dir.mkdir(parents=True, exist_ok=True) + self.processing_dir.mkdir(parents=True, exist_ok=True) + + # In-memory storage for Phase 1 (would be replaced by DB in production) + self.receipts = {} + self.quality_assessments = {} + + async def process_receipt( + self, + file: UploadFile, + background_tasks: BackgroundTasks + ) -> ReceiptResponse: + """ + Process a single receipt file. + + Args: + file: Uploaded receipt file + background_tasks: FastAPI background tasks + + Returns: + ReceiptResponse object + """ + # Generate unique ID for receipt + receipt_id = str(uuid.uuid4()) + + # Save uploaded file + upload_path = self.upload_dir / f"{receipt_id}_{file.filename}" + await self._save_upload_file(file, upload_path) + + # Create receipt entry + receipt = { + "id": receipt_id, + "filename": file.filename, + "status": "uploaded", + "original_path": str(upload_path), + "processed_path": None, + "metadata": {}, + } + + self.receipts[receipt_id] = receipt + + # Add background task for processing + background_tasks.add_task( + self._process_receipt_background, + receipt_id, + upload_path + ) + + return ReceiptResponse( + id=receipt_id, + filename=file.filename, + status="processing", + metadata={}, + quality_score=None, + ) + + async def get_receipt(self, receipt_id: str) -> Optional[ReceiptResponse]: + """ + Get receipt by ID. + + Args: + receipt_id: Receipt ID + + Returns: + ReceiptResponse object or None if not found + """ + receipt = self.receipts.get(receipt_id) + if not receipt: + return None + + quality = self.quality_assessments.get(receipt_id) + quality_score = quality.get("overall_score") if quality else None + + return ReceiptResponse( + id=receipt["id"], + filename=receipt["filename"], + status=receipt["status"], + metadata=receipt["metadata"], + quality_score=quality_score, + ) + + async def get_receipt_quality(self, receipt_id: str) -> Optional[QualityAssessment]: + """ + Get quality assessment for a receipt. + + Args: + receipt_id: Receipt ID + + Returns: + QualityAssessment object or None if not found + """ + quality = self.quality_assessments.get(receipt_id) + if not quality: + return None + + return QualityAssessment( + receipt_id=receipt_id, + overall_score=quality["overall_score"], + is_acceptable=quality["is_acceptable"], + metrics=quality["metrics"], + suggestions=quality["improvement_suggestions"], + ) + + def list_receipts(self) -> List[ReceiptResponse]: + """ + List all receipts. + + Returns: + List of ReceiptResponse objects + """ + result = [] + for receipt_id, receipt in self.receipts.items(): + quality = self.quality_assessments.get(receipt_id) + quality_score = quality.get("overall_score") if quality else None + + result.append(ReceiptResponse( + id=receipt["id"], + filename=receipt["filename"], + status=receipt["status"], + metadata=receipt["metadata"], + quality_score=quality_score, + )) + + return result + + def get_quality_assessments(self) -> Dict[str, QualityAssessment]: + """ + Get all quality assessments. + + Returns: + Dict mapping receipt_id to QualityAssessment object + """ + result = {} + for receipt_id, quality in self.quality_assessments.items(): + result[receipt_id] = QualityAssessment( + receipt_id=receipt_id, + overall_score=quality["overall_score"], + is_acceptable=quality["is_acceptable"], + metrics=quality["metrics"], + suggestions=quality["improvement_suggestions"], + ) + return result + + async def _save_upload_file(self, file: UploadFile, destination: Path) -> None: + """ + Save an uploaded file to disk. + + Args: + file: Uploaded file + destination: Destination path + """ + try: + async with aiofiles.open(destination, 'wb') as out_file: + # Read in chunks to handle large files + content = await file.read(1024 * 1024) # 1MB chunks + while content: + await out_file.write(content) + content = await file.read(1024 * 1024) + + except Exception as e: + logger.exception(f"Error saving upload file: {e}") + raise HTTPException(status_code=500, detail=f"Error saving upload file: {str(e)}") + + async def _process_receipt_background(self, receipt_id: str, upload_path: Path) -> None: + """ + Background task for processing a receipt with progress indicators. + + Args: + receipt_id: Receipt ID + upload_path: Path to uploaded file + """ + try: + # Print a message to indicate start of processing + print(f"\nProcessing receipt {receipt_id}...") + + # Update status + self.receipts[receipt_id]["status"] = "processing" + + # Create processing directory for this receipt + receipt_dir = self.processing_dir / receipt_id + receipt_dir.mkdir(parents=True, exist_ok=True) + + # Step 1: Convert to standard format + print(" Step 1/4: Converting to standard format...") + converted_path = receipt_dir / f"{receipt_id}_converted.png" + success, message, actual_converted_path = convert_to_standard_format( + upload_path, + converted_path + ) + + if not success: + print(f" โœ— Format conversion failed: {message}") + self.receipts[receipt_id]["status"] = "error" + self.receipts[receipt_id]["error"] = message + return + print(" โœ“ Format conversion complete") + + # Step 2: Correct perspective + print(" Step 2/4: Correcting perspective...") + perspective_path = receipt_dir / f"{receipt_id}_perspective.png" + success, message, actual_perspective_path = correct_perspective( + actual_converted_path, + perspective_path + ) + + # Use corrected image if available, otherwise use converted image + current_path = actual_perspective_path if success else actual_converted_path + if success: + print(" โœ“ Perspective correction complete") + else: + print(f" โš  Perspective correction skipped: {message}") + + # Step 3: Enhance image + print(" Step 3/4: Enhancing image...") + enhanced_path = receipt_dir / f"{receipt_id}_enhanced.png" + success, message, actual_enhanced_path = enhance_image( + current_path, + enhanced_path + ) + + if not success: + print(f" โš  Enhancement warning: {message}") + # Continue with current path + else: + current_path = actual_enhanced_path + print(" โœ“ Image enhancement complete") + + # Step 4: Assess quality + print(" Step 4/4: Assessing quality...") + quality_assessment = self.quality_assessor.assess_image(current_path) + self.quality_assessments[receipt_id] = quality_assessment + print(f" โœ“ Quality assessment complete: score {quality_assessment['overall_score']:.1f}/100") + + # Step 5: Extract metadata + print(" Extracting metadata...") + metadata = extract_metadata(upload_path) + if current_path != upload_path: + processed_metadata = extract_metadata(current_path) + metadata["processed"] = { + "width": processed_metadata.get("width"), + "height": processed_metadata.get("height"), + "format": processed_metadata.get("original_format"), + } + print(" โœ“ Metadata extraction complete") + + # Update receipt entry + self.receipts[receipt_id].update({ + "status": "processed", + "processed_path": str(current_path), + "metadata": metadata, + }) + + print(f"โœ“ Receipt {receipt_id} processed successfully!") + + except Exception as e: + print(f"โœ— Error processing receipt {receipt_id}: {e}") + self.receipts[receipt_id]["status"] = "error" + self.receipts[receipt_id]["error"] = str(e) \ No newline at end of file diff --git a/app/static/index.html b/app/static/index.html new file mode 100644 index 0000000..7fc1a76 --- /dev/null +++ b/app/static/index.html @@ -0,0 +1,926 @@ + + + + + + Project Thoth - Inventory & Receipt Manager + + + +
+
+

๐Ÿ“ฆ Project Thoth

+

Smart Inventory & Receipt Management

+
+ + +
+ + +
+ + +
+ +
+

๐Ÿ“Š Inventory Overview

+
+
+
0
+
Total Items
+
+
+
0
+
Unique Products
+
+
+
0
+
Expiring Soon
+
+
+
0
+
Expired
+
+
+
+ + +
+

๐Ÿ”ซ Scanner Gun

+

Use your barcode scanner gun below. Scan will auto-submit when Enter is pressed.

+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ +
+
+

Processing barcode...

+
+ +
+
+ + +
+

๐Ÿ“ท Scan Barcode (Camera/Image)

+
+
๐Ÿ“ธ
+
Click to scan barcode or drag and drop
+
Take a photo of a product barcode (UPC/EAN)
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+

Scanning barcode...

+
+ +
+
+ + +
+

โž• Add Item Manually

+
+
+ + +
+
+ + +
+
+ +
+
+ + +
+
+ + +
+
+ +
+
+ + +
+
+ + +
+
+ + +
+ + +
+

๐Ÿ“‹ Current Inventory

+
+

No items yet. Scan a barcode or add manually!

+
+
+ + +
+

๐Ÿ“ฅ Export

+ + +
+
+ + +
+
+

๐Ÿ“ธ Upload Receipt

+
+
๐Ÿงพ
+
Click to upload or drag and drop
+
Supports JPG, PNG (max 10MB)
+
+ + +
+
+

Processing receipt...

+
+ +
+
+ +
+

๐Ÿ“‹ Recent Receipts

+
+

No receipts yet. Upload one above!

+
+ +
+ + +
+
+
+
+ + + + diff --git a/app/static/upload.html b/app/static/upload.html new file mode 100644 index 0000000..62c9235 --- /dev/null +++ b/app/static/upload.html @@ -0,0 +1,459 @@ + + + + + + Project Thoth - Receipt Upload + + + +
+
+

๐Ÿ“„ Project Thoth

+

Receipt Processing System

+
+ +
+

Upload Receipt

+
+
๐Ÿ“ธ
+
Click to upload or drag and drop
+
Supports JPG, PNG (max 10MB)
+
+ + +
+ +
+ + +
+
+ +
+
+

Processing receipt...

+
+ +
+
+ +
+

Recent Receipts

+
+

No receipts yet. Upload one above!

+
+ +
+

Export Data

+ + +
+
+
+ + + + diff --git a/app/static/upload.html.backup b/app/static/upload.html.backup new file mode 100644 index 0000000..62c9235 --- /dev/null +++ b/app/static/upload.html.backup @@ -0,0 +1,459 @@ + + + + + + Project Thoth - Receipt Upload + + + +
+
+

๐Ÿ“„ Project Thoth

+

Receipt Processing System

+
+ +
+

Upload Receipt

+
+
๐Ÿ“ธ
+
Click to upload or drag and drop
+
Supports JPG, PNG (max 10MB)
+
+ + +
+ +
+ + +
+
+ +
+
+

Processing receipt...

+
+ +
+
+ +
+

Recent Receipts

+
+

No receipts yet. Upload one above!

+
+ +
+

Export Data

+ + +
+
+
+ + + + diff --git a/app/tiers.py b/app/tiers.py new file mode 100644 index 0000000..133eb45 --- /dev/null +++ b/app/tiers.py @@ -0,0 +1,61 @@ +""" +Kiwi tier gates. + +Tiers: free < paid < premium +(Ultra not used in Kiwi โ€” no human-in-the-loop operations.) + +Uses circuitforge-core can_use() with Kiwi's feature map. +""" +from __future__ import annotations + +from circuitforge_core.tiers.tiers import can_use as _can_use, BYOK_UNLOCKABLE + +# Features that unlock when the user supplies their own LLM backend. +KIWI_BYOK_UNLOCKABLE: frozenset[str] = frozenset({ + "recipe_suggestions", + "expiry_llm_matching", + "receipt_ocr", +}) + +# Feature โ†’ minimum tier required +KIWI_FEATURES: dict[str, str] = { + # Free tier + "inventory_crud": "free", + "barcode_scan": "free", + "receipt_upload": "free", + "expiry_alerts": "free", + "export_csv": "free", + + # Paid tier + "receipt_ocr": "paid", # BYOK-unlockable + "recipe_suggestions": "paid", # BYOK-unlockable + "expiry_llm_matching": "paid", # BYOK-unlockable + "meal_planning": "paid", + "dietary_profiles": "paid", + + # Premium tier + "multi_household": "premium", + "background_monitoring": "premium", + "leftover_mode": "premium", +} + + +def can_use(feature: str, tier: str, has_byok: bool = False) -> bool: + """Return True if the given tier can access the feature.""" + return _can_use( + feature, + tier, + has_byok=has_byok, + _features=KIWI_FEATURES, + ) + + +def require_feature(feature: str, tier: str, has_byok: bool = False) -> None: + """Raise ValueError if the tier cannot access the feature.""" + if not can_use(feature, tier, has_byok): + from circuitforge_core.tiers.tiers import tier_label + needed = tier_label(feature, has_byok=has_byok, _features=KIWI_FEATURES) + raise ValueError( + f"Feature '{feature}' requires {needed} tier. " + f"Current tier: {tier}." + ) diff --git a/app/utils/__init__.py b/app/utils/__init__.py new file mode 100644 index 0000000..0777f75 --- /dev/null +++ b/app/utils/__init__.py @@ -0,0 +1,5 @@ +# app/utils/__init__.py +""" +Utility functions for Kiwi. +Contains common helpers used throughout the application. +""" \ No newline at end of file diff --git a/app/utils/progress.py b/app/utils/progress.py new file mode 100644 index 0000000..a8aca53 --- /dev/null +++ b/app/utils/progress.py @@ -0,0 +1,248 @@ +# app/utils/progress.py +import sys +import time +import asyncio +from typing import Optional, Callable, Any +import threading + +class ProgressIndicator: + """ + A simple progress indicator for long-running operations. + + This class provides different styles of progress indicators: + - dots: Animated dots (. .. ... ....) + - spinner: Spinning cursor (|/-\) + - percentage: Progress percentage [#### ] 40% + """ + + def __init__(self, + message: str = "Processing", + style: str = "dots", + total: Optional[int] = None): + """ + Initialize the progress indicator. + + Args: + message: The message to display before the indicator + style: The indicator style ('dots', 'spinner', or 'percentage') + total: Total items for percentage style (required for percentage) + """ + self.message = message + self.style = style + self.total = total + self.current = 0 + self.start_time = None + self._running = False + self._thread = None + self._task = None + + # Validate style + if style not in ["dots", "spinner", "percentage"]: + raise ValueError("Style must be 'dots', 'spinner', or 'percentage'") + + # Validate total for percentage style + if style == "percentage" and total is None: + raise ValueError("Total must be specified for percentage style") + + def start(self): + """Start the progress indicator in a separate thread.""" + if self._running: + return + + self._running = True + self.start_time = time.time() + + # Start the appropriate indicator + if self.style == "dots": + self._thread = threading.Thread(target=self._dots_indicator) + elif self.style == "spinner": + self._thread = threading.Thread(target=self._spinner_indicator) + elif self.style == "percentage": + self._thread = threading.Thread(target=self._percentage_indicator) + + self._thread.daemon = True + self._thread.start() + + async def start_async(self): + """Start the progress indicator as an asyncio task.""" + if self._running: + return + + self._running = True + self.start_time = time.time() + + # Start the appropriate indicator + if self.style == "dots": + self._task = asyncio.create_task(self._dots_indicator_async()) + elif self.style == "spinner": + self._task = asyncio.create_task(self._spinner_indicator_async()) + elif self.style == "percentage": + self._task = asyncio.create_task(self._percentage_indicator_async()) + + def update(self, current: int): + """Update the progress (for percentage style).""" + self.current = current + + def stop(self): + """Stop the progress indicator.""" + if not self._running: + return + + self._running = False + + if self._thread: + self._thread.join(timeout=1.0) + + # Clear the progress line and write a newline + sys.stdout.write("\r" + " " * 80 + "\r") + sys.stdout.flush() + + async def stop_async(self): + """Stop the progress indicator (async version).""" + if not self._running: + return + + self._running = False + + if self._task: + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + + # Clear the progress line and write a newline + sys.stdout.write("\r" + " " * 80 + "\r") + sys.stdout.flush() + + def _dots_indicator(self): + """Display an animated dots indicator.""" + i = 0 + while self._running: + dots = "." * (i % 4 + 1) + elapsed = time.time() - self.start_time + sys.stdout.write(f"\r{self.message}{dots:<4} ({elapsed:.1f}s)") + sys.stdout.flush() + time.sleep(0.5) + i += 1 + + async def _dots_indicator_async(self): + """Display an animated dots indicator (async version).""" + i = 0 + while self._running: + dots = "." * (i % 4 + 1) + elapsed = time.time() - self.start_time + sys.stdout.write(f"\r{self.message}{dots:<4} ({elapsed:.1f}s)") + sys.stdout.flush() + await asyncio.sleep(0.5) + i += 1 + + def _spinner_indicator(self): + """Display a spinning cursor indicator.""" + chars = "|/-\\" + i = 0 + while self._running: + char = chars[i % len(chars)] + elapsed = time.time() - self.start_time + sys.stdout.write(f"\r{self.message} {char} ({elapsed:.1f}s)") + sys.stdout.flush() + time.sleep(0.1) + i += 1 + + async def _spinner_indicator_async(self): + """Display a spinning cursor indicator (async version).""" + chars = "|/-\\" + i = 0 + while self._running: + char = chars[i % len(chars)] + elapsed = time.time() - self.start_time + sys.stdout.write(f"\r{self.message} {char} ({elapsed:.1f}s)") + sys.stdout.flush() + await asyncio.sleep(0.1) + i += 1 + + def _percentage_indicator(self): + """Display a percentage progress bar.""" + while self._running: + percentage = min(100, int((self.current / self.total) * 100)) + bar_length = 20 + filled_length = int(bar_length * percentage // 100) + bar = '#' * filled_length + ' ' * (bar_length - filled_length) + elapsed = time.time() - self.start_time + + # Estimate time remaining if we have progress + if percentage > 0: + remaining = elapsed * (100 - percentage) / percentage + sys.stdout.write(f"\r{self.message} [{bar}] {percentage}% ({elapsed:.1f}s elapsed, ~{remaining:.1f}s remaining)") + else: + sys.stdout.write(f"\r{self.message} [{bar}] {percentage}% ({elapsed:.1f}s elapsed)") + + sys.stdout.flush() + time.sleep(0.2) + + async def _percentage_indicator_async(self): + """Display a percentage progress bar (async version).""" + while self._running: + percentage = min(100, int((self.current / self.total) * 100)) + bar_length = 20 + filled_length = int(bar_length * percentage // 100) + bar = '#' * filled_length + ' ' * (bar_length - filled_length) + elapsed = time.time() - self.start_time + + # Estimate time remaining if we have progress + if percentage > 0: + remaining = elapsed * (100 - percentage) / percentage + sys.stdout.write(f"\r{self.message} [{bar}] {percentage}% ({elapsed:.1f}s elapsed, ~{remaining:.1f}s remaining)") + else: + sys.stdout.write(f"\r{self.message} [{bar}] {percentage}% ({elapsed:.1f}s elapsed)") + + sys.stdout.flush() + await asyncio.sleep(0.2) + +# Convenience function for running a task with progress indicator +def with_progress(func: Callable, *args, message: str = "Processing", style: str = "dots", **kwargs) -> Any: + """ + Run a function with a progress indicator. + + Args: + func: Function to run + *args: Arguments to pass to the function + message: Message to display + style: Progress indicator style + **kwargs: Keyword arguments to pass to the function + + Returns: + The result of the function + """ + progress = ProgressIndicator(message=message, style=style) + progress.start() + + try: + result = func(*args, **kwargs) + return result + finally: + progress.stop() + +# Async version of with_progress +async def with_progress_async(func: Callable, *args, message: str = "Processing", style: str = "dots", **kwargs) -> Any: + """ + Run an async function with a progress indicator. + + Args: + func: Async function to run + *args: Arguments to pass to the function + message: Message to display + style: Progress indicator style + **kwargs: Keyword arguments to pass to the function + + Returns: + The result of the function + """ + progress = ProgressIndicator(message=message, style=style) + await progress.start_async() + + try: + result = await func(*args, **kwargs) + return result + finally: + await progress.stop_async() \ No newline at end of file diff --git a/app/utils/units.py b/app/utils/units.py new file mode 100644 index 0000000..5acabde --- /dev/null +++ b/app/utils/units.py @@ -0,0 +1,185 @@ +""" +Unit normalization and conversion for Kiwi inventory. + +Source of truth: metric. + - Mass โ†’ grams (g) + - Volume โ†’ milliliters (ml) + - Count โ†’ each (dimensionless) + +All inventory quantities are stored in the base metric unit. +Conversion to display units happens at the API/frontend boundary. + +Usage: + from app.utils.units import normalize_to_metric, convert_from_metric + + # Normalise OCR input + qty, unit = normalize_to_metric(2.0, "lb") # โ†’ (907.18, "g") + qty, unit = normalize_to_metric(1.0, "gal") # โ†’ (3785.41, "ml") + qty, unit = normalize_to_metric(3.0, "each") # โ†’ (3.0, "each") + + # Convert for display + display_qty, display_unit = convert_from_metric(907.18, "g", preferred="imperial") + # โ†’ (2.0, "lb") +""" +from __future__ import annotations + +# โ”€โ”€ Unit categories โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +MASS_UNITS: frozenset[str] = frozenset({"g", "kg", "mg", "lb", "lbs", "oz"}) +VOLUME_UNITS: frozenset[str] = frozenset({ + "ml", "l", + "fl oz", "floz", "fluid oz", "fluid ounce", "fluid ounces", + "cup", "cups", "pt", "pint", "pints", + "qt", "quart", "quarts", "gal", "gallon", "gallons", +}) +COUNT_UNITS: frozenset[str] = frozenset({ + "each", "ea", "pc", "pcs", "piece", "pieces", + "ct", "count", "item", "items", + "pk", "pack", "packs", "bag", "bags", + "bunch", "bunches", "head", "heads", + "can", "cans", "bottle", "bottles", "box", "boxes", + "jar", "jars", "tube", "tubes", "roll", "rolls", + "loaf", "loaves", "dozen", +}) + +# โ”€โ”€ Conversion factors to base metric unit โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +# All values are: 1 = N + +# Mass โ†’ grams +_TO_GRAMS: dict[str, float] = { + "g": 1.0, + "mg": 0.001, + "kg": 1_000.0, + "oz": 28.3495, + "lb": 453.592, + "lbs": 453.592, +} + +# Volume โ†’ millilitres +_TO_ML: dict[str, float] = { + "ml": 1.0, + "l": 1_000.0, + "fl oz": 29.5735, + "floz": 29.5735, + "fluid oz": 29.5735, + "fluid ounce": 29.5735, + "fluid ounces": 29.5735, + "cup": 236.588, + "cups": 236.588, + "pt": 473.176, + "pint": 473.176, + "pints": 473.176, + "qt": 946.353, + "quart": 946.353, + "quarts": 946.353, + "gal": 3_785.41, + "gallon": 3_785.41, + "gallons": 3_785.41, +} + +# โ”€โ”€ Imperial display preferences โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +# For convert_from_metric โ€” which metric threshold triggers the next +# larger imperial unit. Keeps display numbers human-readable. + +_IMPERIAL_MASS_THRESHOLDS: list[tuple[float, str, float]] = [ + # (min grams, display unit, grams-per-unit) + (453.592, "lb", 453.592), # โ‰ฅ 1 lb โ†’ show in lb + (0.0, "oz", 28.3495), # otherwise โ†’ oz +] + +_METRIC_MASS_THRESHOLDS: list[tuple[float, str, float]] = [ + (1_000.0, "kg", 1_000.0), + (0.0, "g", 1.0), +] + +_IMPERIAL_VOLUME_THRESHOLDS: list[tuple[float, str, float]] = [ + (3_785.41, "gal", 3_785.41), + (946.353, "qt", 946.353), + (473.176, "pt", 473.176), + (236.588, "cup", 236.588), + (0.0, "fl oz", 29.5735), +] + +_METRIC_VOLUME_THRESHOLDS: list[tuple[float, str, float]] = [ + (1_000.0, "l", 1_000.0), + (0.0, "ml", 1.0), +] + + +# โ”€โ”€ Public API โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +def normalize_unit(raw: str) -> str: + """Canonicalize a raw unit string (lowercase, stripped).""" + return raw.strip().lower() + + +def classify_unit(unit: str) -> str: + """Return 'mass', 'volume', or 'count' for a canonical unit string.""" + u = normalize_unit(unit) + if u in MASS_UNITS: + return "mass" + if u in VOLUME_UNITS: + return "volume" + return "count" + + +def normalize_to_metric(quantity: float, unit: str) -> tuple[float, str]: + """Convert quantity + unit to the canonical metric base unit. + + Returns (metric_quantity, base_unit) where base_unit is one of: + 'g' โ€” grams (for all mass units) + 'ml' โ€” millilitres (for all volume units) + 'each' โ€” countable items (for everything else) + + Unknown or ambiguous units (e.g. 'bag', 'bunch') are treated as count. + """ + u = normalize_unit(unit) + + if u in _TO_GRAMS: + return round(quantity * _TO_GRAMS[u], 4), "g" + + if u in _TO_ML: + return round(quantity * _TO_ML[u], 4), "ml" + + # Count / ambiguous โ€” store as-is + return quantity, "each" + + +def convert_from_metric( + quantity: float, + base_unit: str, + preferred: str = "metric", +) -> tuple[float, str]: + """Convert a stored metric quantity to a display unit. + + Args: + quantity: stored metric quantity + base_unit: 'g', 'ml', or 'each' + preferred: 'metric' or 'imperial' + + Returns (display_quantity, display_unit). + Rounds to 2 decimal places. + """ + if base_unit == "each": + return quantity, "each" + + thresholds: list[tuple[float, str, float]] + + if base_unit == "g": + thresholds = ( + _IMPERIAL_MASS_THRESHOLDS if preferred == "imperial" + else _METRIC_MASS_THRESHOLDS + ) + elif base_unit == "ml": + thresholds = ( + _IMPERIAL_VOLUME_THRESHOLDS if preferred == "imperial" + else _METRIC_VOLUME_THRESHOLDS + ) + else: + return quantity, base_unit + + for min_qty, display_unit, factor in thresholds: + if quantity >= min_qty: + return round(quantity / factor, 2), display_unit + + return round(quantity, 2), base_unit diff --git a/compose.cloud.yml b/compose.cloud.yml new file mode 100644 index 0000000..02c0efa --- /dev/null +++ b/compose.cloud.yml @@ -0,0 +1,43 @@ +# Kiwi โ€” cloud managed instance +# Project: kiwi-cloud (docker compose -f compose.cloud.yml -p kiwi-cloud ...) +# Web: http://127.0.0.1:8515 โ†’ menagerie.circuitforge.tech/kiwi (via Caddy + JWT auth) +# API: internal only on kiwi-cloud-net (nginx proxies /api/ โ†’ api:8512) + +services: + api: + build: + context: .. + dockerfile: kiwi/Dockerfile + restart: unless-stopped + env_file: .env + environment: + CLOUD_MODE: "true" + CLOUD_DATA_ROOT: /devl/kiwi-cloud-data + # DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN โ€” set in .env + volumes: + - /devl/kiwi-cloud-data:/devl/kiwi-cloud-data + # LLM config โ€” shared with other CF products; read-only in container + - ${HOME}/.config/circuitforge:/root/.config/circuitforge:ro + networks: + - kiwi-cloud-net + + web: + build: + context: . + dockerfile: docker/web/Dockerfile + args: + VITE_BASE_URL: /kiwi + VITE_API_BASE: /kiwi + restart: unless-stopped + ports: + - "8515:80" + volumes: + - ./docker/web/nginx.cloud.conf:/etc/nginx/conf.d/default.conf:ro + networks: + - kiwi-cloud-net + depends_on: + - api + +networks: + kiwi-cloud-net: + driver: bridge diff --git a/compose.yml b/compose.yml new file mode 100644 index 0000000..c26deb5 --- /dev/null +++ b/compose.yml @@ -0,0 +1,21 @@ +services: + api: + build: + context: .. + dockerfile: kiwi/Dockerfile + network_mode: host + env_file: .env + volumes: + - ./data:/app/kiwi/data + - ${HOME}/.config/circuitforge:/root/.config/circuitforge:ro + restart: unless-stopped + + web: + build: + context: . + dockerfile: docker/web/Dockerfile + ports: + - "8511:80" + restart: unless-stopped + depends_on: + - api diff --git a/docker/web/Dockerfile b/docker/web/Dockerfile new file mode 100644 index 0000000..fb2ec13 --- /dev/null +++ b/docker/web/Dockerfile @@ -0,0 +1,22 @@ +# Stage 1: build +FROM node:20-alpine AS build +WORKDIR /app +COPY frontend/package*.json ./ +RUN npm ci --prefer-offline +COPY frontend/ ./ + +# Build-time env vars โ€” Vite bakes these as static strings into the bundle. +# VITE_BASE_URL: URL prefix the app is served under (/ for dev, /kiwi for cloud) +# VITE_API_BASE: prefix for all /api/* fetch calls (empty for dev, /kiwi for cloud) +ARG VITE_BASE_URL=/ +ARG VITE_API_BASE= +ENV VITE_BASE_URL=$VITE_BASE_URL +ENV VITE_API_BASE=$VITE_API_BASE + +RUN npm run build + +# Stage 2: serve +FROM nginx:alpine +COPY docker/web/nginx.conf /etc/nginx/conf.d/default.conf +COPY --from=build /app/dist /usr/share/nginx/html +EXPOSE 80 diff --git a/docker/web/nginx.cloud.conf b/docker/web/nginx.cloud.conf new file mode 100644 index 0000000..ea8d37a --- /dev/null +++ b/docker/web/nginx.cloud.conf @@ -0,0 +1,32 @@ +server { + listen 80; + server_name _; + + root /usr/share/nginx/html; + index index.html; + + # Proxy API requests to the FastAPI container via Docker bridge network. + location /api/ { + proxy_pass http://api:8512; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto; + # Forward the session header injected by Caddy from cf_session cookie. + proxy_set_header X-CF-Session $http_x_cf_session; + } + + location = /index.html { + add_header Cache-Control "no-cache, no-store, must-revalidate"; + try_files $uri /index.html; + } + + location / { + try_files $uri $uri/ /index.html; + } + + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff2?)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} diff --git a/docker/web/nginx.conf b/docker/web/nginx.conf new file mode 100644 index 0000000..a987d0f --- /dev/null +++ b/docker/web/nginx.conf @@ -0,0 +1,27 @@ +server { + listen 80; + server_name _; + + root /usr/share/nginx/html; + index index.html; + + location /api/ { + proxy_pass http://172.17.0.1:8512; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + } + + location = /index.html { + add_header Cache-Control "no-cache, no-store, must-revalidate"; + try_files $uri /index.html; + } + + location / { + try_files $uri $uri/ /index.html; + } + + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff2?)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } +} diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..836a5b9 --- /dev/null +++ b/environment.yml @@ -0,0 +1,18 @@ +name: kiwi +channels: + - conda-forge + - defaults +dependencies: + - python=3.11 + - pip + - pip: + - fastapi>=0.110 + - uvicorn[standard]>=0.27 + - python-multipart>=0.0.9 + - aiofiles>=23.0 + - opencv-python>=4.8 + - numpy>=1.25 + - pyzbar>=0.1.9 + - httpx>=0.27 + - pydantic>=2.5 + - PyJWT>=2.8 diff --git a/frontend/.env b/frontend/.env new file mode 100644 index 0000000..3bde7f3 --- /dev/null +++ b/frontend/.env @@ -0,0 +1,3 @@ +# API Configuration +# Use the server's actual IP instead of localhost for remote access +VITE_API_URL=http://10.1.10.71:8000/api/v1 diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..a547bf3 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,24 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/frontend/.vscode/extensions.json b/frontend/.vscode/extensions.json new file mode 100644 index 0000000..a7cea0b --- /dev/null +++ b/frontend/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["Vue.volar"] +} diff --git a/frontend/THEMING_SYSTEM.md b/frontend/THEMING_SYSTEM.md new file mode 100644 index 0000000..125905e --- /dev/null +++ b/frontend/THEMING_SYSTEM.md @@ -0,0 +1,458 @@ +# Vue Frontend - Theming System Documentation + +**Date**: 2025-10-31 +**Status**: โœ… Fully Implemented - Light/Dark Mode Support + +--- + +## Overview + +The Vue frontend now uses a comprehensive CSS custom properties (variables) system that automatically adapts to the user's system color scheme preference. All components are theme-aware and will automatically switch between light and dark modes. + +--- + +## How It Works + +### Automatic Theme Detection + +The theming system uses the CSS `prefers-color-scheme` media query to detect the user's system preference: + +- **Dark Mode (Default)**: Used when system is set to dark mode or if no preference is detected +- **Light Mode**: Automatically activated when system prefers light mode + +### Color Scheme Declaration + +All theme variables are defined in `/frontend/src/style.css`: + +```css +:root { + color-scheme: light dark; /* Declares support for both schemes */ + + /* Dark mode variables (default) */ + --color-text-primary: rgba(255, 255, 255, 0.87); + --color-bg-primary: #242424; + /* ... */ +} + +@media (prefers-color-scheme: light) { + :root { + /* Light mode overrides */ + --color-text-primary: #213547; + --color-bg-primary: #f5f5f5; + /* ... */ + } +} +``` + +--- + +## Theme Variables Reference + +### Text Colors + +| Variable | Dark Mode | Light Mode | Usage | +|----------|-----------|------------|-------| +| `--color-text-primary` | `rgba(255, 255, 255, 0.87)` | `#213547` | Main text | +| `--color-text-secondary` | `rgba(255, 255, 255, 0.6)` | `#666` | Secondary text, labels | +| `--color-text-muted` | `rgba(255, 255, 255, 0.4)` | `#999` | Disabled, hints | + +### Background Colors + +| Variable | Dark Mode | Light Mode | Usage | +|----------|-----------|------------|-------| +| `--color-bg-primary` | `#242424` | `#f5f5f5` | Page background | +| `--color-bg-secondary` | `#1a1a1a` | `#ffffff` | Secondary surfaces | +| `--color-bg-elevated` | `#2d2d2d` | `#ffffff` | Elevated surfaces, dropdowns | +| `--color-bg-card` | `#2d2d2d` | `#ffffff` | Card backgrounds | +| `--color-bg-input` | `#1a1a1a` | `#ffffff` | Input fields | + +### Border Colors + +| Variable | Dark Mode | Light Mode | Usage | +|----------|-----------|------------|-------| +| `--color-border` | `rgba(255, 255, 255, 0.1)` | `#ddd` | Default borders | +| `--color-border-focus` | `rgba(255, 255, 255, 0.2)` | `#ccc` | Focus state borders | + +### Brand Colors + +These remain consistent across themes: + +| Variable | Value | Usage | +|----------|-------|-------| +| `--color-primary` | `#667eea` | Primary brand color | +| `--color-primary-dark` | `#5568d3` | Darker variant (hover) | +| `--color-primary-light` | `#7d8ff0` | Lighter variant | +| `--color-secondary` | `#764ba2` | Secondary brand color | + +### Status Colors + +Base colors remain the same, but backgrounds adjust for contrast: + +#### Success (Green) + +| Variable | Value | Light Mode Bg | Usage | +|----------|-------|---------------|-------| +| `--color-success` | `#4CAF50` | Same | Success actions | +| `--color-success-dark` | `#45a049` | Same | Hover states | +| `--color-success-light` | `#66bb6a` | Same | Accents | +| `--color-success-bg` | `rgba(76, 175, 80, 0.1)` | `#d4edda` | Success backgrounds | +| `--color-success-border` | `rgba(76, 175, 80, 0.3)` | `#c3e6cb` | Success borders | + +#### Warning (Orange) + +| Variable | Value | Light Mode Bg | Usage | +|----------|-------|---------------|-------| +| `--color-warning` | `#ff9800` | Same | Warning states | +| `--color-warning-dark` | `#f57c00` | Same | Hover states | +| `--color-warning-light` | `#ffb74d` | Same | Accents | +| `--color-warning-bg` | `rgba(255, 152, 0, 0.1)` | `#fff3cd` | Warning backgrounds | +| `--color-warning-border` | `rgba(255, 152, 0, 0.3)` | `#ffeaa7` | Warning borders | + +#### Error (Red) + +| Variable | Value | Light Mode Bg | Usage | +|----------|-------|---------------|-------| +| `--color-error` | `#f44336` | Same | Error states | +| `--color-error-dark` | `#d32f2f` | Same | Hover states | +| `--color-error-light` | `#ff6b6b` | Same | Accents | +| `--color-error-bg` | `rgba(244, 67, 54, 0.1)` | `#f8d7da` | Error backgrounds | +| `--color-error-border` | `rgba(244, 67, 54, 0.3)` | `#f5c6cb` | Error borders | + +#### Info (Blue) + +| Variable | Value | Light Mode Bg | Usage | +|----------|-------|---------------|-------| +| `--color-info` | `#2196F3` | Same | Info states | +| `--color-info-dark` | `#1976D2` | Same | Hover states | +| `--color-info-light` | `#64b5f6` | Same | Accents | +| `--color-info-bg` | `rgba(33, 150, 243, 0.1)` | `#d1ecf1` | Info backgrounds | +| `--color-info-border` | `rgba(33, 150, 243, 0.3)` | `#bee5eb` | Info borders | + +### Gradients + +| Variable | Value | Usage | +|----------|-------|-------| +| `--gradient-primary` | `linear-gradient(135deg, var(--color-primary) 0%, var(--color-secondary) 100%)` | Headers, buttons | + +### Shadows + +Adjust opacity for light mode: + +| Variable | Dark Mode | Light Mode | Usage | +|----------|-----------|------------|-------| +| `--shadow-sm` | `0 1px 3px rgba(0,0,0,0.3)` | `0 1px 3px rgba(0,0,0,0.1)` | Small shadows | +| `--shadow-md` | `0 4px 6px rgba(0,0,0,0.3)` | `0 4px 6px rgba(0,0,0,0.1)` | Medium shadows | +| `--shadow-lg` | `0 10px 20px rgba(0,0,0,0.4)` | `0 10px 20px rgba(0,0,0,0.15)` | Large shadows | +| `--shadow-xl` | `0 20px 40px rgba(0,0,0,0.5)` | `0 20px 40px rgba(0,0,0,0.2)` | Extra large shadows | + +### Typography + +| Variable | Value | Usage | +|----------|-------|-------| +| `--font-size-xs` | `12px` | Very small text | +| `--font-size-sm` | `14px` | Small text, labels | +| `--font-size-base` | `16px` | Body text | +| `--font-size-lg` | `18px` | Large text | +| `--font-size-xl` | `24px` | Headings | +| `--font-size-2xl` | `32px` | Large headings, stats | + +### Spacing + +| Variable | Value | Usage | +|----------|-------|-------| +| `--spacing-xs` | `4px` | Tiny gaps | +| `--spacing-sm` | `8px` | Small gaps | +| `--spacing-md` | `16px` | Medium gaps | +| `--spacing-lg` | `24px` | Large gaps | +| `--spacing-xl` | `32px` | Extra large gaps | + +### Border Radius + +| Variable | Value | Usage | +|----------|-------|-------| +| `--radius-sm` | `4px` | Small radius | +| `--radius-md` | `6px` | Medium radius | +| `--radius-lg` | `8px` | Large radius | +| `--radius-xl` | `12px` | Extra large radius (cards) | + +--- + +## Usage Examples + +### In Vue Components + +```vue + +``` + +### Status-Specific Styling + +```vue + +``` + +--- + +## Testing Theme Modes + +### On macOS + +1. **System Preferences** โ†’ **General** โ†’ **Appearance** +2. Select "Dark" or "Light" +3. Vue app will automatically switch + +### On Windows + +1. **Settings** โ†’ **Personalization** โ†’ **Colors** +2. Choose "Dark" or "Light" mode +3. Vue app will automatically switch + +### On Linux (GNOME) + +1. **Settings** โ†’ **Appearance** +2. Toggle **Dark Style** +3. Vue app will automatically switch + +### Browser DevTools Testing + +**Chrome/Edge**: +1. Open DevTools (F12) +2. Press Ctrl+Shift+P (Cmd+Shift+P on Mac) +3. Type "Rendering" +4. Select "Emulate CSS media feature prefers-color-scheme" +5. Choose "prefers-color-scheme: dark" or "light" + +**Firefox**: +1. Open DevTools (F12) +2. Click the settings gear icon +3. Scroll to "Inspector" +4. Toggle "Disable prefers-color-scheme media queries" + +--- + +## Components Using Theme Variables + +All components have been updated to use theme variables: + +โœ… **App.vue** +- Header gradient +- Footer styling +- Tab navigation + +โœ… **InventoryList.vue** +- All cards and backgrounds +- Status colors (success/warning/error) +- Form inputs and labels +- Buttons and actions +- Upload areas +- Loading spinners + +โœ… **ReceiptsView.vue** +- Upload area +- Receipt cards +- Status indicators +- Stats display + +โœ… **EditItemModal.vue** +- Modal background +- Form fields +- Expiration date color coding +- Buttons + +--- + +## Best Practices + +### DO โœ… + +1. **Always use theme variables** instead of hardcoded colors + ```css + /* Good */ + color: var(--color-text-primary); + + /* Bad */ + color: #333; + ``` + +2. **Use semantic variable names** + ```css + /* Good */ + background: var(--color-bg-card); + + /* Bad */ + background: var(--color-bg-elevated); /* Wrong semantic meaning */ + ``` + +3. **Use spacing variables** for consistency + ```css + /* Good */ + padding: var(--spacing-lg); + + /* Bad */ + padding: 24px; + ``` + +4. **Use status colors appropriately** + ```css + /* Good - Expiration warning */ + .expiring { color: var(--color-warning); } + + /* Bad - Using error for warning */ + .expiring { color: var(--color-error); } + ``` + +### DON'T โŒ + +1. **Don't hardcode colors** + ```css + /* Bad */ + background: #ffffff; + color: #333333; + ``` + +2. **Don't use pixel values for spacing** + ```css + /* Bad */ + margin: 16px; + + /* Good */ + margin: var(--spacing-md); + ``` + +3. **Don't mix theme and non-theme styles** + ```css + /* Bad */ + .card { + background: var(--color-bg-card); + border: 1px solid #ddd; /* Hardcoded! */ + } + + /* Good */ + .card { + background: var(--color-bg-card); + border: 1px solid var(--color-border); + } + ``` + +--- + +## Adding New Theme Variables + +If you need to add new theme variables: + +1. **Add to dark mode (default)** in `:root`: + ```css + :root { + --color-my-new-color: #value; + } + ``` + +2. **Add light mode override** in media query: + ```css + @media (prefers-color-scheme: light) { + :root { + --color-my-new-color: #different-value; + } + } + ``` + +3. **Use in components**: + ```css + .my-element { + color: var(--color-my-new-color); + } + ``` + +--- + +## Future Enhancements + +Potential additions to the theming system: + +1. **Manual Theme Toggle** + - Add a theme switcher button + - Store preference in localStorage + - Override system preference + +2. **Custom Color Schemes** + - Allow users to choose accent colors + - Save theme preferences per user + +3. **High Contrast Mode** + - Support `prefers-contrast: high` + - Increase border widths and color differences + +4. **Reduced Motion** + - Support `prefers-reduced-motion` + - Disable animations for accessibility + +--- + +## Browser Support + +The theming system is supported in: + +โœ… **Chrome/Edge**: 76+ +โœ… **Firefox**: 67+ +โœ… **Safari**: 12.1+ +โœ… **Opera**: 62+ + +CSS Custom Properties (Variables) are supported in all modern browsers. + +--- + +## Summary + +**What We Have**: +- โœ… Automatic light/dark mode detection +- โœ… Comprehensive variable system (50+ variables) +- โœ… All components are theme-aware +- โœ… Semantic, maintainable color system +- โœ… Consistent spacing, typography, and shadows +- โœ… Status colors with proper contrast in both modes + +**Benefits**: +- ๐ŸŽจ Consistent design across the entire app +- ๐ŸŒ“ Automatic theme switching based on system preference +- ๐Ÿ”ง Easy to maintain and update colors globally +- โ™ฟ Better accessibility with proper contrast ratios +- ๐Ÿš€ Future-proof for theme customization + +**The Vue frontend now fully supports light and dark modes! ๐ŸŽ‰** diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..2b36112 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + frontend + + +
+ + + diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..8cea527 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,1884 @@ +{ + "name": "frontend", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.0.0", + "dependencies": { + "axios": "^1.13.1", + "pinia": "^3.0.3", + "vue": "^3.5.22", + "vue-router": "^4.6.3" + }, + "devDependencies": { + "@types/node": "^24.6.0", + "@vitejs/plugin-vue": "^6.0.1", + "@vue/tsconfig": "^0.8.1", + "typescript": "~5.9.3", + "vite": "^7.1.7", + "vue-tsc": "^3.1.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.5" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/types": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.11.tgz", + "integrity": "sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.11.tgz", + "integrity": "sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.11.tgz", + "integrity": "sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.11.tgz", + "integrity": "sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.11.tgz", + "integrity": "sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.11.tgz", + "integrity": "sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.11.tgz", + "integrity": "sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.11.tgz", + "integrity": "sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.11.tgz", + "integrity": "sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.11.tgz", + "integrity": "sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.11.tgz", + "integrity": "sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.11.tgz", + "integrity": "sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.11.tgz", + "integrity": "sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.11.tgz", + "integrity": "sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.11.tgz", + "integrity": "sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.11.tgz", + "integrity": "sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.11.tgz", + "integrity": "sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.11.tgz", + "integrity": "sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.11.tgz", + "integrity": "sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.11.tgz", + "integrity": "sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.11.tgz", + "integrity": "sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.11.tgz", + "integrity": "sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.11.tgz", + "integrity": "sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.11.tgz", + "integrity": "sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.11.tgz", + "integrity": "sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.11.tgz", + "integrity": "sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.29", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.29.tgz", + "integrity": "sha512-NIJgOsMjbxAXvoGq/X0gD7VPMQ8j9g0BiDaNjVNVjvl+iKXxL3Jre0v31RmBYeLEmkbj2s02v8vFTbUXi5XS2Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.5.tgz", + "integrity": "sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.5.tgz", + "integrity": "sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.5.tgz", + "integrity": "sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.5.tgz", + "integrity": "sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.5.tgz", + "integrity": "sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.5.tgz", + "integrity": "sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.5.tgz", + "integrity": "sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.5.tgz", + "integrity": "sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.5.tgz", + "integrity": "sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.5.tgz", + "integrity": "sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.5.tgz", + "integrity": "sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.5.tgz", + "integrity": "sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.5.tgz", + "integrity": "sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.5.tgz", + "integrity": "sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.5.tgz", + "integrity": "sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.5.tgz", + "integrity": "sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.5.tgz", + "integrity": "sha512-arCGIcuNKjBoKAXD+y7XomR9gY6Mw7HnFBv5Rw7wQRvwYLR7gBAgV7Mb2QTyjXfTveBNFAtPt46/36vV9STLNg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.5.tgz", + "integrity": "sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.5.tgz", + "integrity": "sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.5.tgz", + "integrity": "sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.5.tgz", + "integrity": "sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.5.tgz", + "integrity": "sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.9.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.9.2.tgz", + "integrity": "sha512-uWN8YqxXxqFMX2RqGOrumsKeti4LlmIMIyV0lgut4jx7KQBcBiW6vkDtIBvHnHIquwNfJhk8v2OtmO8zXWHfPA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@vitejs/plugin-vue": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.1.tgz", + "integrity": "sha512-+MaE752hU0wfPFJEUAIxqw18+20euHHdxVtMvbFcOEpjEyfqXH/5DCoTHiVJ0J29EhTJdoTkjEv5YBKU9dnoTw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rolldown/pluginutils": "1.0.0-beta.29" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", + "vue": "^3.2.25" + } + }, + "node_modules/@volar/language-core": { + "version": "2.4.23", + "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.23.tgz", + "integrity": "sha512-hEEd5ET/oSmBC6pi1j6NaNYRWoAiDhINbT8rmwtINugR39loROSlufGdYMF9TaKGfz+ViGs1Idi3mAhnuPcoGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@volar/source-map": "2.4.23" + } + }, + "node_modules/@volar/source-map": { + "version": "2.4.23", + "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.23.tgz", + "integrity": "sha512-Z1Uc8IB57Lm6k7q6KIDu/p+JWtf3xsXJqAX/5r18hYOTpJyBn0KXUR8oTJ4WFYOcDzWC9n3IflGgHowx6U6z9Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@volar/typescript": { + "version": "2.4.23", + "resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-2.4.23.tgz", + "integrity": "sha512-lAB5zJghWxVPqfcStmAP1ZqQacMpe90UrP5RJ3arDyrhy4aCUQqmxPPLB2PWDKugvylmO41ljK7vZ+t6INMTag==", + "dev": true, + "license": "MIT", + "dependencies": { + "@volar/language-core": "2.4.23", + "path-browserify": "^1.0.1", + "vscode-uri": "^3.0.8" + } + }, + "node_modules/@vue/compiler-core": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.22.tgz", + "integrity": "sha512-jQ0pFPmZwTEiRNSb+i9Ow/I/cHv2tXYqsnHKKyCQ08irI2kdF5qmYedmF8si8mA7zepUFmJ2hqzS8CQmNOWOkQ==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.4", + "@vue/shared": "3.5.22", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.1" + } + }, + "node_modules/@vue/compiler-dom": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.22.tgz", + "integrity": "sha512-W8RknzUM1BLkypvdz10OVsGxnMAuSIZs9Wdx1vzA3mL5fNMN15rhrSCLiTm6blWeACwUwizzPVqGJgOGBEN/hA==", + "license": "MIT", + "dependencies": { + "@vue/compiler-core": "3.5.22", + "@vue/shared": "3.5.22" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.22.tgz", + "integrity": "sha512-tbTR1zKGce4Lj+JLzFXDq36K4vcSZbJ1RBu8FxcDv1IGRz//Dh2EBqksyGVypz3kXpshIfWKGOCcqpSbyGWRJQ==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.4", + "@vue/compiler-core": "3.5.22", + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.19", + "postcss": "^8.5.6", + "source-map-js": "^1.2.1" + } + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.22.tgz", + "integrity": "sha512-GdgyLvg4R+7T8Nk2Mlighx7XGxq/fJf9jaVofc3IL0EPesTE86cP/8DD1lT3h1JeZr2ySBvyqKQJgbS54IX1Ww==", + "license": "MIT", + "dependencies": { + "@vue/compiler-dom": "3.5.22", + "@vue/shared": "3.5.22" + } + }, + "node_modules/@vue/devtools-api": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.7.7.tgz", + "integrity": "sha512-lwOnNBH2e7x1fIIbVT7yF5D+YWhqELm55/4ZKf45R9T8r9dE2AIOy8HKjfqzGsoTHFbWbr337O4E0A0QADnjBg==", + "license": "MIT", + "dependencies": { + "@vue/devtools-kit": "^7.7.7" + } + }, + "node_modules/@vue/devtools-kit": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.7.7.tgz", + "integrity": "sha512-wgoZtxcTta65cnZ1Q6MbAfePVFxfM+gq0saaeytoph7nEa7yMXoi6sCPy4ufO111B9msnw0VOWjPEFCXuAKRHA==", + "license": "MIT", + "dependencies": { + "@vue/devtools-shared": "^7.7.7", + "birpc": "^2.3.0", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^1.0.0", + "speakingurl": "^14.0.1", + "superjson": "^2.2.2" + } + }, + "node_modules/@vue/devtools-shared": { + "version": "7.7.7", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.7.7.tgz", + "integrity": "sha512-+udSj47aRl5aKb0memBvcUG9koarqnxNM5yjuREvqwK6T3ap4mn3Zqqc17QrBFTqSMjr3HK1cvStEZpMDpfdyw==", + "license": "MIT", + "dependencies": { + "rfdc": "^1.4.1" + } + }, + "node_modules/@vue/language-core": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.1.2.tgz", + "integrity": "sha512-PyFDCqpdfYUT+oMLqcc61oHfJlC6yjhybaefwQjRdkchIihToOEpJ2Wu/Ebq2yrnJdd1EsaAvZaXVAqcxtnDxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@volar/language-core": "2.4.23", + "@vue/compiler-dom": "^3.5.0", + "@vue/shared": "^3.5.0", + "alien-signals": "^3.0.0", + "muggle-string": "^0.4.1", + "path-browserify": "^1.0.1", + "picomatch": "^4.0.2" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@vue/reactivity": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.22.tgz", + "integrity": "sha512-f2Wux4v/Z2pqc9+4SmgZC1p73Z53fyD90NFWXiX9AKVnVBEvLFOWCEgJD3GdGnlxPZt01PSlfmLqbLYzY/Fw4A==", + "license": "MIT", + "dependencies": { + "@vue/shared": "3.5.22" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.22.tgz", + "integrity": "sha512-EHo4W/eiYeAzRTN5PCextDUZ0dMs9I8mQ2Fy+OkzvRPUYQEyK9yAjbasrMCXbLNhF7P0OUyivLjIy0yc6VrLJQ==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.22", + "@vue/shared": "3.5.22" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.22.tgz", + "integrity": "sha512-Av60jsryAkI023PlN7LsqrfPvwfxOd2yAwtReCjeuugTJTkgrksYJJstg1e12qle0NarkfhfFu1ox2D+cQotww==", + "license": "MIT", + "dependencies": { + "@vue/reactivity": "3.5.22", + "@vue/runtime-core": "3.5.22", + "@vue/shared": "3.5.22", + "csstype": "^3.1.3" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.22.tgz", + "integrity": "sha512-gXjo+ao0oHYTSswF+a3KRHZ1WszxIqO7u6XwNHqcqb9JfyIL/pbWrrh/xLv7jeDqla9u+LK7yfZKHih1e1RKAQ==", + "license": "MIT", + "dependencies": { + "@vue/compiler-ssr": "3.5.22", + "@vue/shared": "3.5.22" + }, + "peerDependencies": { + "vue": "3.5.22" + } + }, + "node_modules/@vue/shared": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.22.tgz", + "integrity": "sha512-F4yc6palwq3TT0u+FYf0Ns4Tfl9GRFURDN2gWG7L1ecIaS/4fCIuFOjMTnCyjsu/OK6vaDKLCrGAa+KvvH+h4w==", + "license": "MIT" + }, + "node_modules/@vue/tsconfig": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@vue/tsconfig/-/tsconfig-0.8.1.tgz", + "integrity": "sha512-aK7feIWPXFSUhsCP9PFqPyFOcz4ENkb8hZ2pneL6m2UjCkccvaOhC/5KCKluuBufvp2KzkbdA2W2pk20vLzu3g==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": "5.x", + "vue": "^3.4.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + }, + "vue": { + "optional": true + } + } + }, + "node_modules/alien-signals": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.0.3.tgz", + "integrity": "sha512-2JXjom6R7ZwrISpUphLhf4htUq1aKRCennTJ6u9kFfr3sLmC9+I4CxxVi+McoFnIg+p1HnVrfLT/iCt4Dlz//Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/axios": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.1.tgz", + "integrity": "sha512-hU4EGxxt+j7TQijx1oYdAjw4xuIp1wRQSsbMFwSthCWeBQur1eF+qJ5iQ5sN3Tw8YRzQNKb8jszgBdMDVqwJcw==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/birpc": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.6.1.tgz", + "integrity": "sha512-LPnFhlDpdSH6FJhJyn4M0kFO7vtQ5iPw24FnG0y21q09xC7e8+1LeR31S1MAIrDAHp4m7aas4bEkTDTvMAtebQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/copy-anything": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-4.0.5.tgz", + "integrity": "sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==", + "license": "MIT", + "dependencies": { + "is-what": "^5.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "license": "MIT" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.11.tgz", + "integrity": "sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.11", + "@esbuild/android-arm": "0.25.11", + "@esbuild/android-arm64": "0.25.11", + "@esbuild/android-x64": "0.25.11", + "@esbuild/darwin-arm64": "0.25.11", + "@esbuild/darwin-x64": "0.25.11", + "@esbuild/freebsd-arm64": "0.25.11", + "@esbuild/freebsd-x64": "0.25.11", + "@esbuild/linux-arm": "0.25.11", + "@esbuild/linux-arm64": "0.25.11", + "@esbuild/linux-ia32": "0.25.11", + "@esbuild/linux-loong64": "0.25.11", + "@esbuild/linux-mips64el": "0.25.11", + "@esbuild/linux-ppc64": "0.25.11", + "@esbuild/linux-riscv64": "0.25.11", + "@esbuild/linux-s390x": "0.25.11", + "@esbuild/linux-x64": "0.25.11", + "@esbuild/netbsd-arm64": "0.25.11", + "@esbuild/netbsd-x64": "0.25.11", + "@esbuild/openbsd-arm64": "0.25.11", + "@esbuild/openbsd-x64": "0.25.11", + "@esbuild/openharmony-arm64": "0.25.11", + "@esbuild/sunos-x64": "0.25.11", + "@esbuild/win32-arm64": "0.25.11", + "@esbuild/win32-ia32": "0.25.11", + "@esbuild/win32-x64": "0.25.11" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "license": "MIT" + }, + "node_modules/is-what": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-5.5.0.tgz", + "integrity": "sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", + "license": "MIT" + }, + "node_modules/muggle-string": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.4.1.tgz", + "integrity": "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "dev": true, + "license": "MIT" + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pinia": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pinia/-/pinia-3.0.3.tgz", + "integrity": "sha512-ttXO/InUULUXkMHpTdp9Fj4hLpD/2AoJdmAbAeW2yu1iy1k+pkFekQXw5VpC0/5p51IOR/jDaDRfRWRnMMsGOA==", + "license": "MIT", + "dependencies": { + "@vue/devtools-api": "^7.7.2" + }, + "funding": { + "url": "https://github.com/sponsors/posva" + }, + "peerDependencies": { + "typescript": ">=4.4.4", + "vue": "^2.7.0 || ^3.5.11" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "license": "MIT" + }, + "node_modules/rollup": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.5.tgz", + "integrity": "sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.52.5", + "@rollup/rollup-android-arm64": "4.52.5", + "@rollup/rollup-darwin-arm64": "4.52.5", + "@rollup/rollup-darwin-x64": "4.52.5", + "@rollup/rollup-freebsd-arm64": "4.52.5", + "@rollup/rollup-freebsd-x64": "4.52.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.52.5", + "@rollup/rollup-linux-arm-musleabihf": "4.52.5", + "@rollup/rollup-linux-arm64-gnu": "4.52.5", + "@rollup/rollup-linux-arm64-musl": "4.52.5", + "@rollup/rollup-linux-loong64-gnu": "4.52.5", + "@rollup/rollup-linux-ppc64-gnu": "4.52.5", + "@rollup/rollup-linux-riscv64-gnu": "4.52.5", + "@rollup/rollup-linux-riscv64-musl": "4.52.5", + "@rollup/rollup-linux-s390x-gnu": "4.52.5", + "@rollup/rollup-linux-x64-gnu": "4.52.5", + "@rollup/rollup-linux-x64-musl": "4.52.5", + "@rollup/rollup-openharmony-arm64": "4.52.5", + "@rollup/rollup-win32-arm64-msvc": "4.52.5", + "@rollup/rollup-win32-ia32-msvc": "4.52.5", + "@rollup/rollup-win32-x64-gnu": "4.52.5", + "@rollup/rollup-win32-x64-msvc": "4.52.5", + "fsevents": "~2.3.2" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/speakingurl": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/speakingurl/-/speakingurl-14.0.1.tgz", + "integrity": "sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/superjson": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.5.tgz", + "integrity": "sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w==", + "license": "MIT", + "dependencies": { + "copy-anything": "^4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "devOptional": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "7.1.12", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.12.tgz", + "integrity": "sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.25.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vscode-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/vue": { + "version": "3.5.22", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.22.tgz", + "integrity": "sha512-toaZjQ3a/G/mYaLSbV+QsQhIdMo9x5rrqIpYRObsJ6T/J+RyCSFwN2LHNVH9v8uIcljDNa3QzPVdv3Y6b9hAJQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/compiler-dom": "3.5.22", + "@vue/compiler-sfc": "3.5.22", + "@vue/runtime-dom": "3.5.22", + "@vue/server-renderer": "3.5.22", + "@vue/shared": "3.5.22" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/vue-router": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.3.tgz", + "integrity": "sha512-ARBedLm9YlbvQomnmq91Os7ck6efydTSpRP3nuOKCvgJOHNrhRoJDSKtee8kcL1Vf7nz6U+PMBL+hTvR3bTVQg==", + "license": "MIT", + "dependencies": { + "@vue/devtools-api": "^6.6.4" + }, + "funding": { + "url": "https://github.com/sponsors/posva" + }, + "peerDependencies": { + "vue": "^3.5.0" + } + }, + "node_modules/vue-router/node_modules/@vue/devtools-api": { + "version": "6.6.4", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.6.4.tgz", + "integrity": "sha512-sGhTPMuXqZ1rVOk32RylztWkfXTRhuS7vgAKv0zjqk8gbsHkJ7xfFf+jbySxt7tWObEJwyKaHMikV/WGDiQm8g==", + "license": "MIT" + }, + "node_modules/vue-tsc": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.1.2.tgz", + "integrity": "sha512-3fd4DY0rFczs5f+VB3OhcLU83V6+3Puj2yLBe0Ak65k7ERk+STVNKaOAi0EBo6Lc15UiJB6LzU6Mxy4+h/pKew==", + "dev": true, + "license": "MIT", + "dependencies": { + "@volar/typescript": "2.4.23", + "@vue/language-core": "3.1.2" + }, + "bin": { + "vue-tsc": "bin/vue-tsc.js" + }, + "peerDependencies": { + "typescript": ">=5.0.0" + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..e1f1a7e --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,25 @@ +{ + "name": "frontend", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vue-tsc -b && vite build", + "preview": "vite preview" + }, + "dependencies": { + "axios": "^1.13.1", + "pinia": "^3.0.3", + "vue": "^3.5.22", + "vue-router": "^4.6.3" + }, + "devDependencies": { + "@types/node": "^24.6.0", + "@vitejs/plugin-vue": "^6.0.1", + "@vue/tsconfig": "^0.8.1", + "typescript": "~5.9.3", + "vite": "^7.1.7", + "vue-tsc": "^3.1.0" + } +} diff --git a/frontend/public/vite.svg b/frontend/public/vite.svg new file mode 100644 index 0000000..e7b8dfb --- /dev/null +++ b/frontend/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/App.vue b/frontend/src/App.vue new file mode 100644 index 0000000..81aa886 --- /dev/null +++ b/frontend/src/App.vue @@ -0,0 +1,199 @@ + + + + + diff --git a/frontend/src/assets/vue.svg b/frontend/src/assets/vue.svg new file mode 100644 index 0000000..770e9d3 --- /dev/null +++ b/frontend/src/assets/vue.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/components/ConfirmDialog.vue b/frontend/src/components/ConfirmDialog.vue new file mode 100644 index 0000000..7b1d46b --- /dev/null +++ b/frontend/src/components/ConfirmDialog.vue @@ -0,0 +1,189 @@ + + + + + diff --git a/frontend/src/components/EditItemModal.vue b/frontend/src/components/EditItemModal.vue new file mode 100644 index 0000000..a426f92 --- /dev/null +++ b/frontend/src/components/EditItemModal.vue @@ -0,0 +1,452 @@ + + + + + diff --git a/frontend/src/components/HelloWorld.vue b/frontend/src/components/HelloWorld.vue new file mode 100644 index 0000000..b58e52b --- /dev/null +++ b/frontend/src/components/HelloWorld.vue @@ -0,0 +1,41 @@ + + + + + diff --git a/frontend/src/components/InventoryList.vue b/frontend/src/components/InventoryList.vue new file mode 100644 index 0000000..5da5e0b --- /dev/null +++ b/frontend/src/components/InventoryList.vue @@ -0,0 +1,1281 @@ + + + + + diff --git a/frontend/src/components/ReceiptsView.vue b/frontend/src/components/ReceiptsView.vue new file mode 100644 index 0000000..9aeb495 --- /dev/null +++ b/frontend/src/components/ReceiptsView.vue @@ -0,0 +1,454 @@ + + + + + diff --git a/frontend/src/components/ToastNotification.vue b/frontend/src/components/ToastNotification.vue new file mode 100644 index 0000000..1681b4e --- /dev/null +++ b/frontend/src/components/ToastNotification.vue @@ -0,0 +1,252 @@ + + + + + diff --git a/frontend/src/main.ts b/frontend/src/main.ts new file mode 100644 index 0000000..84ae29b --- /dev/null +++ b/frontend/src/main.ts @@ -0,0 +1,11 @@ +import { createApp } from 'vue' +import { createPinia } from 'pinia' +import './style.css' +import './theme.css' +import App from './App.vue' + +const app = createApp(App) +const pinia = createPinia() + +app.use(pinia) +app.mount('#app') diff --git a/frontend/src/services/api.ts b/frontend/src/services/api.ts new file mode 100644 index 0000000..02fc61a --- /dev/null +++ b/frontend/src/services/api.ts @@ -0,0 +1,407 @@ +/** + * API Service for Kiwi Backend + * + * VITE_API_BASE is baked in at build time: + * dev: '' (empty โ€” proxy in vite.config.ts handles /api/) + * cloud: '/kiwi' (Caddy strips /kiwi and forwards to nginx, which proxies /api/ โ†’ api container) + */ + +import axios, { type AxiosInstance } from 'axios' + +// API Configuration +const API_BASE_URL = (import.meta.env.VITE_API_BASE ?? '') + '/api/v1' + +// Create axios instance +const api: AxiosInstance = axios.create({ + baseURL: API_BASE_URL, + headers: { + 'Content-Type': 'application/json', + }, + timeout: 30000, // 30 seconds +}) + +// Request interceptor for logging +api.interceptors.request.use( + (config) => { + console.log(`[API] ${config.method?.toUpperCase()} ${config.baseURL}${config.url}`, { + params: config.params, + data: config.data instanceof FormData ? '' : config.data, + }) + return config + }, + (error) => { + console.error('[API Request Error]', error) + return Promise.reject(error) + } +) + +// Response interceptor for error handling +api.interceptors.response.use( + (response) => { + console.log(`[API] โœ“ ${response.status} ${response.config.method?.toUpperCase()} ${response.config.url}`) + return response + }, + (error) => { + console.error('[API Error]', { + message: error.message, + url: error.config?.url, + method: error.config?.method?.toUpperCase(), + status: error.response?.status, + statusText: error.response?.statusText, + data: error.response?.data, + baseURL: error.config?.baseURL, + }) + return Promise.reject(error) + } +) + +// ========== Types ========== + +export interface Product { + id: string + barcode: string | null + name: string + brand: string | null + category: string | null + description: string | null + image_url: string | null + nutrition_data: Record + source: string + tags: Tag[] +} + +export interface Tag { + id: string + name: string + slug: string + description: string | null + color: string | null + category: string | null +} + +export interface InventoryItem { + id: string + product_id: string + product: Product + quantity: number + unit: string + location: string + sublocation: string | null + purchase_date: string | null + expiration_date: string | null + status: string + source: string + notes: string | null + created_at: string + updated_at: string +} + +export interface InventoryItemUpdate { + quantity?: number + unit?: string + location?: string + sublocation?: string | null + purchase_date?: string | null + expiration_date?: string | null + status?: string + notes?: string | null +} + +export interface InventoryStats { + total_items: number + total_products: number + expiring_soon: number + expired: number + items_by_location: Record + items_by_status: Record +} + +export interface Receipt { + id: string + filename: string + status: string + metadata: Record + quality_score: number | null +} + +export interface ReceiptOCRData { + id: string + receipt_id: string + merchant: { + name: string | null + address: string | null + phone: string | null + } + transaction: { + date: string | null + time: string | null + receipt_number: string | null + register: string | null + cashier: string | null + } + items: Array<{ + name: string + quantity: number + unit_price: number | null + total_price: number + category: string | null + }> + totals: { + subtotal: number | null + tax: number | null + total: number | null + payment_method: string | null + } + confidence: Record + warnings: string[] + processing_time: number | null + created_at: string +} + +// ========== Inventory API ========== + +export const inventoryAPI = { + /** + * List all inventory items + */ + async listItems(params?: { + location?: string + status?: string + limit?: number + offset?: number + }): Promise { + const response = await api.get('/inventory/items', { params }) + return response.data + }, + + /** + * Get a single inventory item + */ + async getItem(itemId: string): Promise { + const response = await api.get(`/inventory/items/${itemId}`) + return response.data + }, + + /** + * Update an inventory item + */ + async updateItem(itemId: string, update: InventoryItemUpdate): Promise { + const response = await api.patch(`/inventory/items/${itemId}`, update) + return response.data + }, + + /** + * Delete an inventory item + */ + async deleteItem(itemId: string): Promise { + await api.delete(`/inventory/items/${itemId}`) + }, + + /** + * Get inventory statistics + */ + async getStats(): Promise { + const response = await api.get('/inventory/stats') + return response.data + }, + + /** + * Get items expiring soon + */ + async getExpiring(days: number = 7): Promise { + const response = await api.get(`/inventory/expiring?days=${days}`) + return response.data + }, + + /** + * Scan barcode from text + */ + async scanBarcodeText( + barcode: string, + location: string = 'pantry', + quantity: number = 1.0, + autoAdd: boolean = true + ): Promise { + const response = await api.post('/inventory/scan/text', { + barcode, + location, + quantity, + auto_add_to_inventory: autoAdd, + }) + return response.data + }, + + /** + * Mark item as consumed + */ + async consumeItem(itemId: string): Promise { + await api.post(`/inventory/items/${itemId}/consume`) + }, + + /** + * Create a new product + */ + async createProduct(data: { + name: string + brand?: string + source?: string + }): Promise { + const response = await api.post('/inventory/products', data) + return response.data + }, + + /** + * Create a new inventory item + */ + async createItem(data: { + product_id: string + quantity: number + unit?: string + location: string + expiration_date?: string + source?: string + }): Promise { + const response = await api.post('/inventory/items', data) + return response.data + }, + + /** + * Scan barcode from image + */ + async scanBarcodeImage( + file: File, + location: string = 'pantry', + quantity: number = 1.0, + autoAdd: boolean = true + ): Promise { + const formData = new FormData() + formData.append('file', file) + formData.append('location', location) + formData.append('quantity', quantity.toString()) + formData.append('auto_add_to_inventory', autoAdd.toString()) + + const response = await api.post('/inventory/scan', formData, { + headers: { + 'Content-Type': 'multipart/form-data', + }, + }) + return response.data + }, +} + +// ========== Receipts API ========== + +export const receiptsAPI = { + /** + * List all receipts + */ + async listReceipts(): Promise { + const response = await api.get('/receipts/') + return response.data + }, + + /** + * Get a single receipt + */ + async getReceipt(receiptId: string): Promise { + const response = await api.get(`/receipts/${receiptId}`) + return response.data + }, + + /** + * Upload a receipt + */ + async upload(file: File): Promise { + const formData = new FormData() + formData.append('file', file) + + const response = await api.post('/receipts/', formData, { + headers: { + 'Content-Type': 'multipart/form-data', + }, + }) + return response.data + }, + + /** + * Get receipt statistics + */ + async getStats(): Promise { + const response = await api.get('/export/stats') + return response.data + }, + + /** + * Get OCR data for a receipt + */ + async getOCRData(receiptId: string): Promise { + const response = await api.get(`/receipts/${receiptId}/ocr/data`) + return response.data + }, + + /** + * Get OCR status for a receipt + */ + async getOCRStatus(receiptId: string): Promise { + const response = await api.get(`/receipts/${receiptId}/ocr/status`) + return response.data + }, + + /** + * Trigger OCR processing + */ + async triggerOCR(receiptId: string, forceReprocess: boolean = false): Promise { + const response = await api.post(`/receipts/${receiptId}/ocr/trigger`, { + force_reprocess: forceReprocess, + }) + return response.data + }, +} + +// ========== Export API ========== + +export const exportAPI = { + /** + * Get export statistics + */ + async getStats(): Promise { + const response = await api.get('/export/stats') + return response.data + }, + + /** + * Download inventory CSV + */ + getInventoryCSVUrl(location?: string, status: string = 'available'): string { + const params = new URLSearchParams() + if (location) params.append('location', location) + params.append('status', status) + return `${API_BASE_URL}/export/inventory/csv?${params.toString()}` + }, + + /** + * Download inventory Excel + */ + getInventoryExcelUrl(location?: string, status: string = 'available'): string { + const params = new URLSearchParams() + if (location) params.append('location', location) + params.append('status', status) + return `${API_BASE_URL}/export/inventory/excel?${params.toString()}` + }, + + /** + * Download receipts CSV + */ + getReceiptsCSVUrl(): string { + return `${API_BASE_URL}/export/csv` + }, + + /** + * Download receipts Excel + */ + getReceiptsExcelUrl(): string { + return `${API_BASE_URL}/export/excel` + }, +} + +export default api diff --git a/frontend/src/stores/inventory.ts b/frontend/src/stores/inventory.ts new file mode 100644 index 0000000..fe119f5 --- /dev/null +++ b/frontend/src/stores/inventory.ts @@ -0,0 +1,173 @@ +/** + * Inventory Store + * + * Manages inventory items, products, and related state using Pinia. + */ + +import { defineStore } from 'pinia' +import { ref, computed } from 'vue' +import { inventoryAPI, type InventoryItem, type InventoryStats, type InventoryItemUpdate } from '../services/api' + +export const useInventoryStore = defineStore('inventory', () => { + // State + const items = ref([]) + const stats = ref(null) + const loading = ref(false) + const error = ref(null) + + // Filter state + const locationFilter = ref('all') + const statusFilter = ref('available') + + // Computed + const filteredItems = computed(() => { + return items.value.filter((item) => { + const matchesLocation = locationFilter.value === 'all' || item.location === locationFilter.value + const matchesStatus = statusFilter.value === 'all' || item.status === statusFilter.value + return matchesLocation && matchesStatus + }) + }) + + const expiringItems = computed(() => { + const today = new Date() + const weekFromNow = new Date(today.getTime() + 7 * 24 * 60 * 60 * 1000) + + return items.value.filter((item) => { + if (!item.expiration_date || item.status !== 'available') return false + const expiryDate = new Date(item.expiration_date) + return expiryDate >= today && expiryDate <= weekFromNow + }) + }) + + const expiredItems = computed(() => { + const today = new Date() + + return items.value.filter((item) => { + if (!item.expiration_date || item.status !== 'available') return false + const expiryDate = new Date(item.expiration_date) + return expiryDate < today + }) + }) + + // Actions + async function fetchItems() { + loading.value = true + error.value = null + + try { + items.value = await inventoryAPI.listItems({ + status: statusFilter.value === 'all' ? undefined : statusFilter.value, + location: locationFilter.value === 'all' ? undefined : locationFilter.value, + limit: 1000, + }) + } catch (err: any) { + error.value = err.response?.data?.detail || 'Failed to fetch inventory items' + console.error('Error fetching inventory:', err) + } finally { + loading.value = false + } + } + + async function fetchStats() { + try { + stats.value = await inventoryAPI.getStats() + } catch (err: any) { + console.error('Error fetching stats:', err) + } + } + + async function updateItem(itemId: string, update: InventoryItemUpdate) { + loading.value = true + error.value = null + + try { + const updatedItem = await inventoryAPI.updateItem(itemId, update) + + // Update in local state + const index = items.value.findIndex((item) => item.id === itemId) + if (index !== -1) { + items.value[index] = updatedItem + } + + return updatedItem + } catch (err: any) { + error.value = err.response?.data?.detail || 'Failed to update item' + console.error('Error updating item:', err) + throw err + } finally { + loading.value = false + } + } + + async function deleteItem(itemId: string) { + loading.value = true + error.value = null + + try { + await inventoryAPI.deleteItem(itemId) + + // Remove from local state + items.value = items.value.filter((item) => item.id !== itemId) + } catch (err: any) { + error.value = err.response?.data?.detail || 'Failed to delete item' + console.error('Error deleting item:', err) + throw err + } finally { + loading.value = false + } + } + + async function scanBarcode(barcode: string, location: string = 'pantry', quantity: number = 1) { + loading.value = true + error.value = null + + try { + const result = await inventoryAPI.scanBarcodeText(barcode, location, quantity, true) + + // Refresh items after successful scan + if (result.success) { + await fetchItems() + } + + return result + } catch (err: any) { + error.value = err.response?.data?.detail || 'Failed to scan barcode' + console.error('Error scanning barcode:', err) + throw err + } finally { + loading.value = false + } + } + + function setLocationFilter(location: string) { + locationFilter.value = location + } + + function setStatusFilter(status: string) { + statusFilter.value = status + } + + return { + // State + items, + stats, + loading, + error, + locationFilter, + statusFilter, + + // Computed + filteredItems, + expiringItems, + expiredItems, + + // Actions + fetchItems, + fetchStats, + updateItem, + deleteItem, + scanBarcode, + setLocationFilter, + setStatusFilter, + } +}) diff --git a/frontend/src/style.css b/frontend/src/style.css new file mode 100644 index 0000000..a5d7160 --- /dev/null +++ b/frontend/src/style.css @@ -0,0 +1,255 @@ +:root { + font-family: system-ui, Avenir, Helvetica, Arial, sans-serif; + line-height: 1.5; + font-weight: 400; + + color-scheme: light dark; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + + /* Theme Colors - Dark Mode (Default) */ + --color-text-primary: rgba(255, 255, 255, 0.87); + --color-text-secondary: rgba(255, 255, 255, 0.6); + --color-text-muted: rgba(255, 255, 255, 0.4); + + --color-bg-primary: #242424; + --color-bg-secondary: #1a1a1a; + --color-bg-elevated: #2d2d2d; + --color-bg-card: #2d2d2d; + --color-bg-input: #1a1a1a; + + --color-border: rgba(255, 255, 255, 0.1); + --color-border-focus: rgba(255, 255, 255, 0.2); + + /* Brand Colors */ + --color-primary: #667eea; + --color-primary-dark: #5568d3; + --color-primary-light: #7d8ff0; + --color-secondary: #764ba2; + + /* Status Colors */ + --color-success: #4CAF50; + --color-success-dark: #45a049; + --color-success-light: #66bb6a; + --color-success-bg: rgba(76, 175, 80, 0.1); + --color-success-border: rgba(76, 175, 80, 0.3); + + --color-warning: #ff9800; + --color-warning-dark: #f57c00; + --color-warning-light: #ffb74d; + --color-warning-bg: rgba(255, 152, 0, 0.1); + --color-warning-border: rgba(255, 152, 0, 0.3); + + --color-error: #f44336; + --color-error-dark: #d32f2f; + --color-error-light: #ff6b6b; + --color-error-bg: rgba(244, 67, 54, 0.1); + --color-error-border: rgba(244, 67, 54, 0.3); + + --color-info: #2196F3; + --color-info-dark: #1976D2; + --color-info-light: #64b5f6; + --color-info-bg: rgba(33, 150, 243, 0.1); + --color-info-border: rgba(33, 150, 243, 0.3); + + /* Gradient */ + --gradient-primary: linear-gradient(135deg, var(--color-primary) 0%, var(--color-secondary) 100%); + + /* Shadows */ + --shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.3); + --shadow-md: 0 4px 6px rgba(0, 0, 0, 0.3); + --shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.4); + --shadow-xl: 0 20px 40px rgba(0, 0, 0, 0.5); + + /* Typography */ + --font-size-xs: 12px; + --font-size-sm: 14px; + --font-size-base: 16px; + --font-size-lg: 18px; + --font-size-xl: 24px; + --font-size-2xl: 32px; + + /* Spacing */ + --spacing-xs: 4px; + --spacing-sm: 8px; + --spacing-md: 16px; + --spacing-lg: 24px; + --spacing-xl: 32px; + + /* Border Radius */ + --radius-sm: 4px; + --radius-md: 6px; + --radius-lg: 8px; + --radius-xl: 12px; + + color: var(--color-text-primary); + background-color: var(--color-bg-primary); +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +.card { + padding: 2em; +} + +#app { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +@media (prefers-color-scheme: light) { + :root { + /* Theme Colors - Light Mode */ + --color-text-primary: #213547; + --color-text-secondary: #666; + --color-text-muted: #999; + + --color-bg-primary: #f5f5f5; + --color-bg-secondary: #ffffff; + --color-bg-elevated: #ffffff; + --color-bg-card: #ffffff; + --color-bg-input: #ffffff; + + --color-border: #ddd; + --color-border-focus: #ccc; + + /* Status colors stay the same in light mode */ + /* But we adjust backgrounds for better contrast */ + --color-success-bg: #d4edda; + --color-success-border: #c3e6cb; + + --color-warning-bg: #fff3cd; + --color-warning-border: #ffeaa7; + + --color-error-bg: #f8d7da; + --color-error-border: #f5c6cb; + + --color-info-bg: #d1ecf1; + --color-info-border: #bee5eb; + + /* Shadows for light mode (lighter) */ + --shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.1); + --shadow-md: 0 4px 6px rgba(0, 0, 0, 0.1); + --shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.15); + --shadow-xl: 0 20px 40px rgba(0, 0, 0, 0.2); + + color: var(--color-text-primary); + background-color: var(--color-bg-primary); + } + + a:hover { + color: #747bff; + } + + button { + background-color: #f9f9f9; + } +} + +/* Mobile Responsive Typography and Spacing */ +@media (max-width: 480px) { + :root { + /* Reduce font sizes for mobile */ + --font-size-xs: 11px; + --font-size-sm: 13px; + --font-size-base: 14px; + --font-size-lg: 16px; + --font-size-xl: 20px; + --font-size-2xl: 24px; + + /* Reduce spacing for mobile */ + --spacing-xs: 4px; + --spacing-sm: 6px; + --spacing-md: 12px; + --spacing-lg: 16px; + --spacing-xl: 20px; + + /* Reduce shadows for mobile */ + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.2); + --shadow-md: 0 2px 4px rgba(0, 0, 0, 0.2); + --shadow-lg: 0 4px 8px rgba(0, 0, 0, 0.3); + --shadow-xl: 0 8px 16px rgba(0, 0, 0, 0.4); + } + + h1 { + font-size: 2em; + } + + .card { + padding: 1em; + } + + #app { + padding: 1rem; + } +} + +@media (min-width: 481px) and (max-width: 768px) { + :root { + /* Slightly reduced sizes for tablets */ + --font-size-base: 15px; + --font-size-lg: 17px; + --font-size-xl: 22px; + --font-size-2xl: 28px; + + --spacing-md: 14px; + --spacing-lg: 20px; + --spacing-xl: 28px; + } + + h1 { + font-size: 2.5em; + } + + .card { + padding: 1.5em; + } + + #app { + padding: 1.5rem; + } +} diff --git a/frontend/src/theme.css b/frontend/src/theme.css new file mode 100644 index 0000000..5992156 --- /dev/null +++ b/frontend/src/theme.css @@ -0,0 +1,536 @@ +/** + * Central Theme System for Project Thoth + * + * This file contains all reusable, theme-aware, responsive CSS classes. + * Components should use these classes instead of custom styles where possible. + */ + +/* ============================================ + LAYOUT UTILITIES - RESPONSIVE GRIDS + ============================================ */ + +/* Responsive Grid - Automatically adjusts columns based on screen size */ +.grid-responsive { + display: grid; + gap: var(--spacing-md); +} + +/* Mobile: 1 column, Tablet: 2 columns, Desktop: 3+ columns */ +.grid-auto { + display: grid; + gap: var(--spacing-md); + grid-template-columns: 1fr; /* Default to single column */ +} + +/* Stats grid - always fills available space */ +.grid-stats { + display: grid; + gap: var(--spacing-md); + grid-template-columns: 1fr; /* Default to single column */ +} + +/* Force specific column counts */ +.grid-1 { grid-template-columns: 1fr; } +.grid-2 { grid-template-columns: repeat(2, 1fr); } +.grid-3 { grid-template-columns: repeat(3, 1fr); } +.grid-4 { grid-template-columns: repeat(4, 1fr); } + +/* ============================================ + FLEXBOX UTILITIES - RESPONSIVE + ============================================ */ + +.flex { display: flex; } +.flex-col { display: flex; flex-direction: column; } +.flex-wrap { flex-wrap: wrap; } +.flex-center { + display: flex; + align-items: center; + justify-content: center; +} +.flex-between { + display: flex; + justify-content: space-between; + align-items: center; +} +.flex-start { + display: flex; + justify-content: flex-start; + align-items: center; +} +.flex-end { + display: flex; + justify-content: flex-end; + align-items: center; +} + +/* Stack on mobile, horizontal on desktop */ +.flex-responsive { + display: flex; + gap: var(--spacing-md); + flex-wrap: wrap; +} + +/* ============================================ + SPACING UTILITIES + ============================================ */ + +/* Gaps */ +.gap-xs { gap: var(--spacing-xs); } +.gap-sm { gap: var(--spacing-sm); } +.gap-md { gap: var(--spacing-md); } +.gap-lg { gap: var(--spacing-lg); } +.gap-xl { gap: var(--spacing-xl); } + +/* Padding */ +.p-0 { padding: 0; } +.p-xs { padding: var(--spacing-xs); } +.p-sm { padding: var(--spacing-sm); } +.p-md { padding: var(--spacing-md); } +.p-lg { padding: var(--spacing-lg); } +.p-xl { padding: var(--spacing-xl); } + +/* Margin */ +.m-0 { margin: 0; } +.m-xs { margin: var(--spacing-xs); } +.m-sm { margin: var(--spacing-sm); } +.m-md { margin: var(--spacing-md); } +.m-lg { margin: var(--spacing-lg); } +.m-xl { margin: var(--spacing-xl); } + +/* Margin/Padding specific sides */ +.mt-md { margin-top: var(--spacing-md); } +.mb-md { margin-bottom: var(--spacing-md); } +.ml-md { margin-left: var(--spacing-md); } +.mr-md { margin-right: var(--spacing-md); } + +.pt-md { padding-top: var(--spacing-md); } +.pb-md { padding-bottom: var(--spacing-md); } +.pl-md { padding-left: var(--spacing-md); } +.pr-md { padding-right: var(--spacing-md); } + +/* ============================================ + CARD COMPONENTS - THEME AWARE + ============================================ */ + +.card { + background: var(--color-bg-card); + border-radius: var(--radius-xl); + padding: var(--spacing-xl); + box-shadow: var(--shadow-md); + transition: box-shadow 0.2s ease; +} + +.card:hover { + box-shadow: var(--shadow-lg); +} + +.card-sm { + background: var(--color-bg-card); + border-radius: var(--radius-lg); + padding: var(--spacing-md); + box-shadow: var(--shadow-sm); +} + +.card-secondary { + background: var(--color-bg-secondary); + border-radius: var(--radius-lg); + padding: var(--spacing-lg); + box-shadow: var(--shadow-sm); +} + +/* Status border variants */ +.card-success { border-left: 4px solid var(--color-success); } +.card-warning { border-left: 4px solid var(--color-warning); } +.card-error { border-left: 4px solid var(--color-error); } +.card-info { border-left: 4px solid var(--color-info); } + +/* ============================================ + BUTTON COMPONENTS - THEME AWARE + ============================================ */ + +.btn { + padding: var(--spacing-sm) var(--spacing-md); + border: none; + border-radius: var(--radius-md); + font-size: var(--font-size-sm); + font-weight: 600; + cursor: pointer; + transition: all 0.2s ease; + white-space: nowrap; +} + +.btn:hover { + transform: translateY(-1px); +} + +.btn:active { + transform: translateY(0); +} + +.btn:disabled { + opacity: 0.5; + cursor: not-allowed; + transform: none; +} + +/* Button variants */ +.btn-primary { + background: var(--gradient-primary); + color: white; + border: none; +} + +.btn-success { + background: var(--color-success); + color: white; +} + +.btn-success:hover:not(:disabled) { + background: var(--color-success-dark); +} + +.btn-error { + background: var(--color-error); + color: white; +} + +.btn-error:hover:not(:disabled) { + background: var(--color-error-dark); +} + +.btn-info { + background: var(--color-info); + color: white; +} + +.btn-info:hover:not(:disabled) { + background: var(--color-info-dark); +} + +.btn-secondary { + background: var(--color-bg-secondary); + color: var(--color-text-primary); + border: 2px solid var(--color-border); +} + +.btn-secondary:hover:not(:disabled) { + background: var(--color-bg-primary); + border-color: var(--color-primary); +} + +.btn-secondary.active { + background: var(--gradient-primary); + color: white; + border-color: var(--color-primary); +} + +/* Button sizes */ +.btn-sm { + padding: var(--spacing-xs) var(--spacing-sm); + font-size: var(--font-size-xs); +} + +.btn-lg { + padding: var(--spacing-md) var(--spacing-xl); + font-size: var(--font-size-lg); +} + +/* ============================================ + FORM COMPONENTS - THEME AWARE + ============================================ */ + +.form-group { + margin-bottom: var(--spacing-md); +} + +.form-label { + display: block; + margin-bottom: var(--spacing-sm); + font-weight: 600; + color: var(--color-text-primary); + font-size: var(--font-size-sm); +} + +.form-input, +.form-select, +.form-textarea { + width: 100%; + padding: var(--spacing-sm) var(--spacing-md); + border: 1px solid var(--color-border); + border-radius: var(--radius-md); + background: var(--color-bg-input); + color: var(--color-text-primary); + font-size: var(--font-size-sm); + transition: border-color 0.2s ease, box-shadow 0.2s ease; +} + +.form-input:focus, +.form-select:focus, +.form-textarea:focus { + outline: none; + border-color: var(--color-primary); + box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1); +} + +.form-textarea { + resize: vertical; + min-height: 80px; + font-family: inherit; +} + +/* Form layouts */ +.form-row { + display: grid; + gap: var(--spacing-md); + grid-template-columns: 1fr; +} + +/* ============================================ + TEXT UTILITIES + ============================================ */ + +.text-xs { font-size: var(--font-size-xs); } +.text-sm { font-size: var(--font-size-sm); } +.text-base { font-size: var(--font-size-base); } +.text-lg { font-size: var(--font-size-lg); } +.text-xl { font-size: var(--font-size-xl); } +.text-2xl { font-size: var(--font-size-2xl); } + +.text-primary { color: var(--color-text-primary); } +.text-secondary { color: var(--color-text-secondary); } +.text-muted { color: var(--color-text-muted); } + +.text-success { color: var(--color-success); } +.text-warning { color: var(--color-warning); } +.text-error { color: var(--color-error); } +.text-info { color: var(--color-info); } + +.text-center { text-align: center; } +.text-left { text-align: left; } +.text-right { text-align: right; } + +.font-bold { font-weight: 700; } +.font-semibold { font-weight: 600; } +.font-normal { font-weight: 400; } + +/* ============================================ + RESPONSIVE UTILITIES + ============================================ */ + +/* Show/Hide based on screen size */ +.mobile-only { display: none; } +.desktop-only { display: block; } + +/* Width utilities */ +.w-full { width: 100%; } +.w-auto { width: auto; } + +/* Height utilities */ +.h-full { height: 100%; } +.h-auto { height: auto; } + +/* ============================================ + MOBILE BREAKPOINTS (โ‰ค480px) + ============================================ */ + +@media (max-width: 480px) { + /* Show/Hide */ + .mobile-only { display: block; } + .desktop-only { display: none; } + + /* Grids already default to 1fr, just ensure it stays that way */ + .grid-2, + .grid-3, + .grid-4 { + grid-template-columns: 1fr !important; + } + + /* Stack flex items vertically */ + .flex-responsive { + flex-direction: column; + } + + /* Buttons take full width */ + .btn-mobile-full { + width: 100%; + min-width: 100%; + } + + /* Reduce card padding on mobile */ + .card { + padding: var(--spacing-md); + } + + .card-sm { + padding: var(--spacing-sm); + } + + /* Allow text wrapping on mobile */ + .btn { + white-space: normal; + text-align: center; + } +} + +/* ============================================ + TABLET BREAKPOINTS (481px - 768px) + ============================================ */ + +@media (min-width: 481px) and (max-width: 768px) { + /* 2-column layouts on tablets */ + .grid-3, + .grid-4 { + grid-template-columns: repeat(2, 1fr); + } + + .grid-auto { + grid-template-columns: repeat(2, 1fr); + } + + .grid-stats { + grid-template-columns: repeat(2, 1fr); + } + + .form-row { + grid-template-columns: 1fr 1fr; + } +} + +/* ============================================ + DESKTOP BREAKPOINTS (769px - 1024px) + ============================================ */ + +@media (min-width: 769px) and (max-width: 1024px) { + .grid-auto { + grid-template-columns: repeat(3, 1fr); + } + + .grid-stats { + grid-template-columns: repeat(3, 1fr); + } + + .grid-4 { + grid-template-columns: repeat(3, 1fr); + } +} + +/* ============================================ + LARGE DESKTOP (โ‰ฅ1025px) + ============================================ */ + +@media (min-width: 1025px) { + .grid-auto { + grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); + } + + .grid-stats { + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + } + + .form-row { + grid-template-columns: 1fr 1fr; + } +} + +/* ============================================ + STATUS & STATE UTILITIES + ============================================ */ + +.status-badge { + display: inline-block; + padding: var(--spacing-xs) var(--spacing-sm); + border-radius: var(--radius-sm); + font-size: var(--font-size-xs); + font-weight: 600; +} + +.status-success { + background: var(--color-success-bg); + color: var(--color-success-dark); + border: 1px solid var(--color-success-border); +} + +.status-warning { + background: var(--color-warning-bg); + color: var(--color-warning-dark); + border: 1px solid var(--color-warning-border); +} + +.status-error { + background: var(--color-error-bg); + color: var(--color-error-dark); + border: 1px solid var(--color-error-border); +} + +.status-info { + background: var(--color-info-bg); + color: var(--color-info-dark); + border: 1px solid var(--color-info-border); +} + +/* ============================================ + ANIMATION UTILITIES + ============================================ */ + +.fade-in { + animation: fadeIn 0.3s ease-in; +} + +@keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } +} + +.slide-up { + animation: slideUp 0.3s ease-out; +} + +@keyframes slideUp { + from { + opacity: 0; + transform: translateY(20px); + } + to { + opacity: 1; + transform: translateY(0); + } +} + +/* ============================================ + LOADING UTILITIES + ============================================ */ + +.spinner { + border: 3px solid var(--color-border); + border-top: 3px solid var(--color-primary); + border-radius: 50%; + width: 40px; + height: 40px; + animation: spin 1s linear infinite; + margin: 0 auto; +} + +.spinner-sm { + width: 20px; + height: 20px; + border-width: 2px; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +/* ============================================ + DIVIDER + ============================================ */ + +.divider { + height: 1px; + background: var(--color-border); + margin: var(--spacing-lg) 0; +} + +.divider-md { + margin: var(--spacing-md) 0; +} diff --git a/frontend/tsconfig.app.json b/frontend/tsconfig.app.json new file mode 100644 index 0000000..8d16e42 --- /dev/null +++ b/frontend/tsconfig.app.json @@ -0,0 +1,16 @@ +{ + "extends": "@vue/tsconfig/tsconfig.dom.json", + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "types": ["vite/client"], + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.vue"] +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..1ffef60 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,7 @@ +{ + "files": [], + "references": [ + { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.node.json" } + ] +} diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json new file mode 100644 index 0000000..8a67f62 --- /dev/null +++ b/frontend/tsconfig.node.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "target": "ES2023", + "lib": ["ES2023"], + "module": "ESNext", + "types": ["node"], + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "moduleDetection": "force", + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..7e4bca6 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,26 @@ +import { defineConfig } from 'vite' +import vue from '@vitejs/plugin-vue' + +export default defineConfig({ + plugins: [vue()], + base: process.env.VITE_BASE_URL ?? '/', + build: { + rollupOptions: { + output: { + entryFileNames: 'assets/[name]-[hash:16].js', + chunkFileNames: 'assets/[name]-[hash:16].js', + assetFileNames: 'assets/[name]-[hash:16].[ext]', + }, + }, + }, + server: { + host: '0.0.0.0', + port: 5173, + proxy: { + '/api': { + target: 'http://localhost:8512', + changeOrigin: true, + }, + }, + }, +}) diff --git a/manage.sh b/manage.sh new file mode 100755 index 0000000..a59d93e --- /dev/null +++ b/manage.sh @@ -0,0 +1,98 @@ +#!/usr/bin/env bash +set -euo pipefail + +SERVICE=kiwi +WEB_PORT=8511 # Vue SPA (nginx) โ€” dev +API_PORT=8512 # FastAPI โ€” dev +CLOUD_WEB_PORT=8515 # Vue SPA (nginx) โ€” cloud +COMPOSE_FILE="compose.yml" +CLOUD_COMPOSE_FILE="compose.cloud.yml" +CLOUD_PROJECT="kiwi-cloud" + +usage() { + echo "Usage: $0 {start|stop|restart|status|logs|open|build|test" + echo " |cloud-start|cloud-stop|cloud-restart|cloud-status|cloud-logs|cloud-build}" + echo "" + echo "Dev:" + echo " start Build (if needed) and start all services" + echo " stop Stop and remove containers" + echo " restart Stop then start" + echo " status Show running containers" + echo " logs [svc] Follow logs (api | web โ€” defaults to all)" + echo " open Open web UI in browser" + echo " build Rebuild Docker images without cache" + echo " test Run pytest test suite" + echo "" + echo "Cloud (menagerie.circuitforge.tech/kiwi):" + echo " cloud-start Build cloud images and start kiwi-cloud project" + echo " cloud-stop Stop cloud instance" + echo " cloud-restart Stop then start cloud instance" + echo " cloud-status Show cloud containers" + echo " cloud-logs Follow cloud logs [api|web โ€” defaults to all]" + echo " cloud-build Rebuild cloud images without cache" + exit 1 +} + +cmd="${1:-help}" +shift || true + +case "$cmd" in + start) + docker compose -f "$COMPOSE_FILE" up -d --build + echo "Kiwi running โ†’ http://localhost:${WEB_PORT}" + ;; + stop) + docker compose -f "$COMPOSE_FILE" down + ;; + restart) + docker compose -f "$COMPOSE_FILE" down + docker compose -f "$COMPOSE_FILE" up -d --build + echo "Kiwi running โ†’ http://localhost:${WEB_PORT}" + ;; + status) + docker compose -f "$COMPOSE_FILE" ps + ;; + logs) + svc="${1:-}" + docker compose -f "$COMPOSE_FILE" logs -f ${svc} + ;; + open) + xdg-open "http://localhost:${WEB_PORT}" 2>/dev/null \ + || open "http://localhost:${WEB_PORT}" 2>/dev/null \ + || echo "Open http://localhost:${WEB_PORT} in your browser" + ;; + build) + docker compose -f "$COMPOSE_FILE" build --no-cache + ;; + test) + docker compose -f "$COMPOSE_FILE" run --rm api \ + conda run -n job-seeker pytest tests/ -v + ;; + + cloud-start) + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" up -d --build + echo "Kiwi cloud running โ†’ https://menagerie.circuitforge.tech/kiwi" + ;; + cloud-stop) + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" down + ;; + cloud-restart) + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" down + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" up -d --build + echo "Kiwi cloud running โ†’ https://menagerie.circuitforge.tech/kiwi" + ;; + cloud-status) + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" ps + ;; + cloud-logs) + svc="${1:-}" + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" logs -f ${svc} + ;; + cloud-build) + docker compose -f "$CLOUD_COMPOSE_FILE" -p "$CLOUD_PROJECT" build --no-cache + ;; + + *) + usage + ;; +esac diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..29d3b06 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[build-system] +requires = ["setuptools>=68", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "kiwi" +version = "0.1.0" +description = "Pantry tracking + leftover recipe suggestions" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + # API + "fastapi>=0.110", + "uvicorn[standard]>=0.27", + "python-multipart>=0.0.9", + "aiofiles>=23.0", + # Image processing + OCR + "opencv-python>=4.8", + "numpy>=1.25", + "pyzbar>=0.1.9", + # HTTP client + "httpx>=0.27", + # CircuitForge shared scaffold + "circuitforge-core", +] + +[tool.setuptools.packages.find] +where = ["."] +include = ["app*"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +asyncio_mode = "auto"