feat: wire cloud session, Heimdall licensing, and split-store DB isolation

- api/cloud_session.py: new module — JWT validation (Directus HS256),
  Heimdall provision+tier-resolve, CloudUser+SessionFeatures dataclasses,
  compute_features() tier→feature-flag mapping, require_tier() dependency
  factory, get_session() FastAPI dependency (local-mode transparent passthrough)
- api/main.py: remove _DB_PATH singleton; all endpoints receive session via
  Depends(get_session); shared_store (sellers/comps) and user_store (listings/
  saved_searches) created per-request from session.shared_db / session.user_db;
  pages capped to features.max_pages; saved_searches limit enforced for free tier;
  /api/session endpoint exposes tier+features to frontend; _trigger_scraper_enrichment
  receives shared_db Path (background thread creates its own Store)
- app/platforms/ebay/adapter.py, scraper.py: rename store→shared_store parameter
  (adapters only touch sellers+comps, never listings — naming reflects this)
- app/trust/__init__.py: rename store→shared_store (TrustScorer reads
  sellers+comps from shared DB; listing staging fields come from caller)
- app/db/store.py: refresh_seller_categories gains listing_store param for
  split-DB mode (reads listings from user_store, writes categories to self)
- web/src/stores/session.ts: new Pinia store — bootstrap() fetches /api/session,
  exposes tier+features reactively; falls back to full-access local defaults
- web/src/App.vue: call session.bootstrap() on mount
- web/src/views/SearchView.vue: import session store; pages buttons disabled+greyed
  above features.max_pages with upgrade tooltip
- compose.cloud.yml: add CLOUD_MODE=true + CLOUD_DATA_ROOT env; fix volume mount
- docker/web/nginx.cloud.conf: forward X-CF-Session header from Caddy to API
- .env.example: document cloud env vars (CLOUD_MODE, DIRECTUS_JWT_SECRET, etc.)
This commit is contained in:
pyr0ball 2026-03-27 02:07:06 -07:00
parent a61166f48a
commit 9e20759dbe
12 changed files with 468 additions and 58 deletions

View file

@ -31,3 +31,18 @@ EBAY_WEBHOOK_VERIFY_SIGNATURES=true
# ── Database ───────────────────────────────────────────────────────────────────
SNIPE_DB=data/snipe.db
# ── Cloud mode (managed / menagerie instance only) ─────────────────────────────
# Leave unset for self-hosted / local use. When set, per-user DB isolation
# and Heimdall licensing are enabled. compose.cloud.yml sets CLOUD_MODE=true
# automatically — only set manually if running without Docker.
# CLOUD_MODE=true
# CLOUD_DATA_ROOT=/devl/snipe-cloud-data
# JWT secret from cf-directus (must match Directus SECRET env var exactly).
# DIRECTUS_JWT_SECRET=
# Heimdall license server — for tier resolution and free-key auto-provisioning.
# HEIMDALL_URL=https://license.circuitforge.tech
# HEIMDALL_ADMIN_TOKEN=

239
api/cloud_session.py Normal file
View file

@ -0,0 +1,239 @@
"""Cloud session resolution for Snipe FastAPI.
In local mode (CLOUD_MODE unset/false): all functions return a local CloudUser
with no auth checks, full tier access, and both DB paths pointing to SNIPE_DB.
In cloud mode (CLOUD_MODE=true): validates the cf_session JWT injected by Caddy
as X-CF-Session, resolves user_id, auto-provisions a free Heimdall license on
first visit, fetches the tier, and returns per-user DB paths.
FastAPI usage:
@app.get("/api/search")
def search(session: CloudUser = Depends(get_session)):
shared_store = Store(session.shared_db)
user_store = Store(session.user_db)
...
"""
from __future__ import annotations
import hashlib
import hmac
import logging
import os
import re
import time
from dataclasses import dataclass
from pathlib import Path
from typing import Optional
import requests
from fastapi import Depends, HTTPException, Request
log = logging.getLogger(__name__)
# ── Config ────────────────────────────────────────────────────────────────────
CLOUD_MODE: bool = os.environ.get("CLOUD_MODE", "").lower() in ("1", "true", "yes")
CLOUD_DATA_ROOT: Path = Path(os.environ.get("CLOUD_DATA_ROOT", "/devl/snipe-cloud-data"))
DIRECTUS_JWT_SECRET: str = os.environ.get("DIRECTUS_JWT_SECRET", "")
CF_SERVER_SECRET: str = os.environ.get("CF_SERVER_SECRET", "")
HEIMDALL_URL: str = os.environ.get("HEIMDALL_URL", "https://license.circuitforge.tech")
HEIMDALL_ADMIN_TOKEN: str = os.environ.get("HEIMDALL_ADMIN_TOKEN", "")
# Local-mode DB paths (ignored in cloud mode)
_LOCAL_SNIPE_DB: Path = Path(os.environ.get("SNIPE_DB", "data/snipe.db"))
# Tier cache: user_id → (tier, fetched_at_epoch)
_TIER_CACHE: dict[str, tuple[str, float]] = {}
_TIER_CACHE_TTL = 300 # 5 minutes
TIERS = ["free", "paid", "premium", "ultra"]
# ── Domain ────────────────────────────────────────────────────────────────────
@dataclass(frozen=True)
class CloudUser:
user_id: str # Directus UUID, or "local" in local mode
tier: str # free | paid | premium | ultra | local
shared_db: Path # sellers, market_comps — shared across all users
user_db: Path # listings, saved_searches, trust_scores — per-user
@dataclass(frozen=True)
class SessionFeatures:
saved_searches: bool
saved_searches_limit: Optional[int] # None = unlimited
background_monitoring: bool
max_pages: int
upc_search: bool
photo_analysis: bool
shared_scammer_db: bool
shared_image_db: bool
def compute_features(tier: str) -> SessionFeatures:
"""Compute feature flags from tier. Evaluated server-side; sent to frontend."""
local = tier == "local"
paid_plus = local or tier in ("paid", "premium", "ultra")
premium_plus = local or tier in ("premium", "ultra")
return SessionFeatures(
saved_searches=True, # all tiers get saved searches
saved_searches_limit=None if paid_plus else 3,
background_monitoring=paid_plus,
max_pages=999 if local else (5 if paid_plus else 1),
upc_search=paid_plus,
photo_analysis=paid_plus,
shared_scammer_db=paid_plus,
shared_image_db=paid_plus,
)
# ── JWT validation ────────────────────────────────────────────────────────────
def _extract_session_token(header_value: str) -> str:
"""Extract cf_session value from a Cookie or X-CF-Session header string."""
# X-CF-Session may be the raw JWT or the full cookie string
m = re.search(r'(?:^|;)\s*cf_session=([^;]+)', header_value)
return m.group(1).strip() if m else header_value.strip()
def validate_session_jwt(token: str) -> str:
"""Validate a cf_session JWT and return the Directus user_id (sub claim).
Uses HMAC-SHA256 verification against DIRECTUS_JWT_SECRET (same secret
cf-directus uses to sign session tokens). Returns user_id on success,
raises HTTPException(401) on failure.
"""
try:
import jwt as pyjwt
payload = pyjwt.decode(
token,
DIRECTUS_JWT_SECRET,
algorithms=["HS256"],
options={"require": ["sub", "exp"]},
)
return payload["sub"]
except Exception as exc:
log.debug("JWT validation failed: %s", exc)
raise HTTPException(status_code=401, detail="Session invalid or expired")
# ── Heimdall integration ──────────────────────────────────────────────────────
def _ensure_provisioned(user_id: str) -> None:
"""Idempotent: create a free Heimdall license for this user if none exists."""
if not HEIMDALL_ADMIN_TOKEN:
return
try:
requests.post(
f"{HEIMDALL_URL}/admin/provision",
json={"directus_user_id": user_id, "product": "snipe", "tier": "free"},
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
timeout=5,
)
except Exception as exc:
log.warning("Heimdall provision failed for user %s: %s", user_id, exc)
def _fetch_cloud_tier(user_id: str) -> str:
"""Resolve tier from Heimdall with a 5-minute in-process cache."""
now = time.monotonic()
cached = _TIER_CACHE.get(user_id)
if cached and (now - cached[1]) < _TIER_CACHE_TTL:
return cached[0]
if not HEIMDALL_ADMIN_TOKEN:
return "free"
try:
resp = requests.post(
f"{HEIMDALL_URL}/admin/cloud/resolve",
json={"directus_user_id": user_id, "product": "snipe"},
headers={"Authorization": f"Bearer {HEIMDALL_ADMIN_TOKEN}"},
timeout=5,
)
tier = resp.json().get("tier", "free") if resp.ok else "free"
except Exception as exc:
log.warning("Heimdall tier resolve failed for user %s: %s", user_id, exc)
tier = "free"
_TIER_CACHE[user_id] = (tier, now)
return tier
# ── DB path helpers ───────────────────────────────────────────────────────────
def _shared_db_path() -> Path:
path = CLOUD_DATA_ROOT / "shared" / "shared.db"
path.parent.mkdir(parents=True, exist_ok=True)
return path
def _user_db_path(user_id: str) -> Path:
path = CLOUD_DATA_ROOT / user_id / "snipe" / "user.db"
path.parent.mkdir(parents=True, exist_ok=True)
return path
# ── FastAPI dependency ────────────────────────────────────────────────────────
def get_session(request: Request) -> CloudUser:
"""FastAPI dependency — resolves the current user from the request.
Local mode: returns a fully-privileged "local" user pointing at SNIPE_DB.
Cloud mode: validates X-CF-Session JWT, provisions Heimdall license,
resolves tier, returns per-user DB paths.
"""
if not CLOUD_MODE:
return CloudUser(
user_id="local",
tier="local",
shared_db=_LOCAL_SNIPE_DB,
user_db=_LOCAL_SNIPE_DB,
)
raw_header = (
request.headers.get("x-cf-session", "")
or request.headers.get("cookie", "")
)
if not raw_header:
raise HTTPException(status_code=401, detail="Not authenticated")
token = _extract_session_token(raw_header)
if not token:
raise HTTPException(status_code=401, detail="Not authenticated")
user_id = validate_session_jwt(token)
_ensure_provisioned(user_id)
tier = _fetch_cloud_tier(user_id)
return CloudUser(
user_id=user_id,
tier=tier,
shared_db=_shared_db_path(),
user_db=_user_db_path(user_id),
)
def require_tier(min_tier: str):
"""Dependency factory — raises 403 if the session tier is below min_tier.
Usage: @app.post("/api/foo", dependencies=[Depends(require_tier("paid"))])
"""
min_idx = TIERS.index(min_tier)
def _check(session: CloudUser = Depends(get_session)) -> CloudUser:
if session.tier == "local":
return session # local users always pass
try:
if TIERS.index(session.tier) < min_idx:
raise HTTPException(
status_code=403,
detail=f"This feature requires {min_tier} tier or above.",
)
except ValueError:
raise HTTPException(status_code=403, detail="Unknown tier.")
return session
return _check

View file

@ -8,7 +8,7 @@ import os
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from fastapi import FastAPI, HTTPException
from fastapi import Depends, FastAPI, HTTPException
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
@ -21,14 +21,12 @@ from app.platforms.ebay.adapter import EbayAdapter
from app.platforms.ebay.auth import EbayTokenManager
from app.platforms.ebay.query_builder import expand_queries, parse_groups
from app.trust import TrustScorer
from api.cloud_session import CloudUser, compute_features, get_session
from api.ebay_webhook import router as ebay_webhook_router
load_env(Path(".env"))
log = logging.getLogger(__name__)
_DB_PATH = Path(os.environ.get("SNIPE_DB", "data/snipe.db"))
_DB_PATH.parent.mkdir(exist_ok=True)
def _ebay_creds() -> tuple[str, str, str]:
"""Return (client_id, client_secret, env) from env vars.
@ -61,7 +59,27 @@ def health():
return {"status": "ok"}
def _trigger_scraper_enrichment(listings: list, store: Store) -> None:
@app.get("/api/session")
def session_info(session: CloudUser = Depends(get_session)):
"""Return the current session tier and computed feature flags.
Used by the Vue frontend to gate UI features (pages slider cap,
saved search limits, shared DB badges, etc.) without hardcoding
tier logic client-side.
"""
features = compute_features(session.tier)
return {
"user_id": session.user_id,
"tier": session.tier,
"features": dataclasses.asdict(features),
}
def _trigger_scraper_enrichment(
listings: list,
shared_store: Store,
shared_db: Path,
) -> None:
"""Fire-and-forget background enrichment for missing seller signals.
Two enrichment passes run concurrently in the same daemon thread:
@ -72,6 +90,10 @@ def _trigger_scraper_enrichment(listings: list, store: Store) -> None:
future searches. Uses ScrapedEbayAdapter's Playwright stack regardless of
which adapter was used for the main search (Shopping API handles age for
the API adapter inline; BTF is the fallback for no-creds / scraper mode).
shared_store: used for pre-flight seller checks (same-thread reads).
shared_db: path passed to background thread it creates its own Store
(sqlite3 connections are not thread-safe).
"""
# Caps per search: limits Playwright sessions launched in the background so we
# don't hammer Kasada or spin up dozens of Xvfb instances after a large search.
@ -86,7 +108,7 @@ def _trigger_scraper_enrichment(listings: list, store: Store) -> None:
sid = listing.seller_platform_id
if not sid:
continue
seller = store.get_seller("ebay", sid)
seller = shared_store.get_seller("ebay", sid)
if not seller:
continue
if (seller.account_age_days is None
@ -108,7 +130,7 @@ def _trigger_scraper_enrichment(listings: list, store: Store) -> None:
def _run():
try:
enricher = ScrapedEbayAdapter(Store(_DB_PATH))
enricher = ScrapedEbayAdapter(Store(shared_db))
if needs_btf:
enricher.enrich_sellers_btf(needs_btf, max_workers=2)
log.info("BTF enrichment complete for %d sellers", len(needs_btf))
@ -128,28 +150,31 @@ def _parse_terms(raw: str) -> list[str]:
return [t.strip() for t in raw.split(",") if t.strip()]
def _make_adapter(store: Store, force: str = "auto"):
def _make_adapter(shared_store: Store, force: str = "auto"):
"""Return the appropriate adapter.
force: "auto" | "api" | "scraper"
auto API if creds present, else scraper
api Browse API (raises if no creds)
scraper Playwright scraper regardless of creds
Adapters receive shared_store because they only read/write sellers and
market_comps never listings. Listings are returned and saved by the caller.
"""
client_id, client_secret, env = _ebay_creds()
has_creds = bool(client_id and client_secret)
if force == "scraper":
return ScrapedEbayAdapter(store)
return ScrapedEbayAdapter(shared_store)
if force == "api":
if not has_creds:
raise ValueError("adapter=api requested but no eBay API credentials configured")
return EbayAdapter(EbayTokenManager(client_id, client_secret, env), store, env=env)
return EbayAdapter(EbayTokenManager(client_id, client_secret, env), shared_store, env=env)
# auto
if has_creds:
return EbayAdapter(EbayTokenManager(client_id, client_secret, env), store, env=env)
return EbayAdapter(EbayTokenManager(client_id, client_secret, env), shared_store, env=env)
log.debug("No eBay API credentials — using scraper adapter (partial trust scores)")
return ScrapedEbayAdapter(store)
return ScrapedEbayAdapter(shared_store)
def _adapter_name(force: str = "auto") -> str:
@ -173,10 +198,15 @@ def search(
must_exclude: str = "", # comma-separated; forwarded to eBay -term + client-side
category_id: str = "", # eBay category ID — forwarded to Browse API / scraper _sacat
adapter: str = "auto", # "auto" | "api" | "scraper" — override adapter selection
session: CloudUser = Depends(get_session),
):
if not q.strip():
return {"listings": [], "trust_scores": {}, "sellers": {}, "market_price": None, "adapter_used": _adapter_name(adapter)}
# Cap pages to the tier's maximum — free cloud users get 1 page, local gets unlimited.
features = compute_features(session.tier)
pages = min(max(1, pages), features.max_pages)
must_exclude_terms = _parse_terms(must_exclude)
# In Groups mode, expand OR groups into multiple targeted eBay queries to
@ -190,20 +220,23 @@ def search(
base_filters = SearchFilters(
max_price=max_price if max_price > 0 else None,
min_price=min_price if min_price > 0 else None,
pages=max(1, pages),
pages=pages,
must_exclude=must_exclude_terms, # forwarded to eBay -term by the scraper
category_id=category_id.strip() or None,
)
adapter_used = _adapter_name(adapter)
shared_db = session.shared_db
user_db = session.user_db
# Each thread creates its own Store — sqlite3 check_same_thread=True.
def _run_search(ebay_query: str) -> list:
return _make_adapter(Store(_DB_PATH), adapter).search(ebay_query, base_filters)
return _make_adapter(Store(shared_db), adapter).search(ebay_query, base_filters)
def _run_comps() -> None:
try:
_make_adapter(Store(_DB_PATH), adapter).get_completed_sales(q, pages)
_make_adapter(Store(shared_db), adapter).get_completed_sales(q, pages)
except Exception:
log.warning("comps: unhandled exception for %r", q, exc_info=True)
@ -224,39 +257,44 @@ def search(
if listing.platform_listing_id not in seen_ids:
seen_ids.add(listing.platform_listing_id)
listings.append(listing)
comps_future.result() # side-effect: market comp written to DB
comps_future.result() # side-effect: market comp written to shared DB
except Exception as e:
log.warning("eBay scrape failed: %s", e)
raise HTTPException(status_code=502, detail=f"eBay search failed: {e}")
log.info("Multi-search: %d queries → %d unique listings", len(ebay_queries), len(listings))
# Main-thread store for all post-search reads/writes — fresh connection, same thread.
store = Store(_DB_PATH)
store.save_listings(listings)
# Main-thread stores — fresh connections, same thread.
# shared_store: sellers, market_comps (all users share this data)
# user_store: listings, saved_searches (per-user in cloud mode, same file in local mode)
shared_store = Store(shared_db)
user_store = Store(user_db)
user_store.save_listings(listings)
# Derive category_history from accumulated listing data — free for API adapter
# (category_name comes from Browse API response), no-op for scraper listings (category_name=None).
# Reads listings from user_store, writes seller categories to shared_store.
seller_ids = list({l.seller_platform_id for l in listings if l.seller_platform_id})
n_cat = store.refresh_seller_categories("ebay", seller_ids)
n_cat = shared_store.refresh_seller_categories("ebay", seller_ids, listing_store=user_store)
if n_cat:
log.info("Category history derived for %d sellers from listing data", n_cat)
# Re-fetch to hydrate staging fields (times_seen, first_seen_at, id, price_at_first_seen)
# that are only available from the DB after the upsert.
staged = store.get_listings_staged("ebay", [l.platform_listing_id for l in listings])
staged = user_store.get_listings_staged("ebay", [l.platform_listing_id for l in listings])
listings = [staged.get(l.platform_listing_id, l) for l in listings]
# BTF enrichment: scrape /itm/ pages for sellers missing account_age_days.
# Runs in the background so it doesn't delay the response; next search of
# the same sellers will have full scores.
_trigger_scraper_enrichment(listings, store)
_trigger_scraper_enrichment(listings, shared_store, shared_db)
scorer = TrustScorer(store)
scorer = TrustScorer(shared_store)
trust_scores_list = scorer.score_batch(listings, q)
query_hash = hashlib.md5(q.encode()).hexdigest()
comp = store.get_market_comp("ebay", query_hash)
comp = shared_store.get_market_comp("ebay", query_hash)
market_price = comp.median_price if comp else None
# Serialize — keyed by platform_listing_id for easy Vue lookup
@ -267,11 +305,11 @@ def search(
}
seller_map = {
listing.seller_platform_id: dataclasses.asdict(
store.get_seller("ebay", listing.seller_platform_id)
shared_store.get_seller("ebay", listing.seller_platform_id)
)
for listing in listings
if listing.seller_platform_id
and store.get_seller("ebay", listing.seller_platform_id)
and shared_store.get_seller("ebay", listing.seller_platform_id)
}
return {
@ -286,7 +324,12 @@ def search(
# ── On-demand enrichment ──────────────────────────────────────────────────────
@app.post("/api/enrich")
def enrich_seller(seller: str, listing_id: str, query: str = ""):
def enrich_seller(
seller: str,
listing_id: str,
query: str = "",
session: CloudUser = Depends(get_session),
):
"""Synchronous on-demand enrichment for a single seller + re-score.
Runs enrichment paths in parallel:
@ -298,38 +341,45 @@ def enrich_seller(seller: str, listing_id: str, query: str = ""):
Returns the updated trust_score and seller so the frontend can patch in-place.
"""
import threading
store = Store(_DB_PATH)
seller_obj = store.get_seller("ebay", seller)
shared_store = Store(session.shared_db)
user_store = Store(session.user_db)
shared_db = session.shared_db
seller_obj = shared_store.get_seller("ebay", seller)
if not seller_obj:
raise HTTPException(status_code=404, detail=f"Seller '{seller}' not found")
# Fast path: Shopping API for account age (inline, no Playwright)
try:
api_adapter = _make_adapter(store, "api")
api_adapter = _make_adapter(shared_store, "api")
if hasattr(api_adapter, "enrich_sellers_shopping_api"):
api_adapter.enrich_sellers_shopping_api([seller])
except Exception:
pass # no API creds — fall through to BTF
seller_obj = store.get_seller("ebay", seller)
seller_obj = shared_store.get_seller("ebay", seller)
needs_btf = seller_obj is not None and seller_obj.account_age_days is None
needs_categories = seller_obj is None or seller_obj.category_history_json in ("{}", "", None)
# Slow path: Playwright for remaining gaps (BTF + _ssn in parallel threads)
# Slow path: Playwright for remaining gaps (BTF + _ssn in parallel threads).
# Each thread creates its own Store — sqlite3 connections are not thread-safe.
if needs_btf or needs_categories:
scraper = ScrapedEbayAdapter(Store(_DB_PATH))
errors: list[Exception] = []
def _btf():
try:
scraper.enrich_sellers_btf({seller: listing_id}, max_workers=1)
ScrapedEbayAdapter(Store(shared_db)).enrich_sellers_btf(
{seller: listing_id}, max_workers=1
)
except Exception as e:
errors.append(e)
def _ssn():
try:
ScrapedEbayAdapter(Store(_DB_PATH)).enrich_sellers_categories([seller], max_workers=1)
ScrapedEbayAdapter(Store(shared_db)).enrich_sellers_categories(
[seller], max_workers=1
)
except Exception as e:
errors.append(e)
@ -347,16 +397,16 @@ def enrich_seller(seller: str, listing_id: str, query: str = ""):
log.warning("enrich_seller: %d scrape error(s): %s", len(errors), errors[0])
# Re-fetch listing with staging fields, re-score
staged = store.get_listings_staged("ebay", [listing_id])
staged = user_store.get_listings_staged("ebay", [listing_id])
listing = staged.get(listing_id)
if not listing:
raise HTTPException(status_code=404, detail=f"Listing '{listing_id}' not found")
scorer = TrustScorer(store)
scorer = TrustScorer(shared_store)
trust_list = scorer.score_batch([listing], query or listing.title)
trust = trust_list[0] if trust_list else None
seller_final = store.get_seller("ebay", seller)
seller_final = shared_store.get_seller("ebay", seller)
return {
"trust_score": dataclasses.asdict(trust) if trust else None,
"seller": dataclasses.asdict(seller_final) if seller_final else None,
@ -372,24 +422,39 @@ class SavedSearchCreate(BaseModel):
@app.get("/api/saved-searches")
def list_saved_searches():
return {"saved_searches": [dataclasses.asdict(s) for s in Store(_DB_PATH).list_saved_searches()]}
def list_saved_searches(session: CloudUser = Depends(get_session)):
user_store = Store(session.user_db)
return {"saved_searches": [dataclasses.asdict(s) for s in user_store.list_saved_searches()]}
@app.post("/api/saved-searches", status_code=201)
def create_saved_search(body: SavedSearchCreate):
created = Store(_DB_PATH).save_saved_search(
def create_saved_search(
body: SavedSearchCreate,
session: CloudUser = Depends(get_session),
):
user_store = Store(session.user_db)
features = compute_features(session.tier)
if features.saved_searches_limit is not None:
existing = user_store.list_saved_searches()
if len(existing) >= features.saved_searches_limit:
raise HTTPException(
status_code=403,
detail=f"Free tier allows up to {features.saved_searches_limit} saved searches. Upgrade to save more.",
)
created = user_store.save_saved_search(
SavedSearchModel(name=body.name, query=body.query, platform="ebay", filters_json=body.filters_json)
)
return dataclasses.asdict(created)
@app.delete("/api/saved-searches/{saved_id}", status_code=204)
def delete_saved_search(saved_id: int):
Store(_DB_PATH).delete_saved_search(saved_id)
def delete_saved_search(saved_id: int, session: CloudUser = Depends(get_session)):
Store(session.user_db).delete_saved_search(saved_id)
@app.patch("/api/saved-searches/{saved_id}/run")
def mark_saved_search_run(saved_id: int):
Store(_DB_PATH).update_saved_search_last_run(saved_id)
def mark_saved_search_run(saved_id: int, session: CloudUser = Depends(get_session)):
Store(session.user_db).update_saved_search_last_run(saved_id)
return {"ok": True}

View file

@ -59,14 +59,25 @@ class Store:
return None
return Seller(*row[:7], id=row[7], fetched_at=row[8])
def refresh_seller_categories(self, platform: str, seller_ids: list[str]) -> int:
def refresh_seller_categories(
self,
platform: str,
seller_ids: list[str],
listing_store: "Optional[Store]" = None,
) -> int:
"""Derive category_history_json for sellers that lack it by aggregating
their stored listings' category_name values.
listing_store: the Store instance that holds listings (may differ from
self in cloud split-DB mode where sellers live in shared.db and listings
live in user.db). Defaults to self when not provided (local mode).
Returns the count of sellers updated.
"""
from app.platforms.ebay.scraper import _classify_category_label # lazy to avoid circular
src = listing_store if listing_store is not None else self
if not seller_ids:
return 0
updated = 0
@ -74,7 +85,7 @@ class Store:
seller = self.get_seller(platform, sid)
if not seller or seller.category_history_json not in ("{}", "", None):
continue # already enriched
rows = self._conn.execute(
rows = src._conn.execute(
"SELECT category_name, COUNT(*) FROM listings "
"WHERE platform=? AND seller_platform_id=? AND category_name IS NOT NULL "
"GROUP BY category_name",

View file

@ -64,9 +64,9 @@ BROWSE_BASE = {
class EbayAdapter(PlatformAdapter):
def __init__(self, token_manager: EbayTokenManager, store: Store, env: str = "production"):
def __init__(self, token_manager: EbayTokenManager, shared_store: Store, env: str = "production"):
self._tokens = token_manager
self._store = store
self._store = shared_store
self._env = env
self._browse_base = BROWSE_BASE[env]

View file

@ -283,8 +283,8 @@ class ScrapedEbayAdapter(PlatformAdapter):
category_history) cause TrustScorer to set score_is_partial=True.
"""
def __init__(self, store: Store, delay: float = 1.0):
self._store = store
def __init__(self, shared_store: Store, delay: float = 1.0):
self._store = shared_store
self._delay = delay
def _fetch_url(self, url: str) -> str:

View file

@ -10,8 +10,8 @@ import math
class TrustScorer:
"""Orchestrates metadata + photo scoring for a batch of listings."""
def __init__(self, store: Store):
self._store = store
def __init__(self, shared_store: Store):
self._store = shared_store
self._meta = MetadataScorer()
self._photo = PhotoScorer()
self._agg = Aggregator()

View file

@ -12,9 +12,17 @@ services:
dockerfile: snipe/Dockerfile
restart: unless-stopped
env_file: .env
environment:
# Cloud mode — enables per-user DB isolation and Heimdall tier resolution.
# All values may be overridden by setting them in .env (env_file takes precedence
# over environment: only when the same key appears in both — Docker merges them,
# env_file wins for duplicates).
CLOUD_MODE: "true"
CLOUD_DATA_ROOT: /devl/snipe-cloud-data
# DIRECTUS_JWT_SECRET, HEIMDALL_URL, HEIMDALL_ADMIN_TOKEN — set in .env (never commit)
# No network_mode: host — isolated on snipe-cloud-net; nginx reaches it via 'api:8510'
volumes:
- /devl/snipe-cloud-data:/app/snipe/data
- /devl/snipe-cloud-data:/devl/snipe-cloud-data
networks:
- snipe-cloud-net

View file

@ -13,6 +13,9 @@ server {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
# Forward the session header injected by Caddy from the cf_session cookie.
# Caddy adds: header_up X-CF-Session {http.request.cookie.cf_session}
proxy_set_header X-CF-Session $http_x_cf_session;
}
# index.html — never cache; ensures clients always get the latest entry point

View file

@ -17,15 +17,18 @@ import { RouterView } from 'vue-router'
import { useMotion } from './composables/useMotion'
import { useSnipeMode } from './composables/useSnipeMode'
import { useKonamiCode } from './composables/useKonamiCode'
import { useSessionStore } from './stores/session'
import AppNav from './components/AppNav.vue'
const motion = useMotion()
const { activate, restore } = useSnipeMode()
const session = useSessionStore()
useKonamiCode(activate)
onMounted(() => {
restore() // re-apply snipe mode from localStorage on hard reload
session.bootstrap() // fetch tier + feature flags from API
})
</script>

53
web/src/stores/session.ts Normal file
View file

@ -0,0 +1,53 @@
import { defineStore } from 'pinia'
import { ref, computed } from 'vue'
// Mirrors api/cloud_session.py SessionFeatures dataclass
export interface SessionFeatures {
saved_searches: boolean
saved_searches_limit: number | null // null = unlimited
background_monitoring: boolean
max_pages: number
upc_search: boolean
photo_analysis: boolean
shared_scammer_db: boolean
shared_image_db: boolean
}
const LOCAL_FEATURES: SessionFeatures = {
saved_searches: true,
saved_searches_limit: null,
background_monitoring: true,
max_pages: 999,
upc_search: true,
photo_analysis: true,
shared_scammer_db: true,
shared_image_db: true,
}
export const useSessionStore = defineStore('session', () => {
const userId = ref<string>('local')
const tier = ref<string>('local')
const features = ref<SessionFeatures>(LOCAL_FEATURES)
const loaded = ref(false)
const isCloud = computed(() => tier.value !== 'local')
const isFree = computed(() => tier.value === 'free')
const isPaid = computed(() => ['paid', 'premium', 'ultra', 'local'].includes(tier.value))
async function bootstrap() {
try {
const res = await fetch('/api/session')
if (!res.ok) return // local-mode with no session endpoint — keep defaults
const data = await res.json()
userId.value = data.user_id
tier.value = data.tier
features.value = data.features
} catch {
// Network error or non-cloud deploy — keep local defaults
} finally {
loaded.value = true
}
}
return { userId, tier, features, loaded, isCloud, isFree, isPaid, bootstrap }
})

View file

@ -107,8 +107,13 @@
:key="p"
type="button"
class="filter-pages-btn"
:class="{ 'filter-pages-btn--active': filters.pages === p }"
@click="filters.pages = p"
:class="{
'filter-pages-btn--active': filters.pages === p,
'filter-pages-btn--locked': p > session.features.max_pages,
}"
:disabled="p > session.features.max_pages"
:title="p > session.features.max_pages ? 'Upgrade to fetch more pages' : undefined"
@click="p <= session.features.max_pages && (filters.pages = p)"
>{{ p }}</button>
</div>
<p class="filter-pages-hint">{{ pagesHint }}</p>
@ -305,11 +310,13 @@ import { MagnifyingGlassIcon, ExclamationTriangleIcon, BookmarkIcon } from '@her
import { useSearchStore } from '../stores/search'
import type { Listing, TrustScore, SearchFilters, MustIncludeMode } from '../stores/search'
import { useSavedSearchesStore } from '../stores/savedSearches'
import { useSessionStore } from '../stores/session'
import ListingCard from '../components/ListingCard.vue'
const route = useRoute()
const store = useSearchStore()
const savedStore = useSavedSearchesStore()
const session = useSessionStore()
const queryInput = ref('')
// Save search UI state
@ -921,6 +928,12 @@ async function onSearch() {
color: var(--color-text-inverse);
}
.filter-pages-btn--locked,
.filter-pages-btn:disabled {
opacity: 0.35;
cursor: not-allowed;
}
.filter-pages-hint {
font-size: 0.6875rem;
color: var(--color-text-muted);