- api/cloud_session.py: new module — JWT validation (Directus HS256), Heimdall provision+tier-resolve, CloudUser+SessionFeatures dataclasses, compute_features() tier→feature-flag mapping, require_tier() dependency factory, get_session() FastAPI dependency (local-mode transparent passthrough) - api/main.py: remove _DB_PATH singleton; all endpoints receive session via Depends(get_session); shared_store (sellers/comps) and user_store (listings/ saved_searches) created per-request from session.shared_db / session.user_db; pages capped to features.max_pages; saved_searches limit enforced for free tier; /api/session endpoint exposes tier+features to frontend; _trigger_scraper_enrichment receives shared_db Path (background thread creates its own Store) - app/platforms/ebay/adapter.py, scraper.py: rename store→shared_store parameter (adapters only touch sellers+comps, never listings — naming reflects this) - app/trust/__init__.py: rename store→shared_store (TrustScorer reads sellers+comps from shared DB; listing staging fields come from caller) - app/db/store.py: refresh_seller_categories gains listing_store param for split-DB mode (reads listings from user_store, writes categories to self) - web/src/stores/session.ts: new Pinia store — bootstrap() fetches /api/session, exposes tier+features reactively; falls back to full-access local defaults - web/src/App.vue: call session.bootstrap() on mount - web/src/views/SearchView.vue: import session store; pages buttons disabled+greyed above features.max_pages with upgrade tooltip - compose.cloud.yml: add CLOUD_MODE=true + CLOUD_DATA_ROOT env; fix volume mount - docker/web/nginx.cloud.conf: forward X-CF-Session header from Caddy to API - .env.example: document cloud env vars (CLOUD_MODE, DIRECTUS_JWT_SECRET, etc.)
60 lines
2.3 KiB
Python
60 lines
2.3 KiB
Python
from .metadata import MetadataScorer
|
|
from .photo import PhotoScorer
|
|
from .aggregator import Aggregator
|
|
from app.db.models import Seller, Listing, TrustScore
|
|
from app.db.store import Store
|
|
import hashlib
|
|
import math
|
|
|
|
|
|
class TrustScorer:
|
|
"""Orchestrates metadata + photo scoring for a batch of listings."""
|
|
|
|
def __init__(self, shared_store: Store):
|
|
self._store = shared_store
|
|
self._meta = MetadataScorer()
|
|
self._photo = PhotoScorer()
|
|
self._agg = Aggregator()
|
|
|
|
def score_batch(
|
|
self,
|
|
listings: list[Listing],
|
|
query: str,
|
|
) -> list[TrustScore]:
|
|
query_hash = hashlib.md5(query.encode()).hexdigest()
|
|
comp = self._store.get_market_comp("ebay", query_hash)
|
|
market_median = comp.median_price if comp else None
|
|
|
|
# Coefficient of variation: stddev/mean across batch prices.
|
|
# None when fewer than 2 priced listings (can't compute variance).
|
|
_prices = [l.price for l in listings if l.price > 0]
|
|
if len(_prices) >= 2:
|
|
_mean = sum(_prices) / len(_prices)
|
|
_stddev = math.sqrt(sum((p - _mean) ** 2 for p in _prices) / len(_prices))
|
|
price_cv: float | None = _stddev / _mean if _mean > 0 else None
|
|
else:
|
|
price_cv = None
|
|
|
|
photo_url_sets = [l.photo_urls for l in listings]
|
|
duplicates = self._photo.check_duplicates(photo_url_sets)
|
|
|
|
scores = []
|
|
for listing, is_dup in zip(listings, duplicates):
|
|
seller = self._store.get_seller("ebay", listing.seller_platform_id)
|
|
if seller:
|
|
signal_scores = self._meta.score(seller, market_median, listing.price, price_cv)
|
|
else:
|
|
signal_scores = {k: None for k in
|
|
["account_age", "feedback_count", "feedback_ratio",
|
|
"price_vs_market", "category_history"]}
|
|
trust = self._agg.aggregate(
|
|
signal_scores, is_dup, seller,
|
|
listing_id=listing.id or 0,
|
|
listing_title=listing.title,
|
|
times_seen=listing.times_seen,
|
|
first_seen_at=listing.first_seen_at,
|
|
price=listing.price,
|
|
price_at_first_seen=listing.price_at_first_seen,
|
|
)
|
|
scores.append(trust)
|
|
return scores
|