snipe/app/trust/__init__.py
pyr0ball fb81422c54 feat: snipe beta backlog batch (tickets #22/#28/#30/#34/#35/#36/#37/#38)
Cloud/session:
- fix(_extract_session_token): return "" for non-JWT cookie strings (snipe_guest=uuid was
  triggering 401 → forced login redirect for all unauthenticated cloud visitors)
- fix(affiliate): exclude guest: and anonymous users from pref-store writes (#38)
- fix(market-comp): use enriched comp_query for market comp hash so write/read keys match (#30)

Frontend:
- feat(SearchView): unauthenticated landing strip with free-account CTA (#36)
- feat(SearchView): aria-pressed on filter toggles, aria-label on icon buttons, focus-visible
  rings on all interactive controls, live region for result count (#35)
- feat(SearchView): no-results empty-state hint text (#36)
- feat(SEO): og:image 1200x630, summary_large_image twitter card, canonical link (#37)
- feat(OG): generated og-image.png (dark tactical theme, feature pills) (#37)
- feat(settings): TrustSignalPref view wired to /settings route (#28)
- fix(router): /settings route added; unauthenticated access redirects to home (#34)

CI/CD:
- feat(ci): Forgejo Actions workflow (ruff + pytest + vue-tsc + vitest) (#22)
- feat(ci): mirror workflow (GitHub + Codeberg on push to main/tags) (#22)
- feat(ci): release workflow (Docker build+push + git-cliff changelog) (#22)
- chore: git-cliff config (.cliff.toml) for conventional commit changelog (#22)
- chore(pyproject): dev extras (pytest/ruff/httpx), ruff config with ignore list (#22)

Lint:
- fix: remove 11 unused imports across api/, app/, tests/ (ruff F401 clean)
2026-04-13 19:32:50 -07:00

65 lines
2.5 KiB
Python

import hashlib
import math
from app.db.models import Listing, TrustScore
from app.db.store import Store
from .aggregator import Aggregator
from .metadata import MetadataScorer
from .photo import PhotoScorer
class TrustScorer:
"""Orchestrates metadata + photo scoring for a batch of listings."""
def __init__(self, shared_store: Store):
self._store = shared_store
self._meta = MetadataScorer()
self._photo = PhotoScorer()
self._agg = Aggregator()
def score_batch(
self,
listings: list[Listing],
query: str,
) -> list[TrustScore]:
query_hash = hashlib.md5(query.encode()).hexdigest()
comp = self._store.get_market_comp("ebay", query_hash)
market_median = comp.median_price if comp else None
# Coefficient of variation: stddev/mean across batch prices.
# None when fewer than 2 priced listings (can't compute variance).
_prices = [l.price for l in listings if l.price > 0]
if len(_prices) >= 2:
_mean = sum(_prices) / len(_prices)
_stddev = math.sqrt(sum((p - _mean) ** 2 for p in _prices) / len(_prices))
price_cv: float | None = _stddev / _mean if _mean > 0 else None
else:
price_cv = None
photo_url_sets = [l.photo_urls for l in listings]
duplicates = self._photo.check_duplicates(photo_url_sets)
scores = []
for listing, is_dup in zip(listings, duplicates):
seller = self._store.get_seller("ebay", listing.seller_platform_id)
blocklisted = self._store.is_blocklisted("ebay", listing.seller_platform_id)
if seller:
signal_scores = self._meta.score(seller, market_median, listing.price, price_cv)
else:
signal_scores = {k: None for k in
["account_age", "feedback_count", "feedback_ratio",
"price_vs_market", "category_history"]}
trust = self._agg.aggregate(
signal_scores, is_dup, seller,
listing_id=listing.id or 0,
listing_title=listing.title,
listing_condition=listing.condition,
times_seen=listing.times_seen,
first_seen_at=listing.first_seen_at,
price=listing.price,
price_at_first_seen=listing.price_at_first_seen,
is_blocklisted=blocklisted,
)
scores.append(trust)
return scores