diff --git a/app/db/migrations/013_ebay_user_tokens.sql b/app/db/migrations/013_ebay_user_tokens.sql new file mode 100644 index 0000000..3655bbb --- /dev/null +++ b/app/db/migrations/013_ebay_user_tokens.sql @@ -0,0 +1,20 @@ +-- Migration 013: eBay user OAuth tokens +-- +-- Stores per-user eBay Authorization Code tokens so the app can call +-- Trading API GetUser for instant account_age_days + category feedback +-- without Playwright scraping. +-- +-- Stored in the per-user DB (user.db), never the shared DB. +-- access_token is short-lived (2h); refresh_token is valid 18 months. +-- The API layer refreshes access_token automatically before expiry. + +CREATE TABLE IF NOT EXISTS ebay_user_tokens ( + id INTEGER PRIMARY KEY, + -- Single row per user DB — upsert on reconnect + access_token TEXT NOT NULL, + refresh_token TEXT NOT NULL, + expires_at REAL NOT NULL, -- epoch seconds; access token expiry + scopes TEXT NOT NULL DEFAULT '', + connected_at TEXT NOT NULL DEFAULT (datetime('now')), + last_refreshed TEXT +); diff --git a/app/db/migrations/014_saved_search_monitor.sql b/app/db/migrations/014_saved_search_monitor.sql new file mode 100644 index 0000000..b5484db --- /dev/null +++ b/app/db/migrations/014_saved_search_monitor.sql @@ -0,0 +1,24 @@ +-- Migration 014: background monitor settings on saved_searches + watch_alerts table + +ALTER TABLE saved_searches ADD COLUMN monitor_enabled INTEGER NOT NULL DEFAULT 0; +ALTER TABLE saved_searches ADD COLUMN poll_interval_min INTEGER NOT NULL DEFAULT 60; +ALTER TABLE saved_searches ADD COLUMN min_trust_score INTEGER NOT NULL DEFAULT 60; +ALTER TABLE saved_searches ADD COLUMN last_checked_at TEXT; + +CREATE TABLE IF NOT EXISTS watch_alerts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + saved_search_id INTEGER NOT NULL REFERENCES saved_searches(id) ON DELETE CASCADE, + platform_listing_id TEXT NOT NULL, + title TEXT NOT NULL, + price REAL NOT NULL, + currency TEXT NOT NULL DEFAULT 'USD', + trust_score INTEGER NOT NULL, + url TEXT, + first_alerted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + dismissed_at TEXT, + UNIQUE(saved_search_id, platform_listing_id) +); + +CREATE INDEX IF NOT EXISTS idx_watch_alerts_undismissed + ON watch_alerts(saved_search_id) + WHERE dismissed_at IS NULL; diff --git a/app/db/migrations/015_active_monitors.sql b/app/db/migrations/015_active_monitors.sql new file mode 100644 index 0000000..16fa30a --- /dev/null +++ b/app/db/migrations/015_active_monitors.sql @@ -0,0 +1,20 @@ +-- Migration 015: cross-user monitor registry for the background polling loop +-- +-- In cloud mode this table lives in shared.db — the polling loop queries it +-- to find all due monitors without scanning per-user DB files. +-- In local mode it lives in the single local DB (same result, one user). +-- +-- user_db_path references the per-user snipe user.db so the poller knows +-- which DB to open for the full SavedSearch config and to write alerts. + +CREATE TABLE IF NOT EXISTS active_monitors ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_db_path TEXT NOT NULL, + saved_search_id INTEGER NOT NULL, + poll_interval_min INTEGER NOT NULL DEFAULT 60, + last_checked_at TEXT, + UNIQUE(user_db_path, saved_search_id) +); + +CREATE INDEX IF NOT EXISTS idx_active_monitors_due + ON active_monitors(last_checked_at); diff --git a/app/db/models.py b/app/db/models.py index 08a3eaa..fed71e6 100644 --- a/app/db/models.py +++ b/app/db/models.py @@ -81,6 +81,26 @@ class SavedSearch: id: Optional[int] = None created_at: Optional[str] = None last_run_at: Optional[str] = None + # Monitor settings (migration 014) + monitor_enabled: bool = False + poll_interval_min: int = 60 + min_trust_score: int = 60 + last_checked_at: Optional[str] = None + + +@dataclass +class WatchAlert: + """A new listing surfaced by the background monitor for a saved search.""" + saved_search_id: int + platform_listing_id: str + title: str + price: float + trust_score: int + currency: str = "USD" + url: Optional[str] = None + id: Optional[int] = None + first_alerted_at: Optional[str] = None + dismissed_at: Optional[str] = None @dataclass diff --git a/app/db/store.py b/app/db/store.py index d2c0684..5dba63c 100644 --- a/app/db/store.py +++ b/app/db/store.py @@ -8,7 +8,7 @@ from typing import Optional from circuitforge_core.db import get_connection, run_migrations -from .models import Listing, MarketComp, SavedSearch, ScammerEntry, Seller, TrustScore +from .models import Listing, MarketComp, SavedSearch, ScammerEntry, Seller, TrustScore, WatchAlert MIGRATIONS_DIR = Path(__file__).parent / "migrations" @@ -310,15 +310,66 @@ class Store: def list_saved_searches(self) -> list[SavedSearch]: rows = self._conn.execute( - "SELECT name, query, platform, filters_json, id, created_at, last_run_at " + "SELECT name, query, platform, filters_json, id, created_at, last_run_at, " + "monitor_enabled, poll_interval_min, min_trust_score, last_checked_at " "FROM saved_searches ORDER BY created_at DESC" ).fetchall() return [ - SavedSearch(name=r[0], query=r[1], platform=r[2], filters_json=r[3], - id=r[4], created_at=r[5], last_run_at=r[6]) + SavedSearch( + name=r[0], query=r[1], platform=r[2], filters_json=r[3], + id=r[4], created_at=r[5], last_run_at=r[6], + monitor_enabled=bool(r[7]), poll_interval_min=r[8], + min_trust_score=r[9], last_checked_at=r[10], + ) for r in rows ] + def update_monitor_settings( + self, + saved_id: int, + *, + monitor_enabled: bool, + poll_interval_min: int, + min_trust_score: int, + ) -> None: + self._conn.execute( + "UPDATE saved_searches " + "SET monitor_enabled=?, poll_interval_min=?, min_trust_score=? " + "WHERE id=?", + (int(monitor_enabled), poll_interval_min, min_trust_score, saved_id), + ) + self._conn.commit() + + def list_monitored_searches(self) -> list[SavedSearch]: + """Return all saved searches with monitoring enabled (used by background poller).""" + rows = self._conn.execute( + "SELECT name, query, platform, filters_json, id, created_at, last_run_at, " + "monitor_enabled, poll_interval_min, min_trust_score, last_checked_at " + "FROM saved_searches WHERE monitor_enabled=1" + ).fetchall() + return [ + SavedSearch( + name=r[0], query=r[1], platform=r[2], filters_json=r[3], + id=r[4], created_at=r[5], last_run_at=r[6], + monitor_enabled=True, poll_interval_min=r[8], + min_trust_score=r[9], last_checked_at=r[10], + ) + for r in rows + ] + + def mark_search_checked(self, saved_id: int) -> None: + self._conn.execute( + "UPDATE saved_searches SET last_checked_at=? WHERE id=?", + (datetime.now(timezone.utc).isoformat(), saved_id), + ) + self._conn.commit() + + def count_active_monitors(self) -> int: + row = self._conn.execute( + "SELECT COUNT(*) FROM saved_searches WHERE monitor_enabled=1" + ).fetchone() + return row[0] if row else 0 + def delete_saved_search(self, saved_id: int) -> None: self._conn.execute("DELETE FROM saved_searches WHERE id=?", (saved_id,)) self._conn.commit() @@ -330,6 +381,112 @@ class Store: ) self._conn.commit() + # --- WatchAlerts --- + + def upsert_alert(self, alert: WatchAlert) -> tuple[int, bool]: + """Insert alert if not already present. Returns (id, is_new).""" + existing = self._conn.execute( + "SELECT id FROM watch_alerts WHERE saved_search_id=? AND platform_listing_id=?", + (alert.saved_search_id, alert.platform_listing_id), + ).fetchone() + if existing: + return existing[0], False + cur = self._conn.execute( + "INSERT INTO watch_alerts " + "(saved_search_id, platform_listing_id, title, price, currency, trust_score, url) " + "VALUES (?,?,?,?,?,?,?)", + (alert.saved_search_id, alert.platform_listing_id, alert.title, + alert.price, alert.currency, alert.trust_score, alert.url), + ) + self._conn.commit() + return cur.lastrowid, True + + def list_alerts(self, *, include_dismissed: bool = False) -> list[WatchAlert]: + where = "" if include_dismissed else "WHERE dismissed_at IS NULL" + rows = self._conn.execute( + f"SELECT id, saved_search_id, platform_listing_id, title, price, currency, " + f"trust_score, url, first_alerted_at, dismissed_at " + f"FROM watch_alerts {where} ORDER BY first_alerted_at DESC" + ).fetchall() + return [ + WatchAlert( + id=r[0], saved_search_id=r[1], platform_listing_id=r[2], + title=r[3], price=r[4], currency=r[5], trust_score=r[6], + url=r[7], first_alerted_at=r[8], dismissed_at=r[9], + ) + for r in rows + ] + + def count_undismissed_alerts(self) -> int: + row = self._conn.execute( + "SELECT COUNT(*) FROM watch_alerts WHERE dismissed_at IS NULL" + ).fetchone() + return row[0] if row else 0 + + def dismiss_alert(self, alert_id: int) -> None: + self._conn.execute( + "UPDATE watch_alerts SET dismissed_at=? WHERE id=?", + (datetime.now(timezone.utc).isoformat(), alert_id), + ) + self._conn.commit() + + def dismiss_all_alerts(self) -> int: + """Dismiss all undismissed alerts. Returns count dismissed.""" + cur = self._conn.execute( + "UPDATE watch_alerts SET dismissed_at=? WHERE dismissed_at IS NULL", + (datetime.now(timezone.utc).isoformat(),), + ) + self._conn.commit() + return cur.rowcount + + # --- ActiveMonitors (sched_db / shared_db) --- + + def upsert_active_monitor( + self, + user_db_path: str, + saved_search_id: int, + poll_interval_min: int, + ) -> None: + """Register or update a monitor in the cross-user registry (sched_db).""" + self._conn.execute( + "INSERT INTO active_monitors (user_db_path, saved_search_id, poll_interval_min) " + "VALUES (?,?,?) " + "ON CONFLICT(user_db_path, saved_search_id) DO UPDATE SET " + " poll_interval_min=excluded.poll_interval_min", + (user_db_path, saved_search_id, poll_interval_min), + ) + self._conn.commit() + + def remove_active_monitor(self, user_db_path: str, saved_search_id: int) -> None: + self._conn.execute( + "DELETE FROM active_monitors WHERE user_db_path=? AND saved_search_id=?", + (user_db_path, saved_search_id), + ) + self._conn.commit() + + def list_due_active_monitors(self) -> list[tuple[str, int, int]]: + """Return (user_db_path, saved_search_id, poll_interval_min) for monitors that are due. + + Due = never checked OR last_checked_at is old enough given poll_interval_min. + Uses SQLite's strftime('%s') for epoch arithmetic without Python datetime overhead. + """ + rows = self._conn.execute( + "SELECT user_db_path, saved_search_id, poll_interval_min " + "FROM active_monitors " + "WHERE last_checked_at IS NULL " + " OR (strftime('%s','now') - strftime('%s', last_checked_at)) " + " >= poll_interval_min * 60" + ).fetchall() + return [(r[0], r[1], r[2]) for r in rows] + + def mark_active_monitor_checked(self, user_db_path: str, saved_search_id: int) -> None: + self._conn.execute( + "UPDATE active_monitors SET last_checked_at=? " + "WHERE user_db_path=? AND saved_search_id=?", + (datetime.now(timezone.utc).isoformat(), user_db_path, saved_search_id), + ) + self._conn.commit() + # --- ScammerBlocklist --- def add_to_blocklist(self, entry: ScammerEntry) -> ScammerEntry: diff --git a/app/platforms/ebay/adapter.py b/app/platforms/ebay/adapter.py index 4b729b9..2ecb07d 100644 --- a/app/platforms/ebay/adapter.py +++ b/app/platforms/ebay/adapter.py @@ -1,8 +1,9 @@ -"""eBay Browse API adapter.""" +"""eBay Browse + Trading API adapter.""" from __future__ import annotations import hashlib import logging +import xml.etree.ElementTree as ET from dataclasses import replace from datetime import datetime, timedelta, timezone from typing import Optional @@ -210,6 +211,70 @@ class EbayAdapter(PlatformAdapter): except Exception as e: log.debug("Shopping API enrich failed for %s: %s", username, e) + # ── Trading API GetUser (requires user OAuth token) ─────────────────────── + + _TRADING_API_URL = "https://api.ebay.com/ws/api.dll" + _TRADING_API_COMPATIBILITY = "1283" + + def enrich_seller_trading_api(self, username: str, user_access_token: str) -> bool: + """Enrich a seller's account_age_days using Trading API GetUser. + + Uses the connected user's OAuth access token (Authorization Code flow), + which bypasses Shopping API rate limits and works even when the Shopping + API GetUserProfile call is throttled. + + Unlike BTF scraping, this is a clean API call (~200ms, no Playwright). + Called from the search endpoint when the requesting user has connected + their eBay account. + + Returns True if enrichment succeeded, False on any failure. + """ + xml_body = ( + '' + '' + f'{username}' + '' + ) + try: + resp = requests.post( + self._TRADING_API_URL, + headers={ + "X-EBAY-API-CALL-NAME": "GetUser", + "X-EBAY-API-SITEID": "0", + "X-EBAY-API-COMPATIBILITY-LEVEL": self._TRADING_API_COMPATIBILITY, + "X-EBAY-API-IAF-TOKEN": f"Bearer {user_access_token}", + "Content-Type": "text/xml", + }, + data=xml_body.encode("utf-8"), + timeout=10, + ) + resp.raise_for_status() + root = ET.fromstring(resp.text) + ns = {"e": "urn:ebay:apis:eBLBaseComponents"} + + ack = root.findtext("e:Ack", namespaces=ns) + if ack not in ("Success", "Warning"): + errors = [e.findtext("e:LongMessage", namespaces=ns, default="") + for e in root.findall("e:Errors", namespaces=ns)] + log.debug("Trading API GetUser failed for %s: %s", username, errors) + return False + + reg_date = root.findtext("e:User/e:RegistrationDate", namespaces=ns) + if not reg_date: + return False + + dt = datetime.fromisoformat(reg_date.replace("Z", "+00:00")) + age_days = (datetime.now(timezone.utc) - dt).days + seller = self._store.get_seller("ebay", username) + if seller: + self._store.save_seller(replace(seller, account_age_days=age_days)) + log.debug("Trading API GetUser: %s registered %d days ago", username, age_days) + return True + + except Exception as exc: + log.debug("Trading API GetUser failed for %s: %s", username, exc) + return False + def get_seller(self, seller_platform_id: str) -> Optional[Seller]: cached = self._store.get_seller("ebay", seller_platform_id) if cached: diff --git a/app/tasks/monitor.py b/app/tasks/monitor.py new file mode 100644 index 0000000..7a17bad --- /dev/null +++ b/app/tasks/monitor.py @@ -0,0 +1,145 @@ +# app/tasks/monitor.py +"""Background saved-search monitor — polls eBay and writes WatchAlerts for new listings. + +Design notes: +- Runs synchronously inside an asyncio.to_thread() call from the polling loop. +- Uses the same eBay adapter + trust scoring pipeline as the live search endpoint. +- Dedup via watch_alerts (saved_search_id, platform_listing_id) UNIQUE constraint. +- Never takes any transactional action — alert only. +""" +from __future__ import annotations + +import json +import logging +from pathlib import Path + +from app.db.models import SavedSearch, WatchAlert +from app.db.store import Store + +log = logging.getLogger(__name__) + + +_AUCTION_ALERT_WINDOW_HOURS = 24 # alert on auctions ending within this window + + +def should_alert( + *, + trust_score: int, + score_is_partial: bool, + price: float, + buying_format: str, + min_trust_score: int, + ends_at: "str | None" = None, +) -> bool: + """Return True if a listing qualifies for a watch alert. + + BIN (fixed_price / best_offer): alert immediately — these sell on a first-come + basis, so speed matters. Require a higher trust bar on partial scores to reduce + false positives while BTF scraping is still in flight. + + Auction: only alert when the auction is within _AUCTION_ALERT_WINDOW_HOURS of + ending. Alerting on a 7-day auction 6 days early is noise — the user can't act + usefully until the end window anyway. Bid scheduling (paid+) and sniping algo + (premium) are separate features built on top of this alert layer. + """ + from datetime import datetime, timezone + + # Partial scores: apply a +10 buffer so we don't surface unreliable signals. + effective_min = min_trust_score + 10 if score_is_partial else min_trust_score + if trust_score < effective_min: + return False + + if buying_format in ("fixed_price", "best_offer"): + # BIN: alert immediately — inventory can disappear any time. + return True + + if buying_format == "auction": + if not ends_at: + # No end time recorded — alert anyway rather than silently skip. + return True + try: + end = datetime.fromisoformat(ends_at.replace("Z", "+00:00")) + hours_remaining = (end - datetime.now(timezone.utc)).total_seconds() / 3600 + return 0 < hours_remaining <= _AUCTION_ALERT_WINDOW_HOURS + except (ValueError, TypeError): + log.debug("should_alert: could not parse ends_at=%r, alerting anyway", ends_at) + return True + + # Unknown format — alert and let the user decide. + return True + + +def run_monitor_search( + search: SavedSearch, + *, + user_db: Path, + shared_db: Path, +) -> int: + """Execute one background monitor run for a saved search. + + Fetches current listings, scores them, writes new high-trust finds + to watch_alerts. Returns the count of new alerts written. + + Called from the async polling loop via asyncio.to_thread(). + """ + from app.platforms.ebay.adapter import EbayAdapter + from app.trust import TrustScorer + + log.info("Monitor: checking saved search %d (%r)", search.id, search.name) + + filters = json.loads(search.filters_json or "{}") + query = filters.pop("query_raw", search.query) + + try: + adapter = EbayAdapter() + raw_listings = adapter.search(query, **filters) + except Exception as exc: + log.warning("Monitor: eBay search failed for search %d: %s", search.id, exc) + return 0 + + shared_store = Store(shared_db) + user_store = Store(user_db) + scorer = TrustScorer(shared_store) + + try: + trust_scores = scorer.score_batch(raw_listings, query) + except Exception as exc: + log.warning("Monitor: trust scoring failed for search %d: %s", search.id, exc) + return 0 + + new_alert_count = 0 + for listing, trust in zip(raw_listings, trust_scores): + qualifies = should_alert( + trust_score=trust.composite_score, + score_is_partial=trust.score_is_partial, + price=listing.price, + buying_format=listing.buying_format, + min_trust_score=search.min_trust_score, + ends_at=listing.ends_at, + ) + if not qualifies: + continue + + alert = WatchAlert( + saved_search_id=search.id, + platform_listing_id=listing.platform_listing_id, + title=listing.title, + price=listing.price, + currency=listing.currency, + trust_score=trust.composite_score, + url=listing.url, + ) + _, is_new = user_store.upsert_alert(alert) + if is_new: + new_alert_count += 1 + log.info( + "Monitor: new alert — search %d, listing %s, score=%d", + search.id, listing.platform_listing_id, trust.composite_score, + ) + + user_store.mark_search_checked(search.id) + log.info( + "Monitor: search %d done — %d new alerts from %d listings", + search.id, new_alert_count, len(raw_listings), + ) + return new_alert_count diff --git a/docker/web/nginx.cloud.conf b/docker/web/nginx.cloud.conf index f153288..414219a 100644 --- a/docker/web/nginx.cloud.conf +++ b/docker/web/nginx.cloud.conf @@ -16,6 +16,10 @@ server { # Forward the session header injected by Caddy from the cf_session cookie. # Caddy adds: header_up X-CF-Session {http.request.cookie.cf_session} proxy_set_header X-CF-Session $http_x_cf_session; + # eBay search + comps can take 60-90s (Marketplace Insights 404 → Browse fallback). + # Default 60s proxy_read_timeout drops slow searches with a NetworkError on the client. + proxy_read_timeout 120s; + proxy_send_timeout 120s; } # index.html — never cache; ensures clients always get the latest entry point diff --git a/tests/platforms/test_browser_pool.py b/tests/platforms/test_browser_pool.py index d57b976..ad23e40 100644 --- a/tests/platforms/test_browser_pool.py +++ b/tests/platforms/test_browser_pool.py @@ -153,7 +153,10 @@ class TestFetchHtmlPoolHit: html = pool.fetch_html("https://www.ebay.com/sch/i.html?_nkw=test", delay=0) assert html == "ok" - mock_fetch.assert_called_once_with(slot, "https://www.ebay.com/sch/i.html?_nkw=test") + mock_fetch.assert_called_once_with( + slot, "https://www.ebay.com/sch/i.html?_nkw=test", + wait_for_selector=None, wait_for_timeout_ms=2000, + ) mock_replenish.assert_called_once_with(slot) # Fresh slot returned to queue assert pool._q.get_nowait() is fresh_slot @@ -197,7 +200,10 @@ class TestFetchHtmlFallback: html = pool.fetch_html("https://www.ebay.com/sch/i.html?_nkw=widget", delay=0) assert html == "fresh" - mock_fresh.assert_called_once_with("https://www.ebay.com/sch/i.html?_nkw=widget") + mock_fresh.assert_called_once_with( + "https://www.ebay.com/sch/i.html?_nkw=widget", + wait_for_selector=None, wait_for_timeout_ms=2000, + ) def test_falls_back_when_pooled_fetch_raises(self): """If _fetch_with_slot raises, the slot is closed and _fetch_fresh is used.""" diff --git a/tests/test_tasks/test_monitor.py b/tests/test_tasks/test_monitor.py new file mode 100644 index 0000000..af3608b --- /dev/null +++ b/tests/test_tasks/test_monitor.py @@ -0,0 +1,372 @@ +"""Tests for the background monitor: should_alert logic, store alert methods, and run_monitor_search.""" +from __future__ import annotations + +import sqlite3 +from datetime import datetime, timedelta, timezone +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from app.tasks.monitor import _AUCTION_ALERT_WINDOW_HOURS, should_alert + + +# --------------------------------------------------------------------------- +# should_alert — pure function, no I/O +# --------------------------------------------------------------------------- + + +class TestShouldAlert: + def test_bin_above_threshold_alerts(self): + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="fixed_price", + min_trust_score=60, + ) is True + + def test_bin_below_threshold_no_alert(self): + assert should_alert( + trust_score=55, score_is_partial=False, + price=100.0, buying_format="fixed_price", + min_trust_score=60, + ) is False + + def test_partial_score_applies_buffer(self): + # Score 65 with min 60 passes normally but fails with the +10 partial buffer. + assert should_alert( + trust_score=65, score_is_partial=True, + price=100.0, buying_format="fixed_price", + min_trust_score=60, + ) is False + + def test_partial_score_above_buffered_threshold_alerts(self): + assert should_alert( + trust_score=75, score_is_partial=True, + price=100.0, buying_format="fixed_price", + min_trust_score=60, + ) is True + + def test_best_offer_treated_like_bin(self): + assert should_alert( + trust_score=80, score_is_partial=False, + price=200.0, buying_format="best_offer", + min_trust_score=60, + ) is True + + def test_auction_within_window_alerts(self): + soon = (datetime.now(timezone.utc) + timedelta(hours=12)).isoformat() + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at=soon, + ) is True + + def test_auction_outside_window_no_alert(self): + far = (datetime.now(timezone.utc) + timedelta(hours=48)).isoformat() + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at=far, + ) is False + + def test_auction_no_ends_at_alerts_anyway(self): + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at=None, + ) is True + + def test_auction_bad_ends_at_alerts_anyway(self): + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at="not-a-date", + ) is True + + def test_auction_expired_no_alert(self): + past = (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat() + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at=past, + ) is False + + def test_unknown_format_alerts(self): + # Fail-open: unknown buying_format should not silently suppress. + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="mystery_format", + min_trust_score=60, + ) is True + + def test_score_exactly_at_threshold_passes(self): + assert should_alert( + trust_score=60, score_is_partial=False, + price=100.0, buying_format="fixed_price", + min_trust_score=60, + ) is True + + def test_auction_exactly_at_window_boundary_alerts(self): + boundary = (datetime.now(timezone.utc) + timedelta(hours=_AUCTION_ALERT_WINDOW_HOURS - 0.1)).isoformat() + assert should_alert( + trust_score=70, score_is_partial=False, + price=100.0, buying_format="auction", + min_trust_score=60, ends_at=boundary, + ) is True + + +# --------------------------------------------------------------------------- +# Store alert methods — integration against real SQLite +# --------------------------------------------------------------------------- + + +def _create_monitor_db(path: Path) -> None: + conn = sqlite3.connect(path) + conn.executescript(""" + CREATE TABLE IF NOT EXISTS saved_searches ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + query TEXT NOT NULL, + platform TEXT NOT NULL DEFAULT 'ebay', + filters_json TEXT, + created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + last_run_at TEXT, + monitor_enabled INTEGER NOT NULL DEFAULT 0, + poll_interval_min INTEGER NOT NULL DEFAULT 60, + min_trust_score INTEGER NOT NULL DEFAULT 60, + last_checked_at TEXT + ); + CREATE TABLE IF NOT EXISTS watch_alerts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + saved_search_id INTEGER NOT NULL REFERENCES saved_searches(id) ON DELETE CASCADE, + platform_listing_id TEXT NOT NULL, + title TEXT NOT NULL, + price REAL NOT NULL, + currency TEXT NOT NULL DEFAULT 'USD', + trust_score INTEGER NOT NULL, + url TEXT, + first_alerted_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP, + dismissed_at TEXT, + UNIQUE(saved_search_id, platform_listing_id) + ); + INSERT INTO saved_searches (name, query, monitor_enabled) VALUES ('RTX 4090', 'rtx 4090', 1); + """) + conn.commit() + conn.close() + + +@pytest.fixture +def monitor_db(tmp_path: Path) -> Path: + db = tmp_path / "snipe.db" + _create_monitor_db(db) + return db + + +class TestStoreAlertMethods: + def test_upsert_alert_new(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + alert = WatchAlert( + saved_search_id=1, platform_listing_id="ebay-001", + title="RTX 4090", price=750.0, trust_score=72, currency="USD", + url="https://ebay.com/itm/001", + ) + alert_id, is_new = store.upsert_alert(alert) + assert is_new is True + assert alert_id > 0 + + def test_upsert_alert_dedup(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + alert = WatchAlert( + saved_search_id=1, platform_listing_id="ebay-002", + title="RTX 4090 FE", price=800.0, trust_score=68, + ) + id1, new1 = store.upsert_alert(alert) + id2, new2 = store.upsert_alert(alert) + assert id1 == id2 + assert new1 is True + assert new2 is False + + def test_list_alerts_returns_undismissed(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + alert = WatchAlert( + saved_search_id=1, platform_listing_id="ebay-003", + title="Test listing", price=500.0, trust_score=75, + ) + store.upsert_alert(alert) + alerts = store.list_alerts(include_dismissed=False) + assert len(alerts) == 1 + assert alerts[0].platform_listing_id == "ebay-003" + + def test_count_undismissed_alerts(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + for i in range(3): + store.upsert_alert(WatchAlert( + saved_search_id=1, platform_listing_id=f"ebay-{i:03d}", + title=f"Listing {i}", price=float(100 + i), trust_score=70, + )) + assert store.count_undismissed_alerts() == 3 + + def test_dismiss_alert(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + alert = WatchAlert( + saved_search_id=1, platform_listing_id="ebay-dismiss", + title="To dismiss", price=400.0, trust_score=65, + ) + alert_id, _ = store.upsert_alert(alert) + store.dismiss_alert(alert_id) + alerts = store.list_alerts(include_dismissed=False) + assert all(a.id != alert_id for a in alerts) + + def test_dismiss_all_alerts(self, monitor_db: Path): + from app.db.models import WatchAlert + from app.db.store import Store + + store = Store(monitor_db) + for i in range(3): + store.upsert_alert(WatchAlert( + saved_search_id=1, platform_listing_id=f"all-{i}", + title=f"All {i}", price=float(100 * i), trust_score=70, + )) + count = store.dismiss_all_alerts() + assert count == 3 + assert store.count_undismissed_alerts() == 0 + + def test_mark_search_checked_updates_timestamp(self, monitor_db: Path): + from app.db.store import Store + + store = Store(monitor_db) + store.mark_search_checked(1) + searches = store.list_monitored_searches() + assert searches[0].last_checked_at is not None + + +# --------------------------------------------------------------------------- +# run_monitor_search — mocked adapter + trust aggregator +# --------------------------------------------------------------------------- + + +class TestRunMonitorSearch: + def test_new_qualifying_listing_creates_alert(self, monitor_db: Path): + from app.db.models import Listing, SavedSearch, TrustScore + from app.db.store import Store + from app.tasks.monitor import run_monitor_search + + search = SavedSearch( + id=1, name="RTX 4090", query="rtx 4090", + platform="ebay", monitor_enabled=True, + min_trust_score=60, + ) + mock_listing = Listing( + platform="ebay", platform_listing_id="ebay-new", + title="ASUS RTX 4090", price=750.0, currency="USD", + condition="used", url="https://ebay.com/itm/new", + buying_format="fixed_price", seller_platform_id="seller123", + ) + mock_trust = TrustScore( + listing_id=0, composite_score=72, score_is_partial=False, + account_age_score=0, feedback_count_score=0, feedback_ratio_score=0, + price_vs_market_score=0, category_history_score=0, + ) + + with patch("app.platforms.ebay.adapter.EbayAdapter") as MockAdapter, \ + patch("app.trust.TrustScorer") as MockAgg: + MockAdapter.return_value.search.return_value = [mock_listing] + MockAgg.return_value.score_batch.return_value = [mock_trust] + + count = run_monitor_search(search, user_db=monitor_db, shared_db=monitor_db) + + assert count == 1 + alerts = Store(monitor_db).list_alerts() + assert len(alerts) == 1 + assert alerts[0].platform_listing_id == "ebay-new" + + def test_below_threshold_listing_not_alerted(self, monitor_db: Path): + from app.db.models import Listing, SavedSearch, TrustScore + from app.tasks.monitor import run_monitor_search + + search = SavedSearch( + id=1, name="RTX 4090", query="rtx 4090", + platform="ebay", monitor_enabled=True, + min_trust_score=70, + ) + mock_listing = Listing( + platform="ebay", platform_listing_id="ebay-low", + title="Sketchy RTX 4090", price=500.0, currency="USD", + condition="used", url="https://ebay.com/itm/low", + buying_format="fixed_price", seller_platform_id="s1", + ) + mock_trust = TrustScore( + listing_id=0, composite_score=55, score_is_partial=False, + account_age_score=0, feedback_count_score=0, feedback_ratio_score=0, + price_vs_market_score=0, category_history_score=0, + ) + + with patch("app.platforms.ebay.adapter.EbayAdapter") as MockAdapter, \ + patch("app.trust.TrustScorer") as MockAgg: + MockAdapter.return_value.search.return_value = [mock_listing] + MockAgg.return_value.score_batch.return_value = [mock_trust] + + count = run_monitor_search(search, user_db=monitor_db, shared_db=monitor_db) + + assert count == 0 + + def test_duplicate_listing_not_double_alerted(self, monitor_db: Path): + from app.db.models import Listing, SavedSearch, TrustScore + from app.tasks.monitor import run_monitor_search + + search = SavedSearch( + id=1, name="RTX 4090", query="rtx 4090", + platform="ebay", monitor_enabled=True, min_trust_score=60, + ) + mock_listing = Listing( + platform="ebay", platform_listing_id="ebay-dupe", + title="RTX 4090", price=700.0, currency="USD", + condition="used", url="https://ebay.com/itm/dupe", + buying_format="fixed_price", seller_platform_id="s1", + ) + mock_trust = TrustScore( + listing_id=0, composite_score=75, score_is_partial=False, + account_age_score=0, feedback_count_score=0, feedback_ratio_score=0, + price_vs_market_score=0, category_history_score=0, + ) + + with patch("app.platforms.ebay.adapter.EbayAdapter") as MockAdapter, \ + patch("app.trust.TrustScorer") as MockAgg: + MockAdapter.return_value.search.return_value = [mock_listing] + MockAgg.return_value.score_batch.return_value = [mock_trust] + + count1 = run_monitor_search(search, user_db=monitor_db, shared_db=monitor_db) + count2 = run_monitor_search(search, user_db=monitor_db, shared_db=monitor_db) + + assert count1 == 1 + assert count2 == 0 # deduped by UNIQUE constraint + + def test_adapter_failure_returns_zero(self, monitor_db: Path): + from app.db.models import SavedSearch + from app.tasks.monitor import run_monitor_search + + search = SavedSearch( + id=1, name="RTX 4090", query="rtx 4090", + platform="ebay", monitor_enabled=True, min_trust_score=60, + ) + + with patch("app.platforms.ebay.adapter.EbayAdapter") as MockAdapter: + MockAdapter.return_value.search.side_effect = RuntimeError("eBay down") + count = run_monitor_search(search, user_db=monitor_db, shared_db=monitor_db) + + assert count == 0 diff --git a/web/src/components/AlertBell.vue b/web/src/components/AlertBell.vue new file mode 100644 index 0000000..be40aaa --- /dev/null +++ b/web/src/components/AlertBell.vue @@ -0,0 +1,398 @@ + + + + + diff --git a/web/src/components/AppNav.vue b/web/src/components/AppNav.vue index 48ef111..8c30acc 100644 --- a/web/src/components/AppNav.vue +++ b/web/src/components/AppNav.vue @@ -1,7 +1,7 @@ @@ -127,12 +255,12 @@ async function onDelete(id: number) { display: flex; flex-direction: column; gap: var(--space-3); - max-width: 720px; + max-width: 800px; } .saved-card { display: flex; - align-items: center; + align-items: flex-start; gap: var(--space-4); padding: var(--space-4) var(--space-5); background: var(--color-surface-2); @@ -174,13 +302,131 @@ async function onDelete(id: number) { margin: 0; } +.saved-card-checked { + color: var(--app-primary); +} + +/* Right column: monitor section + action buttons */ +.saved-card-right { + display: flex; + flex-direction: column; + align-items: flex-end; + gap: var(--space-3); + flex-shrink: 0; +} + .saved-card-actions { display: flex; align-items: center; gap: var(--space-2); +} + +/* Monitor toggle */ +.monitor-section { + display: flex; + flex-direction: column; + align-items: flex-end; + gap: var(--space-2); +} + +.monitor-toggle-label { + display: flex; + align-items: center; + gap: var(--space-2); + cursor: pointer; + user-select: none; +} + +/* Visually hide the native checkbox but keep it accessible */ +.monitor-toggle-input { + position: absolute; + width: 1px; + height: 1px; + opacity: 0; + pointer-events: none; +} + +.monitor-toggle-track { + display: inline-block; + width: 32px; + height: 18px; + border-radius: 9px; + background: var(--color-border); + position: relative; + transition: background 150ms ease; flex-shrink: 0; } +.monitor-toggle-track::after { + content: ''; + position: absolute; + top: 2px; + left: 2px; + width: 14px; + height: 14px; + border-radius: 50%; + background: #fff; + transition: transform 150ms ease; +} + +.monitor-toggle-input:checked + .monitor-toggle-track { + background: var(--app-primary); +} + +.monitor-toggle-input:checked + .monitor-toggle-track::after { + transform: translateX(14px); +} + +/* Focus ring on the label when the hidden checkbox is focused */ +.monitor-toggle-label:has(.monitor-toggle-input:focus-visible) .monitor-toggle-track { + outline: 2px solid var(--app-primary); + outline-offset: 2px; +} + +.monitor-toggle-text { + font-size: 0.8125rem; + color: var(--color-text-muted); + white-space: nowrap; +} + +/* Inline monitor settings */ +.monitor-settings { + display: flex; + flex-direction: column; + gap: var(--space-2); + padding: var(--space-3); + background: var(--color-surface); + border: 1px solid var(--color-border-light); + border-radius: var(--radius-md); + font-size: 0.8125rem; + color: var(--color-text-muted); +} + +.monitor-setting-label { + display: flex; + align-items: center; + gap: var(--space-2); + flex-wrap: wrap; +} + +.monitor-setting-input { + width: 60px; + padding: var(--space-1) var(--space-2); + background: var(--color-surface-2); + border: 1px solid var(--color-border); + border-radius: var(--radius-sm); + color: var(--color-text); + font-family: var(--font-mono); + font-size: 0.8125rem; + text-align: center; +} + +.monitor-hint { + font-size: 0.6875rem; + color: var(--color-text-muted); + opacity: 0.75; +} + .saved-run-btn { padding: var(--space-2) var(--space-4); background: var(--app-primary); @@ -206,13 +452,65 @@ async function onDelete(id: number) { cursor: pointer; transition: border-color 150ms ease, color 150ms ease; min-width: 28px; + min-height: 28px; } .saved-delete-btn:hover { border-color: var(--color-error); color: var(--color-error); } +/* Undo toast */ +.undo-toast { + position: fixed; + bottom: calc(var(--space-6) + env(safe-area-inset-bottom)); + left: 50%; + transform: translateX(-50%); + display: flex; + align-items: center; + gap: var(--space-3); + padding: var(--space-3) var(--space-5); + background: var(--color-surface-2); + border: 1px solid var(--color-border); + border-radius: var(--radius-lg); + box-shadow: 0 4px 16px rgba(0,0,0,0.4); + font-size: 0.875rem; + color: var(--color-text); + z-index: 300; + white-space: nowrap; +} + +.undo-btn { + padding: var(--space-1) var(--space-3); + background: var(--app-primary); + border: none; + border-radius: var(--radius-sm); + color: var(--color-text-inverse); + font-family: var(--font-body); + font-size: 0.8125rem; + font-weight: 600; + cursor: pointer; +} + +/* Transitions */ +.slide-enter-active, +.slide-leave-active { transition: opacity 150ms ease, max-height 200ms ease; max-height: 200px; overflow: hidden; } +.slide-enter-from, +.slide-leave-to { opacity: 0; max-height: 0; } + +.toast-enter-active, +.toast-leave-active { transition: opacity 200ms ease, transform 200ms ease; } +.toast-enter-from, +.toast-leave-to { opacity: 0; transform: translateX(-50%) translateY(8px); } + +@media (prefers-reduced-motion: reduce) { + .slide-enter-active, .slide-leave-active, + .toast-enter-active, .toast-leave-active { transition: none; } +} + @media (max-width: 767px) { .saved-header { padding: var(--space-4); } .saved-list { padding: var(--space-4); } .saved-card { flex-direction: column; align-items: flex-start; gap: var(--space-3); } + .saved-card-right { width: 100%; align-items: flex-start; } .saved-card-actions { width: 100%; justify-content: flex-end; } + .monitor-section { width: 100%; align-items: flex-start; } + .monitor-settings { width: 100%; } } diff --git a/web/src/views/SettingsView.vue b/web/src/views/SettingsView.vue index 743d174..b5e2b0d 100644 --- a/web/src/views/SettingsView.vue +++ b/web/src/views/SettingsView.vue @@ -93,6 +93,74 @@ + +
+

eBay Account

+ + +
+
+
+

+ Snipe uses your eBay account to fetch seller registration dates instantly + via the Trading API, without Playwright scraping. This means faster, more + accurate trust scores on every search. + + Your access token has expired — reconnect to restore instant enrichment. + +

+
+ + +
+
+ + +
+

+ Connect your eBay account to enable instant seller registration date lookup + via the Trading API. Without it, Snipe falls back to slower Playwright + scraping (or Shopping API rate-limited calls) to determine account age. +

+ +
+ + +
+

+ Connect your eBay account for instant seller trust scoring without scraping. + Available on Paid tier and above. +

+ + Upgrade to Paid + +
+ + +

{{ ebay.success }}

+
+

Affiliate Links

@@ -174,13 +242,16 @@