Merge pull request 'feat(tasks): add vision task scheduler for trust photo analysis' (#14) from feature/shared-task-scheduler into main

This commit is contained in:
pyr0ball 2026-04-03 21:41:11 -07:00
commit d5419d2b1b
30 changed files with 1841 additions and 105 deletions

View file

@ -159,6 +159,7 @@ docker compose -f compose.cloud.yml -p snipe-cloud build api # after Python cha
Online auctions are frustrating because:
- Winning requires being present at the exact closing moment — sometimes 2 AM
- Platforms vary wildly: some allow proxy bids, some don't; closing times extend on activity
- Scammers exploit auction urgency — new accounts, stolen photos, pressure to pay outside platform
- Price history is hidden — you don't know if an item is underpriced or a trap
- Sellers hide damage in descriptions rather than titles to avoid automated filters
- Shipping logistics for large / fragile antiques require coordination with the auction house

View file

@ -26,6 +26,7 @@ from dataclasses import dataclass
from pathlib import Path
from typing import Optional
import jwt as pyjwt
import requests
from fastapi import Depends, HTTPException, Request
@ -109,7 +110,6 @@ def validate_session_jwt(token: str) -> str:
Directus 11+ uses 'id' (not 'sub') for the user UUID in its JWT payload.
"""
try:
import jwt as pyjwt
payload = pyjwt.decode(
token,
DIRECTUS_JWT_SECRET,

View file

@ -6,15 +6,19 @@ import hashlib
import logging
import os
from concurrent.futures import ThreadPoolExecutor
from contextlib import asynccontextmanager
from pathlib import Path
from fastapi import Depends, FastAPI, HTTPException
import csv
import io
from fastapi import Depends, FastAPI, HTTPException, UploadFile, File
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
from circuitforge_core.config import load_env
from app.db.store import Store
from app.db.models import SavedSearch as SavedSearchModel
from app.db.models import SavedSearch as SavedSearchModel, ScammerEntry
from app.platforms import SearchFilters
from app.platforms.ebay.scraper import ScrapedEbayAdapter
from app.platforms.ebay.adapter import EbayAdapter
@ -28,6 +32,22 @@ load_env(Path(".env"))
log = logging.getLogger(__name__)
@asynccontextmanager
async def _lifespan(app: FastAPI):
# Start vision/LLM background task scheduler.
# background_tasks queue lives in shared_db (cloud) or local_db (local)
# so the scheduler has a single stable DB path across all cloud users.
from app.tasks.scheduler import get_scheduler, reset_scheduler
from api.cloud_session import CLOUD_MODE, _LOCAL_SNIPE_DB, _shared_db_path
sched_db = _shared_db_path() if CLOUD_MODE else _LOCAL_SNIPE_DB
get_scheduler(sched_db)
log.info("Snipe task scheduler started (db=%s)", sched_db)
yield
get_scheduler(sched_db).shutdown(timeout=10.0)
reset_scheduler()
log.info("Snipe task scheduler stopped.")
def _ebay_creds() -> tuple[str, str, str]:
"""Return (client_id, client_secret, env) from env vars.
@ -43,7 +63,7 @@ def _ebay_creds() -> tuple[str, str, str]:
client_secret = (os.environ.get("EBAY_CERT_ID") or os.environ.get("EBAY_CLIENT_SECRET", "")).strip()
return client_id, client_secret, env
app = FastAPI(title="Snipe API", version="0.1.0")
app = FastAPI(title="Snipe API", version="0.1.0", lifespan=_lifespan)
app.include_router(ebay_webhook_router)
app.add_middleware(
@ -111,7 +131,7 @@ def _trigger_scraper_enrichment(
seller = shared_store.get_seller("ebay", sid)
if not seller:
continue
if (seller.account_age_days is None
if ((seller.account_age_days is None or seller.feedback_count == 0)
and sid not in needs_btf
and len(needs_btf) < _BTF_MAX_PER_SEARCH):
needs_btf[sid] = listing.platform_listing_id
@ -145,6 +165,55 @@ def _trigger_scraper_enrichment(
t.start()
def _enqueue_vision_tasks(
listings: list,
trust_scores_list: list,
session: "CloudUser",
) -> None:
"""Enqueue trust_photo_analysis tasks for listings with photos.
Runs fire-and-forget: tasks land in the scheduler queue and the response
returns immediately. Results are written back to trust_scores.photo_analysis_json
by the runner when the vision LLM completes.
session.shared_db: where background_tasks lives (scheduler's DB).
session.user_db: encoded in params so the runner writes to the right
trust_scores table in cloud mode.
"""
import json as _json
from app.tasks.runner import insert_task
from app.tasks.scheduler import get_scheduler
from api.cloud_session import CLOUD_MODE, _shared_db_path, _LOCAL_SNIPE_DB
sched_db = _shared_db_path() if CLOUD_MODE else _LOCAL_SNIPE_DB
sched = get_scheduler(sched_db)
enqueued = 0
for listing, ts in zip(listings, trust_scores_list):
if not listing.photo_urls or not listing.id:
continue
params = _json.dumps({
"photo_url": listing.photo_urls[0],
"listing_title": listing.title,
"user_db": str(session.user_db),
})
task_id, is_new = insert_task(
sched_db, "trust_photo_analysis", job_id=listing.id, params=params
)
if is_new:
ok = sched.enqueue(task_id, "trust_photo_analysis", listing.id, params)
if not ok:
log.warning(
"Vision task queue full — dropped task for listing %s",
listing.platform_listing_id,
)
else:
enqueued += 1
if enqueued:
log.info("Enqueued %d vision analysis task(s)", enqueued)
def _parse_terms(raw: str) -> list[str]:
"""Split a comma-separated keyword string into non-empty, stripped terms."""
return [t.strip() for t in raw.split(",") if t.strip()]
@ -293,6 +362,14 @@ def search(
scorer = TrustScorer(shared_store)
trust_scores_list = scorer.score_batch(listings, q)
# Persist trust scores so background vision tasks have a row to UPDATE.
user_store.save_trust_scores(trust_scores_list)
# Enqueue vision analysis for listings with photos — Paid tier and above.
features = compute_features(session.tier)
if features.photo_analysis:
_enqueue_vision_tasks(listings, trust_scores_list, session)
query_hash = hashlib.md5(q.encode()).hexdigest()
comp = shared_store.get_market_comp("ebay", query_hash)
market_price = comp.median_price if comp else None
@ -359,7 +436,9 @@ def enrich_seller(
pass # no API creds — fall through to BTF
seller_obj = shared_store.get_seller("ebay", seller)
needs_btf = seller_obj is not None and seller_obj.account_age_days is None
needs_btf = seller_obj is not None and (
seller_obj.account_age_days is None or seller_obj.feedback_count == 0
)
needs_categories = seller_obj is None or seller_obj.category_history_json in ("{}", "", None)
# Slow path: Playwright for remaining gaps (BTF + _ssn in parallel threads).
@ -458,3 +537,95 @@ def delete_saved_search(saved_id: int, session: CloudUser = Depends(get_session)
def mark_saved_search_run(saved_id: int, session: CloudUser = Depends(get_session)):
Store(session.user_db).update_saved_search_last_run(saved_id)
return {"ok": True}
# ── Scammer Blocklist ─────────────────────────────────────────────────────────
# Blocklist lives in shared_db: all users on a shared cloud instance see the
# same community blocklist. In local (single-user) mode shared_db == user_db.
class BlocklistAdd(BaseModel):
platform: str = "ebay"
platform_seller_id: str
username: str
reason: str = ""
@app.get("/api/blocklist")
def list_blocklist(session: CloudUser = Depends(get_session)):
store = Store(session.shared_db)
return {"entries": [dataclasses.asdict(e) for e in store.list_blocklist()]}
@app.post("/api/blocklist", status_code=201)
def add_to_blocklist(body: BlocklistAdd, session: CloudUser = Depends(get_session)):
store = Store(session.shared_db)
entry = store.add_to_blocklist(ScammerEntry(
platform=body.platform,
platform_seller_id=body.platform_seller_id,
username=body.username,
reason=body.reason or None,
source="manual",
))
return dataclasses.asdict(entry)
@app.delete("/api/blocklist/{platform_seller_id}", status_code=204)
def remove_from_blocklist(platform_seller_id: str, session: CloudUser = Depends(get_session)):
Store(session.shared_db).remove_from_blocklist("ebay", platform_seller_id)
@app.get("/api/blocklist/export")
def export_blocklist(session: CloudUser = Depends(get_session)):
"""Download the blocklist as a CSV file."""
entries = Store(session.shared_db).list_blocklist()
buf = io.StringIO()
writer = csv.writer(buf)
writer.writerow(["platform", "platform_seller_id", "username", "reason", "source", "created_at"])
for e in entries:
writer.writerow([e.platform, e.platform_seller_id, e.username,
e.reason or "", e.source, e.created_at or ""])
buf.seek(0)
return StreamingResponse(
iter([buf.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": "attachment; filename=snipe-blocklist.csv"},
)
@app.post("/api/blocklist/import", status_code=201)
async def import_blocklist(
file: UploadFile = File(...),
session: CloudUser = Depends(get_session),
):
"""Import a CSV blocklist. Columns: platform_seller_id, username, reason (optional)."""
content = await file.read()
try:
text = content.decode("utf-8-sig") # handle BOM from Excel exports
except UnicodeDecodeError:
raise HTTPException(status_code=400, detail="File must be UTF-8 encoded")
store = Store(session.shared_db)
imported = 0
errors: list[str] = []
reader = csv.DictReader(io.StringIO(text))
# Accept both full-export format (has 'platform' col) and simple format (no 'platform' col).
for i, row in enumerate(reader, start=2):
seller_id = (row.get("platform_seller_id") or "").strip()
username = (row.get("username") or "").strip()
if not seller_id or not username:
errors.append(f"Row {i}: missing platform_seller_id or username — skipped")
continue
platform = (row.get("platform") or "ebay").strip()
reason = (row.get("reason") or "").strip() or None
store.add_to_blocklist(ScammerEntry(
platform=platform,
platform_seller_id=seller_id,
username=username,
reason=reason,
source="csv_import",
))
imported += 1
log.info("Blocklist import: %d added, %d errors", imported, len(errors))
return {"imported": imported, "errors": errors}

View file

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS scammer_blocklist (
id INTEGER PRIMARY KEY AUTOINCREMENT,
platform TEXT NOT NULL,
platform_seller_id TEXT NOT NULL,
username TEXT NOT NULL,
reason TEXT,
source TEXT NOT NULL DEFAULT 'manual', -- manual | csv_import | community
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
UNIQUE(platform, platform_seller_id)
);
CREATE INDEX IF NOT EXISTS idx_scammer_blocklist_lookup
ON scammer_blocklist(platform, platform_seller_id);

View file

@ -0,0 +1,24 @@
-- 007_background_tasks.sql
-- Shared background task queue used by the LLM/vision task scheduler.
-- Schema mirrors the circuitforge-core standard.
-- Also adds UNIQUE constraint on trust_scores(listing_id) so save_trust_scores()
-- can use ON CONFLICT upsert semantics.
CREATE TABLE IF NOT EXISTS background_tasks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_type TEXT NOT NULL,
job_id INTEGER NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'queued',
params TEXT,
error TEXT,
stage TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_bg_tasks_status_type
ON background_tasks (status, task_type);
-- Enable ON CONFLICT upsert in save_trust_scores() — idempotent on existing DBs.
CREATE UNIQUE INDEX IF NOT EXISTS idx_trust_scores_listing
ON trust_scores (listing_id);

View file

@ -82,6 +82,18 @@ class SavedSearch:
last_run_at: Optional[str] = None
@dataclass
class ScammerEntry:
"""A seller manually or community-flagged as a known scammer."""
platform: str
platform_seller_id: str
username: str
reason: Optional[str] = None
source: str = "manual" # "manual" | "csv_import" | "community"
id: Optional[int] = None
created_at: Optional[str] = None
@dataclass
class PhotoHash:
"""Perceptual hash store for cross-search dedup (v0.2+). Schema scaffolded in v0.1."""

View file

@ -7,15 +7,18 @@ from typing import Optional
from circuitforge_core.db import get_connection, run_migrations
from .models import Listing, Seller, TrustScore, MarketComp, SavedSearch
from .models import Listing, Seller, TrustScore, MarketComp, SavedSearch, ScammerEntry
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
class Store:
def __init__(self, db_path: Path):
self._db_path = db_path
self._conn = get_connection(db_path)
run_migrations(self._conn, MIGRATIONS_DIR)
# WAL mode: allows concurrent readers + one writer without blocking
self._conn.execute("PRAGMA journal_mode=WAL")
# --- Seller ---
@ -35,11 +38,26 @@ class Store:
self.save_sellers([seller])
def save_sellers(self, sellers: list[Seller]) -> None:
# COALESCE preserves enriched signals (account_age_days, category_history_json)
# that were filled by BTF / _ssn passes — never overwrite with NULL from a
# fresh search page that doesn't carry those signals.
self._conn.executemany(
"INSERT OR REPLACE INTO sellers "
"INSERT INTO sellers "
"(platform, platform_seller_id, username, account_age_days, "
"feedback_count, feedback_ratio, category_history_json) "
"VALUES (?,?,?,?,?,?,?)",
"VALUES (?,?,?,?,?,?,?) "
"ON CONFLICT(platform, platform_seller_id) DO UPDATE SET "
" username = excluded.username, "
" feedback_count = excluded.feedback_count, "
" feedback_ratio = excluded.feedback_ratio, "
" account_age_days = COALESCE(excluded.account_age_days, sellers.account_age_days), "
" category_history_json = COALESCE("
" CASE WHEN excluded.category_history_json IN ('{}', '', NULL) THEN NULL "
" ELSE excluded.category_history_json END, "
" CASE WHEN sellers.category_history_json IN ('{}', '', NULL) THEN NULL "
" ELSE sellers.category_history_json END, "
" '{}'"
" )",
[
(s.platform, s.platform_seller_id, s.username, s.account_age_days,
s.feedback_count, s.feedback_ratio, s.category_history_json)
@ -224,6 +242,43 @@ class Store:
price_at_first_seen=row[17],
)
# --- TrustScore ---
def save_trust_scores(self, scores: list[TrustScore]) -> None:
"""Upsert trust scores keyed by listing_id.
photo_analysis_json is preserved on conflict so background vision
results written by the task runner are never overwritten by a re-score.
Requires idx_trust_scores_listing UNIQUE index (migration 007).
"""
self._conn.executemany(
"INSERT INTO trust_scores "
"(listing_id, composite_score, account_age_score, feedback_count_score, "
"feedback_ratio_score, price_vs_market_score, category_history_score, "
"photo_hash_duplicate, red_flags_json, score_is_partial) "
"VALUES (?,?,?,?,?,?,?,?,?,?) "
"ON CONFLICT(listing_id) DO UPDATE SET "
" composite_score = excluded.composite_score, "
" account_age_score = excluded.account_age_score, "
" feedback_count_score = excluded.feedback_count_score, "
" feedback_ratio_score = excluded.feedback_ratio_score, "
" price_vs_market_score = excluded.price_vs_market_score, "
" category_history_score= excluded.category_history_score, "
" photo_hash_duplicate = excluded.photo_hash_duplicate, "
" red_flags_json = excluded.red_flags_json, "
" score_is_partial = excluded.score_is_partial, "
" scored_at = CURRENT_TIMESTAMP",
# photo_analysis_json intentionally omitted — runner owns that column
[
(s.listing_id, s.composite_score, s.account_age_score,
s.feedback_count_score, s.feedback_ratio_score,
s.price_vs_market_score, s.category_history_score,
int(s.photo_hash_duplicate), s.red_flags_json, int(s.score_is_partial))
for s in scores if s.listing_id
],
)
self._conn.commit()
# --- MarketComp ---
def save_market_comp(self, comp: MarketComp) -> None:
@ -274,6 +329,58 @@ class Store:
)
self._conn.commit()
# --- ScammerBlocklist ---
def add_to_blocklist(self, entry: ScammerEntry) -> ScammerEntry:
"""Upsert a seller into the blocklist. Returns the saved entry with id and created_at."""
self._conn.execute(
"INSERT INTO scammer_blocklist "
"(platform, platform_seller_id, username, reason, source) "
"VALUES (?,?,?,?,?) "
"ON CONFLICT(platform, platform_seller_id) DO UPDATE SET "
" username = excluded.username, "
" reason = COALESCE(excluded.reason, scammer_blocklist.reason), "
" source = excluded.source",
(entry.platform, entry.platform_seller_id, entry.username,
entry.reason, entry.source),
)
self._conn.commit()
row = self._conn.execute(
"SELECT id, created_at FROM scammer_blocklist "
"WHERE platform=? AND platform_seller_id=?",
(entry.platform, entry.platform_seller_id),
).fetchone()
from dataclasses import replace
return replace(entry, id=row[0], created_at=row[1])
def remove_from_blocklist(self, platform: str, platform_seller_id: str) -> None:
self._conn.execute(
"DELETE FROM scammer_blocklist WHERE platform=? AND platform_seller_id=?",
(platform, platform_seller_id),
)
self._conn.commit()
def is_blocklisted(self, platform: str, platform_seller_id: str) -> bool:
row = self._conn.execute(
"SELECT 1 FROM scammer_blocklist WHERE platform=? AND platform_seller_id=? LIMIT 1",
(platform, platform_seller_id),
).fetchone()
return row is not None
def list_blocklist(self, platform: str = "ebay") -> list[ScammerEntry]:
rows = self._conn.execute(
"SELECT platform, platform_seller_id, username, reason, source, id, created_at "
"FROM scammer_blocklist WHERE platform=? ORDER BY created_at DESC",
(platform,),
).fetchall()
return [
ScammerEntry(
platform=r[0], platform_seller_id=r[1], username=r[2],
reason=r[3], source=r[4], id=r[5], created_at=r[6],
)
for r in rows
]
def get_market_comp(self, platform: str, query_hash: str) -> Optional[MarketComp]:
row = self._conn.execute(
"SELECT platform, query_hash, median_price, sample_count, expires_at, id, fetched_at "

View file

@ -32,6 +32,9 @@ EBAY_SEARCH_URL = "https://www.ebay.com/sch/i.html"
EBAY_ITEM_URL = "https://www.ebay.com/itm/"
_HTML_CACHE_TTL = 300 # seconds — 5 minutes
_JOINED_RE = re.compile(r"Joined\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\w*\s+(\d{4})", re.I)
# Matches "username (1,234) 99.1% positive feedback" on /itm/ listing pages.
# Capture groups: 1=raw_count ("1,234"), 2=ratio_pct ("99.1").
_ITEM_FEEDBACK_RE = re.compile(r'\((\d[\d,]*)\)\s*([\d.]+)%\s*positive', re.I)
_MONTH_MAP = {m: i+1 for i, m in enumerate(
["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]
)}
@ -371,6 +374,23 @@ class ScrapedEbayAdapter(PlatformAdapter):
except ValueError:
return None
@staticmethod
def _parse_feedback_from_item(html: str) -> tuple[Optional[int], Optional[float]]:
"""Parse feedback count and ratio from a listing page seller card.
Matches 'username (1,234) 99.1% positive feedback'.
Returns (count, ratio) or (None, None) if not found.
"""
m = _ITEM_FEEDBACK_RE.search(html)
if not m:
return None, None
try:
count = int(m.group(1).replace(",", ""))
ratio = float(m.group(2)) / 100.0
return count, ratio
except ValueError:
return None, None
def enrich_sellers_btf(
self,
seller_to_listing: dict[str, str],
@ -387,19 +407,38 @@ class ScrapedEbayAdapter(PlatformAdapter):
Does not raise failures per-seller are silently skipped so the main
search response is never blocked.
"""
db_path = self._store._db_path # capture for thread-local Store creation
def _enrich_one(item: tuple[str, str]) -> None:
seller_id, listing_id = item
try:
html = self._fetch_item_html(listing_id)
age_days = self._parse_joined_date(html)
if age_days is not None:
seller = self._store.get_seller("ebay", seller_id)
if seller:
fb_count, fb_ratio = self._parse_feedback_from_item(html)
log.debug(
"BTF enrich: seller=%s age_days=%s feedback=%s ratio=%s",
seller_id, age_days, fb_count, fb_ratio,
)
if age_days is None and fb_count is None:
return # nothing new to write
thread_store = Store(db_path)
seller = thread_store.get_seller("ebay", seller_id)
if not seller:
log.warning("BTF enrich: seller %s not found in DB", seller_id)
return
from dataclasses import replace
updated = replace(seller, account_age_days=age_days)
self._store.save_seller(updated)
except Exception:
pass # non-fatal: partial score is better than a crashed enrichment
updates: dict = {}
if age_days is not None:
updates["account_age_days"] = age_days
# Only overwrite feedback if the listing page found a real value —
# prefer a fresh count over a 0 that came from a failed search parse.
if fb_count is not None:
updates["feedback_count"] = fb_count
if fb_ratio is not None:
updates["feedback_ratio"] = fb_ratio
thread_store.save_seller(replace(seller, **updates))
except Exception as exc:
log.warning("BTF enrich failed for %s/%s: %s", seller_id, listing_id, exc)
with ThreadPoolExecutor(max_workers=max_workers) as ex:
list(ex.map(_enrich_one, seller_to_listing.items()))

0
app/tasks/__init__.py Normal file
View file

171
app/tasks/runner.py Normal file
View file

@ -0,0 +1,171 @@
# app/tasks/runner.py
"""Snipe background task runner.
Implements the run_task_fn interface expected by circuitforge_core.tasks.scheduler.
Current task types:
trust_photo_analysis download primary photo, run vision LLM, write
result to trust_scores.photo_analysis_json (Paid tier).
Prompt note: The vision prompt is a functional first pass. Tune against real
eBay listings before GA specifically stock-photo vs genuine-product distinction
and the damage vocabulary.
"""
from __future__ import annotations
import base64
import json
import logging
from pathlib import Path
import requests
from circuitforge_core.db import get_connection
from circuitforge_core.llm import LLMRouter
log = logging.getLogger(__name__)
LLM_TASK_TYPES: frozenset[str] = frozenset({"trust_photo_analysis"})
VRAM_BUDGETS: dict[str, float] = {
# moondream2 / vision-capable LLM — single image, short response
"trust_photo_analysis": 2.0,
}
_VISION_SYSTEM_PROMPT = (
"You are an expert at evaluating eBay listing photos for authenticity and condition. "
"Respond ONLY with a JSON object containing these exact keys:\n"
" is_stock_photo: bool — true if this looks like a manufacturer/marketing image\n"
" visible_damage: bool — true if scratches, dents, cracks, or defects are visible\n"
" authenticity_signal: string — one of 'genuine_product_photo', 'stock_photo', 'unclear'\n"
" confidence: string — one of 'high', 'medium', 'low'\n"
"No explanation outside the JSON object."
)
def insert_task(
db_path: Path,
task_type: str,
job_id: int,
*,
params: str | None = None,
) -> tuple[int, bool]:
"""Insert a background task if no identical task is already in-flight.
Uses get_connection() so WAL mode and timeout=30 apply same as all other
Snipe DB access. Returns (task_id, is_new).
"""
conn = get_connection(db_path)
conn.row_factory = __import__("sqlite3").Row
try:
existing = conn.execute(
"SELECT id FROM background_tasks "
"WHERE task_type=? AND job_id=? AND status IN ('queued','running')",
(task_type, job_id),
).fetchone()
if existing:
return existing["id"], False
cursor = conn.execute(
"INSERT INTO background_tasks (task_type, job_id, params) VALUES (?,?,?)",
(task_type, job_id, params),
)
conn.commit()
return cursor.lastrowid, True
finally:
conn.close()
def _update_task_status(
db_path: Path, task_id: int, status: str, *, error: str = ""
) -> None:
with get_connection(db_path) as conn:
conn.execute(
"UPDATE background_tasks "
"SET status=?, error=?, updated_at=CURRENT_TIMESTAMP WHERE id=?",
(status, error, task_id),
)
def run_task(
db_path: Path,
task_id: int,
task_type: str,
job_id: int,
params: str | None = None,
) -> None:
"""Execute one background task. Called by the scheduler's batch worker."""
_update_task_status(db_path, task_id, "running")
try:
if task_type == "trust_photo_analysis":
_run_trust_photo_analysis(db_path, job_id, params)
else:
raise ValueError(f"Unknown snipe task type: {task_type!r}")
_update_task_status(db_path, task_id, "completed")
except Exception as exc:
log.exception("Task %d (%s) failed: %s", task_id, task_type, exc)
_update_task_status(db_path, task_id, "failed", error=str(exc))
def _run_trust_photo_analysis(
db_path: Path,
listing_id: int,
params: str | None,
) -> None:
"""Download primary listing photo, run vision LLM, write to trust_scores.
In cloud mode the result must be written to the per-user DB, which differs
from db_path (the scheduler's shared task-queue DB). The enqueue call site
encodes the correct write target as 'user_db' in params; in local mode it
falls back to db_path so the single-DB layout keeps working.
"""
p = json.loads(params or "{}")
photo_url = p.get("photo_url", "")
listing_title = p.get("listing_title", "")
# user_db: per-user DB in cloud mode; same as db_path in local mode.
result_db = Path(p.get("user_db", str(db_path)))
if not photo_url:
raise ValueError("trust_photo_analysis: 'photo_url' is required in params")
# Download and base64-encode the photo
resp = requests.get(photo_url, timeout=10)
resp.raise_for_status()
image_b64 = base64.b64encode(resp.content).decode()
# Build user prompt with optional title context
user_prompt = "Evaluate this eBay listing photo."
if listing_title:
user_prompt = f"Evaluate this eBay listing photo for: {listing_title}"
# Call LLMRouter with vision capability
router = LLMRouter()
raw = router.complete(
user_prompt,
system=_VISION_SYSTEM_PROMPT,
images=[image_b64],
max_tokens=128,
)
# Parse — be lenient: strip markdown fences if present
try:
cleaned = raw.strip().removeprefix("```json").removeprefix("```").removesuffix("```").strip()
analysis = json.loads(cleaned)
except json.JSONDecodeError:
log.warning(
"Vision LLM returned non-JSON for listing %d: %r", listing_id, raw[:200]
)
analysis = {"raw_response": raw, "parse_error": True}
with get_connection(result_db) as conn:
conn.execute(
"UPDATE trust_scores SET photo_analysis_json=? WHERE listing_id=?",
(json.dumps(analysis), listing_id),
)
log.info(
"Vision analysis for listing %d: stock=%s damage=%s confidence=%s",
listing_id,
analysis.get("is_stock_photo"),
analysis.get("visible_damage"),
analysis.get("confidence"),
)

23
app/tasks/scheduler.py Normal file
View file

@ -0,0 +1,23 @@
# app/tasks/scheduler.py
"""Snipe LLM/vision task scheduler — thin shim over circuitforge_core.tasks.scheduler."""
from __future__ import annotations
from pathlib import Path
from circuitforge_core.tasks.scheduler import (
TaskScheduler,
get_scheduler as _base_get_scheduler,
reset_scheduler, # re-export for tests
)
from app.tasks.runner import LLM_TASK_TYPES, VRAM_BUDGETS, run_task
def get_scheduler(db_path: Path) -> TaskScheduler:
"""Return the process-level TaskScheduler singleton for Snipe."""
return _base_get_scheduler(
db_path=db_path,
run_task_fn=run_task,
task_types=LLM_TASK_TYPES,
vram_budgets=VRAM_BUDGETS,
)

View file

@ -1,22 +1,44 @@
"""Snipe feature gates. Delegates to circuitforge_core.tiers."""
"""Snipe feature gates. Delegates to circuitforge_core.tiers.
Tier ladder: free < paid < premium
Ultra is not used in Snipe auto-bidding is the highest-impact feature and is Premium.
BYOK unlock analog: LOCAL_VISION_UNLOCKABLE photo_analysis and serial_number_check
unlock when the user has a local vision model (moondream2 (MD2) or equivalent).
Intentionally ungated (free for all):
- metadata_trust_scoring core value prop; wide adoption preferred
- hash_dedup infrastructure, not a differentiator
- market_comps useful enough to drive signups; not scarce
- scammer_db community data is more valuable with wider reach
- saved_searches retention feature; friction cost outweighs gate value
"""
from __future__ import annotations
from circuitforge_core.tiers import can_use as _core_can_use, TIERS # noqa: F401
# Feature key → minimum tier required.
FEATURES: dict[str, str] = {
# Free tier
"metadata_trust_scoring": "free",
"hash_dedup": "free",
# Paid tier
"photo_analysis": "paid",
"serial_number_check": "paid",
"ai_image_detection": "paid",
"reverse_image_search": "paid",
"saved_searches": "paid",
"background_monitoring": "paid",
"ebay_oauth": "paid", # full trust scores via eBay Trading API
"background_monitoring": "paid", # limited at Paid; see LIMITS below
# Premium tier
"auto_bidding": "premium",
}
# Photo analysis features unlock if user has local vision model (moondream2 (MD2) or similar).
# Per-feature usage limits by tier. None = unlimited.
# Call get_limit(feature, tier) at enforcement points (e.g. before creating a new monitor).
LIMITS: dict[tuple[str, str], int | None] = {
("background_monitoring", "paid"): 5,
("background_monitoring", "premium"): 25,
}
# Unlock photo_analysis and serial_number_check when user has a local vision model.
# Same policy as Peregrine's BYOK_UNLOCKABLE: user is providing the compute.
LOCAL_VISION_UNLOCKABLE: frozenset[str] = frozenset({
"photo_analysis",
"serial_number_check",
@ -32,3 +54,19 @@ def can_use(
if has_local_vision and feature in LOCAL_VISION_UNLOCKABLE:
return True
return _core_can_use(feature, tier, has_byok=has_byok, _features=FEATURES)
def get_limit(feature: str, tier: str) -> int | None:
"""Return the usage limit for a feature at the given tier.
Returns None if the feature is unlimited at this tier.
Returns None if the feature has no entry in LIMITS (treat as unlimited).
Call can_use() first get_limit() does not check tier eligibility.
Example:
if can_use("background_monitoring", tier):
limit = get_limit("background_monitoring", tier)
if limit is not None and current_count >= limit:
raise LimitExceeded(f"Paid tier allows {limit} active monitors. Upgrade to Premium for unlimited.")
"""
return LIMITS.get((feature, tier))

View file

@ -41,6 +41,7 @@ class TrustScorer:
scores = []
for listing, is_dup in zip(listings, duplicates):
seller = self._store.get_seller("ebay", listing.seller_platform_id)
blocklisted = self._store.is_blocklisted("ebay", listing.seller_platform_id)
if seller:
signal_scores = self._meta.score(seller, market_median, listing.price, price_cv)
else:
@ -55,6 +56,7 @@ class TrustScorer:
first_seen_at=listing.first_seen_at,
price=listing.price,
price_at_first_seen=listing.price_at_first_seen,
is_blocklisted=blocklisted,
)
scores.append(trust)
return scores

View file

@ -76,6 +76,7 @@ class Aggregator:
first_seen_at: Optional[str] = None,
price: float = 0.0,
price_at_first_seen: Optional[float] = None,
is_blocklisted: bool = False,
) -> TrustScore:
is_partial = any(v is None for v in signal_scores.values())
clean = {k: (v if v is not None else 0) for k, v in signal_scores.items()}
@ -92,6 +93,23 @@ class Aggregator:
red_flags: list[str] = []
# Blocklist: force established_bad_actor and zero the score regardless of other signals.
if is_blocklisted:
red_flags.append("established_bad_actor")
composite = 0
return TrustScore(
listing_id=listing_id,
composite_score=composite,
account_age_score=clean["account_age"],
feedback_count_score=clean["feedback_count"],
feedback_ratio_score=clean["feedback_ratio"],
price_vs_market_score=clean["price_vs_market"],
category_history_score=clean["category_history"],
photo_hash_duplicate=photo_hash_duplicate,
red_flags_json=json.dumps(red_flags),
score_is_partial=is_partial,
)
# Hard filters
if seller and seller.account_age_days is not None and seller.account_age_days < HARD_FILTER_AGE_DAYS:
red_flags.append("new_account")
@ -100,6 +118,11 @@ class Aggregator:
and seller.feedback_count > HARD_FILTER_BAD_RATIO_MIN_COUNT
):
red_flags.append("established_bad_actor")
if seller and seller.feedback_count == 0:
red_flags.append("zero_feedback")
# Zero feedback is a deliberate signal, not missing data — cap composite score
# so a 0-feedback seller can never appear trustworthy on other signals alone.
composite = min(composite, 35)
# Soft flags
if seller and seller.account_age_days is not None and seller.account_age_days < 30:

View file

@ -17,10 +17,12 @@ dependencies = [
"beautifulsoup4>=4.12",
"lxml>=5.0",
"fastapi>=0.111",
"python-multipart>=0.0.9",
"uvicorn[standard]>=0.29",
"playwright>=1.44",
"playwright-stealth>=1.0",
"cryptography>=42.0",
"PyJWT>=2.8",
]
[tool.setuptools.packages.find]

View file

View file

@ -0,0 +1,158 @@
"""Tests for snipe background task runner."""
from __future__ import annotations
import json
import sqlite3
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from app.tasks.runner import (
LLM_TASK_TYPES,
VRAM_BUDGETS,
insert_task,
run_task,
)
@pytest.fixture
def tmp_db(tmp_path: Path) -> Path:
db = tmp_path / "snipe.db"
conn = sqlite3.connect(db)
conn.executescript("""
CREATE TABLE background_tasks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_type TEXT NOT NULL,
job_id INTEGER NOT NULL DEFAULT 0,
status TEXT NOT NULL DEFAULT 'queued',
params TEXT,
error TEXT,
stage TEXT,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE trust_scores (
id INTEGER PRIMARY KEY AUTOINCREMENT,
listing_id INTEGER NOT NULL,
composite_score INTEGER NOT NULL DEFAULT 0,
photo_analysis_json TEXT,
red_flags_json TEXT NOT NULL DEFAULT '[]',
scored_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO trust_scores (listing_id, composite_score) VALUES (1, 72);
""")
conn.commit()
conn.close()
return db
def test_llm_task_types_defined():
assert "trust_photo_analysis" in LLM_TASK_TYPES
def test_vram_budgets_defined():
assert "trust_photo_analysis" in VRAM_BUDGETS
assert VRAM_BUDGETS["trust_photo_analysis"] > 0
def test_insert_task_creates_row(tmp_db: Path):
task_id, is_new = insert_task(tmp_db, "trust_photo_analysis", job_id=1)
assert is_new is True
conn = sqlite3.connect(tmp_db)
row = conn.execute(
"SELECT status FROM background_tasks WHERE id=?", (task_id,)
).fetchone()
conn.close()
assert row[0] == "queued"
def test_insert_task_dedup(tmp_db: Path):
id1, new1 = insert_task(tmp_db, "trust_photo_analysis", job_id=1)
id2, new2 = insert_task(tmp_db, "trust_photo_analysis", job_id=1)
assert id1 == id2
assert new1 is True
assert new2 is False
def test_run_task_photo_analysis_success(tmp_db: Path):
"""Vision analysis result is written to trust_scores.photo_analysis_json."""
params = json.dumps({
"listing_id": 1,
"photo_url": "https://example.com/photo.jpg",
"listing_title": "Used iPhone 13",
})
task_id, _ = insert_task(tmp_db, "trust_photo_analysis", job_id=1, params=params)
vision_result = {
"is_stock_photo": False,
"visible_damage": False,
"authenticity_signal": "genuine_product_photo",
"confidence": "high",
}
with patch("app.tasks.runner.requests") as mock_req, \
patch("app.tasks.runner.LLMRouter") as MockRouter:
mock_req.get.return_value.content = b"fake_image_bytes"
mock_req.get.return_value.raise_for_status = lambda: None
instance = MockRouter.return_value
instance.complete.return_value = json.dumps(vision_result)
run_task(tmp_db, task_id, "trust_photo_analysis", 1, params)
conn = sqlite3.connect(tmp_db)
score_row = conn.execute(
"SELECT photo_analysis_json FROM trust_scores WHERE listing_id=1"
).fetchone()
task_row = conn.execute(
"SELECT status FROM background_tasks WHERE id=?", (task_id,)
).fetchone()
conn.close()
assert task_row[0] == "completed"
parsed = json.loads(score_row[0])
assert parsed["is_stock_photo"] is False
def test_run_task_photo_fetch_failure_marks_failed(tmp_db: Path):
"""If photo download fails, task is marked failed without crashing."""
params = json.dumps({
"listing_id": 1,
"photo_url": "https://example.com/bad.jpg",
"listing_title": "Laptop",
})
task_id, _ = insert_task(tmp_db, "trust_photo_analysis", job_id=1, params=params)
with patch("app.tasks.runner.requests") as mock_req:
mock_req.get.side_effect = ConnectionError("fetch failed")
run_task(tmp_db, task_id, "trust_photo_analysis", 1, params)
conn = sqlite3.connect(tmp_db)
row = conn.execute(
"SELECT status, error FROM background_tasks WHERE id=?", (task_id,)
).fetchone()
conn.close()
assert row[0] == "failed"
assert "fetch failed" in row[1]
def test_run_task_no_photo_url_marks_failed(tmp_db: Path):
params = json.dumps({"listing_id": 1})
task_id, _ = insert_task(tmp_db, "trust_photo_analysis", job_id=1, params=params)
run_task(tmp_db, task_id, "trust_photo_analysis", 1, params)
conn = sqlite3.connect(tmp_db)
row = conn.execute(
"SELECT status, error FROM background_tasks WHERE id=?", (task_id,)
).fetchone()
conn.close()
assert row[0] == "failed"
assert "photo_url" in row[1]
def test_run_task_unknown_type_marks_failed(tmp_db: Path):
task_id, _ = insert_task(tmp_db, "trust_photo_analysis", job_id=1)
run_task(tmp_db, task_id, "unknown_type", 1, None)
conn = sqlite3.connect(tmp_db)
row = conn.execute(
"SELECT status FROM background_tasks WHERE id=?", (task_id,)
).fetchone()
conn.close()
assert row[0] == "failed"

View file

@ -18,6 +18,7 @@ def test_byok_does_not_unlock_photo_analysis():
assert can_use("photo_analysis", tier="free", has_byok=True) is False
def test_saved_searches_require_paid():
assert can_use("saved_searches", tier="free") is False
def test_saved_searches_are_free():
# Ungated: retention feature — friction cost outweighs gate value (see tiers.py)
assert can_use("saved_searches", tier="free") is True
assert can_use("saved_searches", tier="paid") is True

View file

@ -18,17 +18,20 @@ import { useMotion } from './composables/useMotion'
import { useSnipeMode } from './composables/useSnipeMode'
import { useKonamiCode } from './composables/useKonamiCode'
import { useSessionStore } from './stores/session'
import { useBlocklistStore } from './stores/blocklist'
import AppNav from './components/AppNav.vue'
const motion = useMotion()
const { activate, restore } = useSnipeMode()
const session = useSessionStore()
const blocklistStore = useBlocklistStore()
useKonamiCode(activate)
onMounted(() => {
restore() // re-apply snipe mode from localStorage on hard reload
session.bootstrap() // fetch tier + feature flags from API
blocklistStore.fetchBlocklist() // pre-load so card block buttons reflect state immediately
})
</script>

View file

@ -4,7 +4,10 @@
Snipe Mode easter egg: activated by Konami code (cf-snipe-mode in localStorage).
*/
/* ── Snipe — dark tactical (default — always dark) ─ */
/* Snipe dark tactical (default)
Light variant is defined below via prefers-color-scheme.
Snipe Mode easter egg always overrides both.
*/
:root {
/* Brand — amber target reticle */
--app-primary: #f59e0b;
@ -71,6 +74,49 @@
--sidebar-width: 220px;
}
/* Light mode field notebook / tactical map
Warm cream surfaces with the same amber accent.
Snipe Mode data attribute overrides this via higher specificity.
*/
@media (prefers-color-scheme: light) {
:root:not([data-snipe-mode="active"]) {
/* Surfaces — warm cream, like a tactical field notebook */
--color-surface: #f8f5ee;
--color-surface-2: #f0ece3;
--color-surface-raised: #e8e3d8;
/* Borders — warm khaki */
--color-border: #c8bfae;
--color-border-light: #dbd3c4;
/* Text — warm near-black ink */
--color-text: #1c1a16;
--color-text-muted: #6b6357;
--color-text-inverse: #f8f5ee;
/* Brand — amber stays identical (works great on light too) */
--app-primary: #d97706; /* slightly deeper for contrast on light */
--app-primary-hover: #b45309;
--app-primary-light: rgba(217, 119, 6, 0.12);
/* Trust signals — same hues, adjusted for legibility on cream */
--trust-high: #16a34a;
--trust-mid: #b45309;
--trust-low: #dc2626;
/* Semantic */
--color-success: #16a34a;
--color-error: #dc2626;
--color-warning: #b45309;
--color-info: #2563eb;
/* Shadows — lighter, warm tint */
--shadow-sm: 0 1px 3px rgba(60, 45, 20, 0.12), 0 1px 2px rgba(60, 45, 20, 0.08);
--shadow-md: 0 4px 12px rgba(60, 45, 20, 0.15), 0 2px 4px rgba(60, 45, 20, 0.1);
--shadow-lg: 0 10px 30px rgba(60, 45, 20, 0.2), 0 4px 8px rgba(60, 45, 20, 0.1);
}
}
/* ── Snipe Mode easter egg theme ─────────────────── */
/* Activated by Konami code; stored as 'cf-snipe-mode' in localStorage */
/* Applied: document.documentElement.dataset.snipeMode = 'active' */

View file

@ -66,6 +66,7 @@ import {
MagnifyingGlassIcon,
BookmarkIcon,
Cog6ToothIcon,
ShieldExclamationIcon,
} from '@heroicons/vue/24/outline'
import { useSnipeMode } from '../composables/useSnipeMode'
@ -74,11 +75,13 @@ const { active: isSnipeMode, deactivate } = useSnipeMode()
const navLinks = computed(() => [
{ to: '/', icon: MagnifyingGlassIcon, label: 'Search' },
{ to: '/saved', icon: BookmarkIcon, label: 'Saved' },
{ to: '/blocklist', icon: ShieldExclamationIcon, label: 'Blocklist' },
])
const mobileLinks = [
{ to: '/', icon: MagnifyingGlassIcon, label: 'Search' },
{ to: '/saved', icon: BookmarkIcon, label: 'Saved' },
{ to: '/blocklist', icon: ShieldExclamationIcon, label: 'Block' },
{ to: '/settings', icon: Cog6ToothIcon, label: 'Settings' },
]
</script>

View file

@ -4,6 +4,7 @@
:class="{
'steal-card': isSteal,
'listing-card--auction': isAuction && hoursRemaining !== null && hoursRemaining > 1,
'listing-card--triple-red': tripleRed,
}"
>
<!-- Thumbnail -->
@ -69,6 +70,25 @@
</p>
</div>
<!-- Block seller inline form -->
<div v-if="blockingOpen" class="card__block-popover" @click.stop>
<p class="card__block-title">Block <strong>{{ seller?.username }}</strong>?</p>
<input
v-model="blockReason"
class="card__block-reason"
placeholder="Reason (optional)"
maxlength="200"
@keydown.enter="onBlock"
@keydown.esc="blockingOpen = false"
autofocus
/>
<div class="card__block-actions">
<button class="card__block-confirm" @click="onBlock">Block</button>
<button class="card__block-cancel" @click="blockingOpen = false; blockReason = ''; blockError = ''">Cancel</button>
</div>
<p v-if="blockError" class="card__block-error">{{ blockError }}</p>
</div>
<!-- Score + price column -->
<div class="card__score-col">
<!-- Trust score badge -->
@ -98,6 +118,14 @@
:disabled="enriching"
@click.stop="onEnrich"
>{{ enrichError ? '✗' : '↻' }}</button>
<!-- Block seller -->
<button
v-if="seller"
class="card__block-btn"
:class="{ 'card__block-btn--active': isBlocked }"
:title="isBlocked ? 'Seller is blocked' : 'Block this seller'"
@click.stop="isBlocked ? null : (blockingOpen = !blockingOpen)"
></button>
</div>
<!-- Price -->
@ -123,6 +151,7 @@
import { computed, ref } from 'vue'
import type { Listing, TrustScore, Seller } from '../stores/search'
import { useSearchStore } from '../stores/search'
import { useBlocklistStore } from '../stores/blocklist'
const props = defineProps<{
listing: Listing
@ -132,8 +161,32 @@ const props = defineProps<{
}>()
const store = useSearchStore()
const blocklist = useBlocklistStore()
const enriching = ref(false)
const enrichError = ref(false)
const blockingOpen = ref(false)
const blockReason = ref('')
const blockError = ref('')
const isBlocked = computed(() =>
blocklist.isBlocklisted(props.listing.seller_platform_id),
)
async function onBlock() {
if (!props.seller) return
blockError.value = ''
try {
await blocklist.addSeller(
props.listing.seller_platform_id,
props.seller.username,
blockReason.value.trim(),
)
blockingOpen.value = false
blockReason.value = ''
} catch {
blockError.value = 'Failed to block — try again'
}
}
async function onEnrich() {
if (enriching.value) return
@ -211,7 +264,11 @@ function flagLabel(flag: string): string {
suspicious_price: '✗ Suspicious price',
duplicate_photo: '✗ Duplicate photo',
established_bad_actor: '✗ Bad actor',
zero_feedback: '✗ No feedback',
marketing_photo: '✗ Marketing photo',
scratch_dent_mentioned:'⚠ Damage mentioned',
long_on_market: '⚠ Long on market',
significant_price_drop:'⚠ Price dropped',
}
return labels[flag] ?? `${flag}`
}
@ -250,6 +307,20 @@ const trustBadgeTitle = computed(() => {
return `${base} · pending: ${pendingSignalNames.value.join(', ')} (search again to update)`
})
// Triple Red easter egg: account flag + suspicious price + at least one more hard flag
const tripleRed = computed(() => {
const flags = new Set(redFlags.value)
const hasAccountFlag = flags.has('new_account') || flags.has('account_under_30_days')
const hasPriceFlag = flags.has('suspicious_price')
const hasThirdFlag = (
flags.has('duplicate_photo') ||
flags.has('established_bad_actor') ||
flags.has('zero_feedback') ||
flags.has('scratch_dent_mentioned')
)
return hasAccountFlag && hasPriceFlag && hasThirdFlag
})
const isSteal = computed(() => {
const s = props.trust?.composite_score
if (!s || s < 80) return false
@ -470,6 +541,91 @@ const formattedMarket = computed(() => {
to { transform: rotate(360deg); }
}
.card__block-btn {
margin-top: 2px;
background: none;
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: var(--color-text-muted);
cursor: pointer;
font-size: 0.7rem;
line-height: 1;
opacity: 0;
padding: 1px 4px;
transition: opacity 150ms ease, color 150ms ease, border-color 150ms ease;
}
.listing-card:hover .card__block-btn { opacity: 0.5; }
.listing-card:hover .card__block-btn:hover { opacity: 1; color: var(--color-error); border-color: var(--color-error); }
.card__block-btn--active { opacity: 1 !important; color: var(--color-error); border-color: var(--color-error); cursor: default; }
/* Block popover */
.card__block-popover {
position: absolute;
right: var(--space-4);
top: var(--space-4);
background: var(--color-surface-raised);
border: 1px solid var(--color-border);
border-radius: var(--radius-lg);
padding: var(--space-3);
z-index: 10;
min-width: 220px;
box-shadow: var(--shadow-lg, 0 4px 16px rgba(0,0,0,0.35));
}
.card__block-title {
font-size: 0.8125rem;
margin: 0 0 var(--space-2);
color: var(--color-text);
}
.card__block-reason {
width: 100%;
padding: var(--space-1) var(--space-2);
background: var(--color-surface-2);
border: 1px solid var(--color-border);
border-radius: var(--radius-sm);
color: var(--color-text);
font-size: 0.8125rem;
box-sizing: border-box;
margin-bottom: var(--space-2);
}
.card__block-actions {
display: flex;
gap: var(--space-2);
}
.card__block-confirm {
flex: 1;
padding: var(--space-1) var(--space-2);
background: var(--color-error);
border: none;
border-radius: var(--radius-sm);
color: #fff;
font-size: 0.8125rem;
font-weight: 600;
cursor: pointer;
transition: opacity 120ms ease;
}
.card__block-confirm:hover { opacity: 0.85; }
.card__block-cancel {
padding: var(--space-1) var(--space-2);
background: none;
border: 1px solid var(--color-border);
border-radius: var(--radius-sm);
color: var(--color-text-muted);
font-size: 0.8125rem;
cursor: pointer;
}
.card__block-cancel:hover { border-color: var(--color-text-muted); }
.card__block-error {
font-size: 0.75rem;
color: var(--color-error);
margin: var(--space-1) 0 0;
}
.card__price-wrap {
display: flex;
flex-direction: column;
@ -498,11 +654,79 @@ const formattedMarket = computed(() => {
font-family: var(--font-mono);
}
/* ── Triple Red easter egg ──────────────────────────────────────────────── */
/* Fires when: (new_account | account_under_30d) + suspicious_price + hard flag */
.listing-card--triple-red {
animation: triple-red-glow 2s ease-in-out infinite;
}
.listing-card--triple-red::after {
content: '✗';
position: absolute;
right: var(--space-4);
bottom: var(--space-2);
font-size: 4rem;
font-weight: 900;
line-height: 1;
color: var(--color-error);
opacity: 0.15;
pointer-events: none;
z-index: 0;
animation: triple-red-glitch 2.4s steps(1) infinite;
transition: opacity 350ms ease;
user-select: none;
}
/* On hover: glow settles, ✗ fades away */
.listing-card--triple-red:hover {
animation: none;
border-color: var(--color-error);
box-shadow: 0 0 10px 2px rgba(248, 81, 73, 0.35);
}
.listing-card--triple-red:hover::after {
animation: none;
opacity: 0;
}
@keyframes triple-red-glow {
0%, 100% {
border-color: rgba(248, 81, 73, 0.5);
box-shadow: 0 0 5px 1px rgba(248, 81, 73, 0.2);
}
50% {
border-color: var(--color-error);
box-shadow: 0 0 14px 3px rgba(248, 81, 73, 0.45);
}
}
/* Glitch: mostly still, rapid jitter bursts every ~2.4s */
@keyframes triple-red-glitch {
0%, 80%, 100% { transform: translate(0, 0); opacity: 0.15; }
82% { transform: translate(-4px, 2px); opacity: 0.35; }
84% { transform: translate(3px, -2px); opacity: 0.1; }
86% { transform: translate(-3px, -3px); opacity: 0.4; }
88% { transform: translate(5px, 1px); opacity: 0.08; }
90% { transform: translate(-2px, 3px); opacity: 0.3; }
92% { transform: translate(0, 0); opacity: 0.15; }
}
/* Mobile: stack vertically */
@media (max-width: 600px) {
.listing-card {
grid-template-columns: 60px 1fr;
grid-template-columns: 68px 1fr;
grid-template-rows: auto auto;
padding: var(--space-3);
gap: var(--space-2);
}
.card__thumb {
width: 68px;
height: 68px;
}
.card__title {
font-size: 0.875rem;
}
.card__score-col {
@ -514,5 +738,26 @@ const formattedMarket = computed(() => {
padding-top: var(--space-2);
border-top: 1px solid var(--color-border);
}
/* Trust badge + dots: side by side instead of stacked */
.card__trust {
flex-direction: row;
gap: var(--space-2);
min-width: unset;
padding: var(--space-1) var(--space-3);
}
/* Price + market price: row layout */
.card__price-block {
flex-direction: row;
align-items: center;
gap: var(--space-3);
}
/* Enrich + block buttons: always visible on mobile (no hover) */
.card__enrich-btn,
.card__block-btn {
opacity: 0.6;
}
}
</style>

View file

@ -7,6 +7,7 @@ export const router = createRouter({
{ path: '/', component: SearchView },
{ path: '/listing/:id', component: () => import('../views/ListingView.vue') },
{ path: '/saved', component: () => import('../views/SavedSearchesView.vue') },
{ path: '/blocklist', component: () => import('../views/BlocklistView.vue') },
// Catch-all — FastAPI serves index.html for all unknown routes (SPA mode)
{ path: '/:pathMatch(.*)*', redirect: '/' },
],

109
web/src/stores/blocklist.ts Normal file
View file

@ -0,0 +1,109 @@
import { defineStore } from 'pinia'
import { ref } from 'vue'
import { apiFetch } from '../utils/api'
const apiBase = (import.meta.env.VITE_API_BASE as string) ?? ''
export interface BlocklistEntry {
id: number | null
platform: string
platform_seller_id: string
username: string
reason: string | null
source: string
created_at: string | null
}
export const useBlocklistStore = defineStore('blocklist', () => {
const entries = ref<BlocklistEntry[]>([])
const loading = ref(false)
const error = ref<string | null>(null)
async function fetchBlocklist() {
loading.value = true
error.value = null
try {
const res = await apiFetch(`${apiBase}/api/blocklist`)
if (!res.ok) throw new Error(`HTTP ${res.status}`)
const data = await res.json()
entries.value = data.entries
} catch (e) {
error.value = e instanceof Error ? e.message : 'Failed to load blocklist'
} finally {
loading.value = false
}
}
async function addSeller(
platformSellerId: string,
username: string,
reason: string,
): Promise<void> {
const res = await apiFetch(`${apiBase}/api/blocklist`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
platform: 'ebay',
platform_seller_id: platformSellerId,
username,
reason,
}),
})
if (!res.ok) throw new Error(`HTTP ${res.status}`)
const entry: BlocklistEntry = await res.json()
// Prepend so the new entry appears at the top
entries.value = [entry, ...entries.value.filter(
e => e.platform_seller_id !== platformSellerId,
)]
}
async function removeSeller(platformSellerId: string): Promise<void> {
const res = await apiFetch(`${apiBase}/api/blocklist/${encodeURIComponent(platformSellerId)}`, {
method: 'DELETE',
})
if (!res.ok) throw new Error(`HTTP ${res.status}`)
entries.value = entries.value.filter(e => e.platform_seller_id !== platformSellerId)
}
function isBlocklisted(platformSellerId: string): boolean {
return entries.value.some(e => e.platform_seller_id === platformSellerId)
}
async function exportCsv(): Promise<void> {
const res = await apiFetch(`${apiBase}/api/blocklist/export`)
if (!res.ok) throw new Error(`HTTP ${res.status}`)
const blob = await res.blob()
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = 'snipe-blocklist.csv'
a.click()
URL.revokeObjectURL(url)
}
async function importCsv(file: File): Promise<{ imported: number; errors: string[] }> {
const formData = new FormData()
formData.append('file', file)
const res = await apiFetch(`${apiBase}/api/blocklist/import`, {
method: 'POST',
body: formData,
})
if (!res.ok) throw new Error(`HTTP ${res.status}`)
const result = await res.json()
// Refresh to pick up all imported entries
await fetchBlocklist()
return result
}
return {
entries,
loading,
error,
fetchBlocklist,
addSeller,
removeSeller,
isBlocklisted,
exportCsv,
importCsv,
}
})

View file

@ -1,6 +1,7 @@
import { defineStore } from 'pinia'
import { ref } from 'vue'
import type { SavedSearch, SearchFilters } from './search'
import { apiFetch } from '../utils/api'
export type { SavedSearch }
@ -15,7 +16,7 @@ export const useSavedSearchesStore = defineStore('savedSearches', () => {
loading.value = true
error.value = null
try {
const res = await fetch(`${apiBase}/api/saved-searches`)
const res = await apiFetch(`${apiBase}/api/saved-searches`)
if (!res.ok) throw new Error(`${res.status} ${res.statusText}`)
const data = await res.json() as { saved_searches: SavedSearch[] }
items.value = data.saved_searches
@ -29,7 +30,7 @@ export const useSavedSearchesStore = defineStore('savedSearches', () => {
async function create(name: string, query: string, filters: SearchFilters): Promise<SavedSearch> {
// Strip per-run fields before persisting
const { pages: _pages, ...persistable } = filters
const res = await fetch(`${apiBase}/api/saved-searches`, {
const res = await apiFetch(`${apiBase}/api/saved-searches`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ name, query, filters_json: JSON.stringify(persistable) }),

View file

@ -83,15 +83,43 @@ export interface SearchFilters {
adapter?: 'auto' | 'api' | 'scraper' // override adapter selection
}
// ── Session cache ─────────────────────────────────────────────────────────────
const CACHE_KEY = 'snipe:search-cache'
function saveCache(data: {
query: string
results: Listing[]
trustScores: Record<string, TrustScore>
sellers: Record<string, Seller>
marketPrice: number | null
adapterUsed: 'api' | 'scraper' | null
}) {
try { sessionStorage.setItem(CACHE_KEY, JSON.stringify(data)) } catch { /* quota */ }
}
function loadCache() {
try {
const raw = sessionStorage.getItem(CACHE_KEY)
return raw ? JSON.parse(raw) : null
} catch { return null }
}
// ── Store ────────────────────────────────────────────────────────────────────
export const useSearchStore = defineStore('search', () => {
const query = ref('')
const results = ref<Listing[]>([])
const trustScores = ref<Map<string, TrustScore>>(new Map()) // key: platform_listing_id
const sellers = ref<Map<string, Seller>>(new Map()) // key: platform_seller_id
const marketPrice = ref<number | null>(null)
const adapterUsed = ref<'api' | 'scraper' | null>(null)
const cached = loadCache()
const query = ref<string>(cached?.query ?? '')
const results = ref<Listing[]>(cached?.results ?? [])
const trustScores = ref<Map<string, TrustScore>>(
cached ? new Map(Object.entries(cached.trustScores ?? {})) : new Map()
)
const sellers = ref<Map<string, Seller>>(
cached ? new Map(Object.entries(cached.sellers ?? {})) : new Map()
)
const marketPrice = ref<number | null>(cached?.marketPrice ?? null)
const adapterUsed = ref<'api' | 'scraper' | null>(cached?.adapterUsed ?? null)
const loading = ref(false)
const error = ref<string | null>(null)
@ -143,6 +171,14 @@ export const useSearchStore = defineStore('search', () => {
sellers.value = new Map(Object.entries(data.sellers ?? {}))
marketPrice.value = data.market_price ?? null
adapterUsed.value = data.adapter_used ?? null
saveCache({
query: q,
results: results.value,
trustScores: data.trust_scores ?? {},
sellers: data.sellers ?? {},
marketPrice: marketPrice.value,
adapterUsed: adapterUsed.value,
})
} catch (e) {
if (e instanceof DOMException && e.name === 'AbortError') {
// User cancelled — clear loading but don't surface as an error

17
web/src/utils/api.ts Normal file
View file

@ -0,0 +1,17 @@
/**
* Thin fetch wrapper that redirects to login on 401.
* All stores should use this instead of raw fetch() for authenticated endpoints.
*/
const LOGIN_URL = 'https://circuitforge.tech/login'
export async function apiFetch(url: string, init?: RequestInit): Promise<Response> {
const res = await fetch(url, init)
if (res.status === 401) {
const next = encodeURIComponent(window.location.href)
window.location.href = `${LOGIN_URL}?next=${next}`
// Return a never-resolving promise — navigation is in progress
return new Promise(() => {})
}
return res
}

View file

@ -0,0 +1,318 @@
<template>
<div class="blocklist-view">
<header class="blocklist-header">
<div class="blocklist-header__title-row">
<h1 class="blocklist-title">Scammer Blocklist</h1>
<span class="blocklist-count" v-if="!store.loading">
{{ store.entries.length }} {{ store.entries.length === 1 ? 'entry' : 'entries' }}
</span>
</div>
<p class="blocklist-desc">
Sellers on this list are force-scored to 0 and flagged as bad actors on every search.
Use the block button on any listing card to add sellers.
</p>
<div class="blocklist-actions">
<button class="bl-btn bl-btn--secondary" @click="onExport" :disabled="store.entries.length === 0">
Export CSV
</button>
<label class="bl-btn bl-btn--secondary bl-btn--upload">
Import CSV
<input type="file" accept=".csv,text/csv" class="sr-only" @change="onImport" />
</label>
</div>
<p v-if="importResult" class="import-result" :class="{ 'import-result--error': importResult.errors.length }">
Imported {{ importResult.imported }} sellers.
<span v-if="importResult.errors.length">
{{ importResult.errors.length }} row(s) skipped.
</span>
</p>
</header>
<div v-if="store.loading" class="blocklist-empty">Loading</div>
<div v-else-if="store.error" class="blocklist-empty blocklist-empty--error">
{{ store.error }}
</div>
<div v-else-if="store.entries.length === 0" class="blocklist-empty">
No blocked sellers yet. Use the button on any listing card to add one.
</div>
<table v-else class="bl-table">
<thead>
<tr>
<th>Seller</th>
<th>Reason</th>
<th>Source</th>
<th>Added</th>
<th aria-label="Remove"></th>
</tr>
</thead>
<tbody>
<tr
v-for="entry in store.entries"
:key="entry.platform_seller_id"
class="bl-table__row"
>
<td class="bl-table__seller">
<span class="bl-table__username">{{ entry.username }}</span>
<span class="bl-table__id">{{ entry.platform_seller_id }}</span>
</td>
<td class="bl-table__reason">{{ entry.reason || '—' }}</td>
<td class="bl-table__source">
<span class="bl-source-badge" :class="`bl-source-badge--${entry.source}`">
{{ sourceLabel(entry.source) }}
</span>
</td>
<td class="bl-table__date">{{ formatDate(entry.created_at) }}</td>
<td class="bl-table__remove">
<button
class="bl-remove-btn"
title="Remove from blocklist"
@click="onRemove(entry.platform_seller_id)"
></button>
</td>
</tr>
</tbody>
</table>
</div>
</template>
<script setup lang="ts">
import { onMounted, ref } from 'vue'
import { useBlocklistStore } from '../stores/blocklist'
const store = useBlocklistStore()
const importResult = ref<{ imported: number; errors: string[] } | null>(null)
onMounted(() => store.fetchBlocklist())
async function onRemove(sellerId: string) {
await store.removeSeller(sellerId)
}
async function onExport() {
await store.exportCsv()
}
async function onImport(event: Event) {
const input = event.target as HTMLInputElement
const file = input.files?.[0]
if (!file) return
importResult.value = null
try {
importResult.value = await store.importCsv(file)
} catch {
importResult.value = { imported: 0, errors: ['Upload failed — check file format'] }
} finally {
input.value = ''
}
}
function sourceLabel(source: string): string {
const map: Record<string, string> = {
manual: 'Manual',
csv_import: 'CSV',
community: 'Community',
}
return map[source] ?? source
}
function formatDate(iso: string | null): string {
if (!iso) return '—'
return new Date(iso).toLocaleDateString(undefined, { month: 'short', day: 'numeric', year: 'numeric' })
}
</script>
<style scoped>
.blocklist-view {
max-width: 860px;
margin: 0 auto;
padding: var(--space-6) var(--space-4);
}
.blocklist-header {
margin-bottom: var(--space-6);
}
.blocklist-header__title-row {
display: flex;
align-items: baseline;
gap: var(--space-3);
margin-bottom: var(--space-2);
}
.blocklist-title {
font-size: 1.5rem;
font-weight: 700;
margin: 0;
}
.blocklist-count {
font-size: 0.875rem;
color: var(--color-text-muted);
}
.blocklist-desc {
font-size: 0.875rem;
color: var(--color-text-muted);
margin: 0 0 var(--space-4);
line-height: 1.5;
}
.blocklist-actions {
display: flex;
gap: var(--space-2);
flex-wrap: wrap;
}
.bl-btn {
padding: var(--space-2) var(--space-3);
border-radius: var(--radius-md);
font-size: 0.8125rem;
font-weight: 500;
cursor: pointer;
transition: background 150ms ease, opacity 150ms ease;
}
.bl-btn--secondary {
background: var(--color-surface-raised);
border: 1px solid var(--color-border);
color: var(--color-text);
}
.bl-btn--secondary:hover:not(:disabled) {
background: var(--color-surface-2);
border-color: var(--app-primary);
color: var(--app-primary);
}
.bl-btn--secondary:disabled {
opacity: 0.45;
cursor: default;
}
.bl-btn--upload {
display: inline-flex;
align-items: center;
cursor: pointer;
}
.import-result {
margin-top: var(--space-3);
font-size: 0.8125rem;
color: var(--color-success, var(--trust-high));
}
.import-result--error {
color: var(--color-warning);
}
.blocklist-empty {
text-align: center;
padding: var(--space-10) var(--space-4);
color: var(--color-text-muted);
font-size: 0.9375rem;
}
.blocklist-empty--error {
color: var(--color-error);
}
/* Table */
.bl-table {
width: 100%;
border-collapse: collapse;
font-size: 0.875rem;
}
.bl-table thead th {
text-align: left;
font-size: 0.75rem;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.05em;
color: var(--color-text-muted);
border-bottom: 1px solid var(--color-border);
padding: var(--space-2) var(--space-3);
}
.bl-table__row {
border-bottom: 1px solid var(--color-border);
transition: background 120ms ease;
}
.bl-table__row:hover {
background: var(--color-surface-raised);
}
.bl-table__row td {
padding: var(--space-3);
vertical-align: middle;
}
.bl-table__seller {
display: flex;
flex-direction: column;
gap: 2px;
}
.bl-table__username {
font-weight: 600;
color: var(--color-text);
}
.bl-table__id {
font-size: 0.75rem;
color: var(--color-text-muted);
font-family: var(--font-mono);
}
.bl-table__reason {
color: var(--color-text-muted);
max-width: 280px;
}
.bl-table__date {
white-space: nowrap;
color: var(--color-text-muted);
}
.bl-source-badge {
padding: 2px var(--space-2);
border-radius: var(--radius-sm);
font-size: 0.7rem;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.04em;
}
.bl-source-badge--manual { background: rgba(88, 166, 255, 0.15); color: var(--app-primary); }
.bl-source-badge--csv_import { background: rgba(164, 120, 255, 0.15); color: #a478ff; }
.bl-source-badge--community { background: rgba(63, 185, 80, 0.15); color: var(--trust-high); }
.bl-remove-btn {
background: none;
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: var(--color-text-muted);
cursor: pointer;
font-size: 0.75rem;
padding: 2px var(--space-2);
transition: color 120ms ease, border-color 120ms ease;
}
.bl-remove-btn:hover {
color: var(--color-error);
border-color: var(--color-error);
}
/* Mobile */
@media (max-width: 600px) {
.bl-table thead th:nth-child(3),
.bl-table tbody td:nth-child(3) {
display: none;
}
}
</style>

View file

@ -70,7 +70,7 @@ function formatDate(iso: string | null): string {
async function onRun(item: SavedSearch) {
store.markRun(item.id)
const query: Record<string, string> = { q: item.query }
const query: Record<string, string> = { q: item.query, autorun: '1' }
if (item.filters_json && item.filters_json !== '{}') query.filters = item.filters_json
router.push({ path: '/', query })
}

View file

@ -3,6 +3,7 @@
<!-- Search bar -->
<header class="search-header">
<form class="search-form" @submit.prevent="onSearch" role="search">
<div class="search-form-row1">
<label for="cat-select" class="sr-only">Category</label>
<select
id="cat-select"
@ -29,6 +30,8 @@
autocomplete="off"
:disabled="store.loading"
/>
</div>
<div class="search-form-row2">
<button type="submit" class="search-btn" :disabled="store.loading || !queryInput.trim()">
<MagnifyingGlassIcon class="search-btn-icon" aria-hidden="true" />
<span>{{ store.loading ? 'Searching…' : 'Search' }}</span>
@ -50,6 +53,7 @@
>
<BookmarkIcon class="search-btn-icon" aria-hidden="true" />
</button>
</div>
</form>
<form v-if="showSaveForm" class="save-inline-form" @submit.prevent="onSave">
<input
@ -67,8 +71,25 @@
</header>
<div class="search-body">
<!-- Filter sidebar -->
<aside class="filter-sidebar" aria-label="Search filters">
<!-- Mobile filter toggle -->
<button
type="button"
class="filter-drawer-toggle"
:class="{ 'filter-drawer-toggle--active': showFilters }"
aria-controls="filter-sidebar"
:aria-expanded="showFilters"
@click="showFilters = !showFilters"
>
Filters<span v-if="activeFilterCount > 0" class="filter-badge">{{ activeFilterCount }}</span>
</button>
<!-- Filter sidebar / drawer -->
<aside
id="filter-sidebar"
class="filter-sidebar"
:class="{ 'filter-sidebar--open': showFilters }"
aria-label="Search filters"
>
<!-- eBay Search Parameters -->
<!-- These are sent to eBay. Changes require a new search to take effect. -->
@ -321,10 +342,28 @@ const queryInput = ref('')
// Save search UI state
const showSaveForm = ref(false)
const showFilters = ref(false)
const saveName = ref('')
const saveError = ref<string | null>(null)
const saveSuccess = ref(false)
// Count active non-default filters for the mobile badge
const activeFilterCount = computed(() => {
let n = 0
if (filters.categoryId) n++
if (filters.minPrice !== null && filters.minPrice > 0) n++
if (filters.maxPrice !== null && filters.maxPrice > 0) n++
if (filters.minTrust > 0) n++
if (filters.hideRedFlags) n++
if (filters.hidePartial) n++
if (filters.hideLongOnMarket) n++
if (filters.hidePriceDrop) n++
if (filters.mustInclude) n++
if (filters.mustExclude) n++
if (filters.pages > 1) n++
return n
})
async function onSave() {
if (!saveName.value.trim()) return
saveError.value = null
@ -344,7 +383,6 @@ onMounted(() => {
const q = route.query.q
if (typeof q === 'string' && q.trim()) {
queryInput.value = q.trim()
// Restore saved filters (e.g. category, price range, trust threshold)
const f = route.query.filters
if (typeof f === 'string') {
try {
@ -352,8 +390,14 @@ onMounted(() => {
Object.assign(filters, restored)
} catch { /* malformed — ignore */ }
}
if (route.query.autorun === '1') {
// Strip the autorun flag from the URL before searching
router.replace({ query: { ...route.query, autorun: undefined } })
onSearch()
}
// Otherwise: URL params just restore the form (e.g. on page refresh).
// Results are restored from sessionStorage by the search store.
}
})
// Filters
@ -563,6 +607,7 @@ const hiddenCount = computed(() => store.results.length - visibleListings.value.
async function onSearch() {
if (!queryInput.value.trim()) return
showFilters.value = false // close drawer on mobile when search runs
await store.search(queryInput.value.trim(), filters)
}
</script>
@ -584,12 +629,6 @@ async function onSearch() {
z-index: 10;
}
.search-form {
display: flex;
gap: var(--space-3);
max-width: 760px;
}
.search-category-select {
padding: var(--space-3) var(--space-3);
background: var(--color-surface-raised);
@ -1078,13 +1117,146 @@ async function onSearch() {
gap: var(--space-3);
}
/* Mobile: collapse filter sidebar */
@media (max-width: 767px) {
.filter-sidebar {
/* ── Search form rows (desktop: single flex row, mobile: two rows) ───── */
.search-form {
display: flex;
gap: var(--space-3);
max-width: 760px;
flex-wrap: wrap; /* rows fall through naturally on mobile */
}
.search-form-row1 {
display: flex;
gap: var(--space-3);
flex: 1;
min-width: 0;
}
.search-form-row2 {
display: flex;
gap: var(--space-2);
flex-shrink: 0;
}
/* ── Mobile filter drawer toggle (hidden on desktop) ─────────────────── */
.filter-drawer-toggle {
display: none;
}
.search-header { padding: var(--space-4); }
.results-area { padding: var(--space-4); }
.filter-badge {
display: inline-flex;
align-items: center;
justify-content: center;
min-width: 16px;
height: 16px;
padding: 0 4px;
margin-left: var(--space-1);
background: var(--app-primary);
color: var(--color-text-inverse);
border-radius: var(--radius-full);
font-size: 0.625rem;
font-weight: 700;
line-height: 1;
}
/* ── Responsive breakpoints ──────────────────────────────────────────── */
@media (max-width: 767px) {
/* Search header: tighter padding on mobile */
.search-header {
padding: var(--space-3) var(--space-3) var(--space-3);
}
/* Form rows: row1 takes full width, row2 stretches buttons */
.search-form {
gap: var(--space-2);
}
.search-form-row1 {
width: 100%;
flex: unset;
}
.search-form-row2 {
width: 100%;
flex-shrink: unset;
}
.search-btn {
flex: 1; /* stretch search button to fill row */
}
/* Category select: don't let it crowd the input */
.search-category-select {
max-width: 110px;
font-size: 0.8125rem;
}
/* Filter drawer toggle: show on mobile */
.filter-drawer-toggle {
display: flex;
align-items: center;
gap: var(--space-2);
padding: var(--space-2) var(--space-3);
margin: var(--space-2) var(--space-3);
background: var(--color-surface-raised);
border: 1px solid var(--color-border);
border-radius: var(--radius-md);
color: var(--color-text-muted);
font-family: var(--font-body);
font-size: 0.875rem;
cursor: pointer;
width: calc(100% - var(--space-6));
transition: border-color 150ms ease, color 150ms ease;
align-self: flex-start;
}
.filter-drawer-toggle--active {
border-color: var(--app-primary);
color: var(--app-primary);
}
/* Filter sidebar: hidden by default, slides down when open */
.filter-sidebar {
display: none;
width: 100%;
max-height: 65dvh;
overflow-y: auto;
border-right: none;
border-bottom: 1px solid var(--color-border);
padding: var(--space-4) var(--space-4) var(--space-6);
background: var(--color-surface-2);
animation: drawer-slide-down 180ms ease;
}
.filter-sidebar--open {
display: flex;
}
/* Search body: stack vertically (toggle → sidebar → results) */
.search-body {
flex-direction: column;
}
/* Results: full-width, slightly tighter padding */
.results-area {
padding: var(--space-4) var(--space-3);
overflow-y: unset; /* let the page scroll on mobile, not a sub-scroll container */
}
/* Toolbar: wrap if needed */
.results-toolbar {
flex-wrap: wrap;
gap: var(--space-2);
}
.toolbar-actions {
flex-wrap: wrap;
}
/* Save inline form: full width */
.save-inline-form {
flex-wrap: wrap;
}
.save-name-input {
width: 100%;
}
}
@keyframes drawer-slide-down {
from { opacity: 0; transform: translateY(-8px); }
to { opacity: 1; transform: translateY(0); }
}
</style>