diff --git a/.cliff.toml b/.cliff.toml new file mode 100644 index 0000000..73c9869 --- /dev/null +++ b/.cliff.toml @@ -0,0 +1,28 @@ +[changelog] +header = "" +body = """ +{% for group, commits in commits | group_by(attribute="group") %} +### {{ group | upper_first }} +{% for commit in commits %} +- {{ commit.message | upper_first }} ([{{ commit.id | truncate(length=7, end="") }}](https://git.opensourcesolarpunk.com/Circuit-Forge/snipe/commit/{{ commit.id }})) +{%- endfor %} +{% endfor %} +""" +trim = true + +[git] +conventional_commits = true +filter_unconventional = true +split_commits = false +commit_parsers = [ + { message = "^feat", group = "Features" }, + { message = "^fix", group = "Bug Fixes" }, + { message = "^perf", group = "Performance" }, + { message = "^refactor", group = "Refactoring" }, + { message = "^docs", group = "Documentation" }, + { message = "^test", group = "Testing" }, + { message = "^chore", skip = true }, + { message = "^ci", skip = true }, +] +filter_commits = false +tag_pattern = "v[0-9].*" diff --git a/.forgejo/workflows/ci.yml b/.forgejo/workflows/ci.yml new file mode 100644 index 0000000..9f4e278 --- /dev/null +++ b/.forgejo/workflows/ci.yml @@ -0,0 +1,57 @@ +name: CI + +on: + push: + branches: [main, 'feature/**', 'fix/**'] + pull_request: + branches: [main] + +jobs: + python: + name: Python tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + cache: pip + + # circuitforge-core is a sibling on dev machines but a public GitHub + # mirror in CI — install from there to avoid path-dependency issues. + - name: Install circuitforge-core + run: pip install --no-cache-dir git+https://github.com/CircuitForgeLLC/circuitforge-core.git + + - name: Install snipe (dev extras) + run: pip install --no-cache-dir -e ".[dev]" + + - name: Lint + run: ruff check . + + - name: Test + run: pytest tests/ -v --tb=short + + frontend: + name: Frontend typecheck + tests + runs-on: ubuntu-latest + defaults: + run: + working-directory: web + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + cache: npm + cache-dependency-path: web/package-lock.json + + - name: Install dependencies + run: npm ci + + - name: Typecheck + build + run: npm run build + + - name: Unit tests + run: npm run test diff --git a/.forgejo/workflows/mirror.yml b/.forgejo/workflows/mirror.yml new file mode 100644 index 0000000..2459bf3 --- /dev/null +++ b/.forgejo/workflows/mirror.yml @@ -0,0 +1,30 @@ +name: Mirror + +on: + push: + branches: [main] + tags: ['v*'] + +jobs: + mirror: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Mirror to GitHub + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_MIRROR_TOKEN }} + REPO: ${{ github.event.repository.name }} + run: | + git remote add github "https://x-access-token:${GITHUB_TOKEN}@github.com/CircuitForgeLLC/${REPO}.git" + git push github --mirror + + - name: Mirror to Codeberg + env: + CODEBERG_TOKEN: ${{ secrets.CODEBERG_MIRROR_TOKEN }} + REPO: ${{ github.event.repository.name }} + run: | + git remote add codeberg "https://CircuitForge:${CODEBERG_TOKEN}@codeberg.org/CircuitForge/${REPO}.git" + git push codeberg --mirror diff --git a/.forgejo/workflows/release.yml b/.forgejo/workflows/release.yml new file mode 100644 index 0000000..05d76fc --- /dev/null +++ b/.forgejo/workflows/release.yml @@ -0,0 +1,92 @@ +name: Release + +on: + push: + tags: ['v*'] + +env: + # Forgejo container registry (BSL product — not pushing to public GHCR) + # cf-agents#3: revisit public registry policy before enabling GHCR push + REGISTRY: git.opensourcesolarpunk.com + IMAGE_API: git.opensourcesolarpunk.com/circuit-forge/snipe-api + IMAGE_WEB: git.opensourcesolarpunk.com/circuit-forge/snipe-web + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # ── Changelog ──────────────────────────────────────────────────────────── + - name: Generate changelog + uses: orhun/git-cliff-action@v3 + id: cliff + with: + config: .cliff.toml + args: --latest --strip header + env: + OUTPUT: CHANGES.md + + # ── Docker ─────────────────────────────────────────────────────────────── + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Forgejo registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.FORGEJO_RELEASE_TOKEN }} + + # API image — built with circuitforge-core sibling from GitHub mirror + - name: Checkout circuitforge-core + uses: actions/checkout@v4 + with: + repository: CircuitForgeLLC/circuitforge-core + path: circuitforge-core + + - name: Build and push API image + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile + push: true + platforms: linux/amd64,linux/arm64 + tags: | + ${{ env.IMAGE_API }}:${{ github.ref_name }} + ${{ env.IMAGE_API }}:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build and push web image + uses: docker/build-push-action@v6 + with: + context: . + file: docker/web/Dockerfile + push: true + platforms: linux/amd64,linux/arm64 + tags: | + ${{ env.IMAGE_WEB }}:${{ github.ref_name }} + ${{ env.IMAGE_WEB }}:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + # ── Forgejo Release ─────────────────────────────────────────────────────── + - name: Create Forgejo release + env: + FORGEJO_TOKEN: ${{ secrets.FORGEJO_RELEASE_TOKEN }} + REPO: ${{ github.event.repository.name }} + TAG: ${{ github.ref_name }} + NOTES: ${{ steps.cliff.outputs.content }} + run: | + curl -sS -X POST \ + "https://git.opensourcesolarpunk.com/api/v1/repos/Circuit-Forge/${REPO}/releases" \ + -H "Authorization: token ${FORGEJO_TOKEN}" \ + -H "Content-Type: application/json" \ + -d "$(jq -n --arg tag "$TAG" --arg body "$NOTES" \ + '{tag_name: $tag, name: $tag, body: $body}')" diff --git a/api/cloud_session.py b/api/cloud_session.py index 467702f..ab97abc 100644 --- a/api/cloud_session.py +++ b/api/cloud_session.py @@ -16,8 +16,6 @@ FastAPI usage: """ from __future__ import annotations -import hashlib -import hmac import logging import os import re @@ -77,7 +75,6 @@ def compute_features(tier: str) -> SessionFeatures: """Compute feature flags from tier. Evaluated server-side; sent to frontend.""" local = tier == "local" paid_plus = local or tier in ("paid", "premium", "ultra") - premium_plus = local or tier in ("premium", "ultra") return SessionFeatures( saved_searches=True, # all tiers get saved searches @@ -94,10 +91,28 @@ def compute_features(tier: str) -> SessionFeatures: # ── JWT validation ──────────────────────────────────────────────────────────── def _extract_session_token(header_value: str) -> str: - """Extract cf_session value from a Cookie or X-CF-Session header string.""" - # X-CF-Session may be the raw JWT or the full cookie string + """Extract cf_session value from a Cookie or X-CF-Session header string. + + Returns the JWT token string, or "" if no valid session token is found. + Cookie strings like "snipe_guest=abc123" (no cf_session key) return "" + so the caller falls through to the guest/anonymous path rather than + passing a non-JWT string to validate_session_jwt(). + """ m = re.search(r'(?:^|;)\s*cf_session=([^;]+)', header_value) - return m.group(1).strip() if m else header_value.strip() + if m: + return m.group(1).strip() + # Only treat as a raw JWT if it has exactly three base64url segments (header.payload.sig). + # Cookie strings like "snipe_guest=abc123" must NOT be forwarded to JWT validation. + stripped = header_value.strip() + if re.match(r'^[A-Za-z0-9\-_]+\.[A-Za-z0-9\-_]+\.[A-Za-z0-9\-_=]+$', stripped): + return stripped # bare JWT forwarded directly by Caddy + return "" # not a JWT and no cf_session cookie — treat as unauthenticated + + +def _extract_guest_token(cookie_header: str) -> str | None: + """Extract snipe_guest UUID from the Cookie header, if present.""" + m = re.search(r'(?:^|;)\s*snipe_guest=([^;]+)', cookie_header) + return m.group(1).strip() if m else None def validate_session_jwt(token: str) -> str: @@ -178,6 +193,18 @@ def _user_db_path(user_id: str) -> Path: return path +def _anon_db_path() -> Path: + """Shared pool DB for unauthenticated visitors. + + All anonymous searches write listing data here. Seller and market comp + data accumulates in shared_db as normal, growing the anti-scammer corpus + with every public search regardless of auth state. + """ + path = CLOUD_DATA_ROOT / "anonymous" / "snipe" / "user.db" + path.parent.mkdir(parents=True, exist_ok=True) + return path + + # ── FastAPI dependency ──────────────────────────────────────────────────────── def get_session(request: Request) -> CloudUser: @@ -186,6 +213,8 @@ def get_session(request: Request) -> CloudUser: Local mode: returns a fully-privileged "local" user pointing at SNIPE_DB. Cloud mode: validates X-CF-Session JWT, provisions Heimdall license, resolves tier, returns per-user DB paths. + Unauthenticated cloud visitors: returns a free-tier anonymous user so + search and scoring work without an account. """ if not CLOUD_MODE: return CloudUser( @@ -195,16 +224,30 @@ def get_session(request: Request) -> CloudUser: user_db=_LOCAL_SNIPE_DB, ) - raw_header = ( - request.headers.get("x-cf-session", "") - or request.headers.get("cookie", "") - ) + cookie_header = request.headers.get("cookie", "") + raw_header = request.headers.get("x-cf-session", "") or cookie_header + if not raw_header: - raise HTTPException(status_code=401, detail="Not authenticated") + # No session at all — check for a guest UUID cookie set by /api/session + guest_uuid = _extract_guest_token(cookie_header) + user_id = f"guest:{guest_uuid}" if guest_uuid else "anonymous" + return CloudUser( + user_id=user_id, + tier="free", + shared_db=_shared_db_path(), + user_db=_anon_db_path(), + ) token = _extract_session_token(raw_header) if not token: - raise HTTPException(status_code=401, detail="Not authenticated") + guest_uuid = _extract_guest_token(cookie_header) + user_id = f"guest:{guest_uuid}" if guest_uuid else "anonymous" + return CloudUser( + user_id=user_id, + tier="free", + shared_db=_shared_db_path(), + user_db=_anon_db_path(), + ) user_id = validate_session_jwt(token) _ensure_provisioned(user_id) diff --git a/api/main.py b/api/main.py index e5fd187..819f18e 100644 --- a/api/main.py +++ b/api/main.py @@ -1,8 +1,11 @@ """Snipe FastAPI — search endpoint wired to ScrapedEbayAdapter + TrustScorer.""" from __future__ import annotations +import asyncio +import csv import dataclasses import hashlib +import io import json as _json import logging import os @@ -11,29 +14,27 @@ import uuid from concurrent.futures import ThreadPoolExecutor from contextlib import asynccontextmanager from pathlib import Path +from typing import Optional -import asyncio -import csv -import io - -from fastapi import Depends, FastAPI, HTTPException, Request, UploadFile, File -from fastapi.responses import StreamingResponse -from pydantic import BaseModel -from fastapi.middleware.cors import CORSMiddleware - -from circuitforge_core.config import load_env from circuitforge_core.affiliates import wrap_url as _wrap_affiliate_url from circuitforge_core.api import make_feedback_router as _make_feedback_router +from circuitforge_core.config import load_env +from fastapi import Depends, FastAPI, File, HTTPException, Request, Response, UploadFile +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import StreamingResponse +from pydantic import BaseModel + +from api.cloud_session import CloudUser, compute_features, get_session +from api.ebay_webhook import router as ebay_webhook_router +from app.db.models import SavedSearch as SavedSearchModel +from app.db.models import ScammerEntry from app.db.store import Store -from app.db.models import SavedSearch as SavedSearchModel, ScammerEntry from app.platforms import SearchFilters -from app.platforms.ebay.scraper import ScrapedEbayAdapter from app.platforms.ebay.adapter import EbayAdapter from app.platforms.ebay.auth import EbayTokenManager from app.platforms.ebay.query_builder import expand_queries, parse_groups +from app.platforms.ebay.scraper import ScrapedEbayAdapter from app.trust import TrustScorer -from api.cloud_session import CloudUser, compute_features, get_session -from api.ebay_webhook import router as ebay_webhook_router load_env(Path(".env")) log = logging.getLogger(__name__) @@ -50,8 +51,8 @@ async def _lifespan(app: FastAPI): # Start vision/LLM background task scheduler. # background_tasks queue lives in shared_db (cloud) or local_db (local) # so the scheduler has a single stable DB path across all cloud users. + from api.cloud_session import _LOCAL_SNIPE_DB, CLOUD_MODE, _shared_db_path from app.tasks.scheduler import get_scheduler, reset_scheduler - from api.cloud_session import CLOUD_MODE, _LOCAL_SNIPE_DB, _shared_db_path sched_db = _shared_db_path() if CLOUD_MODE else _LOCAL_SNIPE_DB get_scheduler(sched_db) log.info("Snipe task scheduler started (db=%s)", sched_db) @@ -100,13 +101,33 @@ def health(): @app.get("/api/session") -def session_info(session: CloudUser = Depends(get_session)): +def session_info(response: Response, session: CloudUser = Depends(get_session)): """Return the current session tier and computed feature flags. Used by the Vue frontend to gate UI features (pages slider cap, saved search limits, shared DB badges, etc.) without hardcoding tier logic client-side. + + For anonymous visitors: issues a snipe_guest UUID cookie (24h TTL) so + the user gets a stable identity for the session without requiring an account. """ + from api.cloud_session import CLOUD_MODE + if CLOUD_MODE and session.user_id == "anonymous": + guest_uuid = str(uuid.uuid4()) + response.set_cookie( + key="snipe_guest", + value=guest_uuid, + max_age=86400, + samesite="lax", + httponly=False, + path="/snipe", + ) + session = CloudUser( + user_id=f"guest:{guest_uuid}", + tier="free", + shared_db=session.shared_db, + user_db=session.user_db, + ) features = compute_features(session.tier) return { "user_id": session.user_id, @@ -245,9 +266,10 @@ def _enqueue_vision_tasks( trust_scores table in cloud mode. """ import json as _json + + from api.cloud_session import _LOCAL_SNIPE_DB, CLOUD_MODE, _shared_db_path from app.tasks.runner import insert_task from app.tasks.scheduler import get_scheduler - from api.cloud_session import CLOUD_MODE, _shared_db_path, _LOCAL_SNIPE_DB sched_db = _shared_db_path() if CLOUD_MODE else _LOCAL_SNIPE_DB sched = get_scheduler(sched_db) @@ -323,8 +345,8 @@ def _adapter_name(force: str = "auto") -> str: @app.get("/api/search") def search( q: str = "", - max_price: float = 0, - min_price: float = 0, + max_price: Optional[float] = None, + min_price: Optional[float] = None, pages: int = 1, must_include: str = "", # raw filter string; client-side always applied must_include_mode: str = "all", # "all" | "any" | "groups" — drives eBay expansion @@ -350,9 +372,22 @@ def search( else: ebay_queries = [q] + # Comp query: completed-sales lookup uses an enriched query so the market + # median reflects the same filtered universe the user is looking at. + # "all" mode → append must_include terms to eBay completed-sales query + # "groups" → use first expanded query (most specific variant) + # "any" / no filter → base query (can't enforce optional terms) + if must_include_mode == "groups" and len(ebay_queries) > 0: + comp_query = ebay_queries[0] + elif must_include_mode == "all" and must_include.strip(): + extra = " ".join(_parse_terms(must_include)) + comp_query = f"{q} {extra}".strip() + else: + comp_query = q + base_filters = SearchFilters( - max_price=max_price if max_price > 0 else None, - min_price=min_price if min_price > 0 else None, + max_price=max_price if max_price and max_price > 0 else None, + min_price=min_price if min_price and min_price > 0 else None, pages=pages, must_exclude=must_exclude_terms, # forwarded to eBay -term by the scraper category_id=category_id.strip() or None, @@ -369,9 +404,9 @@ def search( def _run_comps() -> None: try: - _make_adapter(Store(shared_db), adapter).get_completed_sales(q, pages) + _make_adapter(Store(shared_db), adapter).get_completed_sales(comp_query, pages) except Exception: - log.warning("comps: unhandled exception for %r", q, exc_info=True) + log.warning("comps: unhandled exception for %r", comp_query, exc_info=True) try: # Comps submitted first — guarantees an immediate worker slot even at max concurrency. @@ -426,7 +461,7 @@ def search( _update_queues[session_id] = _queue.SimpleQueue() _trigger_scraper_enrichment( listings, shared_store, shared_db, - user_db=user_db, query=q, session_id=session_id, + user_db=user_db, query=comp_query, session_id=session_id, ) scorer = TrustScorer(shared_store) @@ -440,7 +475,7 @@ def search( if features.photo_analysis: _enqueue_vision_tasks(listings, trust_scores_list, session) - query_hash = hashlib.md5(q.encode()).hexdigest() + query_hash = hashlib.md5(comp_query.encode()).hexdigest() comp = shared_store.get_market_comp("ebay", query_hash) market_price = comp.median_price if comp else None @@ -459,9 +494,22 @@ def search( and shared_store.get_seller("ebay", listing.seller_platform_id) } + # Build a preference reader for affiliate URL wrapping. + # Anonymous and guest users always use env-var mode: no opt-out or BYOK lookup. + _is_unauthed = session.user_id == "anonymous" or session.user_id.startswith("guest:") + _pref_store = None if _is_unauthed else user_store + + def _get_pref(uid: Optional[str], path: str, default=None): + return _pref_store.get_user_preference(path, default=default) # type: ignore[union-attr] + def _serialize_listing(l: object) -> dict: d = dataclasses.asdict(l) - d["url"] = _wrap_affiliate_url(d["url"], retailer="ebay") + d["url"] = _wrap_affiliate_url( + d["url"], + retailer="ebay", + user_id=None if _is_unauthed else session.user_id, + get_preference=_get_pref if _pref_store is not None else None, + ) return d return { @@ -683,6 +731,19 @@ def mark_saved_search_run(saved_id: int, session: CloudUser = Depends(get_sessio return {"ok": True} +# ── Community Trust Signals ─────────────────────────────────────────────────── +# Signals live in shared_db so feedback aggregates across all users. + +class CommunitySignal(BaseModel): + seller_id: str + confirmed: bool # True = "score looks right", False = "score is wrong" + + +@app.post("/api/community/signal", status_code=204) +def community_signal(body: CommunitySignal, session: CloudUser = Depends(get_session)): + Store(session.shared_db).save_community_signal(body.seller_id, body.confirmed) + + # ── Scammer Blocklist ───────────────────────────────────────────────────────── # Blocklist lives in shared_db: all users on a shared cloud instance see the # same community blocklist. In local (single-user) mode shared_db == user_db. @@ -702,6 +763,11 @@ def list_blocklist(session: CloudUser = Depends(get_session)): @app.post("/api/blocklist", status_code=201) def add_to_blocklist(body: BlocklistAdd, session: CloudUser = Depends(get_session)): + if session.user_id in ("anonymous",) or session.user_id.startswith("guest:"): + raise HTTPException( + status_code=403, + detail="Sign in to report sellers to the community blocklist.", + ) store = Store(session.shared_db) entry = store.add_to_blocklist(ScammerEntry( platform=body.platform, @@ -742,6 +808,11 @@ async def import_blocklist( session: CloudUser = Depends(get_session), ): """Import a CSV blocklist. Columns: platform_seller_id, username, reason (optional).""" + if session.user_id == "anonymous": + raise HTTPException( + status_code=403, + detail="Sign in to import a blocklist.", + ) content = await file.read() try: text = content.decode("utf-8-sig") # handle BOM from Excel exports @@ -775,3 +846,49 @@ async def import_blocklist( return {"imported": imported, "errors": errors} +# ── User Preferences ────────────────────────────────────────────────────────── + +class PreferenceUpdate(BaseModel): + path: str # dot-separated, e.g. "affiliate.opt_out" or "affiliate.byok_ids.ebay" + value: Optional[object] # bool, str, or None to clear + + +@app.get("/api/preferences") +def get_preferences(session: CloudUser = Depends(get_session)) -> dict: + """Return all preferences for the authenticated user. + + Anonymous users always receive an empty dict (no preferences to store). + """ + if session.user_id == "anonymous": + return {} + store = Store(session.user_db) + return store.get_all_preferences() + + +@app.patch("/api/preferences") +def patch_preference( + body: PreferenceUpdate, + session: CloudUser = Depends(get_session), +) -> dict: + """Set a single preference at *path* to *value*. + + - ``affiliate.opt_out`` — available to all signed-in users. + - ``affiliate.byok_ids.ebay`` — Premium tier only. + + Returns the full updated preferences dict. + """ + if session.user_id == "anonymous": + raise HTTPException( + status_code=403, + detail="Sign in to save preferences.", + ) + if body.path.startswith("affiliate.byok_ids.") and session.tier not in ("premium", "ultra"): + raise HTTPException( + status_code=403, + detail="Custom affiliate IDs (BYOK) require a Premium subscription.", + ) + store = Store(session.user_db) + store.set_user_preference(body.path, body.value) + return store.get_all_preferences() + + diff --git a/app/db/migrations/008_community_signals.sql b/app/db/migrations/008_community_signals.sql new file mode 100644 index 0000000..65468b5 --- /dev/null +++ b/app/db/migrations/008_community_signals.sql @@ -0,0 +1,11 @@ +-- Community trust signals: user feedback on individual trust scores. +-- "This score looks right" (confirmed=1) / "This score is wrong" (confirmed=0). +-- Stored in shared_db so signals aggregate across all users. +CREATE TABLE IF NOT EXISTS community_signals ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + seller_id TEXT NOT NULL, + confirmed INTEGER NOT NULL CHECK (confirmed IN (0, 1)), + recorded_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')) +); + +CREATE INDEX IF NOT EXISTS idx_community_signals_seller ON community_signals(seller_id); diff --git a/app/db/migrations/009_user_preferences.sql b/app/db/migrations/009_user_preferences.sql new file mode 100644 index 0000000..dd824d5 --- /dev/null +++ b/app/db/migrations/009_user_preferences.sql @@ -0,0 +1,9 @@ +-- Per-user preferences stored as a single JSON blob. +-- Lives in user_db (each user has their own DB file) — never in shared.db. +-- Single-row enforced by PRIMARY KEY CHECK (id = 1): acts as a singleton table. +-- Path reads/writes use cf-core preferences.paths (get_path / set_path). +CREATE TABLE IF NOT EXISTS user_preferences ( + id INTEGER PRIMARY KEY CHECK (id = 1), + prefs_json TEXT NOT NULL DEFAULT '{}', + updated_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%SZ', 'now')) +); diff --git a/app/platforms/ebay/scraper.py b/app/platforms/ebay/scraper.py index 2635f08..1f42f7d 100644 --- a/app/platforms/ebay/scraper.py +++ b/app/platforms/ebay/scraper.py @@ -16,7 +16,7 @@ import json import logging import re import time -from concurrent.futures import ThreadPoolExecutor, as_completed +from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timedelta, timezone from typing import Optional @@ -302,7 +302,8 @@ class ScrapedEbayAdapter(PlatformAdapter): time.sleep(self._delay) - import subprocess, os + import os + import subprocess display_num = next(_display_counter) display = f":{display_num}" xvfb = subprocess.Popen( @@ -313,8 +314,10 @@ class ScrapedEbayAdapter(PlatformAdapter): env["DISPLAY"] = display try: - from playwright.sync_api import sync_playwright # noqa: PLC0415 — lazy: only needed in Docker - from playwright_stealth import Stealth # noqa: PLC0415 + from playwright.sync_api import ( + sync_playwright, # noqa: PLC0415 — lazy: only needed in Docker + ) + from playwright_stealth import Stealth # noqa: PLC0415 with sync_playwright() as pw: browser = pw.chromium.launch( diff --git a/app/tasks/runner.py b/app/tasks/runner.py index beea57a..2b41b4c 100644 --- a/app/tasks/runner.py +++ b/app/tasks/runner.py @@ -19,7 +19,6 @@ import logging from pathlib import Path import requests - from circuitforge_core.db import get_connection from circuitforge_core.llm import LLMRouter diff --git a/app/tasks/scheduler.py b/app/tasks/scheduler.py index a45e0ae..74fabd4 100644 --- a/app/tasks/scheduler.py +++ b/app/tasks/scheduler.py @@ -5,9 +5,10 @@ from __future__ import annotations from pathlib import Path from circuitforge_core.tasks.scheduler import ( - TaskScheduler, + TaskScheduler, # re-export for tests +) +from circuitforge_core.tasks.scheduler import ( get_scheduler as _base_get_scheduler, - reset_scheduler, # re-export for tests ) from app.tasks.runner import LLM_TASK_TYPES, VRAM_BUDGETS, run_task diff --git a/app/tiers.py b/app/tiers.py index b355466..d41eafd 100644 --- a/app/tiers.py +++ b/app/tiers.py @@ -14,7 +14,8 @@ Intentionally ungated (free for all): - saved_searches — retention feature; friction cost outweighs gate value """ from __future__ import annotations -from circuitforge_core.tiers import can_use as _core_can_use, TIERS # noqa: F401 + +from circuitforge_core.tiers import can_use as _core_can_use # noqa: F401 # Feature key → minimum tier required. FEATURES: dict[str, str] = { diff --git a/app/trust/__init__.py b/app/trust/__init__.py index 8caf0a1..8a8667c 100644 --- a/app/trust/__init__.py +++ b/app/trust/__init__.py @@ -1,11 +1,13 @@ -from .metadata import MetadataScorer -from .photo import PhotoScorer -from .aggregator import Aggregator -from app.db.models import Seller, Listing, TrustScore -from app.db.store import Store import hashlib import math +from app.db.models import Listing, TrustScore +from app.db.store import Store + +from .aggregator import Aggregator +from .metadata import MetadataScorer +from .photo import PhotoScorer + class TrustScorer: """Orchestrates metadata + photo scoring for a batch of listings.""" diff --git a/app/trust/aggregator.py b/app/trust/aggregator.py index 974c45f..5153b14 100644 --- a/app/trust/aggregator.py +++ b/app/trust/aggregator.py @@ -1,8 +1,10 @@ """Composite score and red flag extraction.""" from __future__ import annotations + import json -from datetime import datetime, timezone +from datetime import datetime from typing import Optional + from app.db.models import Seller, TrustScore HARD_FILTER_AGE_DAYS = 7 diff --git a/pyproject.toml b/pyproject.toml index 8bdf329..c8fe9f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,9 +25,30 @@ dependencies = [ "PyJWT>=2.8", ] +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "pytest-cov>=5.0", + "ruff>=0.4", + "httpx>=0.27", # FastAPI test client +] + [tool.setuptools.packages.find] where = ["."] include = ["app*", "api*"] [tool.pytest.ini_options] testpaths = ["tests"] + +[tool.ruff] +line-length = 100 +target-version = "py311" + +[tool.ruff.lint] +select = ["E", "F", "W", "I"] +ignore = [ + "E501", # line length — handled by formatter + "E402", # module-import-not-at-top — intentional for conditional/lazy imports + "E701", # multiple-statements-colon — `if x: return y` is accepted style + "E741", # ambiguous variable name — l/q used intentionally for listing/query +] diff --git a/tests/db/test_store.py b/tests/db/test_store.py index 26e60ac..dd37014 100644 --- a/tests/db/test_store.py +++ b/tests/db/test_store.py @@ -1,8 +1,9 @@ -import pytest from datetime import datetime, timedelta, timezone -from pathlib import Path + +import pytest + +from app.db.models import Listing, MarketComp, Seller from app.db.store import Store -from app.db.models import Listing, Seller, TrustScore, MarketComp @pytest.fixture diff --git a/tests/test_tasks/test_runner.py b/tests/test_tasks/test_runner.py index 0a44b7c..c9c13db 100644 --- a/tests/test_tasks/test_runner.py +++ b/tests/test_tasks/test_runner.py @@ -4,7 +4,7 @@ from __future__ import annotations import json import sqlite3 from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest diff --git a/tests/test_tiers.py b/tests/test_tiers.py index 11dc250..18c0162 100644 --- a/tests/test_tiers.py +++ b/tests/test_tiers.py @@ -1,4 +1,4 @@ -from app.tiers import can_use, FEATURES, LOCAL_VISION_UNLOCKABLE +from app.tiers import can_use def test_metadata_scoring_is_free(): diff --git a/tests/ui/test_easter_eggs.py b/tests/ui/test_easter_eggs.py index 9551552..1774002 100644 --- a/tests/ui/test_easter_eggs.py +++ b/tests/ui/test_easter_eggs.py @@ -4,10 +4,8 @@ from __future__ import annotations import json from datetime import datetime, timedelta, timezone -import pytest - from app.db.models import Listing, TrustScore -from app.ui.components.easter_eggs import is_steal, auction_hours_remaining +from app.ui.components.easter_eggs import auction_hours_remaining, is_steal def _listing(**kwargs) -> Listing: diff --git a/web/index.html b/web/index.html index 68f94f7..0f7b55c 100644 --- a/web/index.html +++ b/web/index.html @@ -5,7 +5,23 @@ -