feat: add eBay adapter with Browse API, Seller API, and market comps

This commit is contained in:
pyr0ball 2026-03-25 12:54:30 -07:00
parent a8eb11dc46
commit 1672e215b2
4 changed files with 223 additions and 0 deletions

View file

@ -0,0 +1,98 @@
"""eBay Browse API adapter."""
from __future__ import annotations
import hashlib
from datetime import datetime, timedelta, timezone
from typing import Optional
import requests
from app.db.models import Listing, Seller, MarketComp
from app.db.store import Store
from app.platforms import PlatformAdapter, SearchFilters
from app.platforms.ebay.auth import EbayTokenManager
from app.platforms.ebay.normaliser import normalise_listing, normalise_seller
BROWSE_BASE = {
"production": "https://api.ebay.com/buy/browse/v1",
"sandbox": "https://api.sandbox.ebay.com/buy/browse/v1",
}
# Note: seller lookup uses the Browse API with a seller filter, not a separate Seller API.
# The Commerce Identity /user endpoint returns the calling app's own identity (requires
# user OAuth, not app credentials). Seller metadata is extracted from Browse API inline
# seller fields. registrationDate is available in item detail responses via this path.
class EbayAdapter(PlatformAdapter):
def __init__(self, token_manager: EbayTokenManager, store: Store, env: str = "production"):
self._tokens = token_manager
self._store = store
self._browse_base = BROWSE_BASE[env]
def _headers(self) -> dict:
return {"Authorization": f"Bearer {self._tokens.get_token()}"}
def search(self, query: str, filters: SearchFilters) -> list[Listing]:
params: dict = {"q": query, "limit": 50}
filter_parts = []
if filters.max_price:
filter_parts.append(f"price:[..{filters.max_price}],priceCurrency:USD")
if filters.condition:
cond_map = {"new": "NEW", "used": "USED", "open box": "OPEN_BOX", "for parts": "FOR_PARTS_NOT_WORKING"}
ebay_conds = [cond_map[c] for c in filters.condition if c in cond_map]
if ebay_conds:
filter_parts.append(f"conditions:{{{','.join(ebay_conds)}}}")
if filter_parts:
params["filter"] = ",".join(filter_parts)
resp = requests.get(f"{self._browse_base}/item_summary/search",
headers=self._headers(), params=params)
resp.raise_for_status()
items = resp.json().get("itemSummaries", [])
return [normalise_listing(item) for item in items]
def get_seller(self, seller_platform_id: str) -> Optional[Seller]:
cached = self._store.get_seller("ebay", seller_platform_id)
if cached:
return cached
try:
resp = requests.get(
f"{self._browse_base}/item_summary/search",
headers={**self._headers(), "X-EBAY-C-MARKETPLACE-ID": "EBAY_US"},
params={"seller": seller_platform_id, "limit": 1},
)
resp.raise_for_status()
items = resp.json().get("itemSummaries", [])
if not items:
return None
seller = normalise_seller(items[0].get("seller", {}))
self._store.save_seller(seller)
return seller
except Exception:
return None # Caller handles None gracefully (partial score)
def get_completed_sales(self, query: str) -> list[Listing]:
query_hash = hashlib.md5(query.encode()).hexdigest()
cached = self._store.get_market_comp("ebay", query_hash)
if cached:
return [] # Comp data is used directly; return empty to signal cache hit
params = {"q": query, "limit": 20, "filter": "buyingOptions:{FIXED_PRICE}"}
try:
resp = requests.get(f"{self._browse_base}/item_summary/search",
headers=self._headers(), params=params)
resp.raise_for_status()
items = resp.json().get("itemSummaries", [])
listings = [normalise_listing(item) for item in items]
if listings:
prices = sorted(l.price for l in listings)
median = prices[len(prices) // 2]
comp = MarketComp(
platform="ebay",
query_hash=query_hash,
median_price=median,
sample_count=len(prices),
expires_at=(datetime.now(timezone.utc) + timedelta(hours=6)).isoformat(),
)
self._store.save_market_comp(comp)
return listings
except Exception:
return []

View file

@ -0,0 +1,68 @@
"""Convert raw eBay API responses into Snipe domain objects."""
from __future__ import annotations
import json
from datetime import datetime, timezone
from app.db.models import Listing, Seller
def normalise_listing(raw: dict) -> Listing:
price_data = raw.get("price", {})
photos = []
if "image" in raw:
photos.append(raw["image"].get("imageUrl", ""))
for img in raw.get("additionalImages", []):
url = img.get("imageUrl", "")
if url and url not in photos:
photos.append(url)
photos = [p for p in photos if p]
listing_age_days = 0
created_raw = raw.get("itemCreationDate", "")
if created_raw:
try:
created = datetime.fromisoformat(created_raw.replace("Z", "+00:00"))
listing_age_days = (datetime.now(timezone.utc) - created).days
except ValueError:
pass
seller = raw.get("seller", {})
return Listing(
platform="ebay",
platform_listing_id=raw["itemId"],
title=raw.get("title", ""),
price=float(price_data.get("value", 0)),
currency=price_data.get("currency", "USD"),
condition=raw.get("condition", "").lower(),
seller_platform_id=seller.get("username", ""),
url=raw.get("itemWebUrl", ""),
photo_urls=photos,
listing_age_days=listing_age_days,
)
def normalise_seller(raw: dict) -> Seller:
feedback_pct = float(raw.get("feedbackPercentage", "0").strip("%")) / 100.0
account_age_days = 0
reg_date_raw = raw.get("registrationDate", "")
if reg_date_raw:
try:
reg_date = datetime.fromisoformat(reg_date_raw.replace("Z", "+00:00"))
account_age_days = (datetime.now(timezone.utc) - reg_date).days
except ValueError:
pass
category_history = {}
summary = raw.get("sellerFeedbackSummary", {})
for entry in summary.get("feedbackByCategory", []):
category_history[entry.get("categorySite", "")] = int(entry.get("count", 0))
return Seller(
platform="ebay",
platform_seller_id=raw["username"],
username=raw["username"],
account_age_days=account_age_days,
feedback_count=int(raw.get("feedbackScore", 0)),
feedback_ratio=feedback_pct,
category_history_json=json.dumps(category_history),
)

View file

View file

@ -0,0 +1,57 @@
import pytest
from app.platforms.ebay.normaliser import normalise_listing, normalise_seller
def test_normalise_listing_maps_fields():
raw = {
"itemId": "v1|12345|0",
"title": "RTX 4090 GPU",
"price": {"value": "950.00", "currency": "USD"},
"condition": "USED",
"seller": {"username": "techguy", "feedbackScore": 300, "feedbackPercentage": "99.1"},
"itemWebUrl": "https://ebay.com/itm/12345",
"image": {"imageUrl": "https://i.ebayimg.com/1.jpg"},
"additionalImages": [{"imageUrl": "https://i.ebayimg.com/2.jpg"}],
"itemCreationDate": "2026-03-20T00:00:00.000Z",
}
listing = normalise_listing(raw)
assert listing.platform == "ebay"
assert listing.platform_listing_id == "v1|12345|0"
assert listing.title == "RTX 4090 GPU"
assert listing.price == 950.0
assert listing.condition == "used"
assert listing.seller_platform_id == "techguy"
assert "https://i.ebayimg.com/1.jpg" in listing.photo_urls
assert "https://i.ebayimg.com/2.jpg" in listing.photo_urls
def test_normalise_listing_handles_missing_images():
raw = {
"itemId": "v1|999|0",
"title": "GPU",
"price": {"value": "100.00", "currency": "USD"},
"condition": "NEW",
"seller": {"username": "u"},
"itemWebUrl": "https://ebay.com/itm/999",
}
listing = normalise_listing(raw)
assert listing.photo_urls == []
def test_normalise_seller_maps_fields():
raw = {
"username": "techguy",
"feedbackScore": 300,
"feedbackPercentage": "99.1",
"registrationDate": "2020-03-01T00:00:00.000Z",
"sellerFeedbackSummary": {
"feedbackByCategory": [
{"transactionPercent": "95.0", "categorySite": "ELECTRONICS", "count": "50"}
]
}
}
seller = normalise_seller(raw)
assert seller.username == "techguy"
assert seller.feedback_count == 300
assert seller.feedback_ratio == pytest.approx(0.991, abs=0.001)
assert seller.account_age_days > 0