feat: POST /api/search/build endpoint with tier gate and category cache wiring

This commit is contained in:
pyr0ball 2026-04-14 11:46:15 -07:00
parent 93f989c821
commit cdc4e40775
2 changed files with 179 additions and 0 deletions

View file

@ -57,6 +57,15 @@ def _get_community_store() -> "SnipeCommunityStore | None":
return _community_store
# ── LLM Query Builder singletons (optional — requires LLM backend) ────────────
_category_cache = None
_query_translator = None
def _get_query_translator():
return _query_translator
@asynccontextmanager
async def _lifespan(app: FastAPI):
global _community_store
@ -84,6 +93,34 @@ async def _lifespan(app: FastAPI):
else:
log.debug("COMMUNITY_DB_URL not set — community trust signals disabled.")
# LLM Query Builder — category cache + translator (best-effort, never blocks startup)
global _category_cache, _query_translator
try:
from app.platforms.ebay.categories import EbayCategoryCache
from app.llm.query_translator import QueryTranslator
from circuitforge_core.db import get_connection, run_migrations as _run_migrations
from pathlib import Path as _Path
_cat_conn = get_connection(sched_db) # use the same DB as the app
_run_migrations(_cat_conn, _Path("app/db/migrations"))
_category_cache = EbayCategoryCache(_cat_conn)
if _category_cache.is_stale():
_category_cache.refresh(token_manager=None) # bootstrap fallback
try:
from circuitforge_core.llm import LLMRouter
_llm_router = LLMRouter()
_query_translator = QueryTranslator(
category_cache=_category_cache,
llm_router=_llm_router,
)
log.info("LLM query builder ready.")
except Exception:
log.info("No LLM backend configured — query builder disabled.")
except Exception:
log.warning("LLM query builder init failed.", exc_info=True)
yield
get_scheduler(sched_db).shutdown(timeout=10.0)
@ -968,3 +1005,62 @@ def patch_preference(
return store.get_all_preferences()
# ── LLM Query Builder ─────────────────────────────────────────────────────────
class BuildQueryRequest(BaseModel):
natural_language: str
@app.post("/api/search/build")
async def build_search_query(
body: BuildQueryRequest,
session: CloudUser = Depends(get_session),
) -> dict:
"""Translate a natural-language description into eBay search parameters.
Requires Paid tier or local mode. Returns a SearchParamsResponse JSON object
ready to pre-fill the search form.
"""
features = compute_features(session.tier)
if not features.llm_query_builder:
raise HTTPException(
status_code=402,
detail="LLM query builder requires Paid tier or above.",
)
translator = _get_query_translator()
if translator is None:
raise HTTPException(
status_code=503,
detail="No LLM backend configured. Set OLLAMA_HOST, ANTHROPIC_API_KEY, or OPENAI_API_KEY.",
)
from app.llm.query_translator import QueryTranslatorError
import asyncio
loop = asyncio.get_event_loop()
try:
result = await loop.run_in_executor(
None, translator.translate, body.natural_language.strip()
)
except QueryTranslatorError as exc:
raise HTTPException(
status_code=422,
detail={"message": str(exc), "raw": exc.raw},
)
except Exception as exc:
raise HTTPException(status_code=503, detail=f"LLM error: {exc}")
return {
"base_query": result.base_query,
"must_include_mode": result.must_include_mode,
"must_include": result.must_include,
"must_exclude": result.must_exclude,
"max_price": result.max_price,
"min_price": result.min_price,
"condition": result.condition,
"category_id": result.category_id,
"explanation": result.explanation,
}

View file

@ -0,0 +1,83 @@
"""Integration tests for POST /api/search/build."""
from __future__ import annotations
import json
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from fastapi.testclient import TestClient
@pytest.fixture
def client(tmp_path):
"""TestClient with a fresh DB and mocked LLMRouter/category cache."""
import os
os.environ["SNIPE_DB"] = str(tmp_path / "snipe.db")
# Import app AFTER setting SNIPE_DB so the DB path is picked up
from api.main import app
return TestClient(app, raise_server_exceptions=False)
def _good_llm_response() -> str:
return json.dumps({
"base_query": "RTX 3080",
"must_include_mode": "groups",
"must_include": "rtx|geforce, 3080",
"must_exclude": "mining",
"max_price": 300.0,
"min_price": None,
"condition": ["used"],
"category_id": "27386",
"explanation": "Used RTX 3080 under $300.",
})
def test_build_endpoint_success(client):
with patch("api.main._get_query_translator") as mock_get_t:
mock_t = MagicMock()
from app.llm.query_translator import SearchParamsResponse
mock_t.translate.return_value = SearchParamsResponse(
base_query="RTX 3080",
must_include_mode="groups",
must_include="rtx|geforce, 3080",
must_exclude="mining",
max_price=300.0,
min_price=None,
condition=["used"],
category_id="27386",
explanation="Used RTX 3080 under $300.",
)
mock_get_t.return_value = mock_t
resp = client.post(
"/api/search/build",
json={"natural_language": "used RTX 3080 under $300 no mining"},
)
assert resp.status_code == 200
data = resp.json()
assert data["base_query"] == "RTX 3080"
assert data["explanation"] == "Used RTX 3080 under $300."
def test_build_endpoint_llm_unavailable(client):
with patch("api.main._get_query_translator") as mock_get_t:
mock_get_t.return_value = None # no translator configured
resp = client.post(
"/api/search/build",
json={"natural_language": "GPU"},
)
assert resp.status_code == 503
def test_build_endpoint_bad_json(client):
with patch("api.main._get_query_translator") as mock_get_t:
from app.llm.query_translator import QueryTranslatorError
mock_t = MagicMock()
mock_t.translate.side_effect = QueryTranslatorError("unparseable", raw="garbage output")
mock_get_t.return_value = mock_t
resp = client.post(
"/api/search/build",
json={"natural_language": "GPU"},
)
assert resp.status_code == 422
assert "raw" in resp.json()["detail"]