feat: add queue-jobs and delete digest endpoints
This commit is contained in:
parent
4873201242
commit
12ff809bd5
2 changed files with 141 additions and 0 deletions
60
dev-api.py
60
dev-api.py
|
|
@ -578,6 +578,66 @@ def extract_digest_links(digest_id: int):
|
|||
return {"links": _extract_links(row["body"] or "")}
|
||||
|
||||
|
||||
# ── POST /api/digest-queue/{id}/queue-jobs ────────────────────────────────
|
||||
|
||||
class QueueJobsBody(BaseModel):
|
||||
urls: list[str]
|
||||
|
||||
|
||||
@app.post("/api/digest-queue/{digest_id}/queue-jobs")
|
||||
def queue_digest_jobs(digest_id: int, body: QueueJobsBody):
|
||||
if not body.urls:
|
||||
raise HTTPException(400, "urls must not be empty")
|
||||
db = _get_db()
|
||||
try:
|
||||
exists = db.execute(
|
||||
"SELECT 1 FROM digest_queue WHERE id = ?", (digest_id,)
|
||||
).fetchone()
|
||||
finally:
|
||||
db.close()
|
||||
if not exists:
|
||||
raise HTTPException(404, "Digest entry not found")
|
||||
|
||||
try:
|
||||
from scripts.db import insert_job
|
||||
except ImportError:
|
||||
raise HTTPException(500, "scripts.db not available")
|
||||
queued = 0
|
||||
skipped = 0
|
||||
for url in body.urls:
|
||||
if not url or not url.startswith(('http://', 'https://')):
|
||||
skipped += 1
|
||||
continue
|
||||
result = insert_job(DB_PATH, {
|
||||
'url': url,
|
||||
'title': '',
|
||||
'company': '',
|
||||
'source': 'digest',
|
||||
'date_found': datetime.utcnow().isoformat(),
|
||||
})
|
||||
if result:
|
||||
queued += 1
|
||||
else:
|
||||
skipped += 1
|
||||
return {"ok": True, "queued": queued, "skipped": skipped}
|
||||
|
||||
|
||||
# ── DELETE /api/digest-queue/{id} ────────────────────────────────────────
|
||||
|
||||
@app.delete("/api/digest-queue/{digest_id}")
|
||||
def delete_digest_entry(digest_id: int):
|
||||
db = _get_db()
|
||||
try:
|
||||
result = db.execute("DELETE FROM digest_queue WHERE id = ?", (digest_id,))
|
||||
db.commit()
|
||||
rowcount = result.rowcount
|
||||
finally:
|
||||
db.close()
|
||||
if rowcount == 0:
|
||||
raise HTTPException(404, "Digest entry not found")
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
# ── POST /api/jobs/{id}/move ───────────────────────────────────────────────────
|
||||
|
||||
STATUS_TIMESTAMP_COL = {
|
||||
|
|
|
|||
|
|
@ -156,3 +156,84 @@ def test_digest_extract_links_filters_trackers(client, tmp_db):
|
|||
def test_digest_extract_links_404(client):
|
||||
resp = client.post("/api/digest-queue/9999/extract-links")
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
# ── POST /api/digest-queue/{id}/queue-jobs ──────────────────────────────────
|
||||
|
||||
def test_digest_queue_jobs(client, tmp_db):
|
||||
entry_id = _add_digest_entry(tmp_db)
|
||||
resp = client.post(
|
||||
f"/api/digest-queue/{entry_id}/queue-jobs",
|
||||
json={"urls": ["https://greenhouse.io/acme/jobs/456"]},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["queued"] == 1
|
||||
assert data["skipped"] == 0
|
||||
|
||||
con = sqlite3.connect(tmp_db)
|
||||
row = con.execute(
|
||||
"SELECT source, status FROM jobs WHERE url = 'https://greenhouse.io/acme/jobs/456'"
|
||||
).fetchone()
|
||||
con.close()
|
||||
assert row is not None
|
||||
assert row[0] == "digest"
|
||||
assert row[1] == "pending"
|
||||
|
||||
|
||||
def test_digest_queue_jobs_skips_duplicates(client, tmp_db):
|
||||
entry_id = _add_digest_entry(tmp_db)
|
||||
resp = client.post(
|
||||
f"/api/digest-queue/{entry_id}/queue-jobs",
|
||||
json={"urls": [
|
||||
"https://greenhouse.io/acme/jobs/789",
|
||||
"https://greenhouse.io/acme/jobs/789", # same URL twice in one call
|
||||
]},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["queued"] == 1
|
||||
assert data["skipped"] == 1
|
||||
|
||||
con = sqlite3.connect(tmp_db)
|
||||
count = con.execute(
|
||||
"SELECT COUNT(*) FROM jobs WHERE url = 'https://greenhouse.io/acme/jobs/789'"
|
||||
).fetchone()[0]
|
||||
con.close()
|
||||
assert count == 1
|
||||
|
||||
|
||||
def test_digest_queue_jobs_skips_invalid_urls(client, tmp_db):
|
||||
entry_id = _add_digest_entry(tmp_db)
|
||||
resp = client.post(
|
||||
f"/api/digest-queue/{entry_id}/queue-jobs",
|
||||
json={"urls": ["", "ftp://bad.example.com", "https://valid.greenhouse.io/job/1"]},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["queued"] == 1
|
||||
assert data["skipped"] == 2
|
||||
|
||||
|
||||
def test_digest_queue_jobs_empty_urls(client, tmp_db):
|
||||
entry_id = _add_digest_entry(tmp_db)
|
||||
resp = client.post(f"/api/digest-queue/{entry_id}/queue-jobs", json={"urls": []})
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
def test_digest_queue_jobs_404(client):
|
||||
resp = client.post("/api/digest-queue/9999/queue-jobs", json={"urls": ["https://example.com"]})
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
# ── DELETE /api/digest-queue/{id} ───────────────────────────────────────────
|
||||
|
||||
def test_digest_delete(client, tmp_db):
|
||||
entry_id = _add_digest_entry(tmp_db)
|
||||
resp = client.delete(f"/api/digest-queue/{entry_id}")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["ok"] is True
|
||||
|
||||
# Second delete → 404
|
||||
resp2 = client.delete(f"/api/digest-queue/{entry_id}")
|
||||
assert resp2.status_code == 404
|
||||
|
|
|
|||
Loading…
Reference in a new issue