feat: feedback_api — collect_logs + collect_listings
This commit is contained in:
parent
faf65023b4
commit
6764ad4288
2 changed files with 91 additions and 0 deletions
|
|
@ -59,3 +59,26 @@ def collect_context(page: str) -> dict:
|
||||||
"os": platform.platform(),
|
"os": platform.platform(),
|
||||||
"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def collect_logs(n: int = 100, log_path: Path | None = None) -> str:
|
||||||
|
"""Return last n lines of the Streamlit log, with PII masked."""
|
||||||
|
path = log_path or (_ROOT / ".streamlit.log")
|
||||||
|
if not path.exists():
|
||||||
|
return "(no log file found)"
|
||||||
|
lines = path.read_text(errors="replace").splitlines()
|
||||||
|
return mask_pii("\n".join(lines[-n:]))
|
||||||
|
|
||||||
|
|
||||||
|
def collect_listings(db_path: Path | None = None, n: int = 5) -> list[dict]:
|
||||||
|
"""Return the n most-recent job listings — title, company, url only."""
|
||||||
|
import sqlite3
|
||||||
|
from scripts.db import DEFAULT_DB
|
||||||
|
path = db_path or DEFAULT_DB
|
||||||
|
conn = sqlite3.connect(path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT title, company, url FROM jobs ORDER BY id DESC LIMIT ?", (n,)
|
||||||
|
).fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [{"title": r["title"], "company": r["company"], "url": r["url"]} for r in rows]
|
||||||
|
|
|
||||||
|
|
@ -51,3 +51,71 @@ def test_collect_context_timestamp_is_utc():
|
||||||
from scripts.feedback_api import collect_context
|
from scripts.feedback_api import collect_context
|
||||||
ctx = collect_context("X")
|
ctx = collect_context("X")
|
||||||
assert ctx["timestamp"].endswith("Z")
|
assert ctx["timestamp"].endswith("Z")
|
||||||
|
|
||||||
|
|
||||||
|
# ── collect_logs ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def test_collect_logs_returns_string(tmp_path):
|
||||||
|
from scripts.feedback_api import collect_logs
|
||||||
|
log = tmp_path / ".streamlit.log"
|
||||||
|
log.write_text("line1\nline2\nline3\n")
|
||||||
|
result = collect_logs(log_path=log, n=10)
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert "line3" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_collect_logs_tails_n_lines(tmp_path):
|
||||||
|
from scripts.feedback_api import collect_logs
|
||||||
|
log = tmp_path / ".streamlit.log"
|
||||||
|
log.write_text("\n".join(f"line{i}" for i in range(200)))
|
||||||
|
result = collect_logs(log_path=log, n=10)
|
||||||
|
assert "line199" in result
|
||||||
|
assert "line0" not in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_collect_logs_masks_pii(tmp_path):
|
||||||
|
from scripts.feedback_api import collect_logs
|
||||||
|
log = tmp_path / "test.log"
|
||||||
|
log.write_text("user foo@bar.com connected\n")
|
||||||
|
result = collect_logs(log_path=log)
|
||||||
|
assert "foo@bar.com" not in result
|
||||||
|
assert "[email redacted]" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_collect_logs_missing_file(tmp_path):
|
||||||
|
from scripts.feedback_api import collect_logs
|
||||||
|
result = collect_logs(log_path=tmp_path / "nonexistent.log")
|
||||||
|
assert "no log file" in result.lower()
|
||||||
|
|
||||||
|
|
||||||
|
# ── collect_listings ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def test_collect_listings_safe_fields_only(tmp_path):
|
||||||
|
"""Only title, company, url — no cover letters, notes, or emails."""
|
||||||
|
from scripts.db import init_db, insert_job
|
||||||
|
from scripts.feedback_api import collect_listings
|
||||||
|
db = tmp_path / "test.db"
|
||||||
|
init_db(db)
|
||||||
|
insert_job(db, {
|
||||||
|
"title": "CSM", "company": "Acme", "url": "https://example.com/1",
|
||||||
|
"source": "linkedin", "location": "Remote", "is_remote": True,
|
||||||
|
"salary": "", "description": "great role", "date_found": "2026-03-01",
|
||||||
|
})
|
||||||
|
results = collect_listings(db_path=db, n=5)
|
||||||
|
assert len(results) == 1
|
||||||
|
assert set(results[0].keys()) == {"title", "company", "url"}
|
||||||
|
assert results[0]["title"] == "CSM"
|
||||||
|
|
||||||
|
|
||||||
|
def test_collect_listings_respects_n(tmp_path):
|
||||||
|
from scripts.db import init_db, insert_job
|
||||||
|
from scripts.feedback_api import collect_listings
|
||||||
|
db = tmp_path / "test.db"
|
||||||
|
init_db(db)
|
||||||
|
for i in range(10):
|
||||||
|
insert_job(db, {
|
||||||
|
"title": f"Job {i}", "company": "Acme", "url": f"https://example.com/{i}",
|
||||||
|
"source": "linkedin", "location": "Remote", "is_remote": False,
|
||||||
|
"salary": "", "description": "", "date_found": "2026-03-01",
|
||||||
|
})
|
||||||
|
assert len(collect_listings(db_path=db, n=3)) == 3
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue