fix(avocet): narrow cancel except clause, clear stale cancel flags on new run

- except clause in cancel_benchmark/cancel_finetune narrowed from Exception
  to _subprocess.TimeoutExpired (C1)
- _cancelled_jobs.discard() called after registering new proc to prevent
  a stale flag from a prior run masking errors (I2)
- local `import subprocess` removed from run_benchmark and
  run_finetune_endpoint; all Popen calls updated to _subprocess.Popen (I1)
- test patch targets updated from subprocess.Popen to app.api._subprocess.Popen;
  cancelled-event tests updated to set flag in proc.wait() side-effect so
  the discard-on-new-run logic is exercised correctly
This commit is contained in:
pyr0ball 2026-03-15 18:13:01 -07:00
parent 30f19711ec
commit 8e016d7fe6
2 changed files with 28 additions and 21 deletions

View file

@ -345,8 +345,6 @@ def get_benchmark_results():
@app.get("/api/benchmark/run")
def run_benchmark(include_slow: bool = False):
"""Spawn the benchmark script and stream stdout as SSE progress events."""
import subprocess
python_bin = "/devl/miniconda3/envs/job-seeker-classifiers/bin/python"
script = str(_ROOT / "scripts" / "benchmark_classifier.py")
cmd = [python_bin, script, "--score", "--save"]
@ -355,15 +353,16 @@ def run_benchmark(include_slow: bool = False):
def generate():
try:
proc = subprocess.Popen(
proc = _subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdout=_subprocess.PIPE,
stderr=_subprocess.STDOUT,
text=True,
bufsize=1,
cwd=str(_ROOT),
)
_running_procs["benchmark"] = proc
_cancelled_jobs.discard("benchmark") # clear any stale flag from a prior run
try:
for line in proc.stdout:
line = line.rstrip()
@ -421,8 +420,6 @@ def run_finetune_endpoint(
score: list[str] = Query(default=[]),
):
"""Spawn finetune_classifier.py and stream stdout as SSE progress events."""
import subprocess
python_bin = "/devl/miniconda3/envs/job-seeker-classifiers/bin/python"
script = str(_ROOT / "scripts" / "finetune_classifier.py")
cmd = [python_bin, script, "--model", model, "--epochs", str(epochs)]
@ -446,16 +443,17 @@ def run_finetune_endpoint(
def generate():
yield f"data: {json.dumps({'type': 'progress', 'message': f'[api] Using {gpu_note} (most free VRAM)'})}\n\n"
try:
proc = subprocess.Popen(
proc = _subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdout=_subprocess.PIPE,
stderr=_subprocess.STDOUT,
text=True,
bufsize=1,
cwd=str(_ROOT),
env=proc_env,
)
_running_procs["finetune"] = proc
_cancelled_jobs.discard("finetune") # clear any stale flag from a prior run
try:
for line in proc.stdout:
line = line.rstrip()
@ -491,7 +489,7 @@ def cancel_benchmark():
proc.terminate()
try:
proc.wait(timeout=3)
except Exception:
except _subprocess.TimeoutExpired:
proc.kill()
return {"status": "cancelled"}
@ -506,7 +504,7 @@ def cancel_finetune():
proc.terminate()
try:
proc.wait(timeout=3)
except Exception:
except _subprocess.TimeoutExpired:
proc.kill()
return {"status": "cancelled"}

View file

@ -371,7 +371,7 @@ def test_finetune_run_streams_sse_events(client):
mock_proc.returncode = 0
mock_proc.wait = MagicMock()
with patch("subprocess.Popen", return_value=mock_proc):
with patch("app.api._subprocess.Popen",return_value=mock_proc):
r = client.get("/api/finetune/run?model=deberta-small&epochs=1")
assert r.status_code == 200
@ -387,7 +387,7 @@ def test_finetune_run_emits_complete_on_success(client):
mock_proc.returncode = 0
mock_proc.wait = MagicMock()
with patch("subprocess.Popen", return_value=mock_proc):
with patch("app.api._subprocess.Popen",return_value=mock_proc):
r = client.get("/api/finetune/run?model=deberta-small&epochs=1")
assert '{"type": "complete"}' in r.text
@ -402,7 +402,7 @@ def test_finetune_run_emits_error_on_nonzero_exit(client):
mock_proc.returncode = 1
mock_proc.wait = MagicMock()
with patch("subprocess.Popen", return_value=mock_proc):
with patch("app.api._subprocess.Popen",return_value=mock_proc):
r = client.get("/api/finetune/run?model=deberta-small&epochs=1")
assert '"type": "error"' in r.text
@ -422,7 +422,7 @@ def test_finetune_run_passes_score_files_to_subprocess(client):
m.wait = MagicMock()
return m
with patch("subprocess.Popen", side_effect=mock_popen):
with patch("app.api._subprocess.Popen",side_effect=mock_popen):
client.get("/api/finetune/run?model=deberta-small&epochs=1&score=run1.jsonl&score=run2.jsonl")
assert "--score" in captured_cmd
@ -516,13 +516,17 @@ def test_finetune_run_emits_cancelled_event(client):
mock_proc.stdout = iter([])
mock_proc.returncode = -15 # SIGTERM
def mock_popen(cmd, **kwargs):
# Simulate cancel being called after process starts
def mock_wait():
# Simulate cancel being called while the process is running (after discard clears stale flag)
api_module._cancelled_jobs.add("finetune")
mock_proc.wait = mock_wait
def mock_popen(cmd, **kwargs):
return mock_proc
try:
with patch("subprocess.Popen", side_effect=mock_popen):
with patch("app.api._subprocess.Popen",side_effect=mock_popen):
r = client.get("/api/finetune/run?model=deberta-small&epochs=1")
assert '{"type": "cancelled"}' in r.text
assert '"type": "error"' not in r.text
@ -539,12 +543,17 @@ def test_benchmark_run_emits_cancelled_event(client):
mock_proc.stdout = iter([])
mock_proc.returncode = -15
def mock_popen(cmd, **kwargs):
def mock_wait():
# Simulate cancel being called while the process is running (after discard clears stale flag)
api_module._cancelled_jobs.add("benchmark")
mock_proc.wait = mock_wait
def mock_popen(cmd, **kwargs):
return mock_proc
try:
with patch("subprocess.Popen", side_effect=mock_popen):
with patch("app.api._subprocess.Popen",side_effect=mock_popen):
r = client.get("/api/benchmark/run")
assert '{"type": "cancelled"}' in r.text
assert '"type": "error"' not in r.text