feat: wire enhanced suggest_search_terms into Search tab (three-angle excludes)

- Remove old inline _suggest_search_terms (no blocklist/profile awareness)
- Replace with import shim delegating to scripts/suggest_helpers.py
- Call site now loads blocklist.yaml + user.yaml and passes them through
- Update button help text to reflect blocklist, mission values, career background
This commit is contained in:
pyr0ball 2026-03-05 15:08:07 -08:00
parent ce8d5a4ac0
commit dcf2c6af34

View file

@ -36,47 +36,18 @@ def save_yaml(path: Path, data: dict) -> None:
path.write_text(yaml.dump(data, default_flow_style=False, allow_unicode=True)) path.write_text(yaml.dump(data, default_flow_style=False, allow_unicode=True))
def _suggest_search_terms(current_titles: list[str], resume_path: Path) -> dict: from scripts.suggest_helpers import (
"""Call LLM to suggest additional job titles and exclude keywords.""" suggest_search_terms as _suggest_search_terms_impl,
import json suggest_resume_keywords as _suggest_resume_keywords,
import re )
from scripts.llm_router import LLMRouter
resume_context = "" def _suggest_search_terms(current_titles, resume_path, blocklist=None, user_profile=None):
if resume_path.exists(): return _suggest_search_terms_impl(
resume = load_yaml(resume_path) current_titles,
lines = [] resume_path,
for exp in (resume.get("experience_details") or [])[:3]: blocklist or {},
pos = exp.get("position", "") user_profile or {},
co = exp.get("company", "") )
skills = ", ".join((exp.get("skills_acquired") or [])[:5])
lines.append(f"- {pos} at {co}: {skills}")
resume_context = "\n".join(lines)
titles_str = "\n".join(f"- {t}" for t in current_titles)
prompt = f"""You are helping a job seeker optimize their search criteria.
Their background (from resume):
{resume_context or "Customer success and technical account management leader"}
Current job titles being searched:
{titles_str}
Suggest:
1. 5-8 additional job titles they might be missing (alternative names, adjacent roles, senior variants)
2. 3-5 keywords to add to the exclusion filter (to screen out irrelevant postings)
Return ONLY valid JSON in this exact format:
{{"suggested_titles": ["Title 1", "Title 2"], "suggested_excludes": ["keyword 1", "keyword 2"]}}"""
result = LLMRouter().complete(prompt).strip()
m = re.search(r"\{.*\}", result, re.DOTALL)
if m:
try:
return json.loads(m.group())
except Exception:
pass
return {"suggested_titles": [], "suggested_excludes": []}
_show_finetune = bool(_profile and _profile.inference_profile in ("single-gpu", "dual-gpu")) _show_finetune = bool(_profile and _profile.inference_profile in ("single-gpu", "dual-gpu"))
@ -328,7 +299,11 @@ with tab_search:
# Streamlit forbids writing to a widget's key after it renders on the same pass; # Streamlit forbids writing to a widget's key after it renders on the same pass;
# button handlers write to *_pending keys instead, consumed here on the next pass. # button handlers write to *_pending keys instead, consumed here on the next pass.
for _pend, _wkey in [("_sp_titles_pending", "_sp_titles_multi"), for _pend, _wkey in [("_sp_titles_pending", "_sp_titles_multi"),
("_sp_locs_pending", "_sp_locations_multi")]: ("_sp_locs_pending", "_sp_locations_multi"),
("_sp_new_title_pending", "_sp_new_title"),
("_sp_paste_titles_pending", "_sp_paste_titles"),
("_sp_new_loc_pending", "_sp_new_loc"),
("_sp_paste_locs_pending", "_sp_paste_locs")]:
if _pend in st.session_state: if _pend in st.session_state:
st.session_state[_wkey] = st.session_state.pop(_pend) st.session_state[_wkey] = st.session_state.pop(_pend)
@ -339,7 +314,7 @@ with tab_search:
with _suggest_btn_col: with _suggest_btn_col:
st.write("") st.write("")
_run_suggest = st.button("✨ Suggest", key="sp_suggest_btn", _run_suggest = st.button("✨ Suggest", key="sp_suggest_btn",
help="Ask the LLM to suggest additional titles and exclude keywords based on your resume") help="Ask the LLM to suggest additional titles and smarter exclude keywords — using your blocklist, mission values, and career background.")
st.multiselect( st.multiselect(
"Job titles", "Job titles",
@ -364,7 +339,7 @@ with tab_search:
if _t not in _sel: if _t not in _sel:
_sel.append(_t) _sel.append(_t)
st.session_state["_sp_titles_pending"] = _sel st.session_state["_sp_titles_pending"] = _sel
st.session_state["_sp_new_title"] = "" st.session_state["_sp_new_title_pending"] = ""
st.rerun() st.rerun()
with st.expander("📋 Paste a list of titles"): with st.expander("📋 Paste a list of titles"):
st.text_area("One title per line", key="_sp_paste_titles", height=80, label_visibility="collapsed", st.text_area("One title per line", key="_sp_paste_titles", height=80, label_visibility="collapsed",
@ -380,14 +355,25 @@ with tab_search:
_sel.append(_t) _sel.append(_t)
st.session_state["_sp_title_options"] = _opts st.session_state["_sp_title_options"] = _opts
st.session_state["_sp_titles_pending"] = _sel st.session_state["_sp_titles_pending"] = _sel
st.session_state["_sp_paste_titles"] = "" st.session_state["_sp_paste_titles_pending"] = ""
st.rerun() st.rerun()
# ── LLM suggestions panel ──────────────────────────────────────────────── # ── LLM suggestions panel ────────────────────────────────────────────────
if _run_suggest: if _run_suggest:
_current_titles = list(st.session_state.get("_sp_titles_multi", [])) _current_titles = list(st.session_state.get("_sp_titles_multi", []))
_blocklist = load_yaml(BLOCKLIST_CFG)
_user_profile = load_yaml(USER_CFG)
with st.spinner("Asking LLM for suggestions…"): with st.spinner("Asking LLM for suggestions…"):
suggestions = _suggest_search_terms(_current_titles, RESUME_PATH) try:
suggestions = _suggest_search_terms(_current_titles, RESUME_PATH, _blocklist, _user_profile)
except RuntimeError as _e:
st.warning(
f"No LLM backend available: {_e}. "
"Check that Ollama is running and has GPU access, or enable a cloud backend in Settings → System → LLM.",
icon="⚠️",
)
suggestions = None
if suggestions is not None:
# Add suggested titles to options list (not auto-selected — user picks from dropdown) # Add suggested titles to options list (not auto-selected — user picks from dropdown)
_opts = list(st.session_state.get("_sp_title_options", [])) _opts = list(st.session_state.get("_sp_title_options", []))
for _t in suggestions.get("suggested_titles", []): for _t in suggestions.get("suggested_titles", []):
@ -444,8 +430,8 @@ with tab_search:
st.session_state["_sp_loc_options"] = _opts st.session_state["_sp_loc_options"] = _opts
if _l not in _sel: if _l not in _sel:
_sel.append(_l) _sel.append(_l)
st.session_state["_sp_locations_multi"] = _sel st.session_state["_sp_locs_pending"] = _sel
st.session_state["_sp_new_loc"] = "" st.session_state["_sp_new_loc_pending"] = ""
st.rerun() st.rerun()
with st.expander("📋 Paste a list of locations"): with st.expander("📋 Paste a list of locations"):
st.text_area("One location per line", key="_sp_paste_locs", height=80, label_visibility="collapsed", st.text_area("One location per line", key="_sp_paste_locs", height=80, label_visibility="collapsed",
@ -460,8 +446,8 @@ with tab_search:
if _l not in _sel: if _l not in _sel:
_sel.append(_l) _sel.append(_l)
st.session_state["_sp_loc_options"] = _opts st.session_state["_sp_loc_options"] = _opts
st.session_state["_sp_locations_multi"] = _sel st.session_state["_sp_locs_pending"] = _sel
st.session_state["_sp_paste_locs"] = "" st.session_state["_sp_paste_locs_pending"] = ""
st.rerun() st.rerun()
st.subheader("Exclude Keywords") st.subheader("Exclude Keywords")
@ -1023,8 +1009,10 @@ with tab_system:
with st.expander("🔌 Services", expanded=True): with st.expander("🔌 Services", expanded=True):
import subprocess as _sp import subprocess as _sp
import shutil as _shutil import shutil as _shutil
import os as _os
TOKENS_CFG = CONFIG_DIR / "tokens.yaml" TOKENS_CFG = CONFIG_DIR / "tokens.yaml"
COMPOSE_DIR = str(Path(__file__).parent.parent.parent) COMPOSE_DIR = str(Path(__file__).parent.parent.parent)
_compose_env = {**_os.environ, "COMPOSE_PROJECT_NAME": "peregrine"}
_docker_available = bool(_shutil.which("docker")) _docker_available = bool(_shutil.which("docker"))
_sys_profile_name = _profile.inference_profile if _profile else "remote" _sys_profile_name = _profile.inference_profile if _profile else "remote"
SYS_SERVICES = [ SYS_SERVICES = [
@ -1116,7 +1104,7 @@ with tab_system:
elif up: elif up:
if st.button("⏹ Stop", key=f"sys_svc_stop_{svc['port']}", use_container_width=True): if st.button("⏹ Stop", key=f"sys_svc_stop_{svc['port']}", use_container_width=True):
with st.spinner(f"Stopping {svc['name']}"): with st.spinner(f"Stopping {svc['name']}"):
r = _sp.run(svc["stop"], capture_output=True, text=True, cwd=svc["cwd"]) r = _sp.run(svc["stop"], capture_output=True, text=True, cwd=svc["cwd"], env=_compose_env)
st.success("Stopped.") if r.returncode == 0 else st.error(r.stderr or r.stdout) st.success("Stopped.") if r.returncode == 0 else st.error(r.stderr or r.stdout)
st.rerun() st.rerun()
else: else:
@ -1127,7 +1115,7 @@ with tab_system:
_start_cmd.append(_sel) _start_cmd.append(_sel)
if st.button("▶ Start", key=f"sys_svc_start_{svc['port']}", use_container_width=True, type="primary"): if st.button("▶ Start", key=f"sys_svc_start_{svc['port']}", use_container_width=True, type="primary"):
with st.spinner(f"Starting {svc['name']}"): with st.spinner(f"Starting {svc['name']}"):
r = _sp.run(_start_cmd, capture_output=True, text=True, cwd=svc["cwd"]) r = _sp.run(_start_cmd, capture_output=True, text=True, cwd=svc["cwd"], env=_compose_env)
st.success("Started!") if r.returncode == 0 else st.error(r.stderr or r.stdout) st.success("Started!") if r.returncode == 0 else st.error(r.stderr or r.stdout)
st.rerun() st.rerun()