Compare commits

...

5 commits

Author SHA1 Message Date
542ff86a43 feat: show version tag in sidebar footer 2026-02-26 14:39:47 -08:00
2630c161db feat: multiselect tags for job titles & locations; remove duplicate Notion section; docker detection for services panel
- Job titles and locations: replaced text_area with st.multiselect + + add button + paste-list expander
-  Suggest now populates the titles dropdown (not auto-selected) — user picks what they want
- Suggested exclusions still use click-to-add chip buttons
- Removed duplicate Notion expander from System Settings (handled by Integrations tab)
- Services panel: show host terminal copy-paste command when docker CLI unavailable (app runs inside container)
2026-02-26 14:26:58 -08:00
cd8510f972 fix: add address field to Resume Profile — was hidden, triggering false FILL_IN banner 2026-02-26 14:03:55 -08:00
d9b8b9e376 fix: port drift on restart — down before preflight, read port from .env
Makefile restart target now runs compose down before preflight so ports
are free when preflight assigns them; previously preflight ran first while
the old container still held 8502, causing it to bump to 8503.

manage.sh start/restart/open now read STREAMLIT_PORT from .env instead
of re-running preflight after startup (which would see the live container
and bump the reported port again).
2026-02-26 13:57:12 -08:00
34761158e1 feat: resume upload in Settings + improved config hints
- Resume Profile tab: upload widget replaces error+stop when YAML missing;
  collapsed "Replace Resume" expander when profile exists; saves parsed
  data and raw text (for LLM context) in one step
- FILL_IN banner with clickable link to Setup wizard when incomplete fields detected
- Ollama not reachable hint references Services section below
- Fine-tune hint clarifies "My Profile tab above" with inference profile names
- vLLM no-models hint links to Fine-Tune tab
2026-02-26 13:53:01 -08:00
4 changed files with 192 additions and 71 deletions

View file

@ -47,8 +47,10 @@ start: preflight ## Preflight check then start Peregrine (PROFILE=remote|cpu|si
stop: ## Stop all Peregrine services
$(COMPOSE) down
restart: preflight ## Preflight check then restart all services
$(COMPOSE) down && $(COMPOSE) $(COMPOSE_FILES) --profile $(PROFILE) up -d
restart: ## Stop services, re-run preflight (ports now free), then start
$(COMPOSE) down
@$(PYTHON) scripts/preflight.py
$(COMPOSE) $(COMPOSE_FILES) --profile $(PROFILE) up -d
logs: ## Tail app logs
$(COMPOSE) logs -f app

View file

@ -8,6 +8,7 @@ Run: streamlit run app/app.py
bash scripts/manage-ui.sh start
"""
import logging
import subprocess
import sys
from pathlib import Path
@ -138,7 +139,20 @@ def _task_indicator():
detail = f" · {stage}" if stage else (f"{t.get('company')}" if t.get("company") else "")
st.caption(f"{icon} {label}{detail}")
@st.cache_resource
def _get_version() -> str:
try:
return subprocess.check_output(
["git", "describe", "--tags", "--always"],
cwd=Path(__file__).parent.parent,
text=True,
).strip()
except Exception:
return "dev"
with st.sidebar:
_task_indicator()
st.divider()
st.caption(f"Peregrine {_get_version()}")
pg.run()

View file

@ -310,58 +310,87 @@ with tab_search:
p = profiles[0] if profiles else {}
# Seed session state from config on first load (or when config changes after save)
_sp_hash = str(p.get("titles", [])) + str(p.get("exclude_keywords", []))
_sp_hash = str(p.get("titles", [])) + str(p.get("locations", [])) + str(p.get("exclude_keywords", []))
if st.session_state.get("_sp_hash") != _sp_hash:
st.session_state["_sp_titles"] = "\n".join(p.get("titles", []))
_saved_titles = list(p.get("titles", []))
st.session_state["_sp_title_options"] = _saved_titles.copy()
st.session_state["_sp_titles_multi"] = _saved_titles.copy()
_saved_locs = list(p.get("locations", []))
st.session_state["_sp_loc_options"] = _saved_locs.copy()
st.session_state["_sp_locations_multi"] = _saved_locs.copy()
st.session_state["_sp_excludes"] = "\n".join(p.get("exclude_keywords", []))
st.session_state["_sp_hash"] = _sp_hash
# ── Titles ────────────────────────────────────────────────────────────────
title_row, suggest_btn_col = st.columns([4, 1])
with title_row:
_title_row, _suggest_btn_col = st.columns([4, 1])
with _title_row:
st.subheader("Job Titles to Search")
with suggest_btn_col:
st.write("") # vertical align
with _suggest_btn_col:
st.write("")
_run_suggest = st.button("✨ Suggest", key="sp_suggest_btn",
help="Ask the LLM to suggest additional titles and exclude keywords based on your resume")
titles_text = st.text_area(
"One title per line",
key="_sp_titles",
height=150,
help="JobSpy will search for any of these titles across all configured boards.",
label_visibility="visible",
st.multiselect(
"Job titles",
options=st.session_state.get("_sp_title_options", p.get("titles", [])),
key="_sp_titles_multi",
help="Select from known titles. Suggestions from ✨ Suggest appear here — pick the ones you want.",
label_visibility="collapsed",
)
_add_t_col, _add_t_btn = st.columns([5, 1])
with _add_t_col:
st.text_input("Add a title", key="_sp_new_title", label_visibility="collapsed",
placeholder="Type a title and press ")
with _add_t_btn:
if st.button("", key="sp_add_title_btn", use_container_width=True, help="Add custom title"):
_t = st.session_state.get("_sp_new_title", "").strip()
if _t:
_opts = list(st.session_state.get("_sp_title_options", []))
_sel = list(st.session_state.get("_sp_titles_multi", []))
if _t not in _opts:
_opts.append(_t)
st.session_state["_sp_title_options"] = _opts
if _t not in _sel:
_sel.append(_t)
st.session_state["_sp_titles_multi"] = _sel
st.session_state["_sp_new_title"] = ""
st.rerun()
with st.expander("📋 Paste a list of titles"):
st.text_area("One title per line", key="_sp_paste_titles", height=80, label_visibility="collapsed",
placeholder="Paste one title per line…")
if st.button("Import", key="sp_import_titles"):
_new = [t.strip() for t in st.session_state.get("_sp_paste_titles", "").splitlines() if t.strip()]
_opts = list(st.session_state.get("_sp_title_options", []))
_sel = list(st.session_state.get("_sp_titles_multi", []))
for _t in _new:
if _t not in _opts:
_opts.append(_t)
if _t not in _sel:
_sel.append(_t)
st.session_state["_sp_title_options"] = _opts
st.session_state["_sp_titles_multi"] = _sel
st.session_state["_sp_paste_titles"] = ""
st.rerun()
# ── LLM suggestions panel ────────────────────────────────────────────────
if _run_suggest:
current = [t.strip() for t in titles_text.splitlines() if t.strip()]
_current_titles = list(st.session_state.get("_sp_titles_multi", []))
with st.spinner("Asking LLM for suggestions…"):
suggestions = _suggest_search_terms(current, RESUME_PATH)
suggestions = _suggest_search_terms(_current_titles, RESUME_PATH)
# Add suggested titles to options list (not auto-selected — user picks from dropdown)
_opts = list(st.session_state.get("_sp_title_options", []))
for _t in suggestions.get("suggested_titles", []):
if _t not in _opts:
_opts.append(_t)
st.session_state["_sp_title_options"] = _opts
st.session_state["_sp_suggestions"] = suggestions
st.rerun()
if st.session_state.get("_sp_suggestions"):
sugg = st.session_state["_sp_suggestions"]
s_titles = sugg.get("suggested_titles", [])
s_excl = sugg.get("suggested_excludes", [])
existing_titles = {t.lower() for t in titles_text.splitlines() if t.strip()}
existing_excl = {e.lower() for e in st.session_state.get("_sp_excludes", "").splitlines() if e.strip()}
if s_titles:
st.caption("**Suggested titles** — click to add:")
cols = st.columns(min(len(s_titles), 4))
for i, title in enumerate(s_titles):
with cols[i % 4]:
if title.lower() not in existing_titles:
if st.button(f"+ {title}", key=f"sp_add_title_{i}"):
st.session_state["_sp_titles"] = (
st.session_state.get("_sp_titles", "").rstrip("\n") + f"\n{title}"
)
st.rerun()
else:
st.caption(f"{title}")
if s_excl:
st.caption("**Suggested exclusions** — click to add:")
cols2 = st.columns(min(len(s_excl), 4))
@ -380,12 +409,49 @@ with tab_search:
st.session_state.pop("_sp_suggestions", None)
st.rerun()
# ── Locations ─────────────────────────────────────────────────────────────
st.subheader("Locations")
locations_text = st.text_area(
"One location per line",
value="\n".join(p.get("locations", [])),
height=100,
st.multiselect(
"Locations",
options=st.session_state.get("_sp_loc_options", p.get("locations", [])),
key="_sp_locations_multi",
help="Select from known locations or add your own below.",
label_visibility="collapsed",
)
_add_l_col, _add_l_btn = st.columns([5, 1])
with _add_l_col:
st.text_input("Add a location", key="_sp_new_loc", label_visibility="collapsed",
placeholder="Type a location and press ")
with _add_l_btn:
if st.button("", key="sp_add_loc_btn", use_container_width=True, help="Add custom location"):
_l = st.session_state.get("_sp_new_loc", "").strip()
if _l:
_opts = list(st.session_state.get("_sp_loc_options", []))
_sel = list(st.session_state.get("_sp_locations_multi", []))
if _l not in _opts:
_opts.append(_l)
st.session_state["_sp_loc_options"] = _opts
if _l not in _sel:
_sel.append(_l)
st.session_state["_sp_locations_multi"] = _sel
st.session_state["_sp_new_loc"] = ""
st.rerun()
with st.expander("📋 Paste a list of locations"):
st.text_area("One location per line", key="_sp_paste_locs", height=80, label_visibility="collapsed",
placeholder="Paste one location per line…")
if st.button("Import", key="sp_import_locs"):
_new = [l.strip() for l in st.session_state.get("_sp_paste_locs", "").splitlines() if l.strip()]
_opts = list(st.session_state.get("_sp_loc_options", []))
_sel = list(st.session_state.get("_sp_locations_multi", []))
for _l in _new:
if _l not in _opts:
_opts.append(_l)
if _l not in _sel:
_sel.append(_l)
st.session_state["_sp_loc_options"] = _opts
st.session_state["_sp_locations_multi"] = _sel
st.session_state["_sp_paste_locs"] = ""
st.rerun()
st.subheader("Exclude Keywords")
st.caption("Jobs whose **title or description** contain any of these words are silently dropped before entering the queue. Case-insensitive.")
@ -424,8 +490,8 @@ with tab_search:
if st.button("💾 Save search settings", type="primary"):
profiles[0] = {
**p,
"titles": [t.strip() for t in titles_text.splitlines() if t.strip()],
"locations": [loc.strip() for loc in locations_text.splitlines() if loc.strip()],
"titles": list(st.session_state.get("_sp_titles_multi", [])),
"locations": list(st.session_state.get("_sp_locations_multi", [])),
"boards": selected_boards,
"custom_boards": selected_custom,
"results_per_board": results_per,
@ -479,6 +545,45 @@ with tab_search:
st.success("Blocklist saved — takes effect on next discovery run.")
# ── Resume Profile tab ────────────────────────────────────────────────────────
def _upload_resume_widget(key_prefix: str) -> None:
"""Upload + parse + save a resume file. Overwrites config/plain_text_resume.yaml on success."""
_uf = st.file_uploader(
"Upload resume (PDF, DOCX, or ODT)",
type=["pdf", "docx", "odt"],
key=f"{key_prefix}_file",
)
if _uf and st.button("Parse & Save", type="primary", key=f"{key_prefix}_parse"):
from scripts.resume_parser import (
extract_text_from_pdf, extract_text_from_docx,
extract_text_from_odt, structure_resume,
)
_fb = _uf.read()
_ext = _uf.name.rsplit(".", 1)[-1].lower()
if _ext == "pdf":
_raw = extract_text_from_pdf(_fb)
elif _ext == "odt":
_raw = extract_text_from_odt(_fb)
else:
_raw = extract_text_from_docx(_fb)
with st.spinner("Parsing resume…"):
_parsed, _perr = structure_resume(_raw)
if _parsed and any(_parsed.get(k) for k in ("name", "experience", "skills")):
RESUME_PATH.parent.mkdir(parents=True, exist_ok=True)
RESUME_PATH.write_text(yaml.dump(_parsed, default_flow_style=False, allow_unicode=True))
# Persist raw text to user.yaml for LLM context
if USER_CFG.exists():
_uy = yaml.safe_load(USER_CFG.read_text()) or {}
_uy["resume_raw_text"] = _raw[:8000]
save_yaml(USER_CFG, _uy)
st.success("Resume parsed and saved!")
st.rerun()
else:
st.warning(
f"Parsing found limited data — try a different file format. "
f"{('Error: ' + _perr) if _perr else ''}"
)
with tab_resume:
st.caption(
f"Edit {_name}'s application profile. "
@ -486,11 +591,26 @@ with tab_resume:
)
if not RESUME_PATH.exists():
st.error(f"Resume YAML not found at `{RESUME_PATH}`. Copy or create `config/plain_text_resume.yaml`.")
st.info(
"No resume profile found yet. Upload your resume below to get started, "
"or re-run the [Setup wizard](/0_Setup) to build one step-by-step."
)
_upload_resume_widget("rp_new")
st.stop()
with st.expander("🔄 Replace Resume"):
st.caption("Re-upload to overwrite your saved profile. Parsed fields will replace the current data.")
_upload_resume_widget("rp_replace")
_data = yaml.safe_load(RESUME_PATH.read_text()) or {}
if "FILL_IN" in RESUME_PATH.read_text():
st.info(
"Some fields still need attention (marked ⚠️ below). "
"Re-upload your resume above to auto-fill them, or "
"re-run the [Setup wizard](/0_Setup) to fill them step-by-step."
)
def _field(label: str, value: str, key: str, help: str = "", password: bool = False) -> str:
needs_attention = str(value).startswith("FILL_IN") or value == ""
if needs_attention:
@ -516,6 +636,8 @@ with tab_resume:
_zip_code = _field("Zip Code", _info.get("zip_code", ""), "rp_zip")
_dob = _field("Date of Birth", _info.get("date_of_birth", ""), "rp_dob",
help="MM/DD/YYYY")
_address = _field("Street Address", _info.get("address", ""), "rp_address",
help="Used in job applications. Not shown on your resume.")
# ── Experience ────────────────────────────────────────────────────────────
with st.expander("💼 Work Experience"):
@ -600,7 +722,8 @@ with tab_resume:
_data["personal_information"] = {
**_data.get("personal_information", {}),
"name": _name, "surname": _surname, "email": _email, "phone": _phone,
"city": _city, "zip_code": _zip_code, "linkedin": _linkedin, "date_of_birth": _dob,
"city": _city, "zip_code": _zip_code, "address": _address,
"linkedin": _linkedin, "date_of_birth": _dob,
}
_data["experience_details"] = _updated_exp
_data["salary_expectations"] = {"salary_range_usd": _salary_range}
@ -806,7 +929,7 @@ with tab_system:
key=f"{llm_name}_model",
help="Lists models currently installed in Ollama.")
else:
st.caption("_Ollama not reachable — enter model name manually_")
st.caption("_Ollama not reachable — enter model name manually. Start it in the **Services** section below._")
llm_model = st.text_input("Model", value=llm_cur, key=f"{llm_name}_model")
else:
llm_model = st.text_input("Model", value=b.get("model", ""), key=f"{llm_name}_model")
@ -836,33 +959,13 @@ with tab_system:
st.session_state.pop("_llm_order_cfg_key", None)
st.success("LLM settings saved!")
# ── Notion ────────────────────────────────────────────────────────────────
with st.expander("📚 Notion"):
notion_cfg = load_yaml(NOTION_CFG) if NOTION_CFG.exists() else {}
n_token = st.text_input("Integration Token", value=notion_cfg.get("token", ""),
type="password", key="sys_notion_token",
help="notion.so/my-integrations → your integration → Internal Integration Token")
n_db_id = st.text_input("Database ID", value=notion_cfg.get("database_id", ""),
key="sys_notion_db",
help="The 32-character ID from your Notion database URL")
n_c1, n_c2 = st.columns(2)
if n_c1.button("💾 Save Notion", type="primary", key="sys_save_notion"):
save_yaml(NOTION_CFG, {**notion_cfg, "token": n_token, "database_id": n_db_id})
st.success("Notion settings saved!")
if n_c2.button("🔌 Test Notion", key="sys_test_notion"):
with st.spinner("Connecting…"):
try:
from notion_client import Client as _NC
_ndb = _NC(auth=n_token).databases.retrieve(n_db_id)
st.success(f"Connected to: **{_ndb['title'][0]['plain_text']}**")
except Exception as e:
st.error(f"Connection failed: {e}")
# ── Services ──────────────────────────────────────────────────────────────
with st.expander("🔌 Services", expanded=True):
import subprocess as _sp
import shutil as _shutil
TOKENS_CFG = CONFIG_DIR / "tokens.yaml"
COMPOSE_DIR = str(Path(__file__).parent.parent.parent)
_docker_available = bool(_shutil.which("docker"))
_sys_profile_name = _profile.inference_profile if _profile else "remote"
SYS_SERVICES = [
{
@ -944,10 +1047,12 @@ with tab_system:
index=_models.index(_loaded) if _loaded in _models else 0,
key=_mk)
else:
st.caption(f"_No models found in {svc['model_dir']}_")
st.caption(f"_No models found in `{svc['model_dir']}` — train one in the **🎯 Fine-Tune** tab above_")
with rc:
if svc.get("start") is None:
st.caption("_Manual start only_")
if svc.get("start") is None or not _docker_available:
_hint_cmd = " ".join(svc.get("start") or [])
st.caption(f"_Run from host terminal:_")
st.code(_hint_cmd, language=None)
elif up:
if st.button("⏹ Stop", key=f"sys_svc_stop_{svc['port']}", use_container_width=True):
with st.spinner(f"Stopping {svc['name']}"):
@ -1070,7 +1175,7 @@ with tab_finetune:
st.info(
f"Fine-tuning requires a GPU profile. "
f"Current profile: `{_profile.inference_profile if _profile else 'not configured'}`. "
"Change it in **My Profile** to enable this feature."
"Switch to the **👤 My Profile** tab above and change your inference profile to `single-gpu` or `dual-gpu`."
)
else:
st.subheader("Fine-Tune Your Cover Letter Model")

View file

@ -82,7 +82,7 @@ case "$CMD" in
start)
info "Starting Peregrine (PROFILE=${PROFILE})..."
make start PROFILE="$PROFILE"
PORT="$(python3 scripts/preflight.py --service streamlit 2>/dev/null || echo 8501)"
PORT="$(grep -m1 '^STREAMLIT_PORT=' .env 2>/dev/null | cut -d= -f2 || echo 8501)"
success "Peregrine is up → http://localhost:${PORT}"
;;
@ -95,7 +95,7 @@ case "$CMD" in
restart)
info "Restarting (PROFILE=${PROFILE})..."
make restart PROFILE="$PROFILE"
PORT="$(python3 scripts/preflight.py --service streamlit 2>/dev/null || echo 8501)"
PORT="$(grep -m1 '^STREAMLIT_PORT=' .env 2>/dev/null | cut -d= -f2 || echo 8501)"
success "Peregrine restarted → http://localhost:${PORT}"
;;
@ -148,7 +148,7 @@ case "$CMD" in
;;
open)
PORT="$(python3 scripts/preflight.py --service streamlit 2>/dev/null || echo 8501)"
PORT="$(grep -m1 '^STREAMLIT_PORT=' .env 2>/dev/null | cut -d= -f2 || echo 8501)"
URL="http://localhost:${PORT}"
info "Opening ${URL}"
if command -v xdg-open &>/dev/null; then