feat: extract hard-coded personal references from all app pages via UserProfile

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
pyr0ball 2026-02-24 19:00:47 -08:00
parent 5a4d2b6b17
commit b5b4f37bf7
5 changed files with 74 additions and 55 deletions

View file

@ -11,6 +11,12 @@ import streamlit as st
sys.path.insert(0, str(Path(__file__).parent.parent))
from scripts.user_profile import UserProfile
_USER_YAML = Path(__file__).parent.parent / "config" / "user.yaml"
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
_name = _profile.name if _profile else "Job Seeker"
from scripts.db import DEFAULT_DB, init_db, get_job_counts, purge_jobs, purge_email_data, \
purge_non_remote, archive_jobs, kill_stuck_tasks, get_task_for_job, get_active_tasks, \
insert_job, get_existing_urls
@ -64,7 +70,7 @@ def _queue_url_imports(db_path: Path, urls: list) -> int:
return queued
st.title("🔍 Meghan's Job Search")
st.title(f"🔍 {_name}'s Job Search")
st.caption("Discover → Review → Sync to Notion")
st.divider()
@ -149,7 +155,7 @@ with mid:
.get_jobs_by_status(DEFAULT_DB, "pending")
if j.get("match_score") is None and j.get("description"))
st.subheader("Score Listings")
st.caption(f"Run TF-IDF match scoring against Meghan's resume. {unscored} pending job{'s' if unscored != 1 else ''} unscored.")
st.caption(f"Run TF-IDF match scoring against {_name}'s resume. {unscored} pending job{'s' if unscored != 1 else ''} unscored.")
if st.button("📊 Score All Unscored Jobs", use_container_width=True, type="primary",
disabled=unscored == 0):
with st.spinner("Scoring…"):

View file

@ -10,6 +10,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
import yaml
from scripts.user_profile import UserProfile
_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
_name = _profile.name if _profile else "Job Seeker"
st.title("⚙️ Settings")
CONFIG_DIR = Path(__file__).parent.parent.parent / "config"
@ -402,7 +408,6 @@ with tab_services:
import subprocess as _sp
TOKENS_CFG = CONFIG_DIR / "tokens.yaml"
PFP_DIR = Path("/Library/Documents/Post Fight Processing")
# Service definitions: (display_name, port, start_cmd, stop_cmd, notes)
SERVICES = [
@ -422,30 +427,14 @@ with tab_services:
"cwd": "/",
"note": "Local inference engine — systemd service",
},
{
"name": "Claude Code Wrapper",
"port": 3009,
"start": ["bash", str(PFP_DIR / "manage-services.sh"), "start"],
"stop": ["bash", str(PFP_DIR / "manage-services.sh"), "stop"],
"cwd": str(PFP_DIR),
"note": "OpenAI-compat proxy → Claude Code (port 3009)",
},
{
"name": "GitHub Copilot Wrapper",
"port": 3010,
"start": ["bash", str(PFP_DIR / "manage-copilot.sh"), "start"],
"stop": ["bash", str(PFP_DIR / "manage-copilot.sh"), "stop"],
"cwd": str(PFP_DIR),
"note": "OpenAI-compat proxy → GitHub Copilot (port 3010)",
},
{
"name": "vLLM Server",
"port": 8000,
"start": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "start"],
"stop": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "stop"],
"cwd": str(Path(__file__).parent.parent.parent),
"model_dir": "/Library/Assets/LLM/vllm/models",
"note": "Local vLLM inference — Ouro model family (port 8000, GPU 1)",
"model_dir": str(_profile.vllm_models_dir) if _profile else str(Path.home() / "models" / "vllm"),
"note": "Local vLLM inference (port 8000, GPU 1)",
},
{
"name": "Vision Service (moondream2)",
@ -457,11 +446,11 @@ with tab_services:
},
{
"name": "SearXNG (company scraper)",
"port": 8888,
"start": ["docker", "compose", "up", "-d"],
"stop": ["docker", "compose", "down"],
"cwd": str(Path("/Library/Development/scrapers/SearXNG")),
"note": "Privacy-respecting meta-search used for company research (port 8888)",
"port": _profile._svc["searxng_port"] if _profile else 8888,
"start": ["docker", "compose", "--profile", "searxng", "up", "-d", "searxng"],
"stop": ["docker", "compose", "stop", "searxng"],
"cwd": str(Path(__file__).parent.parent.parent),
"note": "Privacy-respecting meta-search for company research",
},
]
@ -583,7 +572,7 @@ with tab_services:
# ── Resume Profile tab ────────────────────────────────────────────────────────
with tab_resume:
st.caption(
"Edit Meghan's application profile. "
f"Edit {_name}'s application profile. "
"Bullets are used as paste-able shortcuts in the Apply Workspace."
)
@ -728,7 +717,7 @@ with tab_email:
EMAIL_EXAMPLE = CONFIG_DIR / "email.yaml.example"
st.caption(
"Connect Meghan's email via IMAP to automatically associate recruitment "
f"Connect {_name}'s email via IMAP to automatically associate recruitment "
"emails with job applications. Only emails that mention the company name "
"AND contain a recruitment keyword are ever imported — no personal emails "
"are touched."
@ -789,7 +778,7 @@ with tab_email:
with tab_skills:
st.subheader("🏷️ Skills & Keywords")
st.caption(
"These are matched against job descriptions to select Meghan's most relevant "
f"These are matched against job descriptions to select {_name}'s most relevant "
"experience and highlight keyword overlap in the research brief."
)

View file

@ -14,6 +14,12 @@ import streamlit as st
import streamlit.components.v1 as components
import yaml
from scripts.user_profile import UserProfile
_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
_name = _profile.name if _profile else "Job Seeker"
from scripts.db import (
DEFAULT_DB, init_db, get_jobs_by_status,
update_cover_letter, mark_applied, update_job_status,
@ -21,7 +27,7 @@ from scripts.db import (
)
from scripts.task_runner import submit_task
DOCS_DIR = Path("/Library/Documents/JobSearch")
DOCS_DIR = _profile.docs_dir if _profile else Path.home() / "Documents" / "JobSearch"
RESUME_YAML = Path(__file__).parent.parent.parent / "aihawk" / "data_folder" / "plain_text_resume.yaml"
st.title("🚀 Apply Workspace")
@ -70,13 +76,16 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
textColor=dark, leading=16, spaceAfter=12, alignment=TA_LEFT,
)
display_name = _profile.name.upper() if _profile else "YOUR NAME"
contact_line = " · ".join(filter(None, [
_profile.email if _profile else "",
_profile.phone if _profile else "",
_profile.linkedin if _profile else "",
]))
story = [
Paragraph("MEGHAN McCANN", name_style),
Paragraph(
"meghan.m.mccann@gmail.com · (510) 764-3155 · "
"linkedin.com/in/MeghanMcCann · hiremeghanmccann.site",
contact_style,
),
Paragraph(display_name, name_style),
Paragraph(contact_line, contact_style),
HRFlowable(width="100%", thickness=1, color=teal, spaceBefore=8, spaceAfter=0),
Paragraph(datetime.now().strftime("%B %d, %Y"), date_style),
]
@ -88,7 +97,7 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
story += [
Spacer(1, 6),
Paragraph("Warm regards,<br/><br/>Meghan McCann", body_style),
Paragraph(f"Warm regards,<br/><br/>{_profile.name if _profile else 'Your Name'}", body_style),
]
doc.build(story)
@ -96,7 +105,7 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
# ── Application Q&A helper ─────────────────────────────────────────────────────
def _answer_question(job: dict, question: str) -> str:
"""Call the LLM to answer an application question in Meghan's voice.
"""Call the LLM to answer an application question in the user's voice.
Uses research_fallback_order (claude_code vllm ollama_research)
rather than the default cover-letter order the fine-tuned cover letter
@ -106,21 +115,22 @@ def _answer_question(job: dict, question: str) -> str:
router = LLMRouter()
fallback = router.config.get("research_fallback_order") or router.config.get("fallback_order")
description_snippet = (job.get("description") or "")[:1200].strip()
prompt = f"""You are answering job application questions for Meghan McCann, a customer success leader.
_persona_summary = (
_profile.career_summary[:200] if _profile and _profile.career_summary
else "a professional with experience in their field"
)
prompt = f"""You are answering job application questions for {_name}.
Background:
- 6+ years in customer success, technical account management, and CS leadership
- Most recent role: led Americas Customer Success at UpGuard (cybersecurity SaaS), NPS consistently 95
- Also founder of M3 Consulting, a CS advisory practice for SaaS startups
- Based in SF Bay Area; open to remote/hybrid; pronouns: any
{_persona_summary}
Role she's applying to: {job.get("title", "")} at {job.get("company", "")}
Role they're applying to: {job.get("title", "")} at {job.get("company", "")}
{f"Job description excerpt:{chr(10)}{description_snippet}" if description_snippet else ""}
Application Question:
{question}
Answer in Meghan's voice — specific, warm, and confident. If the question specifies a word or character limit, respect it. Answer only the question with no preamble or sign-off."""
Answer in {_name}'s voice — specific, warm, and confident. If the question specifies a word or character limit, respect it. Answer only the question with no preamble or sign-off."""
return router.complete(prompt, fallback_order=fallback).strip()

View file

@ -22,6 +22,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
from scripts.user_profile import UserProfile
_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
_name = _profile.name if _profile else "Job Seeker"
from scripts.db import (
DEFAULT_DB, init_db,
get_interview_jobs, advance_to_stage, reject_at_stage,
@ -186,19 +192,21 @@ def _email_modal(job: dict) -> None:
with st.spinner("Drafting…"):
try:
from scripts.llm_router import complete
_persona = (
f"{_name} is a {_profile.career_summary[:120] if _profile and _profile.career_summary else 'professional'}"
)
draft = complete(
prompt=(
f"Draft a professional, warm reply to this email.\n\n"
f"From: {last.get('from_addr', '')}\n"
f"Subject: {last.get('subject', '')}\n\n"
f"{last.get('body', '')}\n\n"
f"Context: Meghan McCann is a Customer Success / "
f"Technical Account Manager applying for "
f"Context: {_persona} applying for "
f"{job.get('title')} at {job.get('company')}."
),
system=(
"You are Meghan McCann's professional email assistant. "
"Write concise, warm, and professional replies in her voice. "
f"You are {_name}'s professional email assistant. "
"Write concise, warm, and professional replies in their voice. "
"Keep it to 35 sentences unless more is needed."
),
)

View file

@ -13,6 +13,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
from scripts.user_profile import UserProfile
_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
_name = _profile.name if _profile else "Job Seeker"
from scripts.db import (
DEFAULT_DB, init_db,
get_interview_jobs, get_contacts, get_research,
@ -231,7 +237,7 @@ with col_prep:
system=(
f"You are a recruiter at {job.get('company')} conducting "
f"a phone screen for the {job.get('title')} role. "
f"Ask one question at a time. After Meghan answers, give "
f"Ask one question at a time. After {_name} answers, give "
f"brief feedback (12 sentences), then ask your next question. "
f"Be professional but warm."
),
@ -253,7 +259,7 @@ with col_prep:
"content": (
f"You are a recruiter at {job.get('company')} conducting "
f"a phone screen for the {job.get('title')} role. "
f"Ask one question at a time. After Meghan answers, give "
f"Ask one question at a time. After {_name} answers, give "
f"brief feedback (12 sentences), then ask your next question."
),
}
@ -265,7 +271,7 @@ with col_prep:
router = LLMRouter()
# Build prompt from history for single-turn backends
convo = "\n\n".join(
f"{'Interviewer' if m['role'] == 'assistant' else 'Meghan'}: {m['content']}"
f"{'Interviewer' if m['role'] == 'assistant' else _name}: {m['content']}"
for m in history
)
response = router.complete(
@ -331,12 +337,12 @@ with col_context:
f"From: {last.get('from_addr', '')}\n"
f"Subject: {last.get('subject', '')}\n\n"
f"{last.get('body', '')}\n\n"
f"Context: Meghan is a CS/TAM professional applying "
f"Context: {_name} is a professional applying "
f"for {job.get('title')} at {job.get('company')}."
),
system=(
"You are Meghan McCann's professional email assistant. "
"Write concise, warm, and professional replies in her voice."
f"You are {_name}'s professional email assistant. "
"Write concise, warm, and professional replies in their voice."
),
)
st.session_state[f"draft_{selected_id}"] = draft