diff --git a/app/Home.py b/app/Home.py
index c516250..4cc5f37 100644
--- a/app/Home.py
+++ b/app/Home.py
@@ -11,6 +11,12 @@ import streamlit as st
sys.path.insert(0, str(Path(__file__).parent.parent))
+from scripts.user_profile import UserProfile
+
+_USER_YAML = Path(__file__).parent.parent / "config" / "user.yaml"
+_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
+_name = _profile.name if _profile else "Job Seeker"
+
from scripts.db import DEFAULT_DB, init_db, get_job_counts, purge_jobs, purge_email_data, \
purge_non_remote, archive_jobs, kill_stuck_tasks, get_task_for_job, get_active_tasks, \
insert_job, get_existing_urls
@@ -64,7 +70,7 @@ def _queue_url_imports(db_path: Path, urls: list) -> int:
return queued
-st.title("π Alex's Job Search")
+st.title(f"π {_name}'s Job Search")
st.caption("Discover β Review β Sync to Notion")
st.divider()
@@ -149,7 +155,7 @@ with mid:
.get_jobs_by_status(DEFAULT_DB, "pending")
if j.get("match_score") is None and j.get("description"))
st.subheader("Score Listings")
- st.caption(f"Run TF-IDF match scoring against Alex's resume. {unscored} pending job{'s' if unscored != 1 else ''} unscored.")
+ st.caption(f"Run TF-IDF match scoring against {_name}'s resume. {unscored} pending job{'s' if unscored != 1 else ''} unscored.")
if st.button("π Score All Unscored Jobs", use_container_width=True, type="primary",
disabled=unscored == 0):
with st.spinner("Scoringβ¦"):
diff --git a/app/pages/2_Settings.py b/app/pages/2_Settings.py
index 9e37a04..16ebbc2 100644
--- a/app/pages/2_Settings.py
+++ b/app/pages/2_Settings.py
@@ -10,6 +10,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
import yaml
+from scripts.user_profile import UserProfile
+
+_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
+_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
+_name = _profile.name if _profile else "Job Seeker"
+
st.title("βοΈ Settings")
CONFIG_DIR = Path(__file__).parent.parent.parent / "config"
@@ -402,7 +408,6 @@ with tab_services:
import subprocess as _sp
TOKENS_CFG = CONFIG_DIR / "tokens.yaml"
- PFP_DIR = Path("/Library/Documents/Post Fight Processing")
# Service definitions: (display_name, port, start_cmd, stop_cmd, notes)
SERVICES = [
@@ -422,30 +427,14 @@ with tab_services:
"cwd": "/",
"note": "Local inference engine β systemd service",
},
- {
- "name": "Claude Code Wrapper",
- "port": 3009,
- "start": ["bash", str(PFP_DIR / "manage-services.sh"), "start"],
- "stop": ["bash", str(PFP_DIR / "manage-services.sh"), "stop"],
- "cwd": str(PFP_DIR),
- "note": "OpenAI-compat proxy β Claude Code (port 3009)",
- },
- {
- "name": "GitHub Copilot Wrapper",
- "port": 3010,
- "start": ["bash", str(PFP_DIR / "manage-copilot.sh"), "start"],
- "stop": ["bash", str(PFP_DIR / "manage-copilot.sh"), "stop"],
- "cwd": str(PFP_DIR),
- "note": "OpenAI-compat proxy β GitHub Copilot (port 3010)",
- },
{
"name": "vLLM Server",
"port": 8000,
"start": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "start"],
"stop": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "stop"],
"cwd": str(Path(__file__).parent.parent.parent),
- "model_dir": "/Library/Assets/LLM/vllm/models",
- "note": "Local vLLM inference β Ouro model family (port 8000, GPU 1)",
+ "model_dir": str(_profile.vllm_models_dir) if _profile else str(Path.home() / "models" / "vllm"),
+ "note": "Local vLLM inference (port 8000, GPU 1)",
},
{
"name": "Vision Service (moondream2)",
@@ -457,11 +446,11 @@ with tab_services:
},
{
"name": "SearXNG (company scraper)",
- "port": 8888,
- "start": ["docker", "compose", "up", "-d"],
- "stop": ["docker", "compose", "down"],
- "cwd": str(Path("/Library/Development/scrapers/SearXNG")),
- "note": "Privacy-respecting meta-search used for company research (port 8888)",
+ "port": _profile._svc["searxng_port"] if _profile else 8888,
+ "start": ["docker", "compose", "--profile", "searxng", "up", "-d", "searxng"],
+ "stop": ["docker", "compose", "stop", "searxng"],
+ "cwd": str(Path(__file__).parent.parent.parent),
+ "note": "Privacy-respecting meta-search for company research",
},
]
@@ -583,7 +572,7 @@ with tab_services:
# ββ Resume Profile tab ββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
with tab_resume:
st.caption(
- "Edit Alex's application profile. "
+ f"Edit {_name}'s application profile. "
"Bullets are used as paste-able shortcuts in the Apply Workspace."
)
@@ -728,7 +717,7 @@ with tab_email:
EMAIL_EXAMPLE = CONFIG_DIR / "email.yaml.example"
st.caption(
- "Connect Alex's email via IMAP to automatically associate recruitment "
+ f"Connect {_name}'s email via IMAP to automatically associate recruitment "
"emails with job applications. Only emails that mention the company name "
"AND contain a recruitment keyword are ever imported β no personal emails "
"are touched."
@@ -789,7 +778,7 @@ with tab_email:
with tab_skills:
st.subheader("π·οΈ Skills & Keywords")
st.caption(
- "These are matched against job descriptions to select Alex's most relevant "
+ f"These are matched against job descriptions to select {_name}'s most relevant "
"experience and highlight keyword overlap in the research brief."
)
diff --git a/app/pages/4_Apply.py b/app/pages/4_Apply.py
index 123f1f4..77cab3d 100644
--- a/app/pages/4_Apply.py
+++ b/app/pages/4_Apply.py
@@ -14,6 +14,12 @@ import streamlit as st
import streamlit.components.v1 as components
import yaml
+from scripts.user_profile import UserProfile
+
+_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
+_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
+_name = _profile.name if _profile else "Job Seeker"
+
from scripts.db import (
DEFAULT_DB, init_db, get_jobs_by_status,
update_cover_letter, mark_applied, update_job_status,
@@ -21,7 +27,7 @@ from scripts.db import (
)
from scripts.task_runner import submit_task
-DOCS_DIR = Path("/Library/Documents/JobSearch")
+DOCS_DIR = _profile.docs_dir if _profile else Path.home() / "Documents" / "JobSearch"
RESUME_YAML = Path(__file__).parent.parent.parent / "aihawk" / "data_folder" / "plain_text_resume.yaml"
st.title("π Apply Workspace")
@@ -70,13 +76,16 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
textColor=dark, leading=16, spaceAfter=12, alignment=TA_LEFT,
)
+ display_name = _profile.name.upper() if _profile else "YOUR NAME"
+ contact_line = " Β· ".join(filter(None, [
+ _profile.email if _profile else "",
+ _profile.phone if _profile else "",
+ _profile.linkedin if _profile else "",
+ ]))
+
story = [
- Paragraph("ALEX RIVERA", name_style),
- Paragraph(
- "alex@example.com Β· (555) 867-5309 Β· "
- "linkedin.com/in/AlexMcCann Β· hirealexmccann.site",
- contact_style,
- ),
+ Paragraph(display_name, name_style),
+ Paragraph(contact_line, contact_style),
HRFlowable(width="100%", thickness=1, color=teal, spaceBefore=8, spaceAfter=0),
Paragraph(datetime.now().strftime("%B %d, %Y"), date_style),
]
@@ -88,7 +97,7 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
story += [
Spacer(1, 6),
- Paragraph("Warm regards,
Alex Rivera", body_style),
+ Paragraph(f"Warm regards,
{_profile.name if _profile else 'Your Name'}", body_style),
]
doc.build(story)
@@ -96,7 +105,7 @@ def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Pa
# ββ Application Q&A helper βββββββββββββββββββββββββββββββββββββββββββββββββββββ
def _answer_question(job: dict, question: str) -> str:
- """Call the LLM to answer an application question in Alex's voice.
+ """Call the LLM to answer an application question in the user's voice.
Uses research_fallback_order (claude_code β vllm β ollama_research)
rather than the default cover-letter order β the fine-tuned cover letter
@@ -106,21 +115,22 @@ def _answer_question(job: dict, question: str) -> str:
router = LLMRouter()
fallback = router.config.get("research_fallback_order") or router.config.get("fallback_order")
description_snippet = (job.get("description") or "")[:1200].strip()
- prompt = f"""You are answering job application questions for Alex Rivera, a customer success leader.
+ _persona_summary = (
+ _profile.career_summary[:200] if _profile and _profile.career_summary
+ else "a professional with experience in their field"
+ )
+ prompt = f"""You are answering job application questions for {_name}.
Background:
-- 6+ years in customer success, technical account management, and CS leadership
-- Most recent role: led Americas Customer Success at UpGuard (cybersecurity SaaS), NPS consistently β₯95
-- Also founder of M3 Consulting, a CS advisory practice for SaaS startups
-- Based in SF Bay Area; open to remote/hybrid; pronouns: any
+{_persona_summary}
-Role she's applying to: {job.get("title", "")} at {job.get("company", "")}
+Role they're applying to: {job.get("title", "")} at {job.get("company", "")}
{f"Job description excerpt:{chr(10)}{description_snippet}" if description_snippet else ""}
Application Question:
{question}
-Answer in Alex's voice β specific, warm, and confident. If the question specifies a word or character limit, respect it. Answer only the question with no preamble or sign-off."""
+Answer in {_name}'s voice β specific, warm, and confident. If the question specifies a word or character limit, respect it. Answer only the question with no preamble or sign-off."""
return router.complete(prompt, fallback_order=fallback).strip()
diff --git a/app/pages/5_Interviews.py b/app/pages/5_Interviews.py
index 7d624e3..1ea743c 100644
--- a/app/pages/5_Interviews.py
+++ b/app/pages/5_Interviews.py
@@ -22,6 +22,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
+from scripts.user_profile import UserProfile
+
+_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
+_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
+_name = _profile.name if _profile else "Job Seeker"
+
from scripts.db import (
DEFAULT_DB, init_db,
get_interview_jobs, advance_to_stage, reject_at_stage,
@@ -186,19 +192,21 @@ def _email_modal(job: dict) -> None:
with st.spinner("Draftingβ¦"):
try:
from scripts.llm_router import complete
+ _persona = (
+ f"{_name} is a {_profile.career_summary[:120] if _profile and _profile.career_summary else 'professional'}"
+ )
draft = complete(
prompt=(
f"Draft a professional, warm reply to this email.\n\n"
f"From: {last.get('from_addr', '')}\n"
f"Subject: {last.get('subject', '')}\n\n"
f"{last.get('body', '')}\n\n"
- f"Context: Alex Rivera is a Customer Success / "
- f"Technical Account Manager applying for "
+ f"Context: {_persona} applying for "
f"{job.get('title')} at {job.get('company')}."
),
system=(
- "You are Alex Rivera's professional email assistant. "
- "Write concise, warm, and professional replies in her voice. "
+ f"You are {_name}'s professional email assistant. "
+ "Write concise, warm, and professional replies in their voice. "
"Keep it to 3β5 sentences unless more is needed."
),
)
diff --git a/app/pages/6_Interview_Prep.py b/app/pages/6_Interview_Prep.py
index 533a111..4f4e0e2 100644
--- a/app/pages/6_Interview_Prep.py
+++ b/app/pages/6_Interview_Prep.py
@@ -13,6 +13,12 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
import streamlit as st
+from scripts.user_profile import UserProfile
+
+_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
+_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
+_name = _profile.name if _profile else "Job Seeker"
+
from scripts.db import (
DEFAULT_DB, init_db,
get_interview_jobs, get_contacts, get_research,
@@ -231,7 +237,7 @@ with col_prep:
system=(
f"You are a recruiter at {job.get('company')} conducting "
f"a phone screen for the {job.get('title')} role. "
- f"Ask one question at a time. After Alex answers, give "
+ f"Ask one question at a time. After {_name} answers, give "
f"brief feedback (1β2 sentences), then ask your next question. "
f"Be professional but warm."
),
@@ -253,7 +259,7 @@ with col_prep:
"content": (
f"You are a recruiter at {job.get('company')} conducting "
f"a phone screen for the {job.get('title')} role. "
- f"Ask one question at a time. After Alex answers, give "
+ f"Ask one question at a time. After {_name} answers, give "
f"brief feedback (1β2 sentences), then ask your next question."
),
}
@@ -265,7 +271,7 @@ with col_prep:
router = LLMRouter()
# Build prompt from history for single-turn backends
convo = "\n\n".join(
- f"{'Interviewer' if m['role'] == 'assistant' else 'Alex'}: {m['content']}"
+ f"{'Interviewer' if m['role'] == 'assistant' else _name}: {m['content']}"
for m in history
)
response = router.complete(
@@ -331,12 +337,12 @@ with col_context:
f"From: {last.get('from_addr', '')}\n"
f"Subject: {last.get('subject', '')}\n\n"
f"{last.get('body', '')}\n\n"
- f"Context: Alex is a CS/TAM professional applying "
+ f"Context: {_name} is a professional applying "
f"for {job.get('title')} at {job.get('company')}."
),
system=(
- "You are Alex Rivera's professional email assistant. "
- "Write concise, warm, and professional replies in her voice."
+ f"You are {_name}'s professional email assistant. "
+ "Write concise, warm, and professional replies in their voice."
),
)
st.session_state[f"draft_{selected_id}"] = draft