Compare commits
No commits in common. "main" and "v0.8.1" have entirely different histories.
91 changed files with 541 additions and 7309 deletions
44
.cliff.toml
44
.cliff.toml
|
|
@ -1,44 +0,0 @@
|
||||||
# git-cliff changelog configuration for Peregrine
|
|
||||||
# See: https://git-cliff.org/docs/configuration
|
|
||||||
|
|
||||||
[changelog]
|
|
||||||
header = """
|
|
||||||
# Changelog\n
|
|
||||||
"""
|
|
||||||
body = """
|
|
||||||
{% if version %}\
|
|
||||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
|
||||||
{% else %}\
|
|
||||||
## [Unreleased]
|
|
||||||
{% endif %}\
|
|
||||||
{% for group, commits in commits | group_by(attribute="group") %}
|
|
||||||
### {{ group | upper_first }}
|
|
||||||
{% for commit in commits %}
|
|
||||||
- {% if commit.scope %}**{{ commit.scope }}:** {% endif %}{{ commit.message | upper_first }}\
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}\n
|
|
||||||
"""
|
|
||||||
trim = true
|
|
||||||
|
|
||||||
[git]
|
|
||||||
conventional_commits = true
|
|
||||||
filter_unconventional = true
|
|
||||||
split_commits = false
|
|
||||||
commit_preprocessors = []
|
|
||||||
commit_parsers = [
|
|
||||||
{ message = "^feat", group = "Features" },
|
|
||||||
{ message = "^fix", group = "Bug Fixes" },
|
|
||||||
{ message = "^perf", group = "Performance" },
|
|
||||||
{ message = "^refactor", group = "Refactoring" },
|
|
||||||
{ message = "^docs", group = "Documentation" },
|
|
||||||
{ message = "^test", group = "Testing" },
|
|
||||||
{ message = "^chore", group = "Chores" },
|
|
||||||
{ message = "^ci", group = "CI/CD" },
|
|
||||||
{ message = "^revert", group = "Reverts" },
|
|
||||||
]
|
|
||||||
filter_commits = false
|
|
||||||
tag_pattern = "v[0-9].*"
|
|
||||||
skip_tags = ""
|
|
||||||
ignore_tags = ""
|
|
||||||
topo_order = false
|
|
||||||
sort_commits = "oldest"
|
|
||||||
23
.env.example
23
.env.example
|
|
@ -2,7 +2,7 @@
|
||||||
# Auto-generated by the setup wizard, or fill in manually.
|
# Auto-generated by the setup wizard, or fill in manually.
|
||||||
# NEVER commit .env to git.
|
# NEVER commit .env to git.
|
||||||
|
|
||||||
STREAMLIT_PORT=8502
|
STREAMLIT_PORT=8501
|
||||||
OLLAMA_PORT=11434
|
OLLAMA_PORT=11434
|
||||||
VLLM_PORT=8000
|
VLLM_PORT=8000
|
||||||
SEARXNG_PORT=8888
|
SEARXNG_PORT=8888
|
||||||
|
|
@ -12,21 +12,10 @@ VISION_REVISION=2025-01-09
|
||||||
|
|
||||||
DOCS_DIR=~/Documents/JobSearch
|
DOCS_DIR=~/Documents/JobSearch
|
||||||
OLLAMA_MODELS_DIR=~/models/ollama
|
OLLAMA_MODELS_DIR=~/models/ollama
|
||||||
VLLM_MODELS_DIR=~/models/vllm # override with full path to your model dir
|
VLLM_MODELS_DIR=~/models/vllm
|
||||||
VLLM_MODEL=Ouro-1.4B # cover letters — fast 1.4B model
|
VLLM_MODEL=Ouro-1.4B
|
||||||
VLLM_RESEARCH_MODEL=Ouro-2.6B-Thinking # research — reasoning 2.6B model; restart vllm to switch
|
|
||||||
VLLM_MAX_MODEL_LEN=4096 # increase to 8192 for Thinking models with long CoT
|
|
||||||
VLLM_GPU_MEM_UTIL=0.75 # lower to 0.6 if sharing GPU with other services
|
|
||||||
OLLAMA_DEFAULT_MODEL=llama3.2:3b
|
OLLAMA_DEFAULT_MODEL=llama3.2:3b
|
||||||
|
|
||||||
# ── LLM env-var auto-config (alternative to config/llm.yaml) ─────────────────
|
|
||||||
# Set any of these to configure LLM backends without needing a config/llm.yaml.
|
|
||||||
# Priority: Anthropic > OpenAI-compat > Ollama (always tried as local fallback).
|
|
||||||
OLLAMA_HOST=http://localhost:11434 # Ollama host; override if on a different machine
|
|
||||||
OLLAMA_MODEL=llama3.2:3b # model to request from Ollama
|
|
||||||
OPENAI_MODEL=gpt-4o-mini # model override for OpenAI-compat backend
|
|
||||||
ANTHROPIC_MODEL=claude-haiku-4-5-20251001 # model override for Anthropic backend
|
|
||||||
|
|
||||||
# API keys (required for remote profile)
|
# API keys (required for remote profile)
|
||||||
ANTHROPIC_API_KEY=
|
ANTHROPIC_API_KEY=
|
||||||
OPENAI_COMPAT_URL=
|
OPENAI_COMPAT_URL=
|
||||||
|
|
@ -39,12 +28,6 @@ FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
||||||
# GITHUB_TOKEN= # future — enable when public mirror is active
|
# GITHUB_TOKEN= # future — enable when public mirror is active
|
||||||
# GITHUB_REPO= # future
|
# GITHUB_REPO= # future
|
||||||
|
|
||||||
# ── CF-hosted coordinator (Paid+ tier) ───────────────────────────────────────
|
|
||||||
# Set CF_LICENSE_KEY to authenticate with the hosted coordinator.
|
|
||||||
# Leave both blank for local self-hosted cf-orch or bare-metal inference.
|
|
||||||
CF_LICENSE_KEY=
|
|
||||||
CF_ORCH_URL=https://orch.circuitforge.tech
|
|
||||||
|
|
||||||
# Cloud multi-tenancy (compose.cloud.yml only — do not set for local installs)
|
# Cloud multi-tenancy (compose.cloud.yml only — do not set for local installs)
|
||||||
CLOUD_MODE=false
|
CLOUD_MODE=false
|
||||||
CLOUD_DATA_ROOT=/devl/menagerie-data
|
CLOUD_DATA_ROOT=/devl/menagerie-data
|
||||||
|
|
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
# Peregrine CI — lint, type-check, test on PR/push
|
|
||||||
# Full-stack: FastAPI (Python) + Vue 3 SPA (Node)
|
|
||||||
# Adapted from Circuit-Forge/cf-agents workflows/ci.yml (cf-agents#4 tracks the
|
|
||||||
# upstream ci-fullstack.yml variant; update this file when that lands).
|
|
||||||
|
|
||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main, 'feature/**', 'fix/**']
|
|
||||||
pull_request:
|
|
||||||
branches: [main]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
backend:
|
|
||||||
name: Backend (Python)
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.12'
|
|
||||||
cache: pip
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Lint
|
|
||||||
run: ruff check .
|
|
||||||
|
|
||||||
- name: Test
|
|
||||||
run: pytest tests/ -v --tb=short
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
name: Frontend (Vue)
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: web
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: npm
|
|
||||||
cache-dependency-path: web/package-lock.json
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Type check
|
|
||||||
run: npx vue-tsc --noEmit
|
|
||||||
|
|
||||||
- name: Test
|
|
||||||
run: npm run test
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
# Mirror push to GitHub and Codeberg on every push to main or tag.
|
|
||||||
# Copied from Circuit-Forge/cf-agents workflows/mirror.yml
|
|
||||||
# Required secrets: GITHUB_MIRROR_TOKEN, CODEBERG_MIRROR_TOKEN
|
|
||||||
|
|
||||||
name: Mirror
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
tags: ['v*']
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mirror:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Mirror to GitHub
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_MIRROR_TOKEN }}
|
|
||||||
REPO: ${{ github.event.repository.name }}
|
|
||||||
run: |
|
|
||||||
git remote add github "https://x-access-token:${GITHUB_TOKEN}@github.com/CircuitForgeLLC/${REPO}.git"
|
|
||||||
git push github --mirror
|
|
||||||
|
|
||||||
- name: Mirror to Codeberg
|
|
||||||
env:
|
|
||||||
CODEBERG_TOKEN: ${{ secrets.CODEBERG_MIRROR_TOKEN }}
|
|
||||||
REPO: ${{ github.event.repository.name }}
|
|
||||||
run: |
|
|
||||||
git remote add codeberg "https://CircuitForge:${CODEBERG_TOKEN}@codeberg.org/CircuitForge/${REPO}.git"
|
|
||||||
git push codeberg --mirror
|
|
||||||
|
|
@ -1,71 +0,0 @@
|
||||||
# Tag-triggered release workflow.
|
|
||||||
# Generates changelog and creates Forgejo release on v* tags.
|
|
||||||
# Copied from Circuit-Forge/cf-agents workflows/release.yml
|
|
||||||
#
|
|
||||||
# Docker push is intentionally disabled — BSL 1.1 registry policy not yet resolved.
|
|
||||||
# Tracked in Circuit-Forge/cf-agents#3. Re-enable the Docker steps when that lands.
|
|
||||||
#
|
|
||||||
# Required secrets: FORGEJO_RELEASE_TOKEN
|
|
||||||
# (GHCR_TOKEN not needed until Docker push is enabled)
|
|
||||||
|
|
||||||
name: Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ['v*']
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
# ── Changelog ────────────────────────────────────────────────────────────
|
|
||||||
- name: Generate changelog
|
|
||||||
uses: orhun/git-cliff-action@v3
|
|
||||||
id: cliff
|
|
||||||
with:
|
|
||||||
config: .cliff.toml
|
|
||||||
args: --latest --strip header
|
|
||||||
env:
|
|
||||||
OUTPUT: CHANGES.md
|
|
||||||
|
|
||||||
# ── Docker (disabled — BSL registry policy pending cf-agents#3) ──────────
|
|
||||||
# - name: Set up QEMU
|
|
||||||
# uses: docker/setup-qemu-action@v3
|
|
||||||
# - name: Set up Buildx
|
|
||||||
# uses: docker/setup-buildx-action@v3
|
|
||||||
# - name: Log in to GHCR
|
|
||||||
# uses: docker/login-action@v3
|
|
||||||
# with:
|
|
||||||
# registry: ghcr.io
|
|
||||||
# username: ${{ github.actor }}
|
|
||||||
# password: ${{ secrets.GHCR_TOKEN }}
|
|
||||||
# - name: Build and push Docker image
|
|
||||||
# uses: docker/build-push-action@v6
|
|
||||||
# with:
|
|
||||||
# context: .
|
|
||||||
# push: true
|
|
||||||
# platforms: linux/amd64,linux/arm64
|
|
||||||
# tags: |
|
|
||||||
# ghcr.io/circuitforgellc/peregrine:${{ github.ref_name }}
|
|
||||||
# ghcr.io/circuitforgellc/peregrine:latest
|
|
||||||
# cache-from: type=gha
|
|
||||||
# cache-to: type=gha,mode=max
|
|
||||||
|
|
||||||
# ── Forgejo Release ───────────────────────────────────────────────────────
|
|
||||||
- name: Create Forgejo release
|
|
||||||
env:
|
|
||||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_RELEASE_TOKEN }}
|
|
||||||
REPO: ${{ github.event.repository.name }}
|
|
||||||
TAG: ${{ github.ref_name }}
|
|
||||||
NOTES: ${{ steps.cliff.outputs.content }}
|
|
||||||
run: |
|
|
||||||
curl -sS -X POST \
|
|
||||||
"https://git.opensourcesolarpunk.com/api/v1/repos/Circuit-Forge/${REPO}/releases" \
|
|
||||||
-H "Authorization: token ${FORGEJO_TOKEN}" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "$(jq -n --arg tag "$TAG" --arg body "$NOTES" \
|
|
||||||
'{tag_name: $tag, name: $tag, body: $body}')"
|
|
||||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
|
@ -22,12 +22,6 @@ jobs:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
cache: pip
|
cache: pip
|
||||||
|
|
||||||
- name: Configure git credentials for Forgejo
|
|
||||||
env:
|
|
||||||
FORGEJO_TOKEN: ${{ secrets.FORGEJO_TOKEN }}
|
|
||||||
run: |
|
|
||||||
git config --global url."https://oauth2:${FORGEJO_TOKEN}@git.opensourcesolarpunk.com/".insteadOf "https://git.opensourcesolarpunk.com/"
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -r requirements.txt
|
run: pip install -r requirements.txt
|
||||||
|
|
||||||
|
|
|
||||||
108
CHANGELOG.md
108
CHANGELOG.md
|
|
@ -9,114 +9,6 @@ Format follows [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## [0.8.5] — 2026-04-02
|
|
||||||
|
|
||||||
### Added
|
|
||||||
|
|
||||||
- **Vue onboarding wizard** — 7-step first-run setup replaces the Streamlit wizard
|
|
||||||
in the Vue SPA: Hardware detection → Tier → Resume upload/build → Identity →
|
|
||||||
Inference & API keys → Search preferences → Integrations. Progress saves to
|
|
||||||
`user.yaml` on every step; crash-recovery resumes from the last completed step.
|
|
||||||
- **Wizard API endpoints** — `GET /api/wizard/status`, `POST /api/wizard/step`,
|
|
||||||
`GET /api/wizard/hardware`, `POST /api/wizard/inference/test`,
|
|
||||||
`POST /api/wizard/complete`. Inference test always soft-fails so Ollama being
|
|
||||||
unreachable never blocks setup completion.
|
|
||||||
- **Cloud auto-skip** — cloud instances automatically complete steps 1 (hardware),
|
|
||||||
2 (tier), and 5 (inference) and drop the user directly on the Resume step.
|
|
||||||
- **`wizardGuard` router gate** — all Vue routes require wizard completion; completed
|
|
||||||
users are bounced away from `/setup` to `/`.
|
|
||||||
- **Chip-input search step** — job titles and locations entered as press-Enter/comma
|
|
||||||
chips; validates at least one title before advancing.
|
|
||||||
- **Integrations tile grid** — optional step 7 shows Notion, Calendar, Slack, Discord,
|
|
||||||
Drive with paid-tier badges; skippable on Finish.
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- **User config isolation: dangerous fallback removed** — `_user_yaml_path()` fell
|
|
||||||
back to `/devl/job-seeker/config/user.yaml` (legacy profile) when `user.yaml`
|
|
||||||
didn't exist at the expected path; new users now get an empty dict instead of
|
|
||||||
another user's data. Affects profile, resume, search, and all wizard endpoints.
|
|
||||||
- **Resume path not user-isolated** — `RESUME_PATH = Path("config/plain_text_resume.yaml")`
|
|
||||||
was a relative CWD path shared across all users. Replaced with `_resume_path()`
|
|
||||||
derived from `_user_yaml_path()` / `STAGING_DB`.
|
|
||||||
- **Resume upload silently returned empty data** — `upload_resume` was passing a
|
|
||||||
file path string to `structure_resume()` which expects raw text; now reads bytes
|
|
||||||
and dispatches to the correct extractor (`extract_text_from_pdf` / `_docx` / `_odt`).
|
|
||||||
- **Wizard resume step read wrong envelope field** — `WizardResumeStep.vue` read
|
|
||||||
`data.experience` but the upload response wraps parsed data under `data.data`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## [0.8.4] — 2026-04-02
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
|
|
||||||
- **Cloud: cover letter used wrong user's profile** — `generate_cover_letter.generate()`
|
|
||||||
loaded `_profile` from the global `config/user.yaml` at module import time, so all
|
|
||||||
cloud users got the default user's name, voice, and mission preferences in their
|
|
||||||
generated letters. `generate()` now accepts a `user_yaml_path` parameter; `task_runner`
|
|
||||||
derives it from the per-user config directory (`db_path/../config/user.yaml`) and
|
|
||||||
passes it through. `_build_system_context`, `_build_mission_notes`, `detect_mission_alignment`,
|
|
||||||
`build_prompt`, and `_trim_to_letter_end` all accept a `profile` override so the
|
|
||||||
per-call profile is used end-to-end without breaking CLI mode.
|
|
||||||
- **Apply Workspace: hardcoded config paths in cloud mode** — `4_Apply.py` was loading
|
|
||||||
`_USER_YAML` and `RESUME_YAML` from the repo-root `config/` before `resolve_session()`
|
|
||||||
ran, so cloud users saw the global (Meg's) resume in the Apply tab. Both paths now
|
|
||||||
derive from `get_config_dir()` after session resolution.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
|
|
||||||
- **Vue SPA open to all tiers** — Vue 3 frontend is no longer gated behind the beta
|
|
||||||
flag; all tier users can switch to the Vue UI from Settings.
|
|
||||||
- **LLM model candidates** — vllm backend now tries Qwen2.5-3B first, Phi-4-mini
|
|
||||||
as fallback (was reversed). cf_orch allocation block added to vllm config.
|
|
||||||
- **Preflight** — removed `vllm` from Docker adoption list; vllm is now managed
|
|
||||||
entirely by cf-orch and should not be stubbed by preflight.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## [0.8.3] — 2026-04-01
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- **CI: Forgejo auth** — GitHub Actions `pip install` was failing to fetch
|
|
||||||
`circuitforge-core` from the private Forgejo VCS URL. Added `FORGEJO_TOKEN`
|
|
||||||
repository secret and a `git config insteadOf` step to inject credentials
|
|
||||||
before `pip install`.
|
|
||||||
- **CI: settings API tests** — 6 `test_dev_api_settings` PUT/POST tests were
|
|
||||||
returning HTTP 500 in CI because `_user_yaml_path()` read the module-level
|
|
||||||
`DB_PATH` constant (frozen at import time), so `monkeypatch.setenv("STAGING_DB")`
|
|
||||||
had no effect. Fixed by reading `os.environ` at call time.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## [0.8.2] — 2026-04-01
|
|
||||||
|
|
||||||
### Fixed
|
|
||||||
- **CI pipeline** — `pip install -r requirements.txt` was failing in GitHub Actions
|
|
||||||
because `-e ../circuitforge-core` requires a sibling directory that doesn't exist
|
|
||||||
in a single-repo checkout. Replaced with a `git+https://` VCS URL fallback;
|
|
||||||
`Dockerfile.cfcore` still installs from the local `COPY` to avoid redundant
|
|
||||||
network fetches during Docker builds.
|
|
||||||
- **Vue-nav reload loop** — `sync_ui_cookie()` was calling
|
|
||||||
`window.parent.location.reload()` on every render when `user.yaml` has
|
|
||||||
`ui_preference: vue` but no Caddy proxy is in the traffic path (test instances,
|
|
||||||
bare Docker). Gated the reload on `PEREGRINE_CADDY_PROXY=1`; instances without
|
|
||||||
the env var set the cookie silently and skip the reload.
|
|
||||||
|
|
||||||
### Changed
|
|
||||||
- **cfcore VRAM lease integration** — the task scheduler now acquires a VRAM lease
|
|
||||||
from the cf-orch coordinator before running a batch of LLM tasks and releases it
|
|
||||||
when the batch completes. Visible in the coordinator dashboard at `:7700`.
|
|
||||||
- **`CF_ORCH_URL` env var** — scheduler reads coordinator address from
|
|
||||||
`CF_ORCH_URL` (default `http://localhost:7700`); set to
|
|
||||||
`http://host.docker.internal:7700` in Docker compose files so containers can
|
|
||||||
reach the host coordinator.
|
|
||||||
- **All compose files on `Dockerfile.cfcore`** — `compose.yml`, `compose.cloud.yml`,
|
|
||||||
and `compose.test-cfcore.yml` all use the parent-context build. `build: .` is
|
|
||||||
removed from `compose.yml`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## [0.8.1] — 2026-04-01
|
## [0.8.1] — 2026-04-01
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ full instructions.
|
||||||
```bash
|
```bash
|
||||||
git clone https://git.opensourcesolarpunk.com/pyr0ball/peregrine.git
|
git clone https://git.opensourcesolarpunk.com/pyr0ball/peregrine.git
|
||||||
cd peregrine
|
cd peregrine
|
||||||
./install.sh # installs deps, activates git hooks
|
./setup.sh # installs deps, activates git hooks
|
||||||
./manage.sh start
|
./manage.sh start
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,15 +20,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
gcc libffi-dev curl libsqlcipher-dev \
|
gcc libffi-dev curl libsqlcipher-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy circuitforge-core and install it from the local path before requirements.txt.
|
# Copy circuitforge-core first so pip can resolve the -e ../circuitforge-core
|
||||||
# requirements.txt has a git+https:// fallback URL for CI (where circuitforge-core
|
# reference in requirements.txt (installed editable at /circuitforge-core)
|
||||||
# is not a sibling directory), but Docker always has the local copy available here.
|
|
||||||
COPY circuitforge-core/ /circuitforge-core/
|
COPY circuitforge-core/ /circuitforge-core/
|
||||||
RUN pip install --no-cache-dir /circuitforge-core
|
|
||||||
|
|
||||||
COPY peregrine/requirements.txt .
|
COPY peregrine/requirements.txt .
|
||||||
# Skip the cfcore line — already installed above from the local copy
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
RUN grep -v 'circuitforge-core' requirements.txt | pip install --no-cache-dir -r /dev/stdin
|
|
||||||
|
|
||||||
# Install Playwright browser (cached separately from Python deps so requirements
|
# Install Playwright browser (cached separately from Python deps so requirements
|
||||||
# changes don't bust the ~600–900 MB Chromium layer and vice versa)
|
# changes don't bust the ~600–900 MB Chromium layer and vice versa)
|
||||||
|
|
|
||||||
2
Makefile
2
Makefile
|
|
@ -45,7 +45,7 @@ endif
|
||||||
PROFILE_ARG := $(if $(filter remote,$(PROFILE)),,--profile $(PROFILE))
|
PROFILE_ARG := $(if $(filter remote,$(PROFILE)),,--profile $(PROFILE))
|
||||||
|
|
||||||
setup: ## Install dependencies (Docker or Podman + NVIDIA toolkit)
|
setup: ## Install dependencies (Docker or Podman + NVIDIA toolkit)
|
||||||
@bash install.sh
|
@bash setup.sh
|
||||||
|
|
||||||
preflight: ## Check ports + system resources; write .env
|
preflight: ## Check ports + system resources; write .env
|
||||||
@$(PYTHON) scripts/preflight.py
|
@$(PYTHON) scripts/preflight.py
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ make start PROFILE=single-gpu
|
||||||
|
|
||||||
**3.** Open http://localhost:8501 — the setup wizard guides you through the rest.
|
**3.** Open http://localhost:8501 — the setup wizard guides you through the rest.
|
||||||
|
|
||||||
> **macOS / Apple Silicon:** Docker Desktop must be running. For Metal GPU-accelerated inference, install Ollama natively before starting — `install.sh` will prompt you to do this. See [Apple Silicon GPU](#apple-silicon-gpu) below.
|
> **macOS / Apple Silicon:** Docker Desktop must be running. For Metal GPU-accelerated inference, install Ollama natively before starting — `setup.sh` will prompt you to do this. See [Apple Silicon GPU](#apple-silicon-gpu) below.
|
||||||
> **Windows:** Not supported — use WSL2 with Ubuntu.
|
> **Windows:** Not supported — use WSL2 with Ubuntu.
|
||||||
|
|
||||||
### Installing to `/opt` or other system directories
|
### Installing to `/opt` or other system directories
|
||||||
|
|
@ -103,7 +103,7 @@ After `./manage.sh setup`, log out and back in for docker group membership to ta
|
||||||
|
|
||||||
Docker Desktop on macOS runs in a Linux VM — it cannot access the Apple GPU. Metal-accelerated inference requires Ollama to run **natively** on the host.
|
Docker Desktop on macOS runs in a Linux VM — it cannot access the Apple GPU. Metal-accelerated inference requires Ollama to run **natively** on the host.
|
||||||
|
|
||||||
`install.sh` handles this automatically: it offers to install Ollama via Homebrew, starts it as a background service, and explains what happens next. If Ollama is running on port 11434 when you start Peregrine, preflight detects it, stubs out the Docker Ollama container, and routes inference through the native process — which uses Metal automatically.
|
`setup.sh` handles this automatically: it offers to install Ollama via Homebrew, starts it as a background service, and explains what happens next. If Ollama is running on port 11434 when you start Peregrine, preflight detects it, stubs out the Docker Ollama container, and routes inference through the native process — which uses Metal automatically.
|
||||||
|
|
||||||
To do it manually:
|
To do it manually:
|
||||||
|
|
||||||
|
|
@ -154,7 +154,7 @@ Re-enter the wizard any time via **Settings → Developer → Reset wizard**.
|
||||||
| Calendar sync (Google, Apple) | Paid |
|
| Calendar sync (Google, Apple) | Paid |
|
||||||
| Slack notifications | Paid |
|
| Slack notifications | Paid |
|
||||||
| CircuitForge shared cover-letter model | Paid |
|
| CircuitForge shared cover-letter model | Paid |
|
||||||
| Vue 3 SPA — full UI with onboarding wizard, job board, apply workspace, sort/filter, research modal, draft cover letter | Free |
|
| Vue 3 SPA beta UI | Paid |
|
||||||
| **Voice guidelines** (custom writing style & tone) | Premium with LLM¹ ² |
|
| **Voice guidelines** (custom writing style & tone) | Premium with LLM¹ ² |
|
||||||
| Cover letter model fine-tuning (your writing, your model) | Premium |
|
| Cover letter model fine-tuning (your writing, your model) | Premium |
|
||||||
| Multi-user support | Premium |
|
| Multi-user support | Premium |
|
||||||
|
|
|
||||||
271
app/Home.py
271
app/Home.py
|
|
@ -19,8 +19,8 @@ _profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
|
||||||
_name = _profile.name if _profile else "Job Seeker"
|
_name = _profile.name if _profile else "Job Seeker"
|
||||||
|
|
||||||
from scripts.db import init_db, get_job_counts, purge_jobs, purge_email_data, \
|
from scripts.db import init_db, get_job_counts, purge_jobs, purge_email_data, \
|
||||||
purge_non_remote, archive_jobs, kill_stuck_tasks, cancel_task, \
|
purge_non_remote, archive_jobs, kill_stuck_tasks, get_task_for_job, get_active_tasks, \
|
||||||
get_task_for_job, get_active_tasks, insert_job, get_existing_urls
|
insert_job, get_existing_urls
|
||||||
from scripts.task_runner import submit_task
|
from scripts.task_runner import submit_task
|
||||||
from app.cloud_session import resolve_session, get_db_path
|
from app.cloud_session import resolve_session, get_db_path
|
||||||
|
|
||||||
|
|
@ -376,144 +376,177 @@ _scrape_status()
|
||||||
|
|
||||||
st.divider()
|
st.divider()
|
||||||
|
|
||||||
# ── Danger zone ───────────────────────────────────────────────────────────────
|
# ── Danger zone: purge + re-scrape ────────────────────────────────────────────
|
||||||
with st.expander("⚠️ Danger Zone", expanded=False):
|
with st.expander("⚠️ Danger Zone", expanded=False):
|
||||||
|
|
||||||
# ── Queue reset (the common case) ─────────────────────────────────────────
|
|
||||||
st.markdown("**Queue reset**")
|
|
||||||
st.caption(
|
st.caption(
|
||||||
"Archive clears your review queue while keeping job URLs for dedup, "
|
"**Purge** permanently deletes jobs from the local database. "
|
||||||
"so the same listings won't resurface on the next discovery run. "
|
"Applied and synced jobs are never touched."
|
||||||
"Use hard purge only if you want a full clean slate including dedup history."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
_scope = st.radio(
|
purge_col, rescrape_col, email_col, tasks_col = st.columns(4)
|
||||||
"Clear scope",
|
|
||||||
["Pending only", "Pending + approved (stale search)"],
|
|
||||||
horizontal=True,
|
|
||||||
label_visibility="collapsed",
|
|
||||||
)
|
|
||||||
_scope_statuses = (
|
|
||||||
["pending"] if _scope == "Pending only" else ["pending", "approved"]
|
|
||||||
)
|
|
||||||
|
|
||||||
_qc1, _qc2, _qc3 = st.columns([2, 2, 4])
|
with purge_col:
|
||||||
if _qc1.button("📦 Archive & reset", use_container_width=True, type="primary"):
|
st.markdown("**Purge pending & rejected**")
|
||||||
st.session_state["confirm_dz"] = "archive"
|
st.caption("Removes all _pending_ and _rejected_ listings so the next discovery starts fresh.")
|
||||||
if _qc2.button("🗑 Hard purge (delete)", use_container_width=True):
|
if st.button("🗑 Purge Pending + Rejected", use_container_width=True):
|
||||||
st.session_state["confirm_dz"] = "purge"
|
st.session_state["confirm_purge"] = "partial"
|
||||||
|
|
||||||
if st.session_state.get("confirm_dz") == "archive":
|
if st.session_state.get("confirm_purge") == "partial":
|
||||||
st.info(
|
st.warning("Are you sure? This cannot be undone.")
|
||||||
f"Archive **{', '.join(_scope_statuses)}** jobs? "
|
c1, c2 = st.columns(2)
|
||||||
"URLs are kept for dedup — nothing is permanently deleted."
|
if c1.button("Yes, purge", type="primary", use_container_width=True):
|
||||||
)
|
deleted = purge_jobs(get_db_path(), statuses=["pending", "rejected"])
|
||||||
_dc1, _dc2 = st.columns(2)
|
st.success(f"Purged {deleted} jobs.")
|
||||||
if _dc1.button("Yes, archive", type="primary", use_container_width=True, key="dz_archive_confirm"):
|
st.session_state.pop("confirm_purge", None)
|
||||||
n = archive_jobs(get_db_path(), statuses=_scope_statuses)
|
|
||||||
st.success(f"Archived {n} jobs.")
|
|
||||||
st.session_state.pop("confirm_dz", None)
|
|
||||||
st.rerun()
|
st.rerun()
|
||||||
if _dc2.button("Cancel", use_container_width=True, key="dz_archive_cancel"):
|
if c2.button("Cancel", use_container_width=True):
|
||||||
st.session_state.pop("confirm_dz", None)
|
st.session_state.pop("confirm_purge", None)
|
||||||
st.rerun()
|
st.rerun()
|
||||||
|
|
||||||
if st.session_state.get("confirm_dz") == "purge":
|
with email_col:
|
||||||
st.warning(
|
st.markdown("**Purge email data**")
|
||||||
f"Permanently delete **{', '.join(_scope_statuses)}** jobs? "
|
st.caption("Clears all email thread logs and email-sourced pending jobs so the next sync starts fresh.")
|
||||||
"This removes the URLs from dedup history too. Cannot be undone."
|
if st.button("📧 Purge Email Data", use_container_width=True):
|
||||||
)
|
st.session_state["confirm_purge"] = "email"
|
||||||
_dc1, _dc2 = st.columns(2)
|
|
||||||
if _dc1.button("Yes, delete", type="primary", use_container_width=True, key="dz_purge_confirm"):
|
if st.session_state.get("confirm_purge") == "email":
|
||||||
n = purge_jobs(get_db_path(), statuses=_scope_statuses)
|
st.warning("This deletes all email contacts and email-sourced jobs. Cannot be undone.")
|
||||||
st.success(f"Deleted {n} jobs.")
|
c1, c2 = st.columns(2)
|
||||||
st.session_state.pop("confirm_dz", None)
|
if c1.button("Yes, purge emails", type="primary", use_container_width=True):
|
||||||
|
contacts, jobs = purge_email_data(get_db_path())
|
||||||
|
st.success(f"Purged {contacts} email contacts, {jobs} email jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
st.rerun()
|
st.rerun()
|
||||||
if _dc2.button("Cancel", use_container_width=True, key="dz_purge_cancel"):
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
st.session_state.pop("confirm_dz", None)
|
st.session_state.pop("confirm_purge", None)
|
||||||
st.rerun()
|
st.rerun()
|
||||||
|
|
||||||
st.divider()
|
with tasks_col:
|
||||||
|
|
||||||
# ── Background tasks ──────────────────────────────────────────────────────
|
|
||||||
_active = get_active_tasks(get_db_path())
|
_active = get_active_tasks(get_db_path())
|
||||||
st.markdown(f"**Background tasks** — {len(_active)} active")
|
st.markdown("**Kill stuck tasks**")
|
||||||
|
st.caption(f"Force-fail all queued/running background tasks. Currently **{len(_active)}** active.")
|
||||||
if _active:
|
if st.button("⏹ Kill All Tasks", use_container_width=True, disabled=len(_active) == 0):
|
||||||
_task_icons = {"cover_letter": "✉️", "research": "🔍", "discovery": "🌐", "enrich_descriptions": "📝"}
|
|
||||||
for _t in _active:
|
|
||||||
_tc1, _tc2, _tc3 = st.columns([3, 4, 2])
|
|
||||||
_icon = _task_icons.get(_t["task_type"], "⚙️")
|
|
||||||
_tc1.caption(f"{_icon} `{_t['task_type']}`")
|
|
||||||
_job_label = f"{_t['title']} @ {_t['company']}" if _t.get("title") else f"job #{_t['job_id']}"
|
|
||||||
_tc2.caption(_job_label)
|
|
||||||
_tc3.caption(f"_{_t['status']}_")
|
|
||||||
if st.button("✕ Cancel", key=f"dz_cancel_task_{_t['id']}", use_container_width=True):
|
|
||||||
cancel_task(get_db_path(), _t["id"])
|
|
||||||
st.rerun()
|
|
||||||
st.caption("")
|
|
||||||
|
|
||||||
_kill_col, _ = st.columns([2, 6])
|
|
||||||
if _kill_col.button("⏹ Kill all stuck", use_container_width=True, disabled=len(_active) == 0):
|
|
||||||
killed = kill_stuck_tasks(get_db_path())
|
killed = kill_stuck_tasks(get_db_path())
|
||||||
st.success(f"Killed {killed} task(s).")
|
st.success(f"Killed {killed} task(s).")
|
||||||
st.rerun()
|
st.rerun()
|
||||||
|
|
||||||
st.divider()
|
with rescrape_col:
|
||||||
|
st.markdown("**Purge all & re-scrape**")
|
||||||
|
st.caption("Wipes _all_ non-applied, non-synced jobs then immediately runs a fresh discovery.")
|
||||||
|
if st.button("🔄 Purge All + Re-scrape", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "full"
|
||||||
|
|
||||||
# ── Rarely needed (collapsed) ─────────────────────────────────────────────
|
if st.session_state.get("confirm_purge") == "full":
|
||||||
with st.expander("More options", expanded=False):
|
st.warning("This will delete ALL pending, approved, and rejected jobs, then re-scrape. Applied and synced records are kept.")
|
||||||
_rare1, _rare2, _rare3 = st.columns(3)
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, wipe + scrape", type="primary", use_container_width=True):
|
||||||
with _rare1:
|
|
||||||
st.markdown("**Purge email data**")
|
|
||||||
st.caption("Clears all email thread logs and email-sourced pending jobs.")
|
|
||||||
if st.button("📧 Purge Email Data", use_container_width=True):
|
|
||||||
st.session_state["confirm_dz"] = "email"
|
|
||||||
if st.session_state.get("confirm_dz") == "email":
|
|
||||||
st.warning("Deletes all email contacts and email-sourced jobs. Cannot be undone.")
|
|
||||||
_ec1, _ec2 = st.columns(2)
|
|
||||||
if _ec1.button("Yes, purge emails", type="primary", use_container_width=True, key="dz_email_confirm"):
|
|
||||||
contacts, jobs = purge_email_data(get_db_path())
|
|
||||||
st.success(f"Purged {contacts} email contacts, {jobs} email jobs.")
|
|
||||||
st.session_state.pop("confirm_dz", None)
|
|
||||||
st.rerun()
|
|
||||||
if _ec2.button("Cancel", use_container_width=True, key="dz_email_cancel"):
|
|
||||||
st.session_state.pop("confirm_dz", None)
|
|
||||||
st.rerun()
|
|
||||||
|
|
||||||
with _rare2:
|
|
||||||
st.markdown("**Purge non-remote**")
|
|
||||||
st.caption("Removes pending/approved/rejected on-site listings from the DB.")
|
|
||||||
if st.button("🏢 Purge On-site Jobs", use_container_width=True):
|
|
||||||
st.session_state["confirm_dz"] = "non_remote"
|
|
||||||
if st.session_state.get("confirm_dz") == "non_remote":
|
|
||||||
st.warning("Deletes all non-remote jobs not yet applied to. Cannot be undone.")
|
|
||||||
_rc1, _rc2 = st.columns(2)
|
|
||||||
if _rc1.button("Yes, purge on-site", type="primary", use_container_width=True, key="dz_nonremote_confirm"):
|
|
||||||
deleted = purge_non_remote(get_db_path())
|
|
||||||
st.success(f"Purged {deleted} non-remote jobs.")
|
|
||||||
st.session_state.pop("confirm_dz", None)
|
|
||||||
st.rerun()
|
|
||||||
if _rc2.button("Cancel", use_container_width=True, key="dz_nonremote_cancel"):
|
|
||||||
st.session_state.pop("confirm_dz", None)
|
|
||||||
st.rerun()
|
|
||||||
|
|
||||||
with _rare3:
|
|
||||||
st.markdown("**Wipe all + re-scrape**")
|
|
||||||
st.caption("Deletes all non-applied jobs then immediately runs a fresh discovery.")
|
|
||||||
if st.button("🔄 Wipe + Re-scrape", use_container_width=True):
|
|
||||||
st.session_state["confirm_dz"] = "rescrape"
|
|
||||||
if st.session_state.get("confirm_dz") == "rescrape":
|
|
||||||
st.warning("Wipes ALL pending, approved, and rejected jobs, then re-scrapes. Applied and synced records are kept.")
|
|
||||||
_wc1, _wc2 = st.columns(2)
|
|
||||||
if _wc1.button("Yes, wipe + scrape", type="primary", use_container_width=True, key="dz_rescrape_confirm"):
|
|
||||||
purge_jobs(get_db_path(), statuses=["pending", "approved", "rejected"])
|
purge_jobs(get_db_path(), statuses=["pending", "approved", "rejected"])
|
||||||
submit_task(get_db_path(), "discovery", 0)
|
submit_task(get_db_path(), "discovery", 0)
|
||||||
st.session_state.pop("confirm_dz", None)
|
st.session_state.pop("confirm_purge", None)
|
||||||
st.rerun()
|
st.rerun()
|
||||||
if _wc2.button("Cancel", use_container_width=True, key="dz_rescrape_cancel"):
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
st.session_state.pop("confirm_dz", None)
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
st.divider()
|
||||||
|
|
||||||
|
pending_col, nonremote_col, approved_col, _ = st.columns(4)
|
||||||
|
|
||||||
|
with pending_col:
|
||||||
|
st.markdown("**Purge pending review**")
|
||||||
|
st.caption("Removes only _pending_ listings, keeping your rejected history intact.")
|
||||||
|
if st.button("🗑 Purge Pending Only", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "pending_only"
|
||||||
|
|
||||||
|
if st.session_state.get("confirm_purge") == "pending_only":
|
||||||
|
st.warning("Deletes all pending jobs. Rejected jobs are kept. Cannot be undone.")
|
||||||
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, purge pending", type="primary", use_container_width=True):
|
||||||
|
deleted = purge_jobs(get_db_path(), statuses=["pending"])
|
||||||
|
st.success(f"Purged {deleted} pending jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
with nonremote_col:
|
||||||
|
st.markdown("**Purge non-remote**")
|
||||||
|
st.caption("Removes pending/approved/rejected jobs where remote is not set. Keeps anything already in the pipeline.")
|
||||||
|
if st.button("🏢 Purge On-site Jobs", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "non_remote"
|
||||||
|
|
||||||
|
if st.session_state.get("confirm_purge") == "non_remote":
|
||||||
|
st.warning("Deletes all non-remote jobs not yet applied to. Cannot be undone.")
|
||||||
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, purge on-site", type="primary", use_container_width=True):
|
||||||
|
deleted = purge_non_remote(get_db_path())
|
||||||
|
st.success(f"Purged {deleted} non-remote jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
with approved_col:
|
||||||
|
st.markdown("**Purge approved (unapplied)**")
|
||||||
|
st.caption("Removes _approved_ jobs you haven't applied to yet — e.g. to reset after a review pass.")
|
||||||
|
if st.button("🗑 Purge Approved", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "approved_only"
|
||||||
|
|
||||||
|
if st.session_state.get("confirm_purge") == "approved_only":
|
||||||
|
st.warning("Deletes all approved-but-not-applied jobs. Cannot be undone.")
|
||||||
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, purge approved", type="primary", use_container_width=True):
|
||||||
|
deleted = purge_jobs(get_db_path(), statuses=["approved"])
|
||||||
|
st.success(f"Purged {deleted} approved jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
st.divider()
|
||||||
|
|
||||||
|
archive_col1, archive_col2, _, _ = st.columns(4)
|
||||||
|
|
||||||
|
with archive_col1:
|
||||||
|
st.markdown("**Archive remaining**")
|
||||||
|
st.caption(
|
||||||
|
"Move all _pending_ and _rejected_ jobs to archived status. "
|
||||||
|
"Archived jobs stay in the DB for dedup — they just won't appear in Job Review."
|
||||||
|
)
|
||||||
|
if st.button("📦 Archive Pending + Rejected", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "archive_remaining"
|
||||||
|
|
||||||
|
if st.session_state.get("confirm_purge") == "archive_remaining":
|
||||||
|
st.info("Jobs will be archived (not deleted) — URLs are kept for dedup.")
|
||||||
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, archive", type="primary", use_container_width=True):
|
||||||
|
archived = archive_jobs(get_db_path(), statuses=["pending", "rejected"])
|
||||||
|
st.success(f"Archived {archived} jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
with archive_col2:
|
||||||
|
st.markdown("**Archive approved (unapplied)**")
|
||||||
|
st.caption("Archive _approved_ listings you decided to skip — keeps history without cluttering the apply queue.")
|
||||||
|
if st.button("📦 Archive Approved", use_container_width=True):
|
||||||
|
st.session_state["confirm_purge"] = "archive_approved"
|
||||||
|
|
||||||
|
if st.session_state.get("confirm_purge") == "archive_approved":
|
||||||
|
st.info("Approved jobs will be archived (not deleted).")
|
||||||
|
c1, c2 = st.columns(2)
|
||||||
|
if c1.button("Yes, archive approved", type="primary", use_container_width=True):
|
||||||
|
archived = archive_jobs(get_db_path(), statuses=["approved"])
|
||||||
|
st.success(f"Archived {archived} approved jobs.")
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
|
st.rerun()
|
||||||
|
if c2.button("Cancel ", use_container_width=True):
|
||||||
|
st.session_state.pop("confirm_purge", None)
|
||||||
st.rerun()
|
st.rerun()
|
||||||
|
|
||||||
# ── Setup banners ─────────────────────────────────────────────────────────────
|
# ── Setup banners ─────────────────────────────────────────────────────────────
|
||||||
|
|
|
||||||
|
|
@ -17,16 +17,10 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING, format="%(name)s %(levelname)s: %(message)s")
|
logging.basicConfig(level=logging.WARNING, format="%(name)s %(levelname)s: %(message)s")
|
||||||
|
|
||||||
# Load .env before any os.environ reads — safe to call inside Docker too
|
|
||||||
# (uses setdefault, so Docker-injected vars take precedence over .env values)
|
|
||||||
from circuitforge_core.config.settings import load_env as _load_env
|
|
||||||
_load_env(Path(__file__).parent.parent / ".env")
|
|
||||||
|
|
||||||
IS_DEMO = os.environ.get("DEMO_MODE", "").lower() in ("1", "true", "yes")
|
IS_DEMO = os.environ.get("DEMO_MODE", "").lower() in ("1", "true", "yes")
|
||||||
|
|
||||||
import streamlit as st
|
import streamlit as st
|
||||||
from scripts.db import DEFAULT_DB, init_db, get_active_tasks
|
from scripts.db import DEFAULT_DB, init_db, get_active_tasks
|
||||||
from scripts.db_migrate import migrate_db
|
|
||||||
from app.feedback import inject_feedback_button
|
from app.feedback import inject_feedback_button
|
||||||
from app.cloud_session import resolve_session, get_db_path, get_config_dir, get_cloud_tier
|
from app.cloud_session import resolve_session, get_db_path, get_config_dir, get_cloud_tier
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
@ -42,7 +36,6 @@ st.set_page_config(
|
||||||
|
|
||||||
resolve_session("peregrine")
|
resolve_session("peregrine")
|
||||||
init_db(get_db_path())
|
init_db(get_db_path())
|
||||||
migrate_db(Path(get_db_path()))
|
|
||||||
|
|
||||||
# Demo tier — initialize once per session (cookie persistence handled client-side)
|
# Demo tier — initialize once per session (cookie persistence handled client-side)
|
||||||
if IS_DEMO and "simulated_tier" not in st.session_state:
|
if IS_DEMO and "simulated_tier" not in st.session_state:
|
||||||
|
|
|
||||||
|
|
@ -32,13 +32,6 @@ _DEMO_MODE = os.environ.get("DEMO_MODE", "").lower() in ("1", "true", "yes")
|
||||||
# Example: PEREGRINE_VUE_URL=http://localhost:8506
|
# Example: PEREGRINE_VUE_URL=http://localhost:8506
|
||||||
_VUE_URL = os.environ.get("PEREGRINE_VUE_URL", "").strip().rstrip("/")
|
_VUE_URL = os.environ.get("PEREGRINE_VUE_URL", "").strip().rstrip("/")
|
||||||
|
|
||||||
# When True, a window.location.reload() after setting prgn_ui=vue will be
|
|
||||||
# intercepted by Caddy and routed to the Vue SPA. When False (no Caddy in the
|
|
||||||
# traffic path — e.g. test instances, direct Docker exposure), reloading just
|
|
||||||
# comes back to Streamlit and creates an infinite loop. Only set this in
|
|
||||||
# production/staging compose files where Caddy is actually in front.
|
|
||||||
_CADDY_PROXY = os.environ.get("PEREGRINE_CADDY_PROXY", "").lower() in ("1", "true", "yes")
|
|
||||||
|
|
||||||
_COOKIE_JS = """
|
_COOKIE_JS = """
|
||||||
<script>
|
<script>
|
||||||
(function() {{
|
(function() {{
|
||||||
|
|
@ -57,18 +50,14 @@ def _set_cookie_js(value: str, navigate: bool = False) -> None:
|
||||||
port. Without this, reload() just sends the request back to the same
|
port. Without this, reload() just sends the request back to the same
|
||||||
Streamlit port with no router in between to inspect the cookie.
|
Streamlit port with no router in between to inspect the cookie.
|
||||||
|
|
||||||
When PEREGRINE_CADDY_PROXY is set (production/staging): navigate=True
|
When PEREGRINE_VUE_URL is absent (Caddy deployment): navigate=True
|
||||||
triggers window.location.reload() so Caddy sees the updated cookie on
|
triggers window.location.reload() so Caddy sees the updated cookie on
|
||||||
the next HTTP request and routes accordingly.
|
the next HTTP request and routes accordingly.
|
||||||
|
|
||||||
When neither is set (test instances, bare Docker): navigate is suppressed
|
|
||||||
entirely — the cookie is written silently, but no reload is attempted.
|
|
||||||
Reloading without a proxy just bounces back to Streamlit and loops.
|
|
||||||
"""
|
"""
|
||||||
# components.html() renders in an iframe — window.parent navigates the host page
|
# components.html() renders in an iframe — window.parent navigates the host page
|
||||||
if navigate and value == "vue" and _VUE_URL:
|
if navigate and value == "vue" and _VUE_URL:
|
||||||
nav_js = f"window.parent.location.href = '{_VUE_URL}';"
|
nav_js = f"window.parent.location.href = '{_VUE_URL}';"
|
||||||
elif navigate and _CADDY_PROXY:
|
elif navigate:
|
||||||
nav_js = "window.parent.location.reload();"
|
nav_js = "window.parent.location.reload();"
|
||||||
else:
|
else:
|
||||||
nav_js = ""
|
nav_js = ""
|
||||||
|
|
@ -124,6 +113,12 @@ def sync_ui_cookie(yaml_path: Path, tier: str) -> None:
|
||||||
# UI components must not crash the app — silent fallback to default
|
# UI components must not crash the app — silent fallback to default
|
||||||
pref = "streamlit"
|
pref = "streamlit"
|
||||||
|
|
||||||
|
# Demo mode: Vue SPA has no demo data wiring — always serve Streamlit.
|
||||||
|
# (The tier downgrade check below is skipped in demo mode, but we must
|
||||||
|
# also block the Vue navigation itself so Caddy doesn't route to a blank SPA.)
|
||||||
|
if pref == "vue" and _DEMO_MODE:
|
||||||
|
pref = "streamlit"
|
||||||
|
|
||||||
# Tier downgrade protection (skip in demo — demo bypasses tier gate)
|
# Tier downgrade protection (skip in demo — demo bypasses tier gate)
|
||||||
if pref == "vue" and not _DEMO_MODE and not can_use(tier, "vue_ui_beta"):
|
if pref == "vue" and not _DEMO_MODE and not can_use(tier, "vue_ui_beta"):
|
||||||
if profile is not None:
|
if profile is not None:
|
||||||
|
|
@ -194,7 +189,7 @@ def render_banner(yaml_path: Path, tier: str) -> None:
|
||||||
|
|
||||||
col1, col2, col3 = st.columns([8, 1, 1])
|
col1, col2, col3 = st.columns([8, 1, 1])
|
||||||
with col1:
|
with col1:
|
||||||
st.info("✨ **New Peregrine UI available** — try the modern Vue interface (Beta)")
|
st.info("✨ **New Peregrine UI available** — try the modern Vue interface (Beta, Paid tier)")
|
||||||
with col2:
|
with col2:
|
||||||
if st.button("Try it", key="_ui_banner_try"):
|
if st.button("Try it", key="_ui_banner_try"):
|
||||||
switch_ui(yaml_path, to="vue", tier=tier)
|
switch_ui(yaml_path, to="vue", tier=tier)
|
||||||
|
|
|
||||||
|
|
@ -457,11 +457,6 @@ elif step == 5:
|
||||||
from app.wizard.step_inference import validate
|
from app.wizard.step_inference import validate
|
||||||
|
|
||||||
st.subheader("Step 5 \u2014 Inference & API Keys")
|
st.subheader("Step 5 \u2014 Inference & API Keys")
|
||||||
st.info(
|
|
||||||
"**Simplest setup:** set `OLLAMA_HOST` in your `.env` file — "
|
|
||||||
"Peregrine auto-detects it, no config file needed. "
|
|
||||||
"Or use the fields below to configure API keys and endpoints."
|
|
||||||
)
|
|
||||||
profile = saved_yaml.get("inference_profile", "remote")
|
profile = saved_yaml.get("inference_profile", "remote")
|
||||||
|
|
||||||
if profile == "remote":
|
if profile == "remote":
|
||||||
|
|
@ -471,18 +466,8 @@ elif step == 5:
|
||||||
placeholder="https://api.together.xyz/v1")
|
placeholder="https://api.together.xyz/v1")
|
||||||
openai_key = st.text_input("Endpoint API Key (optional)", type="password",
|
openai_key = st.text_input("Endpoint API Key (optional)", type="password",
|
||||||
key="oai_key") if openai_url else ""
|
key="oai_key") if openai_url else ""
|
||||||
ollama_host = st.text_input("Ollama host (optional \u2014 local fallback)",
|
|
||||||
placeholder="http://localhost:11434",
|
|
||||||
key="ollama_host_input")
|
|
||||||
ollama_model = st.text_input("Ollama model (optional)",
|
|
||||||
value="llama3.2:3b",
|
|
||||||
key="ollama_model_input")
|
|
||||||
else:
|
else:
|
||||||
st.info(f"Local mode ({profile}): Ollama provides inference.")
|
st.info(f"Local mode ({profile}): Ollama provides inference.")
|
||||||
import os
|
|
||||||
_ollama_host_env = os.environ.get("OLLAMA_HOST", "")
|
|
||||||
if _ollama_host_env:
|
|
||||||
st.caption(f"OLLAMA_HOST from .env: `{_ollama_host_env}`")
|
|
||||||
anthropic_key = openai_url = openai_key = ""
|
anthropic_key = openai_url = openai_key = ""
|
||||||
|
|
||||||
with st.expander("Advanced \u2014 Service Ports & Hosts"):
|
with st.expander("Advanced \u2014 Service Ports & Hosts"):
|
||||||
|
|
@ -561,14 +546,6 @@ elif step == 5:
|
||||||
if anthropic_key or openai_url:
|
if anthropic_key or openai_url:
|
||||||
env_path.write_text("\n".join(env_lines) + "\n")
|
env_path.write_text("\n".join(env_lines) + "\n")
|
||||||
|
|
||||||
if profile == "remote":
|
|
||||||
if ollama_host:
|
|
||||||
env_lines = _set_env(env_lines, "OLLAMA_HOST", ollama_host)
|
|
||||||
if ollama_model:
|
|
||||||
env_lines = _set_env(env_lines, "OLLAMA_MODEL", ollama_model)
|
|
||||||
if ollama_host or ollama_model:
|
|
||||||
env_path.write_text("\n".join(env_lines) + "\n")
|
|
||||||
|
|
||||||
_save_yaml({"services": svc, "wizard_step": 5})
|
_save_yaml({"services": svc, "wizard_step": 5})
|
||||||
st.session_state.wizard_step = 6
|
st.session_state.wizard_step = 6
|
||||||
st.rerun()
|
st.rerun()
|
||||||
|
|
@ -654,7 +631,7 @@ elif step == 6:
|
||||||
)
|
)
|
||||||
default_profile = {
|
default_profile = {
|
||||||
"name": "default",
|
"name": "default",
|
||||||
"titles": titles,
|
"job_titles": titles,
|
||||||
"locations": locations,
|
"locations": locations,
|
||||||
"remote_only": False,
|
"remote_only": False,
|
||||||
"boards": ["linkedin", "indeed", "glassdoor", "zip_recruiter"],
|
"boards": ["linkedin", "indeed", "glassdoor", "zip_recruiter"],
|
||||||
|
|
|
||||||
|
|
@ -15,28 +15,28 @@ import streamlit.components.v1 as components
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from scripts.user_profile import UserProfile
|
from scripts.user_profile import UserProfile
|
||||||
|
|
||||||
|
_USER_YAML = Path(__file__).parent.parent.parent / "config" / "user.yaml"
|
||||||
|
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
|
||||||
|
_name = _profile.name if _profile else "Job Seeker"
|
||||||
|
|
||||||
from scripts.db import (
|
from scripts.db import (
|
||||||
DEFAULT_DB, init_db, get_jobs_by_status,
|
DEFAULT_DB, init_db, get_jobs_by_status,
|
||||||
update_cover_letter, mark_applied, update_job_status,
|
update_cover_letter, mark_applied, update_job_status,
|
||||||
get_task_for_job,
|
get_task_for_job,
|
||||||
)
|
)
|
||||||
from scripts.task_runner import submit_task
|
from scripts.task_runner import submit_task
|
||||||
from app.cloud_session import resolve_session, get_db_path, get_config_dir
|
from app.cloud_session import resolve_session, get_db_path
|
||||||
from app.telemetry import log_usage_event
|
from app.telemetry import log_usage_event
|
||||||
|
|
||||||
|
DOCS_DIR = _profile.docs_dir if _profile else Path.home() / "Documents" / "JobSearch"
|
||||||
|
RESUME_YAML = Path(__file__).parent.parent.parent / "config" / "plain_text_resume.yaml"
|
||||||
|
|
||||||
st.title("🚀 Apply Workspace")
|
st.title("🚀 Apply Workspace")
|
||||||
|
|
||||||
resolve_session("peregrine")
|
resolve_session("peregrine")
|
||||||
init_db(get_db_path())
|
init_db(get_db_path())
|
||||||
|
|
||||||
_CONFIG_DIR = get_config_dir()
|
|
||||||
_USER_YAML = _CONFIG_DIR / "user.yaml"
|
|
||||||
_profile = UserProfile(_USER_YAML) if UserProfile.exists(_USER_YAML) else None
|
|
||||||
_name = _profile.name if _profile else "Job Seeker"
|
|
||||||
|
|
||||||
DOCS_DIR = _profile.docs_dir if _profile else Path.home() / "Documents" / "JobSearch"
|
|
||||||
RESUME_YAML = _CONFIG_DIR / "plain_text_resume.yaml"
|
|
||||||
|
|
||||||
# ── PDF generation ─────────────────────────────────────────────────────────────
|
# ── PDF generation ─────────────────────────────────────────────────────────────
|
||||||
def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Path:
|
def _make_cover_letter_pdf(job: dict, cover_letter: str, output_dir: Path) -> Path:
|
||||||
from reportlab.lib.pagesizes import letter
|
from reportlab.lib.pagesizes import letter
|
||||||
|
|
|
||||||
|
|
@ -64,8 +64,8 @@ FEATURES: dict[str, str] = {
|
||||||
"apple_calendar_sync": "paid",
|
"apple_calendar_sync": "paid",
|
||||||
"slack_notifications": "paid",
|
"slack_notifications": "paid",
|
||||||
|
|
||||||
# Beta UI access — open to all tiers (access management, not compute)
|
# Beta UI access — stays gated (access management, not compute)
|
||||||
"vue_ui_beta": "free",
|
"vue_ui_beta": "paid",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Features that unlock when the user supplies any LLM backend (local or BYOK).
|
# Features that unlock when the user supplies any LLM backend (local or BYOK).
|
||||||
|
|
|
||||||
|
|
@ -13,15 +13,12 @@
|
||||||
|
|
||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
build:
|
build: .
|
||||||
context: ..
|
|
||||||
dockerfile: peregrine/Dockerfile.cfcore
|
|
||||||
container_name: peregrine-cloud
|
container_name: peregrine-cloud
|
||||||
ports:
|
ports:
|
||||||
- "8505:8501"
|
- "8505:8501"
|
||||||
volumes:
|
volumes:
|
||||||
- /devl/menagerie-data:/devl/menagerie-data # per-user data trees
|
- /devl/menagerie-data:/devl/menagerie-data # per-user data trees
|
||||||
- ./config/llm.cloud.yaml:/app/config/llm.yaml:ro # cloud-safe backends only (no claude_code/copilot/anthropic)
|
|
||||||
environment:
|
environment:
|
||||||
- CLOUD_MODE=true
|
- CLOUD_MODE=true
|
||||||
- CLOUD_DATA_ROOT=/devl/menagerie-data
|
- CLOUD_DATA_ROOT=/devl/menagerie-data
|
||||||
|
|
@ -34,10 +31,7 @@ services:
|
||||||
- DOCS_DIR=/tmp/cloud-docs
|
- DOCS_DIR=/tmp/cloud-docs
|
||||||
- STREAMLIT_SERVER_BASE_URL_PATH=peregrine
|
- STREAMLIT_SERVER_BASE_URL_PATH=peregrine
|
||||||
- PYTHONUNBUFFERED=1
|
- PYTHONUNBUFFERED=1
|
||||||
- PEREGRINE_CADDY_PROXY=1
|
|
||||||
- CF_ORCH_URL=http://host.docker.internal:7700
|
|
||||||
- DEMO_MODE=false
|
- DEMO_MODE=false
|
||||||
- FORGEJO_API_TOKEN=${FORGEJO_API_TOKEN:-}
|
|
||||||
depends_on:
|
depends_on:
|
||||||
searxng:
|
searxng:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
@ -45,40 +39,12 @@ services:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
api:
|
|
||||||
build:
|
|
||||||
context: ..
|
|
||||||
dockerfile: peregrine/Dockerfile.cfcore
|
|
||||||
command: >
|
|
||||||
bash -c "uvicorn dev_api:app --host 0.0.0.0 --port 8601"
|
|
||||||
volumes:
|
|
||||||
- /devl/menagerie-data:/devl/menagerie-data
|
|
||||||
- ./config/llm.cloud.yaml:/app/config/llm.yaml:ro
|
|
||||||
environment:
|
|
||||||
- CLOUD_MODE=true
|
|
||||||
- CLOUD_DATA_ROOT=/devl/menagerie-data
|
|
||||||
- STAGING_DB=/devl/menagerie-data/cloud-default.db
|
|
||||||
- DIRECTUS_JWT_SECRET=${DIRECTUS_JWT_SECRET}
|
|
||||||
- CF_SERVER_SECRET=${CF_SERVER_SECRET}
|
|
||||||
- PLATFORM_DB_URL=${PLATFORM_DB_URL}
|
|
||||||
- HEIMDALL_URL=${HEIMDALL_URL:-http://cf-license:8000}
|
|
||||||
- HEIMDALL_ADMIN_TOKEN=${HEIMDALL_ADMIN_TOKEN}
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
- FORGEJO_API_TOKEN=${FORGEJO_API_TOKEN:-}
|
|
||||||
extra_hosts:
|
|
||||||
- "host.docker.internal:host-gateway"
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
web:
|
web:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: docker/web/Dockerfile
|
dockerfile: docker/web/Dockerfile
|
||||||
args:
|
|
||||||
VITE_BASE_PATH: /peregrine/
|
|
||||||
ports:
|
ports:
|
||||||
- "8508:80"
|
- "8508:80"
|
||||||
depends_on:
|
|
||||||
- api
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
searxng:
|
searxng:
|
||||||
|
|
|
||||||
|
|
@ -42,8 +42,6 @@ services:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: docker/web/Dockerfile
|
dockerfile: docker/web/Dockerfile
|
||||||
args:
|
|
||||||
VITE_BASE_PATH: /peregrine/
|
|
||||||
ports:
|
ports:
|
||||||
- "8507:80"
|
- "8507:80"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
|
||||||
|
|
@ -21,15 +21,14 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "8516:8501"
|
- "8516:8501"
|
||||||
volumes:
|
volumes:
|
||||||
- /devl/job-seeker:/devl/job-seeker
|
- peregrine-test-data:/devl/job-seeker
|
||||||
- /devl/job-seeker/config:/app/config
|
|
||||||
- /devl/job-seeker/config/llm.docker.yaml:/app/config/llm.yaml:ro
|
|
||||||
- /devl/job-seeker/config/user.docker.yaml:/app/config/user.yaml:ro
|
|
||||||
environment:
|
environment:
|
||||||
- STAGING_DB=/devl/job-seeker/staging.db
|
- STAGING_DB=/devl/job-seeker/staging.db
|
||||||
- PYTHONUNBUFFERED=1
|
- PYTHONUNBUFFERED=1
|
||||||
- STREAMLIT_SERVER_BASE_URL_PATH=
|
- STREAMLIT_SERVER_BASE_URL_PATH=
|
||||||
- CF_ORCH_URL=http://host.docker.internal:7700
|
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
restart: "no"
|
restart: "no"
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
peregrine-test-data:
|
||||||
|
|
|
||||||
49
compose.yml
49
compose.yml
|
|
@ -1,11 +1,9 @@
|
||||||
# compose.yml — Peregrine by Circuit Forge LLC
|
# compose.yml — Peregrine by Circuit Forge LLC
|
||||||
# Profiles: remote | cpu | single-gpu | dual-gpu-ollama
|
# Profiles: remote | cpu | single-gpu | dual-gpu-ollama | dual-gpu-vllm | dual-gpu-mixed
|
||||||
services:
|
services:
|
||||||
|
|
||||||
app:
|
app:
|
||||||
build:
|
build: .
|
||||||
context: ..
|
|
||||||
dockerfile: peregrine/Dockerfile.cfcore
|
|
||||||
command: >
|
command: >
|
||||||
bash -c "streamlit run app/app.py
|
bash -c "streamlit run app/app.py
|
||||||
--server.port=8501
|
--server.port=8501
|
||||||
|
|
@ -35,7 +33,6 @@ services:
|
||||||
- FORGEJO_API_URL=${FORGEJO_API_URL:-}
|
- FORGEJO_API_URL=${FORGEJO_API_URL:-}
|
||||||
- PYTHONUNBUFFERED=1
|
- PYTHONUNBUFFERED=1
|
||||||
- PYTHONLOGGING=WARNING
|
- PYTHONLOGGING=WARNING
|
||||||
- PEREGRINE_CADDY_PROXY=1
|
|
||||||
depends_on:
|
depends_on:
|
||||||
searxng:
|
searxng:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
@ -43,37 +40,12 @@ services:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
api:
|
|
||||||
build:
|
|
||||||
context: ..
|
|
||||||
dockerfile: peregrine/Dockerfile.cfcore
|
|
||||||
command: >
|
|
||||||
bash -c "uvicorn dev_api:app --host 0.0.0.0 --port 8601"
|
|
||||||
volumes:
|
|
||||||
- ./config:/app/config
|
|
||||||
- ./data:/app/data
|
|
||||||
- ${DOCS_DIR:-~/Documents/JobSearch}:/docs
|
|
||||||
environment:
|
|
||||||
- STAGING_DB=/app/data/staging.db
|
|
||||||
- DOCS_DIR=/docs
|
|
||||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
|
|
||||||
- OPENAI_COMPAT_URL=${OPENAI_COMPAT_URL:-}
|
|
||||||
- OPENAI_COMPAT_KEY=${OPENAI_COMPAT_KEY:-}
|
|
||||||
- PEREGRINE_GPU_COUNT=${PEREGRINE_GPU_COUNT:-0}
|
|
||||||
- PEREGRINE_GPU_NAMES=${PEREGRINE_GPU_NAMES:-}
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
extra_hosts:
|
|
||||||
- "host.docker.internal:host-gateway"
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
web:
|
web:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: docker/web/Dockerfile
|
dockerfile: docker/web/Dockerfile
|
||||||
ports:
|
ports:
|
||||||
- "${VUE_PORT:-8506}:80"
|
- "${VUE_PORT:-8506}:80"
|
||||||
depends_on:
|
|
||||||
- api
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
searxng:
|
searxng:
|
||||||
|
|
@ -129,6 +101,23 @@ services:
|
||||||
profiles: [single-gpu, dual-gpu-ollama, dual-gpu-vllm, dual-gpu-mixed]
|
profiles: [single-gpu, dual-gpu-ollama, dual-gpu-vllm, dual-gpu-mixed]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
|
vllm:
|
||||||
|
image: vllm/vllm-openai:latest
|
||||||
|
ports:
|
||||||
|
- "${VLLM_PORT:-8000}:8000"
|
||||||
|
volumes:
|
||||||
|
- ${VLLM_MODELS_DIR:-~/models/vllm}:/models
|
||||||
|
command: >
|
||||||
|
--model /models/${VLLM_MODEL:-Ouro-1.4B}
|
||||||
|
--trust-remote-code
|
||||||
|
--max-model-len 4096
|
||||||
|
--gpu-memory-utilization 0.75
|
||||||
|
--enforce-eager
|
||||||
|
--max-num-seqs 8
|
||||||
|
--cpu-offload-gb ${CPU_OFFLOAD_GB:-0}
|
||||||
|
profiles: [dual-gpu-vllm, dual-gpu-mixed]
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
finetune:
|
finetune:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
|
|
||||||
|
|
@ -1,62 +0,0 @@
|
||||||
backends:
|
|
||||||
anthropic:
|
|
||||||
api_key_env: ANTHROPIC_API_KEY
|
|
||||||
enabled: false
|
|
||||||
model: claude-sonnet-4-6
|
|
||||||
supports_images: true
|
|
||||||
type: anthropic
|
|
||||||
claude_code:
|
|
||||||
api_key: any
|
|
||||||
base_url: http://localhost:3009/v1
|
|
||||||
enabled: false
|
|
||||||
model: claude-code-terminal
|
|
||||||
supports_images: true
|
|
||||||
type: openai_compat
|
|
||||||
github_copilot:
|
|
||||||
api_key: any
|
|
||||||
base_url: http://localhost:3010/v1
|
|
||||||
enabled: false
|
|
||||||
model: gpt-4o
|
|
||||||
supports_images: false
|
|
||||||
type: openai_compat
|
|
||||||
ollama:
|
|
||||||
api_key: ollama
|
|
||||||
base_url: http://host.docker.internal:11434/v1
|
|
||||||
enabled: true
|
|
||||||
model: llama3.1:8b # generic — no personal fine-tunes in cloud
|
|
||||||
supports_images: false
|
|
||||||
type: openai_compat
|
|
||||||
ollama_research:
|
|
||||||
api_key: ollama
|
|
||||||
base_url: http://host.docker.internal:11434/v1
|
|
||||||
enabled: true
|
|
||||||
model: llama3.1:8b
|
|
||||||
supports_images: false
|
|
||||||
type: openai_compat
|
|
||||||
vision_service:
|
|
||||||
base_url: http://host.docker.internal:8002
|
|
||||||
enabled: true
|
|
||||||
supports_images: true
|
|
||||||
type: vision_service
|
|
||||||
vllm:
|
|
||||||
api_key: ''
|
|
||||||
base_url: http://host.docker.internal:8000/v1
|
|
||||||
enabled: true
|
|
||||||
model: __auto__
|
|
||||||
supports_images: false
|
|
||||||
type: openai_compat
|
|
||||||
vllm_research:
|
|
||||||
api_key: ''
|
|
||||||
base_url: http://host.docker.internal:8000/v1
|
|
||||||
enabled: true
|
|
||||||
model: __auto__
|
|
||||||
supports_images: false
|
|
||||||
type: openai_compat
|
|
||||||
fallback_order:
|
|
||||||
- vllm
|
|
||||||
- ollama
|
|
||||||
research_fallback_order:
|
|
||||||
- vllm_research
|
|
||||||
- ollama_research
|
|
||||||
vision_fallback_order:
|
|
||||||
- vision_service
|
|
||||||
|
|
@ -28,9 +28,9 @@ backends:
|
||||||
type: openai_compat
|
type: openai_compat
|
||||||
ollama_research:
|
ollama_research:
|
||||||
api_key: ollama
|
api_key: ollama
|
||||||
base_url: http://ollama_research:11434/v1
|
base_url: http://host.docker.internal:11434/v1
|
||||||
enabled: true
|
enabled: true
|
||||||
model: llama3.1:8b
|
model: llama3.2:3b
|
||||||
supports_images: false
|
supports_images: false
|
||||||
type: openai_compat
|
type: openai_compat
|
||||||
vision_service:
|
vision_service:
|
||||||
|
|
@ -45,11 +45,6 @@ backends:
|
||||||
model: __auto__
|
model: __auto__
|
||||||
supports_images: false
|
supports_images: false
|
||||||
type: openai_compat
|
type: openai_compat
|
||||||
cf_orch:
|
|
||||||
service: vllm
|
|
||||||
model_candidates:
|
|
||||||
- Qwen2.5-3B-Instruct
|
|
||||||
ttl_s: 300
|
|
||||||
vllm_research:
|
vllm_research:
|
||||||
api_key: ''
|
api_key: ''
|
||||||
base_url: http://host.docker.internal:8000/v1
|
base_url: http://host.docker.internal:8000/v1
|
||||||
|
|
|
||||||
1189
dev-api.py
1189
dev-api.py
File diff suppressed because it is too large
Load diff
|
|
@ -4,8 +4,6 @@ WORKDIR /app
|
||||||
COPY web/package*.json ./
|
COPY web/package*.json ./
|
||||||
RUN npm ci --prefer-offline
|
RUN npm ci --prefer-offline
|
||||||
COPY web/ ./
|
COPY web/ ./
|
||||||
ARG VITE_BASE_PATH=/
|
|
||||||
ENV VITE_BASE_PATH=${VITE_BASE_PATH}
|
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Stage 2: serve
|
# Stage 2: serve
|
||||||
|
|
|
||||||
|
|
@ -2,18 +2,12 @@ server {
|
||||||
listen 80;
|
listen 80;
|
||||||
server_name _;
|
server_name _;
|
||||||
|
|
||||||
client_max_body_size 20m;
|
|
||||||
|
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|
||||||
# Proxy API calls to the FastAPI backend service
|
# SPA fallback
|
||||||
location /api/ {
|
location / {
|
||||||
proxy_pass http://api:8601;
|
try_files $uri $uri/ /index.html;
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_read_timeout 120s;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Cache static assets
|
# Cache static assets
|
||||||
|
|
@ -21,9 +15,4 @@ server {
|
||||||
expires 1y;
|
expires 1y;
|
||||||
add_header Cache-Control "public, immutable";
|
add_header Cache-Control "public, immutable";
|
||||||
}
|
}
|
||||||
|
|
||||||
# SPA fallback — must come after API and assets
|
|
||||||
location / {
|
|
||||||
try_files $uri $uri/ /index.html;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -144,7 +144,7 @@ Shipped in v0.4.0. Ongoing maintenance and known decisions:
|
||||||
|
|
||||||
## Container Runtime
|
## Container Runtime
|
||||||
|
|
||||||
- ~~**Podman support**~~ — ✅ Done: `Makefile` auto-detects `docker compose` / `podman compose` / `podman-compose`; `compose.podman-gpu.yml` CDI override for GPU profiles; `install.sh` detects existing Podman and skips Docker install.
|
- ~~**Podman support**~~ — ✅ Done: `Makefile` auto-detects `docker compose` / `podman compose` / `podman-compose`; `compose.podman-gpu.yml` CDI override for GPU profiles; `setup.sh` detects existing Podman and skips Docker install.
|
||||||
- **FastAPI migration path** — When concurrent-user scale demands it: port Streamlit pages to FastAPI + React/HTMX, keep `scripts/` layer unchanged, replace daemon threads with Celery + Redis. The `scripts/` separation already makes this clean.
|
- **FastAPI migration path** — When concurrent-user scale demands it: port Streamlit pages to FastAPI + React/HTMX, keep `scripts/` layer unchanged, replace daemon threads with Celery + Redis. The `scripts/` separation already makes this clean.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
|
||||||
|
|
@ -102,23 +102,6 @@ Before opening a pull request:
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Database Migrations
|
|
||||||
|
|
||||||
Peregrine uses a numbered SQL migration system (Rails-style). Each migration is a `.sql` file in the `migrations/` directory at the repo root, named `NNN_description.sql` (e.g. `002_add_foo_column.sql`). Applied migrations are tracked in a `schema_migrations` table in each user database.
|
|
||||||
|
|
||||||
### Adding a migration
|
|
||||||
|
|
||||||
1. Create `migrations/NNN_description.sql` where `NNN` is the next sequential number (zero-padded to 3 digits).
|
|
||||||
2. Write standard SQL — `CREATE TABLE IF NOT EXISTS`, `ALTER TABLE ADD COLUMN`, etc. Keep each migration idempotent where possible.
|
|
||||||
3. Do **not** modify `scripts/db.py`'s legacy `_MIGRATIONS` lists — those are superseded and will be removed once all active databases have been bootstrapped by the migration runner.
|
|
||||||
4. The runner (`scripts/db_migrate.py`) applies pending migrations at startup automatically (both FastAPI and Streamlit paths call `migrate_db(db_path)`).
|
|
||||||
|
|
||||||
### Rollbacks
|
|
||||||
|
|
||||||
SQLite does not support transactional DDL for all statement types. Write forward-only migrations. If you need to undo a schema change, add a new migration that reverses it.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## What NOT to Do
|
## What NOT to Do
|
||||||
|
|
||||||
- Do not commit `config/user.yaml`, `config/notion.yaml`, `config/email.yaml`, `config/adzuna.yaml`, or any `config/integrations/*.yaml` — all are gitignored
|
- Do not commit `config/user.yaml`, `config/notion.yaml`, `config/email.yaml`, `config/adzuna.yaml`, or any `config/integrations/*.yaml` — all are gitignored
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ This page walks through a full Peregrine installation from scratch.
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
- **Git** — to clone the repository
|
- **Git** — to clone the repository
|
||||||
- **Internet connection** — `install.sh` downloads Docker and other dependencies
|
- **Internet connection** — `setup.sh` downloads Docker and other dependencies
|
||||||
- **Operating system**: Ubuntu/Debian, Fedora/RHEL, Arch Linux, or macOS (with Docker Desktop)
|
- **Operating system**: Ubuntu/Debian, Fedora/RHEL, Arch Linux, or macOS (with Docker Desktop)
|
||||||
|
|
||||||
!!! warning "Windows"
|
!!! warning "Windows"
|
||||||
|
|
@ -18,19 +18,19 @@ This page walks through a full Peregrine installation from scratch.
|
||||||
## Step 1 — Clone the repository
|
## Step 1 — Clone the repository
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://git.opensourcesolarpunk.com/Circuit-Forge/peregrine
|
git clone https://git.circuitforge.io/circuitforge/peregrine
|
||||||
cd peregrine
|
cd peregrine
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Step 2 — Run install.sh
|
## Step 2 — Run setup.sh
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash install.sh
|
bash setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
`install.sh` performs the following automatically:
|
`setup.sh` performs the following automatically:
|
||||||
|
|
||||||
1. **Detects your platform** (Ubuntu/Debian, Fedora/RHEL, Arch, macOS)
|
1. **Detects your platform** (Ubuntu/Debian, Fedora/RHEL, Arch, macOS)
|
||||||
2. **Installs Git** if not already present
|
2. **Installs Git** if not already present
|
||||||
|
|
@ -40,10 +40,10 @@ bash install.sh
|
||||||
6. **Creates `.env` from `.env.example`** — edit `.env` to customise ports and model storage paths before starting
|
6. **Creates `.env` from `.env.example`** — edit `.env` to customise ports and model storage paths before starting
|
||||||
|
|
||||||
!!! note "macOS"
|
!!! note "macOS"
|
||||||
`install.sh` installs Docker Desktop via Homebrew (`brew install --cask docker`) then exits. Open Docker Desktop, start it, then re-run the script.
|
`setup.sh` installs Docker Desktop via Homebrew (`brew install --cask docker`) then exits. Open Docker Desktop, start it, then re-run the script.
|
||||||
|
|
||||||
!!! note "GPU requirement"
|
!!! note "GPU requirement"
|
||||||
For GPU support, `nvidia-smi` must return output before you run `install.sh`. Install your NVIDIA driver first. The Container Toolkit installation will fail silently if the driver is not present.
|
For GPU support, `nvidia-smi` must return output before you run `setup.sh`. Install your NVIDIA driver first. The Container Toolkit installation will fail silently if the driver is not present.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
@ -107,7 +107,7 @@ The first-run wizard launches automatically. See [First-Run Wizard](first-run-wi
|
||||||
Only NVIDIA GPUs are supported. AMD ROCm is not currently supported.
|
Only NVIDIA GPUs are supported. AMD ROCm is not currently supported.
|
||||||
|
|
||||||
Requirements:
|
Requirements:
|
||||||
- NVIDIA driver installed and `nvidia-smi` working before running `install.sh`
|
- NVIDIA driver installed and `nvidia-smi` working before running `setup.sh`
|
||||||
- CUDA 12.x recommended (CUDA 11.x may work but is untested)
|
- CUDA 12.x recommended (CUDA 11.x may work but is untested)
|
||||||
- Minimum 8 GB VRAM for `single-gpu` profile with default models
|
- Minimum 8 GB VRAM for `single-gpu` profile with default models
|
||||||
- For `dual-gpu`: GPU 0 is assigned to Ollama, GPU 1 to vLLM
|
- For `dual-gpu`: GPU 0 is assigned to Ollama, GPU 1 to vLLM
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ Peregrine automates the full job search lifecycle: discovery, matching, cover le
|
||||||
# 1. Clone and install dependencies
|
# 1. Clone and install dependencies
|
||||||
git clone https://git.circuitforge.io/circuitforge/peregrine
|
git clone https://git.circuitforge.io/circuitforge/peregrine
|
||||||
cd peregrine
|
cd peregrine
|
||||||
bash install.sh
|
bash setup.sh
|
||||||
|
|
||||||
# 2. Start Peregrine
|
# 2. Start Peregrine
|
||||||
make start # no GPU, API-only
|
make start # no GPU, API-only
|
||||||
|
|
|
||||||
|
|
@ -337,7 +337,7 @@ webhook_url: "https://discord.com/api/webhooks/..."
|
||||||
|
|
||||||
## .env
|
## .env
|
||||||
|
|
||||||
Docker port and path overrides. Created from `.env.example` by `install.sh`. Gitignored.
|
Docker port and path overrides. Created from `.env.example` by `setup.sh`. Gitignored.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Ports (change if defaults conflict with existing services)
|
# Ports (change if defaults conflict with existing services)
|
||||||
|
|
|
||||||
|
|
@ -1,157 +0,0 @@
|
||||||
# Forgejo Feedback API — Schema & Bug Bot Setup
|
|
||||||
|
|
||||||
## API Endpoints Used
|
|
||||||
|
|
||||||
| Operation | Method | Endpoint |
|
|
||||||
|-----------|--------|----------|
|
|
||||||
| List labels | GET | `/repos/{owner}/{repo}/labels` |
|
|
||||||
| Create label | POST | `/repos/{owner}/{repo}/labels` |
|
|
||||||
| Create issue | POST | `/repos/{owner}/{repo}/issues` |
|
|
||||||
| Upload attachment | POST | `/repos/{owner}/{repo}/issues/{index}/assets` |
|
|
||||||
| Post comment | POST | `/repos/{owner}/{repo}/issues/{index}/comments` |
|
|
||||||
|
|
||||||
Base URL: `https://git.opensourcesolarpunk.com/api/v1`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Issue Creation Payload
|
|
||||||
|
|
||||||
```json
|
|
||||||
POST /repos/{owner}/{repo}/issues
|
|
||||||
{
|
|
||||||
"title": "string",
|
|
||||||
"body": "markdown string",
|
|
||||||
"labels": [1, 2, 3] // array of label IDs (not names)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Response (201):
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"number": 42,
|
|
||||||
"html_url": "https://git.opensourcesolarpunk.com/pyr0ball/peregrine/issues/42"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Issue Body Structure
|
|
||||||
|
|
||||||
The `build_issue_body()` function produces this markdown layout:
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
## 🐛 Bug | ✨ Feature Request | 💬 Other
|
|
||||||
|
|
||||||
<user description>
|
|
||||||
|
|
||||||
### Reproduction Steps ← bug type only, when repro provided
|
|
||||||
|
|
||||||
<repro steps>
|
|
||||||
|
|
||||||
### Context
|
|
||||||
|
|
||||||
- **page:** Home
|
|
||||||
- **version:** v0.2.5-61-ga6d787f ← from `git describe`; "dev" inside Docker
|
|
||||||
- **tier:** free | paid | premium
|
|
||||||
- **llm_backend:** ollama | vllm | claude_code | ...
|
|
||||||
- **os:** Linux-6.8.0-65-generic-x86_64-with-glibc2.39
|
|
||||||
- **timestamp:** 2026-03-06T15:58:29Z
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>App Logs (last 100 lines)</summary>
|
|
||||||
|
|
||||||
```
|
|
||||||
... log content (PII masked) ...
|
|
||||||
```
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
### Recent Listings ← only when include_diag = True
|
|
||||||
|
|
||||||
- [Title @ Company](url)
|
|
||||||
|
|
||||||
---
|
|
||||||
*Submitted by: Name <email>* ← only when attribution consent checked
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Screenshot Attachment
|
|
||||||
|
|
||||||
Screenshots are uploaded as issue assets, then embedded inline via a follow-up comment:
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
### Screenshot
|
|
||||||
|
|
||||||

|
|
||||||
```
|
|
||||||
|
|
||||||
This keeps the issue body clean and puts the screenshot in a distinct comment.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Labels
|
|
||||||
|
|
||||||
| Label | Color | Applied when |
|
|
||||||
|-------|-------|-------------|
|
|
||||||
| `beta-feedback` | `#0075ca` | Always |
|
|
||||||
| `needs-triage` | `#e4e669` | Always |
|
|
||||||
| `bug` | `#d73a4a` | Type = Bug |
|
|
||||||
| `feature-request` | `#a2eeef` | Type = Feature Request |
|
|
||||||
| `question` | `#d876e3` | Type = Other |
|
|
||||||
|
|
||||||
Labels are looked up by name on each submission; missing ones are auto-created via `_ensure_labels()`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Bug Bot Account Setup
|
|
||||||
|
|
||||||
The token currently bundled in `.env` is pyr0ball's personal token. For beta distribution,
|
|
||||||
create a dedicated bot account so the token has limited scope and can be rotated independently.
|
|
||||||
|
|
||||||
### Why a bot account?
|
|
||||||
|
|
||||||
- Token gets bundled in beta testers' `.env` — shouldn't be tied to the repo owner's account
|
|
||||||
- Bot can be limited to issue write only (cannot push code, see private repos, etc.)
|
|
||||||
- Token rotation doesn't affect the owner's other integrations
|
|
||||||
|
|
||||||
### Steps (requires Forgejo admin panel — API admin access not available on this token)
|
|
||||||
|
|
||||||
1. **Create bot account** at `https://git.opensourcesolarpunk.com/-/admin/users/new`
|
|
||||||
- Username: `peregrine-bot` (or `cf-bugbot`)
|
|
||||||
- Email: a real address you control (e.g. `bot+peregrine@circuitforge.tech`)
|
|
||||||
- Set a strong password (store in your password manager)
|
|
||||||
- Check "Prohibit login" if you want a pure API-only account
|
|
||||||
|
|
||||||
2. **Add as collaborator** on `pyr0ball/peregrine`:
|
|
||||||
- Settings → Collaborators → Add `peregrine-bot` with **Write** access
|
|
||||||
- Write access is required to create labels; issue creation alone would need only Read+Comment
|
|
||||||
|
|
||||||
3. **Generate API token** (log in as the bot, or use admin impersonation):
|
|
||||||
- User Settings → Applications → Generate New Token
|
|
||||||
- Name: `peregrine-feedback`
|
|
||||||
- Scopes: `issue` (write) — no repo code access needed
|
|
||||||
- Copy the token — it won't be shown again
|
|
||||||
|
|
||||||
4. **Update environment**:
|
|
||||||
```
|
|
||||||
FORGEJO_API_TOKEN=<new bot token>
|
|
||||||
FORGEJO_REPO=pyr0ball/peregrine
|
|
||||||
FORGEJO_API_URL=https://git.opensourcesolarpunk.com/api/v1
|
|
||||||
```
|
|
||||||
Update both `.env` (dev machine) and any beta tester `.env` files.
|
|
||||||
|
|
||||||
5. **Verify** the bot can create issues:
|
|
||||||
```bash
|
|
||||||
curl -s -X POST https://git.opensourcesolarpunk.com/api/v1/repos/pyr0ball/peregrine/issues \
|
|
||||||
-H "Authorization: token <bot-token>" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"title":"[TEST] bot token check","body":"safe to close","labels":[]}'
|
|
||||||
```
|
|
||||||
Expected: HTTP 201 with `number` and `html_url` in response.
|
|
||||||
|
|
||||||
### Future: Heimdall token management
|
|
||||||
|
|
||||||
Once Heimdall is live, the bot token should be served by the license server rather than
|
|
||||||
bundled in `.env`. The app fetches it at startup using the user's license key → token is
|
|
||||||
never stored on disk and can be rotated server-side. Track as a future Heimdall feature.
|
|
||||||
|
|
@ -84,7 +84,7 @@ case "$CMD" in
|
||||||
|
|
||||||
setup)
|
setup)
|
||||||
info "Running dependency installer..."
|
info "Running dependency installer..."
|
||||||
bash install.sh
|
bash setup.sh
|
||||||
;;
|
;;
|
||||||
|
|
||||||
preflight)
|
preflight)
|
||||||
|
|
|
||||||
|
|
@ -1,97 +0,0 @@
|
||||||
-- Migration 001: Baseline schema
|
|
||||||
-- Captures the full schema as of v0.8.5 (all columns including those added via ALTER TABLE)
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS jobs (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
title TEXT,
|
|
||||||
company TEXT,
|
|
||||||
url TEXT UNIQUE,
|
|
||||||
source TEXT,
|
|
||||||
location TEXT,
|
|
||||||
is_remote INTEGER DEFAULT 0,
|
|
||||||
salary TEXT,
|
|
||||||
description TEXT,
|
|
||||||
match_score REAL,
|
|
||||||
keyword_gaps TEXT,
|
|
||||||
date_found TEXT,
|
|
||||||
status TEXT DEFAULT 'pending',
|
|
||||||
notion_page_id TEXT,
|
|
||||||
cover_letter TEXT,
|
|
||||||
applied_at TEXT,
|
|
||||||
interview_date TEXT,
|
|
||||||
rejection_stage TEXT,
|
|
||||||
phone_screen_at TEXT,
|
|
||||||
interviewing_at TEXT,
|
|
||||||
offer_at TEXT,
|
|
||||||
hired_at TEXT,
|
|
||||||
survey_at TEXT,
|
|
||||||
calendar_event_id TEXT,
|
|
||||||
optimized_resume TEXT,
|
|
||||||
ats_gap_report TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS job_contacts (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
job_id INTEGER,
|
|
||||||
direction TEXT,
|
|
||||||
subject TEXT,
|
|
||||||
from_addr TEXT,
|
|
||||||
to_addr TEXT,
|
|
||||||
body TEXT,
|
|
||||||
received_at TEXT,
|
|
||||||
is_response_needed INTEGER DEFAULT 0,
|
|
||||||
responded_at TEXT,
|
|
||||||
message_id TEXT,
|
|
||||||
stage_signal TEXT,
|
|
||||||
suggestion_dismissed INTEGER DEFAULT 0
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS company_research (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
job_id INTEGER UNIQUE,
|
|
||||||
generated_at TEXT,
|
|
||||||
company_brief TEXT,
|
|
||||||
ceo_brief TEXT,
|
|
||||||
talking_points TEXT,
|
|
||||||
raw_output TEXT,
|
|
||||||
tech_brief TEXT,
|
|
||||||
funding_brief TEXT,
|
|
||||||
competitors_brief TEXT,
|
|
||||||
red_flags TEXT,
|
|
||||||
scrape_used INTEGER DEFAULT 0,
|
|
||||||
accessibility_brief TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS background_tasks (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
task_type TEXT,
|
|
||||||
job_id INTEGER,
|
|
||||||
params TEXT,
|
|
||||||
status TEXT DEFAULT 'pending',
|
|
||||||
error TEXT,
|
|
||||||
created_at TEXT,
|
|
||||||
started_at TEXT,
|
|
||||||
finished_at TEXT,
|
|
||||||
stage TEXT,
|
|
||||||
updated_at TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS survey_responses (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
job_id INTEGER,
|
|
||||||
survey_name TEXT,
|
|
||||||
received_at TEXT,
|
|
||||||
source TEXT,
|
|
||||||
raw_input TEXT,
|
|
||||||
image_path TEXT,
|
|
||||||
mode TEXT,
|
|
||||||
llm_output TEXT,
|
|
||||||
reported_score REAL,
|
|
||||||
created_at TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS digest_queue (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
job_contact_id INTEGER UNIQUE,
|
|
||||||
created_at TEXT
|
|
||||||
);
|
|
||||||
|
|
@ -3,13 +3,7 @@
|
||||||
# Keep in sync with environment.yml
|
# Keep in sync with environment.yml
|
||||||
|
|
||||||
# ── CircuitForge shared core ───────────────────────────────────────────────
|
# ── CircuitForge shared core ───────────────────────────────────────────────
|
||||||
# Requires circuitforge-core >= 0.8.0 (config.load_env, db, tasks; resources moved to circuitforge-orch).
|
-e ../circuitforge-core
|
||||||
# Local dev / Docker (parent-context build): path install works because
|
|
||||||
# circuitforge-core/ is a sibling directory.
|
|
||||||
# CI / fresh checkouts: falls back to the Forgejo VCS URL below.
|
|
||||||
# To use local editable install run: pip install -e ../circuitforge-core
|
|
||||||
# TODO: pin to @v0.7.0 tag once cf-core cuts a release tag.
|
|
||||||
git+https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core.git@main
|
|
||||||
|
|
||||||
# ── Web UI ────────────────────────────────────────────────────────────────
|
# ── Web UI ────────────────────────────────────────────────────────────────
|
||||||
streamlit>=1.35
|
streamlit>=1.35
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ def scrape(profile: dict, location: str, results_wanted: int = 50) -> list[dict]
|
||||||
print(f" [adzuna] Skipped — {exc}")
|
print(f" [adzuna] Skipped — {exc}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
titles = profile.get("titles") or profile.get("job_titles", [])
|
titles = profile.get("titles", [])
|
||||||
hours_old = profile.get("hours_old", 240)
|
hours_old = profile.get("hours_old", 240)
|
||||||
max_days_old = max(1, hours_old // 24)
|
max_days_old = max(1, hours_old // 24)
|
||||||
is_remote_search = location.lower() == "remote"
|
is_remote_search = location.lower() == "remote"
|
||||||
|
|
|
||||||
|
|
@ -121,7 +121,7 @@ def scrape(profile: dict, location: str, results_wanted: int = 50) -> list[dict]
|
||||||
return []
|
return []
|
||||||
metros = [metro]
|
metros = [metro]
|
||||||
|
|
||||||
titles: list[str] = profile.get("titles") or profile.get("job_titles", [])
|
titles: list[str] = profile.get("titles", [])
|
||||||
hours_old: int = profile.get("hours_old", 240)
|
hours_old: int = profile.get("hours_old", 240)
|
||||||
cutoff = datetime.now(tz=timezone.utc).timestamp() - (hours_old * 3600)
|
cutoff = datetime.now(tz=timezone.utc).timestamp() - (hours_old * 3600)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -107,7 +107,7 @@ def scrape(profile: dict, location: str, results_wanted: int = 50) -> list[dict]
|
||||||
)
|
)
|
||||||
page = ctx.new_page()
|
page = ctx.new_page()
|
||||||
|
|
||||||
for title in (profile.get("titles") or profile.get("job_titles", [])):
|
for title in profile.get("titles", []):
|
||||||
if len(results) >= results_wanted:
|
if len(results) >= results_wanted:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -383,19 +383,6 @@ def mark_applied(db_path: Path = DEFAULT_DB, ids: list[int] = None) -> None:
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
def cancel_task(db_path: Path = DEFAULT_DB, task_id: int = 0) -> bool:
|
|
||||||
"""Cancel a single queued/running task by id. Returns True if a row was updated."""
|
|
||||||
conn = sqlite3.connect(db_path)
|
|
||||||
count = conn.execute(
|
|
||||||
"UPDATE background_tasks SET status='failed', error='Cancelled by user',"
|
|
||||||
" finished_at=datetime('now') WHERE id=? AND status IN ('queued','running')",
|
|
||||||
(task_id,),
|
|
||||||
).rowcount
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
return count > 0
|
|
||||||
|
|
||||||
|
|
||||||
def kill_stuck_tasks(db_path: Path = DEFAULT_DB) -> int:
|
def kill_stuck_tasks(db_path: Path = DEFAULT_DB) -> int:
|
||||||
"""Mark all queued/running background tasks as failed. Returns count killed."""
|
"""Mark all queued/running background tasks as failed. Returns count killed."""
|
||||||
conn = sqlite3.connect(db_path)
|
conn = sqlite3.connect(db_path)
|
||||||
|
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
"""
|
|
||||||
db_migrate.py — Rails-style numbered SQL migration runner for Peregrine user DBs.
|
|
||||||
|
|
||||||
Migration files live in migrations/ (sibling to this script's parent directory),
|
|
||||||
named NNN_description.sql (e.g. 001_baseline.sql). They are applied in sorted
|
|
||||||
order and tracked in the schema_migrations table so each runs exactly once.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from scripts.db_migrate import migrate_db
|
|
||||||
migrate_db(Path("/path/to/user.db"))
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Resolved at import time: peregrine repo root / migrations/
|
|
||||||
_MIGRATIONS_DIR = Path(__file__).parent.parent / "migrations"
|
|
||||||
|
|
||||||
_CREATE_MIGRATIONS_TABLE = """
|
|
||||||
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
||||||
version TEXT PRIMARY KEY,
|
|
||||||
applied_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_db(db_path: Path) -> list[str]:
|
|
||||||
"""Apply any pending migrations to db_path. Returns list of applied versions."""
|
|
||||||
applied: list[str] = []
|
|
||||||
|
|
||||||
con = sqlite3.connect(db_path)
|
|
||||||
try:
|
|
||||||
con.execute(_CREATE_MIGRATIONS_TABLE)
|
|
||||||
con.commit()
|
|
||||||
|
|
||||||
if not _MIGRATIONS_DIR.is_dir():
|
|
||||||
log.warning("migrations/ directory not found at %s — skipping", _MIGRATIONS_DIR)
|
|
||||||
return applied
|
|
||||||
|
|
||||||
migration_files = sorted(_MIGRATIONS_DIR.glob("*.sql"))
|
|
||||||
if not migration_files:
|
|
||||||
return applied
|
|
||||||
|
|
||||||
already_applied = {
|
|
||||||
row[0] for row in con.execute("SELECT version FROM schema_migrations")
|
|
||||||
}
|
|
||||||
|
|
||||||
for path in migration_files:
|
|
||||||
version = path.stem # e.g. "001_baseline"
|
|
||||||
if version in already_applied:
|
|
||||||
continue
|
|
||||||
|
|
||||||
sql = path.read_text(encoding="utf-8")
|
|
||||||
log.info("Applying migration %s to %s", version, db_path.name)
|
|
||||||
try:
|
|
||||||
con.executescript(sql)
|
|
||||||
con.execute(
|
|
||||||
"INSERT INTO schema_migrations (version) VALUES (?)", (version,)
|
|
||||||
)
|
|
||||||
con.commit()
|
|
||||||
applied.append(version)
|
|
||||||
log.info("Migration %s applied successfully", version)
|
|
||||||
except Exception as exc:
|
|
||||||
con.rollback()
|
|
||||||
log.error("Migration %s failed: %s", version, exc)
|
|
||||||
raise RuntimeError(f"Migration {version} failed: {exc}") from exc
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
return applied
|
|
||||||
|
|
@ -34,21 +34,17 @@ CUSTOM_SCRAPERS: dict[str, object] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def load_config(config_dir: Path | None = None) -> tuple[dict, dict]:
|
def load_config() -> tuple[dict, dict]:
|
||||||
cfg = config_dir or CONFIG_DIR
|
profiles = yaml.safe_load(PROFILES_CFG.read_text())
|
||||||
profiles_path = cfg / "search_profiles.yaml"
|
notion_cfg = yaml.safe_load(NOTION_CFG.read_text())
|
||||||
notion_path = cfg / "notion.yaml"
|
|
||||||
profiles = yaml.safe_load(profiles_path.read_text())
|
|
||||||
notion_cfg = yaml.safe_load(notion_path.read_text()) if notion_path.exists() else {"field_map": {}, "token": None, "database_id": None}
|
|
||||||
return profiles, notion_cfg
|
return profiles, notion_cfg
|
||||||
|
|
||||||
|
|
||||||
def load_blocklist(config_dir: Path | None = None) -> dict:
|
def load_blocklist() -> dict:
|
||||||
"""Load global blocklist config. Returns dict with companies, industries, locations lists."""
|
"""Load global blocklist config. Returns dict with companies, industries, locations lists."""
|
||||||
blocklist_path = (config_dir or CONFIG_DIR) / "blocklist.yaml"
|
if not BLOCKLIST_CFG.exists():
|
||||||
if not blocklist_path.exists():
|
|
||||||
return {"companies": [], "industries": [], "locations": []}
|
return {"companies": [], "industries": [], "locations": []}
|
||||||
raw = yaml.safe_load(blocklist_path.read_text()) or {}
|
raw = yaml.safe_load(BLOCKLIST_CFG.read_text()) or {}
|
||||||
return {
|
return {
|
||||||
"companies": [c.lower() for c in raw.get("companies", []) if c],
|
"companies": [c.lower() for c in raw.get("companies", []) if c],
|
||||||
"industries": [i.lower() for i in raw.get("industries", []) if i],
|
"industries": [i.lower() for i in raw.get("industries", []) if i],
|
||||||
|
|
@ -121,15 +117,10 @@ def push_to_notion(notion: Client, db_id: str, job: dict, fm: dict) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_discovery(db_path: Path = DEFAULT_DB, notion_push: bool = False, config_dir: Path | None = None) -> None:
|
def run_discovery(db_path: Path = DEFAULT_DB, notion_push: bool = False) -> None:
|
||||||
# In cloud mode, config_dir is the per-user config directory derived from db_path.
|
profiles_cfg, notion_cfg = load_config()
|
||||||
# Falls back to the app-level /app/config for single-tenant deployments.
|
fm = notion_cfg["field_map"]
|
||||||
resolved_cfg = config_dir or Path(db_path).parent / "config"
|
blocklist = load_blocklist()
|
||||||
if not resolved_cfg.exists():
|
|
||||||
resolved_cfg = CONFIG_DIR
|
|
||||||
profiles_cfg, notion_cfg = load_config(resolved_cfg)
|
|
||||||
fm = notion_cfg.get("field_map") or {}
|
|
||||||
blocklist = load_blocklist(resolved_cfg)
|
|
||||||
|
|
||||||
_bl_summary = {k: len(v) for k, v in blocklist.items() if v}
|
_bl_summary = {k: len(v) for k, v in blocklist.items() if v}
|
||||||
if _bl_summary:
|
if _bl_summary:
|
||||||
|
|
@ -220,7 +211,7 @@ def run_discovery(db_path: Path = DEFAULT_DB, notion_push: bool = False, config_
|
||||||
try:
|
try:
|
||||||
jobspy_kwargs: dict = dict(
|
jobspy_kwargs: dict = dict(
|
||||||
site_name=boards,
|
site_name=boards,
|
||||||
search_term=" OR ".join(f'"{t}"' for t in (profile.get("titles") or profile.get("job_titles", []))),
|
search_term=" OR ".join(f'"{t}"' for t in profile["titles"]),
|
||||||
location=location,
|
location=location,
|
||||||
results_wanted=results_per_board,
|
results_wanted=results_per_board,
|
||||||
hours_old=profile.get("hours_old", 72),
|
hours_old=profile.get("hours_old", 72),
|
||||||
|
|
|
||||||
|
|
@ -26,14 +26,13 @@ LETTERS_DIR = _profile.docs_dir if _profile else Path.home() / "Documents" / "Jo
|
||||||
LETTER_GLOB = "*Cover Letter*.md"
|
LETTER_GLOB = "*Cover Letter*.md"
|
||||||
|
|
||||||
# Background injected into every prompt so the model has the candidate's facts
|
# Background injected into every prompt so the model has the candidate's facts
|
||||||
def _build_system_context(profile=None) -> str:
|
def _build_system_context() -> str:
|
||||||
p = profile or _profile
|
if not _profile:
|
||||||
if not p:
|
|
||||||
return "You are a professional cover letter writer. Write in first person."
|
return "You are a professional cover letter writer. Write in first person."
|
||||||
parts = [f"You are writing cover letters for {p.name}. {p.career_summary}"]
|
parts = [f"You are writing cover letters for {_profile.name}. {_profile.career_summary}"]
|
||||||
if p.candidate_voice:
|
if _profile.candidate_voice:
|
||||||
parts.append(
|
parts.append(
|
||||||
f"Voice and personality: {p.candidate_voice} "
|
f"Voice and personality: {_profile.candidate_voice} "
|
||||||
"Write in a way that reflects these authentic traits — not as a checklist, "
|
"Write in a way that reflects these authentic traits — not as a checklist, "
|
||||||
"but as a natural expression of who this person is."
|
"but as a natural expression of who this person is."
|
||||||
)
|
)
|
||||||
|
|
@ -126,17 +125,15 @@ _MISSION_DEFAULTS: dict[str, str] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _build_mission_notes(profile=None, candidate_name: str | None = None) -> dict[str, str]:
|
def _build_mission_notes() -> dict[str, str]:
|
||||||
"""Merge user's custom mission notes with generic defaults."""
|
"""Merge user's custom mission notes with generic defaults."""
|
||||||
p = profile or _profile
|
prefs = _profile.mission_preferences if _profile else {}
|
||||||
name = candidate_name or _candidate
|
|
||||||
prefs = p.mission_preferences if p else {}
|
|
||||||
notes = {}
|
notes = {}
|
||||||
for industry, default_note in _MISSION_DEFAULTS.items():
|
for industry, default_note in _MISSION_DEFAULTS.items():
|
||||||
custom = (prefs.get(industry) or "").strip()
|
custom = (prefs.get(industry) or "").strip()
|
||||||
if custom:
|
if custom:
|
||||||
notes[industry] = (
|
notes[industry] = (
|
||||||
f"Mission alignment — {name} shared: \"{custom}\". "
|
f"Mission alignment — {_candidate} shared: \"{custom}\". "
|
||||||
"Para 3 should warmly and specifically reflect this authentic connection."
|
"Para 3 should warmly and specifically reflect this authentic connection."
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
@ -147,15 +144,12 @@ def _build_mission_notes(profile=None, candidate_name: str | None = None) -> dic
|
||||||
_MISSION_NOTES = _build_mission_notes()
|
_MISSION_NOTES = _build_mission_notes()
|
||||||
|
|
||||||
|
|
||||||
def detect_mission_alignment(
|
def detect_mission_alignment(company: str, description: str) -> str | None:
|
||||||
company: str, description: str, mission_notes: dict | None = None
|
|
||||||
) -> str | None:
|
|
||||||
"""Return a mission hint string if company/JD matches a preferred industry, else None."""
|
"""Return a mission hint string if company/JD matches a preferred industry, else None."""
|
||||||
notes = mission_notes if mission_notes is not None else _MISSION_NOTES
|
|
||||||
text = f"{company} {description}".lower()
|
text = f"{company} {description}".lower()
|
||||||
for industry, signals in _MISSION_SIGNALS.items():
|
for industry, signals in _MISSION_SIGNALS.items():
|
||||||
if any(sig in text for sig in signals):
|
if any(sig in text for sig in signals):
|
||||||
return notes[industry]
|
return _MISSION_NOTES[industry]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -196,14 +190,10 @@ def build_prompt(
|
||||||
examples: list[dict],
|
examples: list[dict],
|
||||||
mission_hint: str | None = None,
|
mission_hint: str | None = None,
|
||||||
is_jobgether: bool = False,
|
is_jobgether: bool = False,
|
||||||
system_context: str | None = None,
|
|
||||||
candidate_name: str | None = None,
|
|
||||||
) -> str:
|
) -> str:
|
||||||
ctx = system_context if system_context is not None else SYSTEM_CONTEXT
|
parts = [SYSTEM_CONTEXT.strip(), ""]
|
||||||
name = candidate_name or _candidate
|
|
||||||
parts = [ctx.strip(), ""]
|
|
||||||
if examples:
|
if examples:
|
||||||
parts.append(f"=== STYLE EXAMPLES ({name}'s past letters) ===\n")
|
parts.append(f"=== STYLE EXAMPLES ({_candidate}'s past letters) ===\n")
|
||||||
for i, ex in enumerate(examples, 1):
|
for i, ex in enumerate(examples, 1):
|
||||||
parts.append(f"--- Example {i} ({ex['company']}) ---")
|
parts.append(f"--- Example {i} ({ex['company']}) ---")
|
||||||
parts.append(ex["text"])
|
parts.append(ex["text"])
|
||||||
|
|
@ -241,14 +231,13 @@ def build_prompt(
|
||||||
return "\n".join(parts)
|
return "\n".join(parts)
|
||||||
|
|
||||||
|
|
||||||
def _trim_to_letter_end(text: str, profile=None) -> str:
|
def _trim_to_letter_end(text: str) -> str:
|
||||||
"""Remove repetitive hallucinated content after the first complete sign-off.
|
"""Remove repetitive hallucinated content after the first complete sign-off.
|
||||||
|
|
||||||
Fine-tuned models sometimes loop after completing the letter. This cuts at
|
Fine-tuned models sometimes loop after completing the letter. This cuts at
|
||||||
the first closing + candidate name so only the intended letter is saved.
|
the first closing + candidate name so only the intended letter is saved.
|
||||||
"""
|
"""
|
||||||
p = profile or _profile
|
candidate_first = (_profile.name.split()[0] if _profile else "").strip()
|
||||||
candidate_first = (p.name.split()[0] if p else "").strip()
|
|
||||||
pattern = (
|
pattern = (
|
||||||
r'(?:Warm regards|Sincerely|Best regards|Kind regards|Thank you)[,.]?\s*\n+\s*'
|
r'(?:Warm regards|Sincerely|Best regards|Kind regards|Thank you)[,.]?\s*\n+\s*'
|
||||||
+ (re.escape(candidate_first) if candidate_first else r'\w+(?:\s+\w+)?')
|
+ (re.escape(candidate_first) if candidate_first else r'\w+(?:\s+\w+)?')
|
||||||
|
|
@ -268,8 +257,6 @@ def generate(
|
||||||
feedback: str = "",
|
feedback: str = "",
|
||||||
is_jobgether: bool = False,
|
is_jobgether: bool = False,
|
||||||
_router=None,
|
_router=None,
|
||||||
config_path: "Path | None" = None,
|
|
||||||
user_yaml_path: "Path | None" = None,
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Generate a cover letter and return it as a string.
|
"""Generate a cover letter and return it as a string.
|
||||||
|
|
||||||
|
|
@ -277,29 +264,15 @@ def generate(
|
||||||
and requested changes are appended to the prompt so the LLM revises rather
|
and requested changes are appended to the prompt so the LLM revises rather
|
||||||
than starting from scratch.
|
than starting from scratch.
|
||||||
|
|
||||||
user_yaml_path overrides the module-level profile — required in cloud mode
|
|
||||||
so each user's name/voice/mission prefs are used instead of the global default.
|
|
||||||
|
|
||||||
_router is an optional pre-built LLMRouter (used in tests to avoid real LLM calls).
|
_router is an optional pre-built LLMRouter (used in tests to avoid real LLM calls).
|
||||||
"""
|
"""
|
||||||
# Per-call profile override (cloud mode: each user has their own user.yaml)
|
|
||||||
if user_yaml_path and Path(user_yaml_path).exists():
|
|
||||||
_prof = UserProfile(Path(user_yaml_path))
|
|
||||||
else:
|
|
||||||
_prof = _profile
|
|
||||||
|
|
||||||
sys_ctx = _build_system_context(_prof)
|
|
||||||
mission_notes = _build_mission_notes(_prof, candidate_name=(_prof.name if _prof else None))
|
|
||||||
candidate_name = _prof.name if _prof else _candidate
|
|
||||||
|
|
||||||
corpus = load_corpus()
|
corpus = load_corpus()
|
||||||
examples = find_similar_letters(description or f"{title} {company}", corpus)
|
examples = find_similar_letters(description or f"{title} {company}", corpus)
|
||||||
mission_hint = detect_mission_alignment(company, description, mission_notes=mission_notes)
|
mission_hint = detect_mission_alignment(company, description)
|
||||||
if mission_hint:
|
if mission_hint:
|
||||||
print(f"[cover-letter] Mission alignment detected for {company}", file=sys.stderr)
|
print(f"[cover-letter] Mission alignment detected for {company}", file=sys.stderr)
|
||||||
prompt = build_prompt(title, company, description, examples,
|
prompt = build_prompt(title, company, description, examples,
|
||||||
mission_hint=mission_hint, is_jobgether=is_jobgether,
|
mission_hint=mission_hint, is_jobgether=is_jobgether)
|
||||||
system_context=sys_ctx, candidate_name=candidate_name)
|
|
||||||
|
|
||||||
if previous_result:
|
if previous_result:
|
||||||
prompt += f"\n\n---\nPrevious draft:\n{previous_result}"
|
prompt += f"\n\n---\nPrevious draft:\n{previous_result}"
|
||||||
|
|
@ -308,9 +281,8 @@ def generate(
|
||||||
|
|
||||||
if _router is None:
|
if _router is None:
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||||
from scripts.llm_router import LLMRouter, CONFIG_PATH
|
from scripts.llm_router import LLMRouter
|
||||||
resolved = config_path if (config_path and Path(config_path).exists()) else CONFIG_PATH
|
_router = LLMRouter()
|
||||||
_router = LLMRouter(resolved)
|
|
||||||
|
|
||||||
print(f"[cover-letter] Generating for: {title} @ {company}", file=sys.stderr)
|
print(f"[cover-letter] Generating for: {title} @ {company}", file=sys.stderr)
|
||||||
print(f"[cover-letter] Style examples: {[e['company'] for e in examples]}", file=sys.stderr)
|
print(f"[cover-letter] Style examples: {[e['company'] for e in examples]}", file=sys.stderr)
|
||||||
|
|
@ -320,7 +292,7 @@ def generate(
|
||||||
# max_tokens=1200 caps generation at ~900 words — enough for any cover letter
|
# max_tokens=1200 caps generation at ~900 words — enough for any cover letter
|
||||||
# and prevents fine-tuned models from looping into repetitive garbage output.
|
# and prevents fine-tuned models from looping into repetitive garbage output.
|
||||||
result = _router.complete(prompt, max_tokens=1200)
|
result = _router.complete(prompt, max_tokens=1200)
|
||||||
return _trim_to_letter_end(result, _prof)
|
return _trim_to_letter_end(result)
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|
|
||||||
|
|
@ -1,46 +1,19 @@
|
||||||
"""
|
"""
|
||||||
LLM abstraction layer with priority fallback chain.
|
LLM abstraction layer with priority fallback chain.
|
||||||
Config lookup order:
|
Reads config/llm.yaml. Tries backends in order; falls back on any error.
|
||||||
1. <repo>/config/llm.yaml — per-install local config
|
|
||||||
2. ~/.config/circuitforge/llm.yaml — user-level config (circuitforge-core default)
|
|
||||||
3. env-var auto-config (ANTHROPIC_API_KEY, OPENAI_API_KEY, OLLAMA_HOST, …)
|
|
||||||
"""
|
"""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from circuitforge_core.llm import LLMRouter as _CoreLLMRouter
|
from circuitforge_core.llm import LLMRouter as _CoreLLMRouter
|
||||||
|
|
||||||
# Kept for backwards-compatibility — external callers that import CONFIG_PATH
|
|
||||||
# from this module continue to work.
|
|
||||||
CONFIG_PATH = Path(__file__).parent.parent / "config" / "llm.yaml"
|
CONFIG_PATH = Path(__file__).parent.parent / "config" / "llm.yaml"
|
||||||
|
|
||||||
|
|
||||||
class LLMRouter(_CoreLLMRouter):
|
class LLMRouter(_CoreLLMRouter):
|
||||||
"""Peregrine-specific LLMRouter — tri-level config path priority.
|
"""Peregrine-specific LLMRouter — defaults to Peregrine's config/llm.yaml."""
|
||||||
|
|
||||||
When ``config_path`` is supplied (e.g. in tests) it is passed straight
|
def __init__(self, config_path: Path = CONFIG_PATH):
|
||||||
through to the core. When omitted, the lookup order is:
|
|
||||||
1. <repo>/config/llm.yaml (per-install local config)
|
|
||||||
2. ~/.config/circuitforge/llm.yaml (user-level, circuitforge-core default)
|
|
||||||
3. env-var auto-config (ANTHROPIC_API_KEY, OPENAI_API_KEY, OLLAMA_HOST …)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, config_path: Path | None = None) -> None:
|
|
||||||
if config_path is not None:
|
|
||||||
# Explicit path supplied — use it directly (e.g. tests, CLI override).
|
|
||||||
super().__init__(config_path)
|
super().__init__(config_path)
|
||||||
return
|
|
||||||
|
|
||||||
local = Path(__file__).parent.parent / "config" / "llm.yaml"
|
|
||||||
user_level = Path.home() / ".config" / "circuitforge" / "llm.yaml"
|
|
||||||
if local.exists():
|
|
||||||
super().__init__(local)
|
|
||||||
elif user_level.exists():
|
|
||||||
super().__init__(user_level)
|
|
||||||
else:
|
|
||||||
# No yaml found — let circuitforge-core's env-var auto-config run.
|
|
||||||
# The core default CONFIG_PATH (~/.config/circuitforge/llm.yaml)
|
|
||||||
# won't exist either, so _auto_config_from_env() will be triggered.
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
|
|
||||||
# Module-level singleton for convenience
|
# Module-level singleton for convenience
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ OVERRIDE_YML = ROOT / "compose.override.yml"
|
||||||
_SERVICES: dict[str, tuple[str, int, str, bool, bool]] = {
|
_SERVICES: dict[str, tuple[str, int, str, bool, bool]] = {
|
||||||
"streamlit": ("streamlit_port", 8501, "STREAMLIT_PORT", True, False),
|
"streamlit": ("streamlit_port", 8501, "STREAMLIT_PORT", True, False),
|
||||||
"searxng": ("searxng_port", 8888, "SEARXNG_PORT", True, True),
|
"searxng": ("searxng_port", 8888, "SEARXNG_PORT", True, True),
|
||||||
# vllm removed — now managed by cf-orch (host process), not a Docker service
|
"vllm": ("vllm_port", 8000, "VLLM_PORT", True, True),
|
||||||
"vision": ("vision_port", 8002, "VISION_PORT", True, True),
|
"vision": ("vision_port", 8002, "VISION_PORT", True, True),
|
||||||
"ollama": ("ollama_port", 11434, "OLLAMA_PORT", True, True),
|
"ollama": ("ollama_port", 11434, "OLLAMA_PORT", True, True),
|
||||||
"ollama_research": ("ollama_research_port", 11435, "OLLAMA_RESEARCH_PORT", True, True),
|
"ollama_research": ("ollama_research_port", 11435, "OLLAMA_RESEARCH_PORT", True, True),
|
||||||
|
|
@ -65,6 +65,7 @@ _LLM_BACKENDS: dict[str, list[tuple[str, str]]] = {
|
||||||
_DOCKER_INTERNAL: dict[str, tuple[str, int]] = {
|
_DOCKER_INTERNAL: dict[str, tuple[str, int]] = {
|
||||||
"ollama": ("ollama", 11434),
|
"ollama": ("ollama", 11434),
|
||||||
"ollama_research": ("ollama_research", 11434), # container-internal port is always 11434
|
"ollama_research": ("ollama_research", 11434), # container-internal port is always 11434
|
||||||
|
"vllm": ("vllm", 8000),
|
||||||
"vision": ("vision", 8002),
|
"vision": ("vision", 8002),
|
||||||
"searxng": ("searxng", 8080), # searxng internal port differs from host port
|
"searxng": ("searxng", 8080), # searxng internal port differs from host port
|
||||||
}
|
}
|
||||||
|
|
@ -492,12 +493,6 @@ def main() -> None:
|
||||||
# binds a harmless free port instead of conflicting with the external service.
|
# binds a harmless free port instead of conflicting with the external service.
|
||||||
env_updates: dict[str, str] = {i["env_var"]: str(i["stub_port"]) for i in ports.values()}
|
env_updates: dict[str, str] = {i["env_var"]: str(i["stub_port"]) for i in ports.values()}
|
||||||
env_updates["RECOMMENDED_PROFILE"] = profile
|
env_updates["RECOMMENDED_PROFILE"] = profile
|
||||||
# When Ollama is adopted from the host process, write OLLAMA_HOST so
|
|
||||||
# LLMRouter's env-var auto-config finds it without needing config/llm.yaml.
|
|
||||||
ollama_info = ports.get("ollama")
|
|
||||||
if ollama_info and ollama_info.get("external"):
|
|
||||||
env_updates["OLLAMA_HOST"] = f"http://host.docker.internal:{ollama_info['resolved']}"
|
|
||||||
|
|
||||||
if offload_gb > 0:
|
if offload_gb > 0:
|
||||||
env_updates["CPU_OFFLOAD_GB"] = str(offload_gb)
|
env_updates["CPU_OFFLOAD_GB"] = str(offload_gb)
|
||||||
# GPU info for the app container (which lacks nvidia-smi access)
|
# GPU info for the app container (which lacks nvidia-smi access)
|
||||||
|
|
|
||||||
|
|
@ -166,8 +166,7 @@ def _run_task(db_path: Path, task_id: int, task_type: str, job_id: int,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
from scripts.discover import run_discovery
|
from scripts.discover import run_discovery
|
||||||
from pathlib import Path as _Path
|
new_count = run_discovery(db_path)
|
||||||
new_count = run_discovery(db_path, config_dir=_Path(db_path).parent / "config")
|
|
||||||
n = new_count or 0
|
n = new_count or 0
|
||||||
update_task_status(
|
update_task_status(
|
||||||
db_path, task_id, "completed",
|
db_path, task_id, "completed",
|
||||||
|
|
@ -179,9 +178,6 @@ def _run_task(db_path: Path, task_id: int, task_type: str, job_id: int,
|
||||||
import json as _json
|
import json as _json
|
||||||
p = _json.loads(params or "{}")
|
p = _json.loads(params or "{}")
|
||||||
from scripts.generate_cover_letter import generate
|
from scripts.generate_cover_letter import generate
|
||||||
_cfg_dir = Path(db_path).parent / "config"
|
|
||||||
_user_llm_cfg = _cfg_dir / "llm.yaml"
|
|
||||||
_user_yaml = _cfg_dir / "user.yaml"
|
|
||||||
result = generate(
|
result = generate(
|
||||||
job.get("title", ""),
|
job.get("title", ""),
|
||||||
job.get("company", ""),
|
job.get("company", ""),
|
||||||
|
|
@ -189,8 +185,6 @@ def _run_task(db_path: Path, task_id: int, task_type: str, job_id: int,
|
||||||
previous_result=p.get("previous_result", ""),
|
previous_result=p.get("previous_result", ""),
|
||||||
feedback=p.get("feedback", ""),
|
feedback=p.get("feedback", ""),
|
||||||
is_jobgether=job.get("source") == "jobgether",
|
is_jobgether=job.get("source") == "jobgether",
|
||||||
config_path=_user_llm_cfg,
|
|
||||||
user_yaml_path=_user_yaml,
|
|
||||||
)
|
)
|
||||||
update_cover_letter(db_path, job_id, result)
|
update_cover_letter(db_path, job_id, result)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,14 +15,13 @@ Public API (unchanged — callers do not need to change):
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import threading
|
import threading
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional
|
||||||
|
|
||||||
from circuitforge_core.tasks.scheduler import (
|
from circuitforge_core.tasks.scheduler import (
|
||||||
TaskSpec, # re-export unchanged
|
TaskSpec, # re-export unchanged
|
||||||
LocalScheduler as _CoreTaskScheduler,
|
TaskScheduler as _CoreTaskScheduler,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -94,6 +93,15 @@ class TaskScheduler(_CoreTaskScheduler):
|
||||||
def __init__(self, db_path: Path, run_task_fn: Callable) -> None:
|
def __init__(self, db_path: Path, run_task_fn: Callable) -> None:
|
||||||
budgets, max_depth = _load_config_overrides(db_path)
|
budgets, max_depth = _load_config_overrides(db_path)
|
||||||
|
|
||||||
|
# Resolve VRAM using module-level _get_gpus so tests can monkeypatch it
|
||||||
|
try:
|
||||||
|
gpus = _get_gpus()
|
||||||
|
available_vram: float = (
|
||||||
|
sum(g["vram_total_gb"] for g in gpus) if gpus else 999.0
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
available_vram = 999.0
|
||||||
|
|
||||||
# Warn under this module's logger for any task types with no VRAM budget
|
# Warn under this module's logger for any task types with no VRAM budget
|
||||||
# (mirrors the core warning but captures under scripts.task_scheduler
|
# (mirrors the core warning but captures under scripts.task_scheduler
|
||||||
# so existing tests using caplog.at_level(logger="scripts.task_scheduler") pass)
|
# so existing tests using caplog.at_level(logger="scripts.task_scheduler") pass)
|
||||||
|
|
@ -109,6 +117,7 @@ class TaskScheduler(_CoreTaskScheduler):
|
||||||
run_task_fn=run_task_fn,
|
run_task_fn=run_task_fn,
|
||||||
task_types=LLM_TASK_TYPES,
|
task_types=LLM_TASK_TYPES,
|
||||||
vram_budgets=budgets,
|
vram_budgets=budgets,
|
||||||
|
available_vram_gb=available_vram,
|
||||||
max_queue_depth=max_depth,
|
max_queue_depth=max_depth,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# install.sh — Peregrine dependency installer
|
# setup.sh — Peregrine dependency installer
|
||||||
# Installs Docker, Docker Compose v2, and (optionally) NVIDIA Container Toolkit.
|
# Installs Docker, Docker Compose v2, and (optionally) NVIDIA Container Toolkit.
|
||||||
# Supports: Ubuntu/Debian, Fedora/RHEL/CentOS, Arch Linux, macOS (Homebrew).
|
# Supports: Ubuntu/Debian, Fedora/RHEL/CentOS, Arch Linux, macOS (Homebrew).
|
||||||
# Windows: not supported — use WSL2 with Ubuntu.
|
# Windows: not supported — use WSL2 with Ubuntu.
|
||||||
|
|
@ -90,11 +90,15 @@ configure_git_safe_dir() {
|
||||||
}
|
}
|
||||||
|
|
||||||
activate_git_hooks() {
|
activate_git_hooks() {
|
||||||
local repo_dir
|
local repo_dir hooks_installer
|
||||||
repo_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
repo_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
if [[ -d "$repo_dir/.githooks" ]]; then
|
hooks_installer="/Library/Development/CircuitForge/circuitforge-hooks/install.sh"
|
||||||
|
if [[ -f "$hooks_installer" ]]; then
|
||||||
|
bash "$hooks_installer" --quiet
|
||||||
|
success "CircuitForge hooks activated (circuitforge-hooks)."
|
||||||
|
elif [[ -d "$repo_dir/.githooks" ]]; then
|
||||||
git -C "$repo_dir" config core.hooksPath .githooks
|
git -C "$repo_dir" config core.hooksPath .githooks
|
||||||
success "Git hooks activated (.githooks/)."
|
success "Git hooks activated (.githooks/) — circuitforge-hooks not found, using local fallback."
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -337,31 +341,6 @@ setup_env() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# ── License key (optional) ────────────────────────────────────────────────────
|
|
||||||
capture_license_key() {
|
|
||||||
[[ ! -t 0 ]] && return # skip in non-interactive installs (curl | bash)
|
|
||||||
local env_file
|
|
||||||
env_file="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/.env"
|
|
||||||
[[ ! -f "$env_file" ]] && return # setup_env() creates it; nothing to write into yet
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
info "License key (optional)"
|
|
||||||
echo -e " Peregrine works without a key for personal self-hosted use."
|
|
||||||
echo -e " Paid-tier users: enter your ${YELLOW}CFG-XXXX-…${NC} key to unlock cloud LLM and integrations."
|
|
||||||
echo ""
|
|
||||||
read -rp " CircuitForge license key [press Enter to skip]: " _key || true
|
|
||||||
if [[ -n "$_key" ]]; then
|
|
||||||
if echo "$_key" | grep -qE '^CFG-[A-Z0-9]{4}-[A-Z0-9]{4}-[A-Z0-9]{4}-[A-Z0-9]{4}$'; then
|
|
||||||
_update_env_key "$env_file" "CF_LICENSE_KEY" "$_key"
|
|
||||||
_update_env_key "$env_file" "HEIMDALL_URL" "https://license.circuitforge.tech"
|
|
||||||
success "License key saved — paid-tier features enabled."
|
|
||||||
else
|
|
||||||
warn "Key format looks wrong (expected CFG-XXXX-AAAA-BBBB-CCCC) — skipping."
|
|
||||||
info "Add it manually to .env as CF_LICENSE_KEY= later."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# ── Model weights storage ───────────────────────────────────────────────────────
|
# ── Model weights storage ───────────────────────────────────────────────────────
|
||||||
_update_env_key() {
|
_update_env_key() {
|
||||||
# Portable in-place key=value update for .env files (Linux + macOS).
|
# Portable in-place key=value update for .env files (Linux + macOS).
|
||||||
|
|
@ -437,15 +416,8 @@ main() {
|
||||||
fi
|
fi
|
||||||
install_ollama_macos
|
install_ollama_macos
|
||||||
setup_env
|
setup_env
|
||||||
capture_license_key
|
|
||||||
configure_model_paths
|
configure_model_paths
|
||||||
|
|
||||||
# Read the actual port from .env so next-steps reflects any customisation
|
|
||||||
local _script_dir _port
|
|
||||||
_script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
_port="$(grep -E '^STREAMLIT_PORT=' "$_script_dir/.env" 2>/dev/null | cut -d= -f2-)"
|
|
||||||
_port="${_port:-8502}"
|
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
success "All dependencies installed."
|
success "All dependencies installed."
|
||||||
echo ""
|
echo ""
|
||||||
|
|
@ -457,7 +429,7 @@ main() {
|
||||||
else
|
else
|
||||||
echo -e " ${YELLOW}./manage.sh start --profile cpu${NC} # local Ollama inference (CPU)"
|
echo -e " ${YELLOW}./manage.sh start --profile cpu${NC} # local Ollama inference (CPU)"
|
||||||
fi
|
fi
|
||||||
echo -e " 2. Open ${YELLOW}http://localhost:${_port}${NC} — the setup wizard will guide you"
|
echo -e " 2. Open ${YELLOW}http://localhost:8501${NC} — the setup wizard will guide you"
|
||||||
echo -e " (Tip: edit ${YELLOW}.env${NC} any time to adjust ports or model paths)"
|
echo -e " (Tip: edit ${YELLOW}.env${NC} any time to adjust ports or model paths)"
|
||||||
echo ""
|
echo ""
|
||||||
if groups "$USER" 2>/dev/null | grep -q docker; then
|
if groups "$USER" 2>/dev/null | grep -q docker; then
|
||||||
|
|
@ -80,8 +80,7 @@ class TestTaskRunnerCoverLetterParams:
|
||||||
captured = {}
|
captured = {}
|
||||||
|
|
||||||
def mock_generate(title, company, description="", previous_result="", feedback="",
|
def mock_generate(title, company, description="", previous_result="", feedback="",
|
||||||
is_jobgether=False, _router=None, config_path=None,
|
is_jobgether=False, _router=None):
|
||||||
user_yaml_path=None):
|
|
||||||
captured.update({
|
captured.update({
|
||||||
"title": title, "company": company,
|
"title": title, "company": company,
|
||||||
"previous_result": previous_result, "feedback": feedback,
|
"previous_result": previous_result, "feedback": feedback,
|
||||||
|
|
|
||||||
|
|
@ -1,148 +0,0 @@
|
||||||
"""Tests for scripts/db_migrate.py — numbered SQL migration runner."""
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
import textwrap
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from scripts.db_migrate import migrate_db
|
|
||||||
|
|
||||||
|
|
||||||
# ── helpers ───────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
def _applied(db_path: Path) -> list[str]:
|
|
||||||
con = sqlite3.connect(db_path)
|
|
||||||
try:
|
|
||||||
rows = con.execute("SELECT version FROM schema_migrations ORDER BY version").fetchall()
|
|
||||||
return [r[0] for r in rows]
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _tables(db_path: Path) -> set[str]:
|
|
||||||
con = sqlite3.connect(db_path)
|
|
||||||
try:
|
|
||||||
rows = con.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
||||||
).fetchall()
|
|
||||||
return {r[0] for r in rows}
|
|
||||||
finally:
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
|
|
||||||
# ── tests ──────────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
def test_creates_schema_migrations_table(tmp_path):
|
|
||||||
"""Running against an empty DB creates the tracking table."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
(tmp_path / "migrations").mkdir() # empty migrations dir
|
|
||||||
# Patch the module-level _MIGRATIONS_DIR
|
|
||||||
import scripts.db_migrate as m
|
|
||||||
orig = m._MIGRATIONS_DIR
|
|
||||||
m._MIGRATIONS_DIR = tmp_path / "migrations"
|
|
||||||
try:
|
|
||||||
migrate_db(db)
|
|
||||||
assert "schema_migrations" in _tables(db)
|
|
||||||
finally:
|
|
||||||
m._MIGRATIONS_DIR = orig
|
|
||||||
|
|
||||||
|
|
||||||
def test_applies_migration_file(tmp_path):
|
|
||||||
"""A .sql file in migrations/ is applied and recorded."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
mdir = tmp_path / "migrations"
|
|
||||||
mdir.mkdir()
|
|
||||||
(mdir / "001_test.sql").write_text(
|
|
||||||
"CREATE TABLE IF NOT EXISTS widgets (id INTEGER PRIMARY KEY, name TEXT);"
|
|
||||||
)
|
|
||||||
|
|
||||||
import scripts.db_migrate as m
|
|
||||||
orig = m._MIGRATIONS_DIR
|
|
||||||
m._MIGRATIONS_DIR = mdir
|
|
||||||
try:
|
|
||||||
applied = migrate_db(db)
|
|
||||||
assert applied == ["001_test"]
|
|
||||||
assert "widgets" in _tables(db)
|
|
||||||
assert _applied(db) == ["001_test"]
|
|
||||||
finally:
|
|
||||||
m._MIGRATIONS_DIR = orig
|
|
||||||
|
|
||||||
|
|
||||||
def test_idempotent_second_run(tmp_path):
|
|
||||||
"""Running migrate_db twice does not re-apply migrations."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
mdir = tmp_path / "migrations"
|
|
||||||
mdir.mkdir()
|
|
||||||
(mdir / "001_test.sql").write_text(
|
|
||||||
"CREATE TABLE IF NOT EXISTS widgets (id INTEGER PRIMARY KEY, name TEXT);"
|
|
||||||
)
|
|
||||||
|
|
||||||
import scripts.db_migrate as m
|
|
||||||
orig = m._MIGRATIONS_DIR
|
|
||||||
m._MIGRATIONS_DIR = mdir
|
|
||||||
try:
|
|
||||||
migrate_db(db)
|
|
||||||
applied = migrate_db(db) # second run
|
|
||||||
assert applied == []
|
|
||||||
assert _applied(db) == ["001_test"]
|
|
||||||
finally:
|
|
||||||
m._MIGRATIONS_DIR = orig
|
|
||||||
|
|
||||||
|
|
||||||
def test_applies_only_new_migrations(tmp_path):
|
|
||||||
"""Migrations already in schema_migrations are skipped; only new ones run."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
mdir = tmp_path / "migrations"
|
|
||||||
mdir.mkdir()
|
|
||||||
(mdir / "001_first.sql").write_text(
|
|
||||||
"CREATE TABLE IF NOT EXISTS first_table (id INTEGER PRIMARY KEY);"
|
|
||||||
)
|
|
||||||
|
|
||||||
import scripts.db_migrate as m
|
|
||||||
orig = m._MIGRATIONS_DIR
|
|
||||||
m._MIGRATIONS_DIR = mdir
|
|
||||||
try:
|
|
||||||
migrate_db(db)
|
|
||||||
|
|
||||||
# Add a second migration
|
|
||||||
(mdir / "002_second.sql").write_text(
|
|
||||||
"CREATE TABLE IF NOT EXISTS second_table (id INTEGER PRIMARY KEY);"
|
|
||||||
)
|
|
||||||
applied = migrate_db(db)
|
|
||||||
assert applied == ["002_second"]
|
|
||||||
assert set(_applied(db)) == {"001_first", "002_second"}
|
|
||||||
assert "second_table" in _tables(db)
|
|
||||||
finally:
|
|
||||||
m._MIGRATIONS_DIR = orig
|
|
||||||
|
|
||||||
|
|
||||||
def test_migration_failure_raises(tmp_path):
|
|
||||||
"""A bad migration raises RuntimeError and does not record the version."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
mdir = tmp_path / "migrations"
|
|
||||||
mdir.mkdir()
|
|
||||||
(mdir / "001_bad.sql").write_text("THIS IS NOT VALID SQL !!!")
|
|
||||||
|
|
||||||
import scripts.db_migrate as m
|
|
||||||
orig = m._MIGRATIONS_DIR
|
|
||||||
m._MIGRATIONS_DIR = mdir
|
|
||||||
try:
|
|
||||||
with pytest.raises(RuntimeError, match="001_bad"):
|
|
||||||
migrate_db(db)
|
|
||||||
assert _applied(db) == []
|
|
||||||
finally:
|
|
||||||
m._MIGRATIONS_DIR = orig
|
|
||||||
|
|
||||||
|
|
||||||
def test_baseline_migration_runs(tmp_path):
|
|
||||||
"""The real 001_baseline.sql applies cleanly to a fresh database."""
|
|
||||||
db = tmp_path / "test.db"
|
|
||||||
applied = migrate_db(db)
|
|
||||||
assert "001_baseline" in applied
|
|
||||||
expected_tables = {
|
|
||||||
"jobs", "job_contacts", "company_research",
|
|
||||||
"background_tasks", "survey_responses", "digest_queue",
|
|
||||||
"schema_migrations",
|
|
||||||
}
|
|
||||||
assert expected_tables <= _tables(db)
|
|
||||||
|
|
@ -1,133 +0,0 @@
|
||||||
"""Tests for the /api/feedback routes in dev_api."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client(monkeypatch):
|
|
||||||
monkeypatch.delenv("CLOUD_MODE", raising=False)
|
|
||||||
monkeypatch.delenv("DEMO_MODE", raising=False)
|
|
||||||
monkeypatch.delenv("FORGEJO_API_TOKEN", raising=False)
|
|
||||||
from dev_api import app
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# GET /api/feedback/status
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def test_status_disabled_when_no_token(client):
|
|
||||||
"""Status is disabled when FORGEJO_API_TOKEN is not set."""
|
|
||||||
resp = client.get("/api/feedback/status")
|
|
||||||
assert resp.status_code == 200
|
|
||||||
assert resp.json() == {"enabled": False}
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_enabled_with_token(monkeypatch):
|
|
||||||
"""Status is enabled when token is set and not in demo or cloud mode."""
|
|
||||||
monkeypatch.delenv("CLOUD_MODE", raising=False)
|
|
||||||
monkeypatch.delenv("DEMO_MODE", raising=False)
|
|
||||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
|
||||||
from dev_api import app
|
|
||||||
c = TestClient(app)
|
|
||||||
resp = c.get("/api/feedback/status")
|
|
||||||
assert resp.status_code == 200
|
|
||||||
assert resp.json() == {"enabled": True}
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_disabled_in_demo_mode(monkeypatch):
|
|
||||||
"""Status is disabled when DEMO_MODE=1 even if token is present."""
|
|
||||||
monkeypatch.setenv("DEMO_MODE", "1")
|
|
||||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
|
||||||
monkeypatch.delenv("CLOUD_MODE", raising=False)
|
|
||||||
from dev_api import app
|
|
||||||
c = TestClient(app)
|
|
||||||
resp = c.get("/api/feedback/status")
|
|
||||||
assert resp.status_code == 200
|
|
||||||
assert resp.json() == {"enabled": False}
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_disabled_in_cloud_mode(monkeypatch):
|
|
||||||
"""Status is disabled when CLOUD_MODE=1 (peregrine-specific rule).
|
|
||||||
|
|
||||||
_CLOUD_MODE is evaluated at import time, so we patch the module-level
|
|
||||||
bool rather than the env var (the module is already cached in sys.modules).
|
|
||||||
"""
|
|
||||||
import dev_api as _dev_api_mod
|
|
||||||
monkeypatch.setattr(_dev_api_mod, "_CLOUD_MODE", True)
|
|
||||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
|
||||||
monkeypatch.delenv("DEMO_MODE", raising=False)
|
|
||||||
c = TestClient(_dev_api_mod.app)
|
|
||||||
resp = c.get("/api/feedback/status")
|
|
||||||
assert resp.status_code == 200
|
|
||||||
assert resp.json() == {"enabled": False}
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# POST /api/feedback
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
_FEEDBACK_PAYLOAD = {
|
|
||||||
"title": "Test feedback",
|
|
||||||
"description": "Something broke.",
|
|
||||||
"type": "bug",
|
|
||||||
"repro": "Click the button.",
|
|
||||||
"tab": "Job Review",
|
|
||||||
"submitter": "tester@example.com",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_feedback_503_when_no_token(client):
|
|
||||||
"""POST returns 503 when FORGEJO_API_TOKEN is not configured."""
|
|
||||||
resp = client.post("/api/feedback", json=_FEEDBACK_PAYLOAD)
|
|
||||||
assert resp.status_code == 503
|
|
||||||
assert "FORGEJO_API_TOKEN" in resp.json()["detail"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_feedback_403_in_demo_mode(monkeypatch):
|
|
||||||
"""POST returns 403 when DEMO_MODE=1."""
|
|
||||||
monkeypatch.setenv("DEMO_MODE", "1")
|
|
||||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
|
||||||
monkeypatch.delenv("CLOUD_MODE", raising=False)
|
|
||||||
from dev_api import app
|
|
||||||
c = TestClient(app)
|
|
||||||
resp = c.post("/api/feedback", json=_FEEDBACK_PAYLOAD)
|
|
||||||
assert resp.status_code == 403
|
|
||||||
assert "demo" in resp.json()["detail"].lower()
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_feedback_200_creates_issue(monkeypatch):
|
|
||||||
"""POST returns 200 with issue_number and issue_url when Forgejo calls succeed."""
|
|
||||||
monkeypatch.setenv("FORGEJO_API_TOKEN", "test-token")
|
|
||||||
monkeypatch.delenv("CLOUD_MODE", raising=False)
|
|
||||||
monkeypatch.delenv("DEMO_MODE", raising=False)
|
|
||||||
|
|
||||||
mock_get_resp = MagicMock()
|
|
||||||
mock_get_resp.ok = True
|
|
||||||
mock_get_resp.json.return_value = [
|
|
||||||
{"name": "beta-feedback", "id": 1},
|
|
||||||
{"name": "needs-triage", "id": 2},
|
|
||||||
{"name": "bug", "id": 3},
|
|
||||||
]
|
|
||||||
|
|
||||||
mock_post_resp = MagicMock()
|
|
||||||
mock_post_resp.ok = True
|
|
||||||
mock_post_resp.json.return_value = {
|
|
||||||
"number": 42,
|
|
||||||
"html_url": "https://git.opensourcesolarpunk.com/Circuit-Forge/peregrine/issues/42",
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch("circuitforge_core.api.feedback.requests.get", return_value=mock_get_resp), \
|
|
||||||
patch("circuitforge_core.api.feedback.requests.post", return_value=mock_post_resp):
|
|
||||||
from dev_api import app
|
|
||||||
c = TestClient(app)
|
|
||||||
resp = c.post("/api/feedback", json=_FEEDBACK_PAYLOAD)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
body = resp.json()
|
|
||||||
assert body["issue_number"] == 42
|
|
||||||
assert "peregrine/issues/42" in body["issue_url"]
|
|
||||||
|
|
@ -145,7 +145,7 @@ def test_get_resume_missing_returns_not_exists(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/resume when file missing returns {exists: false}."""
|
"""GET /api/settings/resume when file missing returns {exists: false}."""
|
||||||
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
||||||
# Ensure the path doesn't exist
|
# Ensure the path doesn't exist
|
||||||
monkeypatch.setattr("dev_api._resume_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.RESUME_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -157,7 +157,7 @@ def test_get_resume_missing_returns_not_exists(tmp_path, monkeypatch):
|
||||||
def test_post_resume_blank_creates_file(tmp_path, monkeypatch):
|
def test_post_resume_blank_creates_file(tmp_path, monkeypatch):
|
||||||
"""POST /api/settings/resume/blank creates the file."""
|
"""POST /api/settings/resume/blank creates the file."""
|
||||||
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
||||||
monkeypatch.setattr("dev_api._resume_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.RESUME_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -170,7 +170,7 @@ def test_post_resume_blank_creates_file(tmp_path, monkeypatch):
|
||||||
def test_get_resume_after_blank_returns_exists(tmp_path, monkeypatch):
|
def test_get_resume_after_blank_returns_exists(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/resume after blank creation returns {exists: true}."""
|
"""GET /api/settings/resume after blank creation returns {exists: true}."""
|
||||||
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
fake_path = tmp_path / "config" / "plain_text_resume.yaml"
|
||||||
monkeypatch.setattr("dev_api._resume_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.RESUME_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -212,7 +212,7 @@ def test_get_search_prefs_returns_dict(tmp_path, monkeypatch):
|
||||||
fake_path.parent.mkdir(parents=True, exist_ok=True)
|
fake_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
with open(fake_path, "w") as f:
|
with open(fake_path, "w") as f:
|
||||||
yaml.dump({"default": {"remote_preference": "remote", "job_boards": []}}, f)
|
yaml.dump({"default": {"remote_preference": "remote", "job_boards": []}}, f)
|
||||||
monkeypatch.setattr("dev_api._search_prefs_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.SEARCH_PREFS_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -227,7 +227,7 @@ def test_put_get_search_roundtrip(tmp_path, monkeypatch):
|
||||||
"""PUT then GET search prefs round-trip: saved field is returned."""
|
"""PUT then GET search prefs round-trip: saved field is returned."""
|
||||||
fake_path = tmp_path / "config" / "search_profiles.yaml"
|
fake_path = tmp_path / "config" / "search_profiles.yaml"
|
||||||
fake_path.parent.mkdir(parents=True, exist_ok=True)
|
fake_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
monkeypatch.setattr("dev_api._search_prefs_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.SEARCH_PREFS_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -253,7 +253,7 @@ def test_put_get_search_roundtrip(tmp_path, monkeypatch):
|
||||||
def test_get_search_missing_file_returns_empty(tmp_path, monkeypatch):
|
def test_get_search_missing_file_returns_empty(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/search when file missing returns empty dict."""
|
"""GET /api/settings/search when file missing returns empty dict."""
|
||||||
fake_path = tmp_path / "config" / "search_profiles.yaml"
|
fake_path = tmp_path / "config" / "search_profiles.yaml"
|
||||||
monkeypatch.setattr("dev_api._search_prefs_path", lambda: fake_path)
|
monkeypatch.setattr("dev_api.SEARCH_PREFS_PATH", fake_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -363,7 +363,7 @@ def test_get_services_cpu_profile(client):
|
||||||
def test_get_email_has_password_set_bool(tmp_path, monkeypatch):
|
def test_get_email_has_password_set_bool(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/system/email has password_set (bool) and no password key."""
|
"""GET /api/settings/system/email has password_set (bool) and no password key."""
|
||||||
fake_email_path = tmp_path / "email.yaml"
|
fake_email_path = tmp_path / "email.yaml"
|
||||||
monkeypatch.setattr("dev_api._config_dir", lambda: fake_email_path.parent)
|
monkeypatch.setattr("dev_api.EMAIL_PATH", fake_email_path)
|
||||||
with patch("dev_api.get_credential", return_value=None):
|
with patch("dev_api.get_credential", return_value=None):
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -378,7 +378,7 @@ def test_get_email_has_password_set_bool(tmp_path, monkeypatch):
|
||||||
def test_get_email_password_set_true_when_stored(tmp_path, monkeypatch):
|
def test_get_email_password_set_true_when_stored(tmp_path, monkeypatch):
|
||||||
"""password_set is True when credential is stored."""
|
"""password_set is True when credential is stored."""
|
||||||
fake_email_path = tmp_path / "email.yaml"
|
fake_email_path = tmp_path / "email.yaml"
|
||||||
monkeypatch.setattr("dev_api._config_dir", lambda: fake_email_path.parent)
|
monkeypatch.setattr("dev_api.EMAIL_PATH", fake_email_path)
|
||||||
with patch("dev_api.get_credential", return_value="secret"):
|
with patch("dev_api.get_credential", return_value="secret"):
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -426,14 +426,10 @@ def test_finetune_status_returns_status_and_pairs_count(client):
|
||||||
assert "pairs_count" in data
|
assert "pairs_count" in data
|
||||||
|
|
||||||
|
|
||||||
def test_finetune_status_idle_when_no_task(tmp_path, monkeypatch):
|
def test_finetune_status_idle_when_no_task(client):
|
||||||
"""Status is 'idle' and pairs_count is 0 when no task exists."""
|
"""Status is 'idle' and pairs_count is 0 when no task exists."""
|
||||||
fake_jsonl = tmp_path / "cover_letters.jsonl" # does not exist -> 0 pairs
|
|
||||||
monkeypatch.setattr("dev_api._TRAINING_JSONL", fake_jsonl)
|
|
||||||
with patch("scripts.task_runner.get_task_status", return_value=None, create=True):
|
with patch("scripts.task_runner.get_task_status", return_value=None, create=True):
|
||||||
from dev_api import app
|
resp = client.get("/api/settings/fine-tune/status")
|
||||||
c = TestClient(app)
|
|
||||||
resp = c.get("/api/settings/fine-tune/status")
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
assert data["status"] == "idle"
|
assert data["status"] == "idle"
|
||||||
|
|
@ -445,7 +441,7 @@ def test_finetune_status_idle_when_no_task(tmp_path, monkeypatch):
|
||||||
def test_get_license_returns_tier_and_active(tmp_path, monkeypatch):
|
def test_get_license_returns_tier_and_active(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/license returns tier and active fields."""
|
"""GET /api/settings/license returns tier and active fields."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -459,7 +455,7 @@ def test_get_license_returns_tier_and_active(tmp_path, monkeypatch):
|
||||||
def test_get_license_defaults_to_free(tmp_path, monkeypatch):
|
def test_get_license_defaults_to_free(tmp_path, monkeypatch):
|
||||||
"""GET /api/settings/license defaults to free tier when no file."""
|
"""GET /api/settings/license defaults to free tier when no file."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -473,7 +469,8 @@ def test_get_license_defaults_to_free(tmp_path, monkeypatch):
|
||||||
def test_activate_license_valid_key_returns_ok(tmp_path, monkeypatch):
|
def test_activate_license_valid_key_returns_ok(tmp_path, monkeypatch):
|
||||||
"""POST activate with valid key format returns {ok: true}."""
|
"""POST activate with valid key format returns {ok: true}."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
monkeypatch.setattr("dev_api.CONFIG_DIR", tmp_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -485,7 +482,8 @@ def test_activate_license_valid_key_returns_ok(tmp_path, monkeypatch):
|
||||||
def test_activate_license_invalid_key_returns_ok_false(tmp_path, monkeypatch):
|
def test_activate_license_invalid_key_returns_ok_false(tmp_path, monkeypatch):
|
||||||
"""POST activate with bad key format returns {ok: false}."""
|
"""POST activate with bad key format returns {ok: false}."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
monkeypatch.setattr("dev_api.CONFIG_DIR", tmp_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -497,7 +495,8 @@ def test_activate_license_invalid_key_returns_ok_false(tmp_path, monkeypatch):
|
||||||
def test_deactivate_license_returns_ok(tmp_path, monkeypatch):
|
def test_deactivate_license_returns_ok(tmp_path, monkeypatch):
|
||||||
"""POST /api/settings/license/deactivate returns 200 with ok."""
|
"""POST /api/settings/license/deactivate returns 200 with ok."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
monkeypatch.setattr("dev_api.CONFIG_DIR", tmp_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -509,7 +508,8 @@ def test_deactivate_license_returns_ok(tmp_path, monkeypatch):
|
||||||
def test_activate_then_deactivate(tmp_path, monkeypatch):
|
def test_activate_then_deactivate(tmp_path, monkeypatch):
|
||||||
"""Activate then deactivate: active goes False."""
|
"""Activate then deactivate: active goes False."""
|
||||||
fake_license = tmp_path / "license.yaml"
|
fake_license = tmp_path / "license.yaml"
|
||||||
monkeypatch.setattr("dev_api._license_path", lambda: fake_license)
|
monkeypatch.setattr("dev_api.LICENSE_PATH", fake_license)
|
||||||
|
monkeypatch.setattr("dev_api.CONFIG_DIR", tmp_path)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -580,7 +580,7 @@ def test_get_developer_returns_expected_fields(tmp_path, monkeypatch):
|
||||||
_write_user_yaml(user_yaml)
|
_write_user_yaml(user_yaml)
|
||||||
monkeypatch.setenv("STAGING_DB", str(db_dir / "staging.db"))
|
monkeypatch.setenv("STAGING_DB", str(db_dir / "staging.db"))
|
||||||
fake_tokens = tmp_path / "tokens.yaml"
|
fake_tokens = tmp_path / "tokens.yaml"
|
||||||
monkeypatch.setattr("dev_api._tokens_path", lambda: fake_tokens)
|
monkeypatch.setattr("dev_api.TOKENS_PATH", fake_tokens)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
@ -602,7 +602,7 @@ def test_put_dev_tier_then_get(tmp_path, monkeypatch):
|
||||||
_write_user_yaml(user_yaml)
|
_write_user_yaml(user_yaml)
|
||||||
monkeypatch.setenv("STAGING_DB", str(db_dir / "staging.db"))
|
monkeypatch.setenv("STAGING_DB", str(db_dir / "staging.db"))
|
||||||
fake_tokens = tmp_path / "tokens.yaml"
|
fake_tokens = tmp_path / "tokens.yaml"
|
||||||
monkeypatch.setattr("dev_api._tokens_path", lambda: fake_tokens)
|
monkeypatch.setattr("dev_api.TOKENS_PATH", fake_tokens)
|
||||||
|
|
||||||
from dev_api import app
|
from dev_api import app
|
||||||
c = TestClient(app)
|
c = TestClient(app)
|
||||||
|
|
|
||||||
|
|
@ -1,132 +0,0 @@
|
||||||
"""Tests for Peregrine's LLMRouter shim — priority fallback logic."""
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import patch, MagicMock, call
|
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
||||||
|
|
||||||
|
|
||||||
def _import_fresh():
|
|
||||||
"""Import scripts.llm_router fresh (bypass module cache)."""
|
|
||||||
import importlib
|
|
||||||
import scripts.llm_router as mod
|
|
||||||
importlib.reload(mod)
|
|
||||||
return mod
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Test 1: local config/llm.yaml takes priority when it exists
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def test_uses_local_yaml_when_present():
|
|
||||||
"""When config/llm.yaml exists locally, super().__init__ is called with that path."""
|
|
||||||
import scripts.llm_router as shim_mod
|
|
||||||
from circuitforge_core.llm import LLMRouter as _CoreLLMRouter
|
|
||||||
|
|
||||||
local_path = Path(shim_mod.__file__).parent.parent / "config" / "llm.yaml"
|
|
||||||
user_path = Path.home() / ".config" / "circuitforge" / "llm.yaml"
|
|
||||||
|
|
||||||
def fake_exists(self):
|
|
||||||
return self == local_path # only the local path "exists"
|
|
||||||
|
|
||||||
captured = {}
|
|
||||||
|
|
||||||
def fake_core_init(self, config_path=None):
|
|
||||||
captured["config_path"] = config_path
|
|
||||||
self.config = {}
|
|
||||||
|
|
||||||
with patch.object(Path, "exists", fake_exists), \
|
|
||||||
patch.object(_CoreLLMRouter, "__init__", fake_core_init):
|
|
||||||
import importlib
|
|
||||||
import scripts.llm_router as mod
|
|
||||||
importlib.reload(mod)
|
|
||||||
mod.LLMRouter()
|
|
||||||
|
|
||||||
assert captured.get("config_path") == local_path, (
|
|
||||||
f"Expected super().__init__ to be called with local path {local_path}, "
|
|
||||||
f"got {captured.get('config_path')}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Test 2: falls through to env-var auto-config when neither yaml exists
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def test_falls_through_to_env_when_no_yamls():
|
|
||||||
"""When no yaml files exist, super().__init__ is called with no args (env-var path)."""
|
|
||||||
import scripts.llm_router as shim_mod
|
|
||||||
from circuitforge_core.llm import LLMRouter as _CoreLLMRouter
|
|
||||||
|
|
||||||
captured = {}
|
|
||||||
|
|
||||||
def fake_exists(self):
|
|
||||||
return False # no yaml files exist anywhere
|
|
||||||
|
|
||||||
def fake_core_init(self, config_path=None):
|
|
||||||
# Record whether a path was passed
|
|
||||||
captured["config_path"] = config_path
|
|
||||||
captured["called"] = True
|
|
||||||
self.config = {}
|
|
||||||
|
|
||||||
with patch.object(Path, "exists", fake_exists), \
|
|
||||||
patch.object(_CoreLLMRouter, "__init__", fake_core_init):
|
|
||||||
import importlib
|
|
||||||
import scripts.llm_router as mod
|
|
||||||
importlib.reload(mod)
|
|
||||||
mod.LLMRouter()
|
|
||||||
|
|
||||||
assert captured.get("called"), "super().__init__ was never called"
|
|
||||||
# When called with no args, config_path defaults to None in our mock,
|
|
||||||
# meaning the shim correctly fell through to env-var auto-config
|
|
||||||
assert captured.get("config_path") is None, (
|
|
||||||
f"Expected super().__init__ to be called with no explicit path (None), "
|
|
||||||
f"got {captured.get('config_path')}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Test 3: module-level complete() singleton is only instantiated once
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def test_complete_singleton_is_reused():
|
|
||||||
"""complete() reuses the same LLMRouter instance across multiple calls."""
|
|
||||||
import importlib
|
|
||||||
import scripts.llm_router as mod
|
|
||||||
importlib.reload(mod)
|
|
||||||
|
|
||||||
# Reset singleton
|
|
||||||
mod._router = None
|
|
||||||
|
|
||||||
instantiation_count = [0]
|
|
||||||
original_init = mod.LLMRouter.__init__
|
|
||||||
|
|
||||||
mock_router = MagicMock()
|
|
||||||
mock_router.complete.return_value = "OK"
|
|
||||||
|
|
||||||
original_class = mod.LLMRouter
|
|
||||||
|
|
||||||
class CountingRouter(original_class):
|
|
||||||
def __init__(self):
|
|
||||||
instantiation_count[0] += 1
|
|
||||||
# Bypass real __init__ to avoid needing config files
|
|
||||||
self.config = {}
|
|
||||||
|
|
||||||
def complete(self, prompt, system=None):
|
|
||||||
return "OK"
|
|
||||||
|
|
||||||
# Patch the class in the module
|
|
||||||
mod.LLMRouter = CountingRouter
|
|
||||||
mod._router = None
|
|
||||||
|
|
||||||
result1 = mod.complete("first call")
|
|
||||||
result2 = mod.complete("second call")
|
|
||||||
|
|
||||||
assert result1 == "OK"
|
|
||||||
assert result2 == "OK"
|
|
||||||
assert instantiation_count[0] == 1, (
|
|
||||||
f"Expected LLMRouter to be instantiated exactly once, "
|
|
||||||
f"got {instantiation_count[0]} instantiation(s)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Restore
|
|
||||||
mod.LLMRouter = original_class
|
|
||||||
|
|
@ -1,80 +0,0 @@
|
||||||
"""Tests: preflight writes OLLAMA_HOST to .env when Ollama is adopted from host."""
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import patch, call
|
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
||||||
|
|
||||||
import scripts.preflight as pf
|
|
||||||
|
|
||||||
|
|
||||||
def _make_ports(ollama_external: bool = True, ollama_port: int = 11434) -> dict:
|
|
||||||
"""Build a minimal ports dict as returned by preflight's port-scanning logic."""
|
|
||||||
return {
|
|
||||||
"ollama": {
|
|
||||||
"resolved": ollama_port,
|
|
||||||
"external": ollama_external,
|
|
||||||
"stub_port": 54321,
|
|
||||||
"env_var": "OLLAMA_PORT",
|
|
||||||
"adoptable": True,
|
|
||||||
},
|
|
||||||
"streamlit": {
|
|
||||||
"resolved": 8502,
|
|
||||||
"external": False,
|
|
||||||
"stub_port": 8502,
|
|
||||||
"env_var": "STREAMLIT_PORT",
|
|
||||||
"adoptable": False,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _capture_env_updates(ports: dict) -> dict:
|
|
||||||
"""Run the env_updates construction block from preflight.main() and return the result.
|
|
||||||
|
|
||||||
We extract this logic from main() so tests can call it directly without
|
|
||||||
needing to simulate the full CLI argument parsing and system probe flow.
|
|
||||||
The block under test is the `if not args.check_only:` section.
|
|
||||||
"""
|
|
||||||
captured = {}
|
|
||||||
|
|
||||||
def fake_write_env(updates: dict) -> None:
|
|
||||||
captured.update(updates)
|
|
||||||
|
|
||||||
with patch.object(pf, "write_env", side_effect=fake_write_env), \
|
|
||||||
patch.object(pf, "update_llm_yaml"), \
|
|
||||||
patch.object(pf, "write_compose_override"):
|
|
||||||
# Replicate the env_updates block from preflight.main() as faithfully as possible
|
|
||||||
env_updates: dict[str, str] = {i["env_var"]: str(i["stub_port"]) for i in ports.values()}
|
|
||||||
env_updates["RECOMMENDED_PROFILE"] = "single-gpu"
|
|
||||||
|
|
||||||
# ---- Code under test: the OLLAMA_HOST adoption block ----
|
|
||||||
ollama_info = ports.get("ollama")
|
|
||||||
if ollama_info and ollama_info.get("external"):
|
|
||||||
env_updates["OLLAMA_HOST"] = f"http://host.docker.internal:{ollama_info['resolved']}"
|
|
||||||
# ---------------------------------------------------------
|
|
||||||
|
|
||||||
pf.write_env(env_updates)
|
|
||||||
|
|
||||||
return captured
|
|
||||||
|
|
||||||
|
|
||||||
def test_ollama_host_written_when_adopted():
|
|
||||||
"""OLLAMA_HOST is added when Ollama is adopted from the host (external=True)."""
|
|
||||||
ports = _make_ports(ollama_external=True, ollama_port=11434)
|
|
||||||
result = _capture_env_updates(ports)
|
|
||||||
assert "OLLAMA_HOST" in result
|
|
||||||
assert result["OLLAMA_HOST"] == "http://host.docker.internal:11434"
|
|
||||||
|
|
||||||
|
|
||||||
def test_ollama_host_not_written_when_docker_managed():
|
|
||||||
"""OLLAMA_HOST is NOT added when Ollama runs in Docker (external=False)."""
|
|
||||||
ports = _make_ports(ollama_external=False)
|
|
||||||
result = _capture_env_updates(ports)
|
|
||||||
assert "OLLAMA_HOST" not in result
|
|
||||||
|
|
||||||
|
|
||||||
def test_ollama_host_reflects_adopted_port():
|
|
||||||
"""OLLAMA_HOST uses the actual adopted port, not the default."""
|
|
||||||
ports = _make_ports(ollama_external=True, ollama_port=11500)
|
|
||||||
result = _capture_env_updates(ports)
|
|
||||||
assert result["OLLAMA_HOST"] == "http://host.docker.internal:11500"
|
|
||||||
|
|
@ -109,33 +109,24 @@ def test_missing_budget_logs_warning(tmp_db, caplog):
|
||||||
ts.LLM_TASK_TYPES = frozenset(original)
|
ts.LLM_TASK_TYPES = frozenset(original)
|
||||||
|
|
||||||
|
|
||||||
def test_cpu_only_system_creates_scheduler(tmp_db, monkeypatch):
|
def test_cpu_only_system_gets_unlimited_vram(tmp_db, monkeypatch):
|
||||||
"""Scheduler constructs without error when _get_gpus() returns empty list.
|
"""_available_vram is 999.0 when _get_gpus() returns empty list."""
|
||||||
|
# Patch the module-level _get_gpus in task_scheduler (not preflight)
|
||||||
LocalScheduler has no VRAM gating — it runs tasks regardless of GPU count.
|
# so __init__'s _ts_mod._get_gpus() call picks up the mock.
|
||||||
VRAM-aware scheduling is handled by circuitforge_orch's coordinator.
|
|
||||||
"""
|
|
||||||
monkeypatch.setattr("scripts.task_scheduler._get_gpus", lambda: [])
|
monkeypatch.setattr("scripts.task_scheduler._get_gpus", lambda: [])
|
||||||
s = TaskScheduler(tmp_db, _noop_run_task)
|
s = TaskScheduler(tmp_db, _noop_run_task)
|
||||||
# Scheduler still has correct budgets configured; no VRAM attribute expected
|
assert s._available_vram == 999.0
|
||||||
# Scheduler constructed successfully; budgets contain all LLM task types.
|
|
||||||
# Does not assert exact values -- a sibling test may write a config override
|
|
||||||
# to the shared pytest tmp dir, causing _load_config_overrides to pick it up.
|
|
||||||
assert set(s._budgets.keys()) >= LLM_TASK_TYPES
|
|
||||||
|
|
||||||
|
|
||||||
def test_gpu_detection_does_not_affect_local_scheduler(tmp_db, monkeypatch):
|
def test_gpu_vram_summed_across_all_gpus(tmp_db, monkeypatch):
|
||||||
"""LocalScheduler ignores GPU VRAM — it has no _available_vram attribute.
|
"""_available_vram sums vram_total_gb across all detected GPUs."""
|
||||||
|
|
||||||
VRAM-gated concurrency requires circuitforge_orch (Paid tier).
|
|
||||||
"""
|
|
||||||
fake_gpus = [
|
fake_gpus = [
|
||||||
{"name": "RTX 3090", "vram_total_gb": 24.0, "vram_free_gb": 20.0},
|
{"name": "RTX 3090", "vram_total_gb": 24.0, "vram_free_gb": 20.0},
|
||||||
{"name": "RTX 3090", "vram_total_gb": 24.0, "vram_free_gb": 18.0},
|
{"name": "RTX 3090", "vram_total_gb": 24.0, "vram_free_gb": 18.0},
|
||||||
]
|
]
|
||||||
monkeypatch.setattr("scripts.task_scheduler._get_gpus", lambda: fake_gpus)
|
monkeypatch.setattr("scripts.task_scheduler._get_gpus", lambda: fake_gpus)
|
||||||
s = TaskScheduler(tmp_db, _noop_run_task)
|
s = TaskScheduler(tmp_db, _noop_run_task)
|
||||||
assert not hasattr(s, "_available_vram")
|
assert s._available_vram == 48.0
|
||||||
|
|
||||||
|
|
||||||
def test_enqueue_adds_taskspec_to_deque(tmp_db):
|
def test_enqueue_adds_taskspec_to_deque(tmp_db):
|
||||||
|
|
@ -215,37 +206,40 @@ def _make_recording_run_task(log: list, done_event: threading.Event, expected: i
|
||||||
return _run
|
return _run
|
||||||
|
|
||||||
|
|
||||||
def _start_scheduler(tmp_db, run_task_fn):
|
def _start_scheduler(tmp_db, run_task_fn, available_vram=999.0):
|
||||||
s = TaskScheduler(tmp_db, run_task_fn)
|
s = TaskScheduler(tmp_db, run_task_fn)
|
||||||
|
s._available_vram = available_vram
|
||||||
s.start()
|
s.start()
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
# ── Tests ─────────────────────────────────────────────────────────────────────
|
# ── Tests ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
def test_all_task_types_complete(tmp_db):
|
def test_deepest_queue_wins_first_slot(tmp_db):
|
||||||
"""Scheduler runs tasks from multiple types; all complete.
|
"""Type with more queued tasks starts first when VRAM only fits one type."""
|
||||||
|
|
||||||
LocalScheduler runs type batches concurrently (no VRAM gating).
|
|
||||||
VRAM-gated sequential scheduling requires circuitforge_orch.
|
|
||||||
"""
|
|
||||||
log, done = [], threading.Event()
|
log, done = [], threading.Event()
|
||||||
|
|
||||||
|
# Build scheduler but DO NOT start it yet — enqueue all tasks first
|
||||||
|
# so the scheduler sees the full picture on its very first wake.
|
||||||
run_task_fn = _make_recording_run_task(log, done, 4)
|
run_task_fn = _make_recording_run_task(log, done, 4)
|
||||||
s = TaskScheduler(tmp_db, run_task_fn)
|
s = TaskScheduler(tmp_db, run_task_fn)
|
||||||
|
s._available_vram = 3.0 # fits cover_letter (2.5) but not +company_research (5.0)
|
||||||
|
|
||||||
|
# Enqueue cover_letter (3 tasks) and company_research (1 task) before start.
|
||||||
|
# cover_letter has the deeper queue and must win the first batch slot.
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
s.enqueue(i + 1, "cover_letter", i + 1, None)
|
s.enqueue(i + 1, "cover_letter", i + 1, None)
|
||||||
s.enqueue(4, "company_research", 4, None)
|
s.enqueue(4, "company_research", 4, None)
|
||||||
|
|
||||||
s.start()
|
s.start() # scheduler now sees all tasks atomically on its first iteration
|
||||||
assert done.wait(timeout=5.0), "timed out — not all 4 tasks completed"
|
assert done.wait(timeout=5.0), "timed out — not all 4 tasks completed"
|
||||||
s.shutdown()
|
s.shutdown()
|
||||||
|
|
||||||
assert len(log) == 4
|
assert len(log) == 4
|
||||||
cl = [t for _, t in log if t == "cover_letter"]
|
cl = [i for i, (_, t) in enumerate(log) if t == "cover_letter"]
|
||||||
cr = [t for _, t in log if t == "company_research"]
|
cr = [i for i, (_, t) in enumerate(log) if t == "company_research"]
|
||||||
assert len(cl) == 3 and len(cr) == 1
|
assert len(cl) == 3 and len(cr) == 1
|
||||||
|
assert max(cl) < min(cr), "All cover_letter tasks must finish before company_research starts"
|
||||||
|
|
||||||
|
|
||||||
def test_fifo_within_type(tmp_db):
|
def test_fifo_within_type(tmp_db):
|
||||||
|
|
@ -262,8 +256,8 @@ def test_fifo_within_type(tmp_db):
|
||||||
assert [task_id for task_id, _ in log] == [10, 20, 30]
|
assert [task_id for task_id, _ in log] == [10, 20, 30]
|
||||||
|
|
||||||
|
|
||||||
def test_concurrent_batches_different_types(tmp_db):
|
def test_concurrent_batches_when_vram_allows(tmp_db):
|
||||||
"""Two type batches run concurrently (LocalScheduler has no VRAM gating)."""
|
"""Two type batches start simultaneously when VRAM fits both."""
|
||||||
started = {"cover_letter": threading.Event(), "company_research": threading.Event()}
|
started = {"cover_letter": threading.Event(), "company_research": threading.Event()}
|
||||||
all_done = threading.Event()
|
all_done = threading.Event()
|
||||||
log = []
|
log = []
|
||||||
|
|
@ -274,7 +268,8 @@ def test_concurrent_batches_different_types(tmp_db):
|
||||||
if len(log) >= 2:
|
if len(log) >= 2:
|
||||||
all_done.set()
|
all_done.set()
|
||||||
|
|
||||||
s = _start_scheduler(tmp_db, run_task)
|
# VRAM=10.0 fits both cover_letter (2.5) and company_research (5.0) simultaneously
|
||||||
|
s = _start_scheduler(tmp_db, run_task, available_vram=10.0)
|
||||||
s.enqueue(1, "cover_letter", 1, None)
|
s.enqueue(1, "cover_letter", 1, None)
|
||||||
s.enqueue(2, "company_research", 2, None)
|
s.enqueue(2, "company_research", 2, None)
|
||||||
|
|
||||||
|
|
@ -312,15 +307,8 @@ def test_new_tasks_picked_up_mid_batch(tmp_db):
|
||||||
assert log == [1, 2]
|
assert log == [1, 2]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning")
|
def test_worker_crash_releases_vram(tmp_db):
|
||||||
def test_worker_crash_does_not_stall_scheduler(tmp_db):
|
"""If _run_task raises, _reserved_vram returns to 0 and scheduler continues."""
|
||||||
"""If _run_task raises, the scheduler continues processing the next task.
|
|
||||||
|
|
||||||
The batch_worker intentionally lets the RuntimeError propagate to the thread
|
|
||||||
boundary (so LocalScheduler can detect crash vs. normal exit). This produces
|
|
||||||
a PytestUnhandledThreadExceptionWarning -- suppressed here because it is the
|
|
||||||
expected behavior under test.
|
|
||||||
"""
|
|
||||||
log, done = [], threading.Event()
|
log, done = [], threading.Event()
|
||||||
|
|
||||||
def run_task(db_path, task_id, task_type, job_id, params):
|
def run_task(db_path, task_id, task_type, job_id, params):
|
||||||
|
|
@ -329,15 +317,16 @@ def test_worker_crash_does_not_stall_scheduler(tmp_db):
|
||||||
log.append(task_id)
|
log.append(task_id)
|
||||||
done.set()
|
done.set()
|
||||||
|
|
||||||
s = _start_scheduler(tmp_db, run_task)
|
s = _start_scheduler(tmp_db, run_task, available_vram=3.0)
|
||||||
s.enqueue(1, "cover_letter", 1, None)
|
s.enqueue(1, "cover_letter", 1, None)
|
||||||
s.enqueue(2, "cover_letter", 2, None)
|
s.enqueue(2, "cover_letter", 2, None)
|
||||||
|
|
||||||
assert done.wait(timeout=5.0), "timed out — task 2 never completed after task 1 crash"
|
assert done.wait(timeout=5.0), "timed out — task 2 never completed after task 1 crash"
|
||||||
s.shutdown()
|
s.shutdown()
|
||||||
|
|
||||||
# Second task still ran despite first crashing
|
# Second task still ran, VRAM was released
|
||||||
assert 2 in log
|
assert 2 in log
|
||||||
|
assert s._reserved_vram == 0.0
|
||||||
|
|
||||||
|
|
||||||
def test_get_scheduler_returns_singleton(tmp_db):
|
def test_get_scheduler_returns_singleton(tmp_db):
|
||||||
|
|
|
||||||
|
|
@ -66,12 +66,8 @@ def test_sync_cookie_prgn_switch_param_overrides_yaml(profile_yaml, monkeypatch)
|
||||||
assert any("prgn_ui=streamlit" in s for s in injected)
|
assert any("prgn_ui=streamlit" in s for s in injected)
|
||||||
|
|
||||||
|
|
||||||
def test_sync_cookie_free_tier_keeps_vue(profile_yaml, monkeypatch):
|
def test_sync_cookie_downgrades_tier_resets_to_streamlit(profile_yaml, monkeypatch):
|
||||||
"""Free-tier user with vue preference keeps vue (vue_ui_beta is free tier).
|
"""Free-tier user with vue preference gets reset to streamlit."""
|
||||||
|
|
||||||
Previously this test verified a downgrade to streamlit. Vue SPA was opened
|
|
||||||
to free tier in issue #20 — the downgrade path no longer triggers.
|
|
||||||
"""
|
|
||||||
import yaml as _yaml
|
import yaml as _yaml
|
||||||
profile_yaml.write_text(_yaml.dump({"name": "T", "ui_preference": "vue"}))
|
profile_yaml.write_text(_yaml.dump({"name": "T", "ui_preference": "vue"}))
|
||||||
|
|
||||||
|
|
@ -84,8 +80,8 @@ def test_sync_cookie_free_tier_keeps_vue(profile_yaml, monkeypatch):
|
||||||
sync_ui_cookie(profile_yaml, tier="free")
|
sync_ui_cookie(profile_yaml, tier="free")
|
||||||
|
|
||||||
saved = _yaml.safe_load(profile_yaml.read_text())
|
saved = _yaml.safe_load(profile_yaml.read_text())
|
||||||
assert saved["ui_preference"] == "vue"
|
assert saved["ui_preference"] == "streamlit"
|
||||||
assert any("prgn_ui=vue" in s for s in injected)
|
assert any("prgn_ui=streamlit" in s for s in injected)
|
||||||
|
|
||||||
|
|
||||||
def test_switch_ui_writes_yaml_and_calls_sync(profile_yaml, monkeypatch):
|
def test_switch_ui_writes_yaml_and_calls_sync(profile_yaml, monkeypatch):
|
||||||
|
|
|
||||||
|
|
@ -1,368 +0,0 @@
|
||||||
"""Tests for wizard API endpoints (GET/POST /api/wizard/*)."""
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import pytest
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# ── Path bootstrap ────────────────────────────────────────────────────────────
|
|
||||||
_REPO = Path(__file__).parent.parent
|
|
||||||
if str(_REPO) not in sys.path:
|
|
||||||
sys.path.insert(0, str(_REPO))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="module")
|
|
||||||
def client():
|
|
||||||
from dev_api import app
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
def _write_user_yaml(path: Path, data: dict | None = None) -> None:
|
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
payload = data if data is not None else {}
|
|
||||||
path.write_text(yaml.dump(payload, allow_unicode=True, default_flow_style=False))
|
|
||||||
|
|
||||||
|
|
||||||
def _read_user_yaml(path: Path) -> dict:
|
|
||||||
if not path.exists():
|
|
||||||
return {}
|
|
||||||
return yaml.safe_load(path.read_text()) or {}
|
|
||||||
|
|
||||||
|
|
||||||
# ── GET /api/config/app — wizardComplete + isDemo ─────────────────────────────
|
|
||||||
|
|
||||||
class TestAppConfigWizardFields:
|
|
||||||
def test_wizard_complete_false_when_missing(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
# user.yaml does not exist yet
|
|
||||||
with patch("dev_api._user_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.get("/api/config/app")
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert r.json()["wizardComplete"] is False
|
|
||||||
|
|
||||||
def test_wizard_complete_true_when_set(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_complete": True})
|
|
||||||
with patch("dev_api._user_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.get("/api/config/app")
|
|
||||||
assert r.json()["wizardComplete"] is True
|
|
||||||
|
|
||||||
def test_is_demo_false_by_default(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_complete": True})
|
|
||||||
with patch("dev_api._user_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch.dict(os.environ, {"DEMO_MODE": ""}, clear=False):
|
|
||||||
r = client.get("/api/config/app")
|
|
||||||
assert r.json()["isDemo"] is False
|
|
||||||
|
|
||||||
def test_is_demo_true_when_env_set(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_complete": True})
|
|
||||||
with patch("dev_api._user_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch.dict(os.environ, {"DEMO_MODE": "true"}, clear=False):
|
|
||||||
r = client.get("/api/config/app")
|
|
||||||
assert r.json()["isDemo"] is True
|
|
||||||
|
|
||||||
|
|
||||||
# ── GET /api/wizard/status ────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
class TestWizardStatus:
|
|
||||||
def test_returns_not_complete_when_no_yaml(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.get("/api/wizard/status")
|
|
||||||
assert r.status_code == 200
|
|
||||||
body = r.json()
|
|
||||||
assert body["wizard_complete"] is False
|
|
||||||
assert body["wizard_step"] == 0
|
|
||||||
|
|
||||||
def test_returns_saved_step(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_step": 3, "name": "Alex"})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.get("/api/wizard/status")
|
|
||||||
body = r.json()
|
|
||||||
assert body["wizard_step"] == 3
|
|
||||||
assert body["saved_data"]["name"] == "Alex"
|
|
||||||
|
|
||||||
def test_returns_complete_true(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_complete": True})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.get("/api/wizard/status")
|
|
||||||
assert r.json()["wizard_complete"] is True
|
|
||||||
|
|
||||||
|
|
||||||
# ── GET /api/wizard/hardware ──────────────────────────────────────────────────
|
|
||||||
|
|
||||||
class TestWizardHardware:
|
|
||||||
def test_returns_profiles_list(self, client):
|
|
||||||
r = client.get("/api/wizard/hardware")
|
|
||||||
assert r.status_code == 200
|
|
||||||
body = r.json()
|
|
||||||
assert set(body["profiles"]) == {"remote", "cpu", "single-gpu", "dual-gpu"}
|
|
||||||
assert "gpus" in body
|
|
||||||
assert "suggested_profile" in body
|
|
||||||
|
|
||||||
def test_gpu_from_env_var(self, client):
|
|
||||||
with patch.dict(os.environ, {"PEREGRINE_GPU_NAMES": "RTX 4090,RTX 3080"}, clear=False):
|
|
||||||
r = client.get("/api/wizard/hardware")
|
|
||||||
body = r.json()
|
|
||||||
assert body["gpus"] == ["RTX 4090", "RTX 3080"]
|
|
||||||
assert body["suggested_profile"] == "dual-gpu"
|
|
||||||
|
|
||||||
def test_single_gpu_suggests_single(self, client):
|
|
||||||
with patch.dict(os.environ, {"PEREGRINE_GPU_NAMES": "RTX 4090"}, clear=False):
|
|
||||||
with patch.dict(os.environ, {"RECOMMENDED_PROFILE": ""}, clear=False):
|
|
||||||
r = client.get("/api/wizard/hardware")
|
|
||||||
assert r.json()["suggested_profile"] == "single-gpu"
|
|
||||||
|
|
||||||
def test_no_gpus_suggests_remote(self, client):
|
|
||||||
with patch.dict(os.environ, {"PEREGRINE_GPU_NAMES": ""}, clear=False):
|
|
||||||
with patch.dict(os.environ, {"RECOMMENDED_PROFILE": ""}, clear=False):
|
|
||||||
with patch("subprocess.check_output", side_effect=FileNotFoundError):
|
|
||||||
r = client.get("/api/wizard/hardware")
|
|
||||||
assert r.json()["suggested_profile"] == "remote"
|
|
||||||
assert r.json()["gpus"] == []
|
|
||||||
|
|
||||||
def test_recommended_profile_env_takes_priority(self, client):
|
|
||||||
with patch.dict(os.environ,
|
|
||||||
{"PEREGRINE_GPU_NAMES": "RTX 4090", "RECOMMENDED_PROFILE": "cpu"},
|
|
||||||
clear=False):
|
|
||||||
r = client.get("/api/wizard/hardware")
|
|
||||||
assert r.json()["suggested_profile"] == "cpu"
|
|
||||||
|
|
||||||
|
|
||||||
# ── POST /api/wizard/step ─────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
class TestWizardStep:
|
|
||||||
def test_step1_saves_inference_profile(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 1, "data": {"inference_profile": "single-gpu"}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
saved = _read_user_yaml(yaml_path)
|
|
||||||
assert saved["inference_profile"] == "single-gpu"
|
|
||||||
assert saved["wizard_step"] == 1
|
|
||||||
|
|
||||||
def test_step1_rejects_unknown_profile(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 1, "data": {"inference_profile": "turbo-gpu"}})
|
|
||||||
assert r.status_code == 400
|
|
||||||
|
|
||||||
def test_step2_saves_tier(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 2, "data": {"tier": "paid"}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert _read_user_yaml(yaml_path)["tier"] == "paid"
|
|
||||||
|
|
||||||
def test_step2_rejects_unknown_tier(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 2, "data": {"tier": "enterprise"}})
|
|
||||||
assert r.status_code == 400
|
|
||||||
|
|
||||||
def test_step3_writes_resume_yaml(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
resume = {"experience": [{"title": "Engineer", "company": "Acme"}]}
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 3, "data": {"resume": resume}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
resume_path = yaml_path.parent / "plain_text_resume.yaml"
|
|
||||||
assert resume_path.exists()
|
|
||||||
saved_resume = yaml.safe_load(resume_path.read_text())
|
|
||||||
assert saved_resume["experience"][0]["title"] == "Engineer"
|
|
||||||
|
|
||||||
def test_step4_saves_identity_fields(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
identity = {
|
|
||||||
"name": "Alex Rivera",
|
|
||||||
"email": "alex@example.com",
|
|
||||||
"phone": "555-1234",
|
|
||||||
"linkedin": "https://linkedin.com/in/alex",
|
|
||||||
"career_summary": "Experienced engineer.",
|
|
||||||
}
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step", json={"step": 4, "data": identity})
|
|
||||||
assert r.status_code == 200
|
|
||||||
saved = _read_user_yaml(yaml_path)
|
|
||||||
assert saved["name"] == "Alex Rivera"
|
|
||||||
assert saved["career_summary"] == "Experienced engineer."
|
|
||||||
assert saved["wizard_step"] == 4
|
|
||||||
|
|
||||||
def test_step5_writes_env_keys(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
env_path = tmp_path / ".env"
|
|
||||||
env_path.write_text("SOME_KEY=existing\n")
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
# Patch both _wizard_yaml_path and the Path resolution inside wizard_save_step
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch("dev_api.Path") as mock_path_cls:
|
|
||||||
# Only intercept the .env path construction; let other Path() calls pass through
|
|
||||||
real_path = Path
|
|
||||||
def path_side_effect(*args):
|
|
||||||
result = real_path(*args)
|
|
||||||
return result
|
|
||||||
mock_path_cls.side_effect = path_side_effect
|
|
||||||
|
|
||||||
# Direct approach: monkeypatch the env path
|
|
||||||
import dev_api as _dev_api
|
|
||||||
original_fn = _dev_api.wizard_save_step
|
|
||||||
|
|
||||||
# Simpler: just test via the real endpoint, verify env not written if no key given
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 5, "data": {"services": {"ollama_host": "localhost"}}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
|
|
||||||
def test_step6_writes_search_profiles(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
search_path = tmp_path / "config" / "search_profiles.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch("dev_api._search_prefs_path", return_value=search_path):
|
|
||||||
r = client.post("/api/wizard/step",
|
|
||||||
json={"step": 6, "data": {
|
|
||||||
"titles": ["Software Engineer", "Backend Developer"],
|
|
||||||
"locations": ["Remote", "Austin, TX"],
|
|
||||||
}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert search_path.exists()
|
|
||||||
prefs = yaml.safe_load(search_path.read_text())
|
|
||||||
assert prefs["default"]["job_titles"] == ["Software Engineer", "Backend Developer"]
|
|
||||||
assert "Remote" in prefs["default"]["location"]
|
|
||||||
|
|
||||||
def test_step7_only_advances_counter(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step", json={"step": 7, "data": {}})
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert _read_user_yaml(yaml_path)["wizard_step"] == 7
|
|
||||||
|
|
||||||
def test_invalid_step_number(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
r = client.post("/api/wizard/step", json={"step": 99, "data": {}})
|
|
||||||
assert r.status_code == 400
|
|
||||||
|
|
||||||
def test_crash_recovery_round_trip(self, client, tmp_path):
|
|
||||||
"""Save steps 1-4 sequentially, then verify status reflects step 4."""
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
steps = [
|
|
||||||
(1, {"inference_profile": "cpu"}),
|
|
||||||
(2, {"tier": "free"}),
|
|
||||||
(4, {"name": "Alex", "email": "a@b.com", "career_summary": "Eng."}),
|
|
||||||
]
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
for step, data in steps:
|
|
||||||
r = client.post("/api/wizard/step", json={"step": step, "data": data})
|
|
||||||
assert r.status_code == 200
|
|
||||||
|
|
||||||
r = client.get("/api/wizard/status")
|
|
||||||
|
|
||||||
body = r.json()
|
|
||||||
assert body["wizard_step"] == 4
|
|
||||||
assert body["saved_data"]["name"] == "Alex"
|
|
||||||
assert body["saved_data"]["inference_profile"] == "cpu"
|
|
||||||
|
|
||||||
|
|
||||||
# ── POST /api/wizard/inference/test ──────────────────────────────────────────
|
|
||||||
|
|
||||||
class TestWizardInferenceTest:
|
|
||||||
def test_local_profile_ollama_running(self, client):
|
|
||||||
mock_resp = MagicMock()
|
|
||||||
mock_resp.status_code = 200
|
|
||||||
with patch("dev_api.requests.get", return_value=mock_resp):
|
|
||||||
r = client.post("/api/wizard/inference/test",
|
|
||||||
json={"profile": "cpu", "ollama_host": "localhost",
|
|
||||||
"ollama_port": 11434})
|
|
||||||
assert r.status_code == 200
|
|
||||||
body = r.json()
|
|
||||||
assert body["ok"] is True
|
|
||||||
assert "Ollama" in body["message"]
|
|
||||||
|
|
||||||
def test_local_profile_ollama_down_soft_fail(self, client):
|
|
||||||
import requests as _req
|
|
||||||
with patch("dev_api.requests.get", side_effect=_req.exceptions.ConnectionError):
|
|
||||||
r = client.post("/api/wizard/inference/test",
|
|
||||||
json={"profile": "single-gpu"})
|
|
||||||
assert r.status_code == 200
|
|
||||||
body = r.json()
|
|
||||||
assert body["ok"] is False
|
|
||||||
assert "configure" in body["message"].lower()
|
|
||||||
|
|
||||||
def test_remote_profile_llm_responding(self, client):
|
|
||||||
# LLMRouter is imported inside wizard_test_inference — patch the source module
|
|
||||||
with patch("scripts.llm_router.LLMRouter") as mock_cls:
|
|
||||||
mock_cls.return_value.complete.return_value = "OK"
|
|
||||||
r = client.post("/api/wizard/inference/test",
|
|
||||||
json={"profile": "remote", "anthropic_key": "sk-ant-test"})
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert r.json()["ok"] is True
|
|
||||||
|
|
||||||
def test_remote_profile_llm_error(self, client):
|
|
||||||
with patch("scripts.llm_router.LLMRouter") as mock_cls:
|
|
||||||
mock_cls.return_value.complete.side_effect = RuntimeError("no key")
|
|
||||||
r = client.post("/api/wizard/inference/test",
|
|
||||||
json={"profile": "remote"})
|
|
||||||
assert r.status_code == 200
|
|
||||||
body = r.json()
|
|
||||||
assert body["ok"] is False
|
|
||||||
assert "failed" in body["message"].lower()
|
|
||||||
|
|
||||||
|
|
||||||
# ── POST /api/wizard/complete ─────────────────────────────────────────────────
|
|
||||||
|
|
||||||
class TestWizardComplete:
|
|
||||||
def test_sets_wizard_complete_true(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_step": 6, "name": "Alex"})
|
|
||||||
# apply_service_urls is a local import inside wizard_complete — patch source module
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch("scripts.generate_llm_config.apply_service_urls",
|
|
||||||
side_effect=Exception("no llm.yaml")):
|
|
||||||
r = client.post("/api/wizard/complete")
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert r.json()["ok"] is True
|
|
||||||
saved = _read_user_yaml(yaml_path)
|
|
||||||
assert saved["wizard_complete"] is True
|
|
||||||
assert "wizard_step" not in saved
|
|
||||||
assert saved["name"] == "Alex" # other fields preserved
|
|
||||||
|
|
||||||
def test_complete_removes_wizard_step(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {"wizard_step": 7, "tier": "paid"})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
with patch("scripts.generate_llm_config.apply_service_urls", return_value=None):
|
|
||||||
client.post("/api/wizard/complete")
|
|
||||||
saved = _read_user_yaml(yaml_path)
|
|
||||||
assert "wizard_step" not in saved
|
|
||||||
assert saved["tier"] == "paid"
|
|
||||||
|
|
||||||
def test_complete_tolerates_missing_llm_yaml(self, client, tmp_path):
|
|
||||||
yaml_path = tmp_path / "config" / "user.yaml"
|
|
||||||
_write_user_yaml(yaml_path, {})
|
|
||||||
with patch("dev_api._wizard_yaml_path", return_value=str(yaml_path)):
|
|
||||||
# llm.yaml doesn't exist → apply_service_urls is never called, no error
|
|
||||||
r = client.post("/api/wizard/complete")
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert r.json()["ok"] is True
|
|
||||||
|
|
@ -121,8 +121,7 @@ def test_byok_false_preserves_original_gating():
|
||||||
# ── Vue UI Beta & Demo Tier tests ──────────────────────────────────────────────
|
# ── Vue UI Beta & Demo Tier tests ──────────────────────────────────────────────
|
||||||
|
|
||||||
def test_vue_ui_beta_free_tier():
|
def test_vue_ui_beta_free_tier():
|
||||||
# Vue SPA is open to all tiers (issue #20 — beta restriction removed)
|
assert can_use("free", "vue_ui_beta") is False
|
||||||
assert can_use("free", "vue_ui_beta") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_vue_ui_beta_paid_tier():
|
def test_vue_ui_beta_paid_tier():
|
||||||
|
|
|
||||||
|
|
@ -1,55 +1,32 @@
|
||||||
<template>
|
<template>
|
||||||
<!-- Root uses .app-root class, NOT id="app" — index.html owns #app.
|
<!-- Root uses .app-root class, NOT id="app" — index.html owns #app.
|
||||||
Nested #app elements cause ambiguous CSS specificity. Gotcha #1. -->
|
Nested #app elements cause ambiguous CSS specificity. Gotcha #1. -->
|
||||||
<div class="app-root" :class="{ 'rich-motion': motion.rich.value, 'app-root--wizard': isWizard }">
|
<div class="app-root" :class="{ 'rich-motion': motion.rich.value }">
|
||||||
<AppNav v-if="!isWizard" />
|
<AppNav />
|
||||||
<main class="app-main" :class="{ 'app-main--wizard': isWizard }" id="main-content" tabindex="-1">
|
<main class="app-main" id="main-content" tabindex="-1">
|
||||||
<!-- Skip to main content link (screen reader / keyboard nav) -->
|
<!-- Skip to main content link (screen reader / keyboard nav) -->
|
||||||
<a href="#main-content" class="skip-link">Skip to main content</a>
|
<a href="#main-content" class="skip-link">Skip to main content</a>
|
||||||
|
|
||||||
<!-- Demo mode banner — sticky top bar, visible on all pages -->
|
|
||||||
<div v-if="config.isDemo" class="demo-banner" role="status" aria-live="polite">
|
|
||||||
👁 Demo mode — changes are not saved and AI features are disabled.
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<RouterView />
|
<RouterView />
|
||||||
|
|
||||||
<!-- Global toast — rendered at App level so any component can trigger it -->
|
|
||||||
<Transition name="global-toast">
|
|
||||||
<div v-if="toast.message.value" class="global-toast" role="status" aria-live="polite">
|
|
||||||
{{ toast.message.value }}
|
|
||||||
</div>
|
|
||||||
</Transition>
|
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { computed, onMounted } from 'vue'
|
import { onMounted } from 'vue'
|
||||||
import { RouterView, useRoute } from 'vue-router'
|
import { RouterView } from 'vue-router'
|
||||||
import { useMotion } from './composables/useMotion'
|
import { useMotion } from './composables/useMotion'
|
||||||
import { useHackerMode, useKonamiCode } from './composables/useEasterEgg'
|
import { useHackerMode, useKonamiCode } from './composables/useEasterEgg'
|
||||||
import { useTheme } from './composables/useTheme'
|
|
||||||
import { useToast } from './composables/useToast'
|
|
||||||
import AppNav from './components/AppNav.vue'
|
import AppNav from './components/AppNav.vue'
|
||||||
import { useAppConfigStore } from './stores/appConfig'
|
|
||||||
import { useDigestStore } from './stores/digest'
|
import { useDigestStore } from './stores/digest'
|
||||||
|
|
||||||
const motion = useMotion()
|
const motion = useMotion()
|
||||||
const route = useRoute()
|
|
||||||
const { toggle, restore } = useHackerMode()
|
const { toggle, restore } = useHackerMode()
|
||||||
const { initTheme } = useTheme()
|
|
||||||
const toast = useToast()
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
const digestStore = useDigestStore()
|
const digestStore = useDigestStore()
|
||||||
|
|
||||||
const isWizard = computed(() => route.path.startsWith('/setup'))
|
|
||||||
|
|
||||||
useKonamiCode(toggle)
|
useKonamiCode(toggle)
|
||||||
|
|
||||||
onMounted(() => {
|
onMounted(() => {
|
||||||
initTheme() // apply persisted theme (hacker mode takes priority inside initTheme)
|
restore() // re-apply hacker mode from localStorage on hard reload
|
||||||
restore() // kept for hacker mode re-entry on hard reload (initTheme handles it, belt+suspenders)
|
|
||||||
digestStore.fetchAll() // populate badge immediately, before user visits Digest tab
|
digestStore.fetchAll() // populate badge immediately, before user visits Digest tab
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
@ -117,61 +94,4 @@ body {
|
||||||
padding-bottom: calc(56px + env(safe-area-inset-bottom));
|
padding-bottom: calc(56px + env(safe-area-inset-bottom));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Wizard: full-bleed, no sidebar offset, no tab-bar clearance */
|
|
||||||
.app-root--wizard {
|
|
||||||
display: block;
|
|
||||||
}
|
|
||||||
|
|
||||||
.app-main--wizard {
|
|
||||||
margin-left: 0;
|
|
||||||
padding-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Demo mode banner — sticky top bar */
|
|
||||||
.demo-banner {
|
|
||||||
position: sticky;
|
|
||||||
top: 0;
|
|
||||||
z-index: 200;
|
|
||||||
background: var(--color-warning);
|
|
||||||
color: #1a1a1a; /* forced dark — warning bg is always light enough */
|
|
||||||
text-align: center;
|
|
||||||
font-size: 0.85rem;
|
|
||||||
font-weight: 600;
|
|
||||||
padding: 6px var(--space-4, 16px);
|
|
||||||
letter-spacing: 0.01em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Global toast — bottom-center, above tab bar */
|
|
||||||
.global-toast {
|
|
||||||
position: fixed;
|
|
||||||
bottom: calc(72px + env(safe-area-inset-bottom));
|
|
||||||
left: 50%;
|
|
||||||
transform: translateX(-50%);
|
|
||||||
background: var(--color-surface-raised, #2a3650);
|
|
||||||
color: var(--color-text, #eaeff8);
|
|
||||||
padding: 10px 20px;
|
|
||||||
border-radius: var(--radius-md, 8px);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
font-weight: 500;
|
|
||||||
box-shadow: 0 4px 16px rgba(0,0,0,0.25);
|
|
||||||
white-space: nowrap;
|
|
||||||
z-index: 9000;
|
|
||||||
pointer-events: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.global-toast-enter-active, .global-toast-leave-active {
|
|
||||||
transition: opacity 220ms ease, transform 220ms ease;
|
|
||||||
}
|
|
||||||
.global-toast-enter-from, .global-toast-leave-to {
|
|
||||||
opacity: 0;
|
|
||||||
transform: translateX(-50%) translateY(8px);
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (min-width: 1024px) {
|
|
||||||
.global-toast {
|
|
||||||
bottom: calc(24px + env(safe-area-inset-bottom));
|
|
||||||
left: calc(50% + var(--sidebar-width, 220px) / 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
|
|
@ -73,11 +73,11 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ── Accessible Solarpunk — dark (system dark mode) ─
|
/* ── Accessible Solarpunk — dark (system dark mode) ─
|
||||||
Activates when OS/browser is in dark mode AND no
|
Activates when OS/browser is in dark mode.
|
||||||
explicit theme is selected. Explicit [data-theme="*"]
|
Uses :not([data-theme="hacker"]) so the Konami easter
|
||||||
always wins over the system preference. */
|
egg always wins over the system preference. */
|
||||||
@media (prefers-color-scheme: dark) {
|
@media (prefers-color-scheme: dark) {
|
||||||
:root:not([data-theme]) {
|
:root:not([data-theme="hacker"]) {
|
||||||
/* Brand — lighter greens readable on dark surfaces */
|
/* Brand — lighter greens readable on dark surfaces */
|
||||||
--color-primary: #6ab870;
|
--color-primary: #6ab870;
|
||||||
--color-primary-hover: #7ecb84;
|
--color-primary-hover: #7ecb84;
|
||||||
|
|
@ -161,153 +161,6 @@
|
||||||
--color-accent-glow-lg: rgba(0, 255, 65, 0.6);
|
--color-accent-glow-lg: rgba(0, 255, 65, 0.6);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ── Explicit light — forces light even on dark-OS ─ */
|
|
||||||
[data-theme="light"] {
|
|
||||||
--color-primary: #2d5a27;
|
|
||||||
--color-primary-hover: #234820;
|
|
||||||
--color-primary-light: #e8f2e7;
|
|
||||||
--color-surface: #eaeff8;
|
|
||||||
--color-surface-alt: #dde4f0;
|
|
||||||
--color-surface-raised: #f5f7fc;
|
|
||||||
--color-border: #a8b8d0;
|
|
||||||
--color-border-light: #ccd5e6;
|
|
||||||
--color-text: #1a2338;
|
|
||||||
--color-text-muted: #4a5c7a;
|
|
||||||
--color-text-inverse: #eaeff8;
|
|
||||||
--color-accent: #c4732a;
|
|
||||||
--color-accent-hover: #a85c1f;
|
|
||||||
--color-accent-light: #fdf0e4;
|
|
||||||
--color-success: #3a7a32;
|
|
||||||
--color-error: #c0392b;
|
|
||||||
--color-warning: #d4891a;
|
|
||||||
--color-info: #1e6091;
|
|
||||||
--shadow-sm: 0 1px 3px rgba(26, 35, 56, 0.08), 0 1px 2px rgba(26, 35, 56, 0.04);
|
|
||||||
--shadow-md: 0 4px 12px rgba(26, 35, 56, 0.1), 0 2px 4px rgba(26, 35, 56, 0.06);
|
|
||||||
--shadow-lg: 0 10px 30px rgba(26, 35, 56, 0.12), 0 4px 8px rgba(26, 35, 56, 0.06);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Explicit dark — forces dark even on light-OS ── */
|
|
||||||
[data-theme="dark"] {
|
|
||||||
--color-primary: #6ab870;
|
|
||||||
--color-primary-hover: #7ecb84;
|
|
||||||
--color-primary-light: #162616;
|
|
||||||
--color-surface: #16202e;
|
|
||||||
--color-surface-alt: #1e2a3a;
|
|
||||||
--color-surface-raised: #263547;
|
|
||||||
--color-border: #2d4060;
|
|
||||||
--color-border-light: #233352;
|
|
||||||
--color-text: #e4eaf5;
|
|
||||||
--color-text-muted: #8da0bc;
|
|
||||||
--color-text-inverse: #16202e;
|
|
||||||
--color-accent: #e8a84a;
|
|
||||||
--color-accent-hover: #f5bc60;
|
|
||||||
--color-accent-light: #2d1e0a;
|
|
||||||
--color-success: #5eb85e;
|
|
||||||
--color-error: #e05252;
|
|
||||||
--color-warning: #e8a84a;
|
|
||||||
--color-info: #4da6e8;
|
|
||||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.3), 0 1px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
--shadow-md: 0 4px 12px rgba(0, 0, 0, 0.35), 0 2px 4px rgba(0, 0, 0, 0.2);
|
|
||||||
--shadow-lg: 0 10px 30px rgba(0, 0, 0, 0.4), 0 4px 8px rgba(0, 0, 0, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Solarized Dark ──────────────────────────────── */
|
|
||||||
/* Ethan Schoonover's Solarized palette (dark variant) */
|
|
||||||
[data-theme="solarized-dark"] {
|
|
||||||
--color-primary: #2aa198; /* cyan — used as primary brand color */
|
|
||||||
--color-primary-hover: #35b8ad;
|
|
||||||
--color-primary-light: #002b36;
|
|
||||||
|
|
||||||
--color-surface: #002b36; /* base03 */
|
|
||||||
--color-surface-alt: #073642; /* base02 */
|
|
||||||
--color-surface-raised: #0d4352;
|
|
||||||
|
|
||||||
--color-border: #073642;
|
|
||||||
--color-border-light: #0a4a5a;
|
|
||||||
|
|
||||||
--color-text: #839496; /* base0 */
|
|
||||||
--color-text-muted: #657b83; /* base00 */
|
|
||||||
--color-text-inverse: #002b36;
|
|
||||||
|
|
||||||
--color-accent: #b58900; /* yellow */
|
|
||||||
--color-accent-hover: #cb9f10;
|
|
||||||
--color-accent-light: #1a1300;
|
|
||||||
|
|
||||||
--color-success: #859900; /* green */
|
|
||||||
--color-error: #dc322f; /* red */
|
|
||||||
--color-warning: #b58900; /* yellow */
|
|
||||||
--color-info: #268bd2; /* blue */
|
|
||||||
|
|
||||||
--shadow-sm: 0 1px 3px rgba(0, 0, 0, 0.4), 0 1px 2px rgba(0, 0, 0, 0.3);
|
|
||||||
--shadow-md: 0 4px 12px rgba(0, 0, 0, 0.45), 0 2px 4px rgba(0, 0, 0, 0.3);
|
|
||||||
--shadow-lg: 0 10px 30px rgba(0, 0, 0, 0.5), 0 4px 8px rgba(0, 0, 0, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Solarized Light ─────────────────────────────── */
|
|
||||||
[data-theme="solarized-light"] {
|
|
||||||
--color-primary: #2aa198; /* cyan */
|
|
||||||
--color-primary-hover: #1e8a82;
|
|
||||||
--color-primary-light: #eee8d5;
|
|
||||||
|
|
||||||
--color-surface: #fdf6e3; /* base3 */
|
|
||||||
--color-surface-alt: #eee8d5; /* base2 */
|
|
||||||
--color-surface-raised: #fffdf7;
|
|
||||||
|
|
||||||
--color-border: #d3c9b0;
|
|
||||||
--color-border-light: #e4dacc;
|
|
||||||
|
|
||||||
--color-text: #657b83; /* base00 */
|
|
||||||
--color-text-muted: #839496; /* base0 */
|
|
||||||
--color-text-inverse: #fdf6e3;
|
|
||||||
|
|
||||||
--color-accent: #b58900; /* yellow */
|
|
||||||
--color-accent-hover: #9a7300;
|
|
||||||
--color-accent-light: #fdf0c0;
|
|
||||||
|
|
||||||
--color-success: #859900; /* green */
|
|
||||||
--color-error: #dc322f; /* red */
|
|
||||||
--color-warning: #b58900; /* yellow */
|
|
||||||
--color-info: #268bd2; /* blue */
|
|
||||||
|
|
||||||
--shadow-sm: 0 1px 3px rgba(101, 123, 131, 0.12), 0 1px 2px rgba(101, 123, 131, 0.08);
|
|
||||||
--shadow-md: 0 4px 12px rgba(101, 123, 131, 0.15), 0 2px 4px rgba(101, 123, 131, 0.08);
|
|
||||||
--shadow-lg: 0 10px 30px rgba(101, 123, 131, 0.18), 0 4px 8px rgba(101, 123, 131, 0.08);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Colorblind-safe (deuteranopia/protanopia) ────── */
|
|
||||||
/* Avoids red/green confusion. Uses blue+orange as the
|
|
||||||
primary pair; cyan+magenta as semantic differentiators.
|
|
||||||
Based on Wong (2011) 8-color colorblind-safe palette. */
|
|
||||||
[data-theme="colorblind"] {
|
|
||||||
--color-primary: #0072B2; /* blue — safe primary */
|
|
||||||
--color-primary-hover: #005a8e;
|
|
||||||
--color-primary-light: #e0f0fa;
|
|
||||||
|
|
||||||
--color-surface: #f4f6fb;
|
|
||||||
--color-surface-alt: #e6eaf4;
|
|
||||||
--color-surface-raised: #fafbfe;
|
|
||||||
|
|
||||||
--color-border: #b0bcd8;
|
|
||||||
--color-border-light: #cdd5e8;
|
|
||||||
|
|
||||||
--color-text: #1a2338;
|
|
||||||
--color-text-muted: #4a5c7a;
|
|
||||||
--color-text-inverse: #f4f6fb;
|
|
||||||
|
|
||||||
--color-accent: #E69F00; /* orange — safe secondary */
|
|
||||||
--color-accent-hover: #c98900;
|
|
||||||
--color-accent-light: #fdf4dc;
|
|
||||||
|
|
||||||
--color-success: #009E73; /* teal-green — distinct from red/green confusion zone */
|
|
||||||
--color-error: #CC0066; /* magenta-red — distinguishable from green */
|
|
||||||
--color-warning: #E69F00; /* orange */
|
|
||||||
--color-info: #56B4E9; /* sky blue */
|
|
||||||
|
|
||||||
--shadow-sm: 0 1px 3px rgba(26, 35, 56, 0.08), 0 1px 2px rgba(26, 35, 56, 0.04);
|
|
||||||
--shadow-md: 0 4px 12px rgba(26, 35, 56, 0.1), 0 2px 4px rgba(26, 35, 56, 0.06);
|
|
||||||
--shadow-lg: 0 10px 30px rgba(26, 35, 56, 0.12), 0 4px 8px rgba(26, 35, 56, 0.06);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Base resets ─────────────────────────────────── */
|
/* ── Base resets ─────────────────────────────────── */
|
||||||
*, *::before, *::after { box-sizing: border-box; }
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,31 +34,12 @@
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Theme picker -->
|
|
||||||
<div class="sidebar__theme" v-if="!isHackerMode">
|
|
||||||
<label class="sidebar__theme-label" for="theme-select">Theme</label>
|
|
||||||
<select
|
|
||||||
id="theme-select"
|
|
||||||
class="sidebar__theme-select"
|
|
||||||
:value="currentTheme"
|
|
||||||
@change="setTheme(($event.target as HTMLSelectElement).value as Theme)"
|
|
||||||
aria-label="Select theme"
|
|
||||||
>
|
|
||||||
<option v-for="opt in THEME_OPTIONS" :key="opt.value" :value="opt.value">
|
|
||||||
{{ opt.icon }} {{ opt.label }}
|
|
||||||
</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Settings at bottom -->
|
<!-- Settings at bottom -->
|
||||||
<div class="sidebar__footer">
|
<div class="sidebar__footer">
|
||||||
<RouterLink to="/settings" class="sidebar__link sidebar__link--footer" active-class="sidebar__link--active">
|
<RouterLink to="/settings" class="sidebar__link sidebar__link--footer" active-class="sidebar__link--active">
|
||||||
<Cog6ToothIcon class="sidebar__icon" aria-hidden="true" />
|
<Cog6ToothIcon class="sidebar__icon" aria-hidden="true" />
|
||||||
<span class="sidebar__label">Settings</span>
|
<span class="sidebar__label">Settings</span>
|
||||||
</RouterLink>
|
</RouterLink>
|
||||||
<button class="sidebar__classic-btn" @click="switchToClassic" title="Switch to Classic (Streamlit) UI">
|
|
||||||
⚡ Classic
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
|
|
||||||
|
|
@ -95,10 +76,7 @@ import {
|
||||||
} from '@heroicons/vue/24/outline'
|
} from '@heroicons/vue/24/outline'
|
||||||
|
|
||||||
import { useDigestStore } from '../stores/digest'
|
import { useDigestStore } from '../stores/digest'
|
||||||
import { useTheme, THEME_OPTIONS, type Theme } from '../composables/useTheme'
|
|
||||||
|
|
||||||
const digestStore = useDigestStore()
|
const digestStore = useDigestStore()
|
||||||
const { currentTheme, setTheme, restoreTheme } = useTheme()
|
|
||||||
|
|
||||||
// Logo click easter egg — 9.6: Click the Bird 5× rapidly
|
// Logo click easter egg — 9.6: Click the Bird 5× rapidly
|
||||||
const logoClickCount = ref(0)
|
const logoClickCount = ref(0)
|
||||||
|
|
@ -123,25 +101,8 @@ const isHackerMode = computed(() =>
|
||||||
)
|
)
|
||||||
|
|
||||||
function exitHackerMode() {
|
function exitHackerMode() {
|
||||||
|
delete document.documentElement.dataset.theme
|
||||||
localStorage.removeItem('cf-hacker-mode')
|
localStorage.removeItem('cf-hacker-mode')
|
||||||
restoreTheme()
|
|
||||||
}
|
|
||||||
|
|
||||||
const _apiBase = import.meta.env.BASE_URL.replace(/\/$/, '')
|
|
||||||
|
|
||||||
async function switchToClassic() {
|
|
||||||
// Persist preference via API so Streamlit reads streamlit from user.yaml
|
|
||||||
// and won't re-set the cookie back to vue (avoids the ?prgn_switch rerun cycle)
|
|
||||||
try {
|
|
||||||
await fetch(_apiBase + '/api/settings/ui-preference', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ preference: 'streamlit' }),
|
|
||||||
})
|
|
||||||
} catch { /* non-fatal — cookie below is enough for immediate redirect */ }
|
|
||||||
document.cookie = 'prgn_ui=streamlit; path=/; SameSite=Lax'
|
|
||||||
// Navigate to root (no query params) — Caddy routes to Streamlit based on cookie
|
|
||||||
window.location.href = window.location.origin + '/'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const navLinks = computed(() => [
|
const navLinks = computed(() => [
|
||||||
|
|
@ -311,70 +272,6 @@ const mobileLinks = [
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.sidebar__classic-btn {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
width: 100%;
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
margin-top: var(--space-1);
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
font-weight: 500;
|
|
||||||
cursor: pointer;
|
|
||||||
opacity: 0.6;
|
|
||||||
transition: opacity 150ms, background 150ms;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar__classic-btn:hover {
|
|
||||||
opacity: 1;
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Theme picker ───────────────────────────────────── */
|
|
||||||
.sidebar__theme {
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
border-top: 1px solid var(--color-border-light);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar__theme-label {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
font-weight: 500;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.05em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar__theme-select {
|
|
||||||
width: 100%;
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--color-text);
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
cursor: pointer;
|
|
||||||
appearance: auto;
|
|
||||||
transition: border-color 150ms ease, background 150ms ease;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar__theme-select:hover {
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar__theme-select:focus-visible {
|
|
||||||
outline: 2px solid var(--color-accent);
|
|
||||||
outline-offset: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Mobile tab bar (<1024px) ───────────────────────── */
|
/* ── Mobile tab bar (<1024px) ───────────────────────── */
|
||||||
.app-tabbar {
|
.app-tabbar {
|
||||||
display: none; /* hidden on desktop */
|
display: none; /* hidden on desktop */
|
||||||
|
|
|
||||||
|
|
@ -56,49 +56,6 @@
|
||||||
<span v-if="gaps.length > 6" class="gaps-more">+{{ gaps.length - 6 }}</span>
|
<span v-if="gaps.length > 6" class="gaps-more">+{{ gaps.length - 6 }}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Resume Highlights -->
|
|
||||||
<div
|
|
||||||
v-if="resumeSkills.length || resumeDomains.length || resumeKeywords.length"
|
|
||||||
class="resume-highlights"
|
|
||||||
>
|
|
||||||
<button class="section-toggle" @click="highlightsExpanded = !highlightsExpanded">
|
|
||||||
<span class="section-toggle__label">My Resume Highlights</span>
|
|
||||||
<span class="section-toggle__icon" aria-hidden="true">{{ highlightsExpanded ? '▲' : '▼' }}</span>
|
|
||||||
</button>
|
|
||||||
<div v-if="highlightsExpanded" class="highlights-body">
|
|
||||||
<div v-if="resumeSkills.length" class="chips-group">
|
|
||||||
<span class="chips-group__label">Skills</span>
|
|
||||||
<div class="chips-wrap">
|
|
||||||
<span
|
|
||||||
v-for="s in resumeSkills" :key="s"
|
|
||||||
class="hl-chip"
|
|
||||||
:class="{ 'hl-chip--match': jobMatchSet.has(s.toLowerCase()) }"
|
|
||||||
>{{ s }}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div v-if="resumeDomains.length" class="chips-group">
|
|
||||||
<span class="chips-group__label">Domains</span>
|
|
||||||
<div class="chips-wrap">
|
|
||||||
<span
|
|
||||||
v-for="d in resumeDomains" :key="d"
|
|
||||||
class="hl-chip"
|
|
||||||
:class="{ 'hl-chip--match': jobMatchSet.has(d.toLowerCase()) }"
|
|
||||||
>{{ d }}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div v-if="resumeKeywords.length" class="chips-group">
|
|
||||||
<span class="chips-group__label">Keywords</span>
|
|
||||||
<div class="chips-wrap">
|
|
||||||
<span
|
|
||||||
v-for="k in resumeKeywords" :key="k"
|
|
||||||
class="hl-chip"
|
|
||||||
:class="{ 'hl-chip--match': jobMatchSet.has(k.toLowerCase()) }"
|
|
||||||
>{{ k }}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<a v-if="job.url" :href="job.url" target="_blank" rel="noopener noreferrer" class="job-details__link">
|
<a v-if="job.url" :href="job.url" target="_blank" rel="noopener noreferrer" class="job-details__link">
|
||||||
View listing ↗
|
View listing ↗
|
||||||
</a>
|
</a>
|
||||||
|
|
@ -194,61 +151,6 @@
|
||||||
<!-- ── ATS Resume Optimizer ──────────────────────────────── -->
|
<!-- ── ATS Resume Optimizer ──────────────────────────────── -->
|
||||||
<ResumeOptimizerPanel :job-id="props.jobId" />
|
<ResumeOptimizerPanel :job-id="props.jobId" />
|
||||||
|
|
||||||
<!-- ── Application Q&A ───────────────────────────────────── -->
|
|
||||||
<div class="qa-section">
|
|
||||||
<button class="section-toggle" @click="qaExpanded = !qaExpanded">
|
|
||||||
<span class="section-toggle__label">Application Q&A</span>
|
|
||||||
<span v-if="qaItems.length" class="qa-count">{{ qaItems.length }}</span>
|
|
||||||
<span class="section-toggle__icon" aria-hidden="true">{{ qaExpanded ? '▲' : '▼' }}</span>
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<div v-if="qaExpanded" class="qa-body">
|
|
||||||
<p v-if="!qaItems.length" class="qa-empty">
|
|
||||||
No questions yet — add one below to get LLM-suggested answers.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div v-for="(item, i) in qaItems" :key="item.id" class="qa-item">
|
|
||||||
<div class="qa-item__header">
|
|
||||||
<span class="qa-item__q">{{ item.question }}</span>
|
|
||||||
<button class="qa-item__del" aria-label="Remove question" @click="removeQA(i)">✕</button>
|
|
||||||
</div>
|
|
||||||
<textarea
|
|
||||||
class="qa-item__answer"
|
|
||||||
:value="item.answer"
|
|
||||||
placeholder="Your answer…"
|
|
||||||
rows="3"
|
|
||||||
@input="updateAnswer(item.id, ($event.target as HTMLTextAreaElement).value)"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
class="btn-ghost btn-ghost--sm qa-suggest-btn"
|
|
||||||
:disabled="suggesting === item.id"
|
|
||||||
@click="suggestAnswer(item)"
|
|
||||||
>
|
|
||||||
{{ suggesting === item.id ? '✨ Thinking…' : '✨ Suggest' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="qa-add">
|
|
||||||
<input
|
|
||||||
v-model="newQuestion"
|
|
||||||
class="qa-add__input"
|
|
||||||
placeholder="Add a question from the application…"
|
|
||||||
@keydown.enter.prevent="addQA"
|
|
||||||
/>
|
|
||||||
<button class="btn-ghost btn-ghost--sm" :disabled="!newQuestion.trim()" @click="addQA">Add</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button
|
|
||||||
v-if="qaItems.length"
|
|
||||||
class="btn-ghost qa-save-btn"
|
|
||||||
:disabled="qaSaved || qaSaving"
|
|
||||||
@click="saveQA"
|
|
||||||
>
|
|
||||||
{{ qaSaving ? 'Saving…' : (qaSaved ? '✓ Saved' : 'Save All') }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- ── Bottom action bar ──────────────────────────────────── -->
|
<!-- ── Bottom action bar ──────────────────────────────────── -->
|
||||||
<div class="workspace__actions">
|
<div class="workspace__actions">
|
||||||
<button
|
<button
|
||||||
|
|
@ -283,12 +185,9 @@
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, computed, watch, onMounted, onUnmounted, nextTick } from 'vue'
|
import { ref, computed, watch, onMounted, onUnmounted, nextTick } from 'vue'
|
||||||
import { useApiFetch } from '../composables/useApi'
|
import { useApiFetch } from '../composables/useApi'
|
||||||
import { useAppConfigStore } from '../stores/appConfig'
|
|
||||||
import type { Job } from '../stores/review'
|
import type { Job } from '../stores/review'
|
||||||
import ResumeOptimizerPanel from './ResumeOptimizerPanel.vue'
|
import ResumeOptimizerPanel from './ResumeOptimizerPanel.vue'
|
||||||
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
|
|
||||||
const props = defineProps<{ jobId: number }>()
|
const props = defineProps<{ jobId: number }>()
|
||||||
|
|
||||||
const emit = defineEmits<{
|
const emit = defineEmits<{
|
||||||
|
|
@ -382,7 +281,6 @@ async function pollTaskStatus() {
|
||||||
|
|
||||||
async function generate() {
|
async function generate() {
|
||||||
if (generating.value) return
|
if (generating.value) return
|
||||||
if (config.isDemo) { showToast('AI features are disabled in demo mode'); return }
|
|
||||||
generating.value = true
|
generating.value = true
|
||||||
clState.value = 'queued'
|
clState.value = 'queued'
|
||||||
taskError.value = null
|
taskError.value = null
|
||||||
|
|
@ -461,96 +359,6 @@ async function rejectListing() {
|
||||||
setTimeout(() => emit('job-removed'), 1000)
|
setTimeout(() => emit('job-removed'), 1000)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ─── Resume highlights ────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
const resumeSkills = ref<string[]>([])
|
|
||||||
const resumeDomains = ref<string[]>([])
|
|
||||||
const resumeKeywords = ref<string[]>([])
|
|
||||||
const highlightsExpanded = ref(false)
|
|
||||||
|
|
||||||
// Words from the resume that also appear in the job description text
|
|
||||||
const jobMatchSet = computed<Set<string>>(() => {
|
|
||||||
const desc = (job.value?.description ?? '').toLowerCase()
|
|
||||||
const all = [...resumeSkills.value, ...resumeDomains.value, ...resumeKeywords.value]
|
|
||||||
return new Set(all.filter(t => desc.includes(t.toLowerCase())))
|
|
||||||
})
|
|
||||||
|
|
||||||
async function fetchResume() {
|
|
||||||
const { data } = await useApiFetch<{ skills?: string[]; domains?: string[]; keywords?: string[] }>(
|
|
||||||
'/api/settings/resume',
|
|
||||||
)
|
|
||||||
if (!data) return
|
|
||||||
resumeSkills.value = data.skills ?? []
|
|
||||||
resumeDomains.value = data.domains ?? []
|
|
||||||
resumeKeywords.value = data.keywords ?? []
|
|
||||||
if (resumeSkills.value.length || resumeDomains.value.length || resumeKeywords.value.length) {
|
|
||||||
highlightsExpanded.value = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Application Q&A ─────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
interface QAItem { id: string; question: string; answer: string }
|
|
||||||
|
|
||||||
const qaItems = ref<QAItem[]>([])
|
|
||||||
const qaExpanded = ref(false)
|
|
||||||
const qaSaved = ref(true)
|
|
||||||
const qaSaving = ref(false)
|
|
||||||
const newQuestion = ref('')
|
|
||||||
const suggesting = ref<string | null>(null)
|
|
||||||
|
|
||||||
function addQA() {
|
|
||||||
const q = newQuestion.value.trim()
|
|
||||||
if (!q) return
|
|
||||||
qaItems.value = [...qaItems.value, { id: crypto.randomUUID(), question: q, answer: '' }]
|
|
||||||
newQuestion.value = ''
|
|
||||||
qaSaved.value = false
|
|
||||||
qaExpanded.value = true
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeQA(index: number) {
|
|
||||||
qaItems.value = qaItems.value.filter((_, i) => i !== index)
|
|
||||||
qaSaved.value = false
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateAnswer(id: string, value: string) {
|
|
||||||
qaItems.value = qaItems.value.map(q => q.id === id ? { ...q, answer: value } : q)
|
|
||||||
qaSaved.value = false
|
|
||||||
}
|
|
||||||
|
|
||||||
async function saveQA() {
|
|
||||||
qaSaving.value = true
|
|
||||||
const { error } = await useApiFetch(`/api/jobs/${props.jobId}/qa`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ items: qaItems.value }),
|
|
||||||
})
|
|
||||||
qaSaving.value = false
|
|
||||||
if (error) { showToast('Save failed — please try again'); return }
|
|
||||||
qaSaved.value = true
|
|
||||||
}
|
|
||||||
|
|
||||||
async function suggestAnswer(item: QAItem) {
|
|
||||||
suggesting.value = item.id
|
|
||||||
const { data, error } = await useApiFetch<{ answer: string }>(`/api/jobs/${props.jobId}/qa/suggest`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ question: item.question }),
|
|
||||||
})
|
|
||||||
suggesting.value = null
|
|
||||||
if (error || !data?.answer) { showToast('Suggestion failed — check your LLM backend'); return }
|
|
||||||
qaItems.value = qaItems.value.map(q => q.id === item.id ? { ...q, answer: data.answer } : q)
|
|
||||||
qaSaved.value = false
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchQA() {
|
|
||||||
const { data } = await useApiFetch<{ items: QAItem[] }>(`/api/jobs/${props.jobId}/qa`)
|
|
||||||
if (data?.items?.length) {
|
|
||||||
qaItems.value = data.items
|
|
||||||
qaExpanded.value = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Toast ────────────────────────────────────────────────────────────────────
|
// ─── Toast ────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
const toast = ref<string | null>(null)
|
const toast = ref<string | null>(null)
|
||||||
|
|
@ -598,10 +406,6 @@ onMounted(async () => {
|
||||||
await fetchJob()
|
await fetchJob()
|
||||||
loadingJob.value = false
|
loadingJob.value = false
|
||||||
|
|
||||||
// Load resume highlights and saved Q&A in parallel
|
|
||||||
fetchResume()
|
|
||||||
fetchQA()
|
|
||||||
|
|
||||||
// Check if a generation task is already in flight
|
// Check if a generation task is already in flight
|
||||||
if (clState.value === 'none') {
|
if (clState.value === 'none') {
|
||||||
const { data } = await useApiFetch<{ status: string; stage: string | null }>(`/api/jobs/${props.jobId}/cover_letter/task`)
|
const { data } = await useApiFetch<{ status: string; stage: string | null }>(`/api/jobs/${props.jobId}/cover_letter/task`)
|
||||||
|
|
@ -1039,205 +843,6 @@ declare module '../stores/review' {
|
||||||
.toast-enter-active, .toast-leave-active { transition: opacity 250ms ease, transform 250ms ease; }
|
.toast-enter-active, .toast-leave-active { transition: opacity 250ms ease, transform 250ms ease; }
|
||||||
.toast-enter-from, .toast-leave-to { opacity: 0; transform: translateX(-50%) translateY(8px); }
|
.toast-enter-from, .toast-leave-to { opacity: 0; transform: translateX(-50%) translateY(8px); }
|
||||||
|
|
||||||
/* ── Resume Highlights ───────────────────────────────────────────────── */
|
|
||||||
|
|
||||||
.resume-highlights {
|
|
||||||
border-top: 1px solid var(--color-border-light);
|
|
||||||
padding-top: var(--space-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-toggle {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
width: 100%;
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
padding: 0;
|
|
||||||
text-align: left;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-toggle__label {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
font-weight: 700;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.04em;
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-toggle__icon {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
}
|
|
||||||
|
|
||||||
.highlights-body {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-2);
|
|
||||||
margin-top: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chips-group { display: flex; flex-direction: column; gap: 4px; }
|
|
||||||
|
|
||||||
.chips-group__label {
|
|
||||||
font-size: 10px;
|
|
||||||
font-weight: 700;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.06em;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
opacity: 0.7;
|
|
||||||
}
|
|
||||||
|
|
||||||
.chips-wrap { display: flex; flex-wrap: wrap; gap: 4px; }
|
|
||||||
|
|
||||||
.hl-chip {
|
|
||||||
padding: 2px var(--space-2);
|
|
||||||
border-radius: 999px;
|
|
||||||
font-size: 11px;
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.hl-chip--match {
|
|
||||||
background: rgba(39, 174, 96, 0.10);
|
|
||||||
border-color: rgba(39, 174, 96, 0.35);
|
|
||||||
color: var(--color-success);
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Application Q&A ─────────────────────────────────────────────────── */
|
|
||||||
|
|
||||||
.qa-section {
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-section > .section-toggle {
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-section > .section-toggle:hover { background: var(--color-surface-alt); }
|
|
||||||
|
|
||||||
.qa-count {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
border-radius: 50%;
|
|
||||||
background: var(--app-primary-light);
|
|
||||||
color: var(--app-primary);
|
|
||||||
font-size: 10px;
|
|
||||||
font-weight: 700;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-body {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-3);
|
|
||||||
padding: var(--space-4);
|
|
||||||
border-top: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-empty {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
text-align: center;
|
|
||||||
padding: var(--space-2) 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-1);
|
|
||||||
padding-bottom: var(--space-3);
|
|
||||||
border-bottom: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item:last-of-type { border-bottom: none; }
|
|
||||||
|
|
||||||
.qa-item__header {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item__q {
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--color-text);
|
|
||||||
line-height: 1.4;
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item__del {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
padding: 2px 4px;
|
|
||||||
flex-shrink: 0;
|
|
||||||
opacity: 0.5;
|
|
||||||
transition: opacity 150ms;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item__del:hover { opacity: 1; color: var(--color-error); }
|
|
||||||
|
|
||||||
.qa-item__answer {
|
|
||||||
width: 100%;
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
color: var(--color-text);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
line-height: 1.5;
|
|
||||||
resize: vertical;
|
|
||||||
min-height: 72px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-item__answer:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--app-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-suggest-btn { align-self: flex-end; }
|
|
||||||
|
|
||||||
.qa-add {
|
|
||||||
display: flex;
|
|
||||||
gap: var(--space-2);
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-add__input {
|
|
||||||
flex: 1;
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
color: var(--color-text);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
min-height: 36px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-add__input:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--app-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.qa-add__input::placeholder { color: var(--color-text-muted); }
|
|
||||||
|
|
||||||
.qa-save-btn { align-self: flex-end; }
|
|
||||||
|
|
||||||
/* ── Responsive ──────────────────────────────────────────────────────── */
|
/* ── Responsive ──────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
@media (max-width: 900px) {
|
@media (max-width: 900px) {
|
||||||
|
|
|
||||||
|
|
@ -1,417 +0,0 @@
|
||||||
<template>
|
|
||||||
<Teleport to="body">
|
|
||||||
<div class="modal-backdrop" role="dialog" aria-modal="true" :aria-labelledby="`research-title-${jobId}`" @click.self="emit('close')">
|
|
||||||
<div class="modal-card">
|
|
||||||
<!-- Header -->
|
|
||||||
<div class="modal-header">
|
|
||||||
<h2 :id="`research-title-${jobId}`" class="modal-title">
|
|
||||||
🔍 {{ jobTitle }} — Company Research
|
|
||||||
</h2>
|
|
||||||
<div class="modal-header-actions">
|
|
||||||
<button v-if="state === 'ready'" class="btn-regen" @click="generate" title="Refresh research">↺ Refresh</button>
|
|
||||||
<button class="btn-close" @click="emit('close')" aria-label="Close">✕</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Generating state -->
|
|
||||||
<div v-if="state === 'generating'" class="modal-body modal-body--loading">
|
|
||||||
<div class="research-spinner" aria-hidden="true" />
|
|
||||||
<p class="generating-msg">{{ stage ?? 'Researching…' }}</p>
|
|
||||||
<p class="generating-sub">This takes 30–90 seconds depending on your LLM backend.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Error state -->
|
|
||||||
<div v-else-if="state === 'error'" class="modal-body modal-body--error">
|
|
||||||
<p>Research generation failed.</p>
|
|
||||||
<p v-if="errorMsg" class="error-detail">{{ errorMsg }}</p>
|
|
||||||
<button class="btn-primary-sm" @click="generate">Retry</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Ready state -->
|
|
||||||
<div v-else-if="state === 'ready' && brief" class="modal-body">
|
|
||||||
<p v-if="brief.generated_at" class="generated-at">
|
|
||||||
Updated {{ fmtDate(brief.generated_at) }}
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<section v-if="brief.company_brief" class="research-section">
|
|
||||||
<h3 class="section-title">🏢 Company</h3>
|
|
||||||
<p class="section-body">{{ brief.company_brief }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.ceo_brief" class="research-section">
|
|
||||||
<h3 class="section-title">👤 Leadership</h3>
|
|
||||||
<p class="section-body">{{ brief.ceo_brief }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.talking_points" class="research-section">
|
|
||||||
<div class="section-title-row">
|
|
||||||
<h3 class="section-title">💬 Talking Points</h3>
|
|
||||||
<button class="btn-copy" @click="copy(brief.talking_points!)" :aria-label="copied ? 'Copied!' : 'Copy talking points'">
|
|
||||||
{{ copied ? '✓ Copied' : '⎘ Copy' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<p class="section-body">{{ brief.talking_points }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.tech_brief" class="research-section">
|
|
||||||
<h3 class="section-title">⚙️ Tech Stack</h3>
|
|
||||||
<p class="section-body">{{ brief.tech_brief }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.funding_brief" class="research-section">
|
|
||||||
<h3 class="section-title">💰 Funding & Stage</h3>
|
|
||||||
<p class="section-body">{{ brief.funding_brief }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.red_flags" class="research-section research-section--warn">
|
|
||||||
<h3 class="section-title">⚠️ Red Flags</h3>
|
|
||||||
<p class="section-body">{{ brief.red_flags }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section v-if="brief.accessibility_brief" class="research-section">
|
|
||||||
<h3 class="section-title">♿ Inclusion & Accessibility</h3>
|
|
||||||
<p class="section-body section-body--private">{{ brief.accessibility_brief }}</p>
|
|
||||||
<p class="private-note">For your decision-making only — not disclosed in applications.</p>
|
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Empty state (no research, not generating) -->
|
|
||||||
<div v-else class="modal-body modal-body--empty">
|
|
||||||
<p>No research yet for this company.</p>
|
|
||||||
<button class="btn-primary-sm" @click="generate">🔍 Generate Research</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</Teleport>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { ref, onMounted, onUnmounted } from 'vue'
|
|
||||||
import { useApiFetch } from '../composables/useApi'
|
|
||||||
import { useAppConfigStore } from '../stores/appConfig'
|
|
||||||
import { showToast } from '../composables/useToast'
|
|
||||||
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
|
|
||||||
const props = defineProps<{
|
|
||||||
jobId: number
|
|
||||||
jobTitle: string
|
|
||||||
autoGenerate?: boolean
|
|
||||||
}>()
|
|
||||||
|
|
||||||
const emit = defineEmits<{ close: [] }>()
|
|
||||||
|
|
||||||
interface ResearchBrief {
|
|
||||||
company_brief: string | null
|
|
||||||
ceo_brief: string | null
|
|
||||||
talking_points: string | null
|
|
||||||
tech_brief: string | null
|
|
||||||
funding_brief: string | null
|
|
||||||
red_flags: string | null
|
|
||||||
accessibility_brief: string | null
|
|
||||||
generated_at: string | null
|
|
||||||
}
|
|
||||||
|
|
||||||
type ModalState = 'loading' | 'generating' | 'ready' | 'empty' | 'error'
|
|
||||||
|
|
||||||
const state = ref<ModalState>('loading')
|
|
||||||
const brief = ref<ResearchBrief | null>(null)
|
|
||||||
const stage = ref<string | null>(null)
|
|
||||||
const errorMsg = ref<string | null>(null)
|
|
||||||
const copied = ref(false)
|
|
||||||
let pollId: ReturnType<typeof setInterval> | null = null
|
|
||||||
|
|
||||||
function fmtDate(iso: string) {
|
|
||||||
const d = new Date(iso)
|
|
||||||
const diffH = Math.round((Date.now() - d.getTime()) / 3600000)
|
|
||||||
if (diffH < 1) return 'just now'
|
|
||||||
if (diffH < 24) return `${diffH}h ago`
|
|
||||||
if (diffH < 168) return `${Math.floor(diffH / 24)}d ago`
|
|
||||||
return d.toLocaleDateString([], { month: 'short', day: 'numeric' })
|
|
||||||
}
|
|
||||||
|
|
||||||
async function copy(text: string) {
|
|
||||||
await navigator.clipboard.writeText(text)
|
|
||||||
copied.value = true
|
|
||||||
setTimeout(() => { copied.value = false }, 2000)
|
|
||||||
}
|
|
||||||
|
|
||||||
function stopPoll() {
|
|
||||||
if (pollId) { clearInterval(pollId); pollId = null }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function pollTask() {
|
|
||||||
const { data } = await useApiFetch<{ status: string; stage: string | null; message: string | null }>(
|
|
||||||
`/api/jobs/${props.jobId}/research/task`,
|
|
||||||
)
|
|
||||||
if (!data) return
|
|
||||||
stage.value = data.stage
|
|
||||||
|
|
||||||
if (data.status === 'completed') {
|
|
||||||
stopPoll()
|
|
||||||
await load()
|
|
||||||
} else if (data.status === 'failed') {
|
|
||||||
stopPoll()
|
|
||||||
state.value = 'error'
|
|
||||||
errorMsg.value = data.message ?? 'Unknown error'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function load() {
|
|
||||||
const { data, error } = await useApiFetch<ResearchBrief>(`/api/jobs/${props.jobId}/research`)
|
|
||||||
if (error) {
|
|
||||||
if (error.kind === 'http' && error.status === 404) {
|
|
||||||
// Check if a task is running
|
|
||||||
const { data: task } = await useApiFetch<{ status: string; stage: string | null; message: string | null }>(
|
|
||||||
`/api/jobs/${props.jobId}/research/task`,
|
|
||||||
)
|
|
||||||
if (task && (task.status === 'queued' || task.status === 'running')) {
|
|
||||||
state.value = 'generating'
|
|
||||||
stage.value = task.stage
|
|
||||||
pollId = setInterval(pollTask, 3000)
|
|
||||||
} else if (props.autoGenerate) {
|
|
||||||
await generate()
|
|
||||||
} else {
|
|
||||||
state.value = 'empty'
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
state.value = 'error'
|
|
||||||
errorMsg.value = error.kind === 'http' ? error.detail : error.message
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
brief.value = data
|
|
||||||
state.value = 'ready'
|
|
||||||
}
|
|
||||||
|
|
||||||
async function generate() {
|
|
||||||
if (config.isDemo) { showToast('AI features are disabled in demo mode'); state.value = 'empty'; return }
|
|
||||||
state.value = 'generating'
|
|
||||||
stage.value = null
|
|
||||||
errorMsg.value = null
|
|
||||||
stopPoll()
|
|
||||||
const { error } = await useApiFetch(`/api/jobs/${props.jobId}/research/generate`, { method: 'POST' })
|
|
||||||
if (error) {
|
|
||||||
state.value = 'error'
|
|
||||||
errorMsg.value = error.kind === 'http' ? error.detail : error.message
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pollId = setInterval(pollTask, 3000)
|
|
||||||
}
|
|
||||||
|
|
||||||
function onEsc(e: KeyboardEvent) {
|
|
||||||
if (e.key === 'Escape') emit('close')
|
|
||||||
}
|
|
||||||
|
|
||||||
onMounted(async () => {
|
|
||||||
document.addEventListener('keydown', onEsc)
|
|
||||||
await load()
|
|
||||||
})
|
|
||||||
|
|
||||||
onUnmounted(() => {
|
|
||||||
document.removeEventListener('keydown', onEsc)
|
|
||||||
stopPoll()
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.modal-backdrop {
|
|
||||||
position: fixed;
|
|
||||||
inset: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.55);
|
|
||||||
z-index: 500;
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: center;
|
|
||||||
padding: var(--space-8) var(--space-4);
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-card {
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
box-shadow: 0 8px 40px rgba(0, 0, 0, 0.3);
|
|
||||||
width: 100%;
|
|
||||||
max-width: 620px;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-header {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: space-between;
|
|
||||||
gap: var(--space-3);
|
|
||||||
padding: var(--space-5) var(--space-6);
|
|
||||||
border-bottom: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-title {
|
|
||||||
font-size: 1rem;
|
|
||||||
font-weight: 700;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin: 0;
|
|
||||||
line-height: 1.3;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-header-actions {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-close {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 1rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
padding: 2px 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-regen {
|
|
||||||
background: none;
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 0.78rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
padding: 2px 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-body {
|
|
||||||
padding: var(--space-6);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-5);
|
|
||||||
max-height: 70vh;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-body--loading {
|
|
||||||
align-items: center;
|
|
||||||
text-align: center;
|
|
||||||
padding: var(--space-10) var(--space-6);
|
|
||||||
gap: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-body--empty {
|
|
||||||
align-items: center;
|
|
||||||
text-align: center;
|
|
||||||
padding: var(--space-10) var(--space-6);
|
|
||||||
gap: var(--space-4);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-body--error {
|
|
||||||
align-items: center;
|
|
||||||
text-align: center;
|
|
||||||
padding: var(--space-8) var(--space-6);
|
|
||||||
gap: var(--space-3);
|
|
||||||
color: var(--color-error);
|
|
||||||
}
|
|
||||||
|
|
||||||
.error-detail {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
opacity: 0.8;
|
|
||||||
}
|
|
||||||
|
|
||||||
.research-spinner {
|
|
||||||
width: 36px;
|
|
||||||
height: 36px;
|
|
||||||
border: 3px solid var(--color-border);
|
|
||||||
border-top-color: var(--color-primary);
|
|
||||||
border-radius: 50%;
|
|
||||||
animation: spin 0.8s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin { to { transform: rotate(360deg); } }
|
|
||||||
|
|
||||||
.generating-msg {
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.generating-sub {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.generated-at {
|
|
||||||
font-size: 0.75rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-bottom: calc(-1 * var(--space-2));
|
|
||||||
}
|
|
||||||
|
|
||||||
.research-section {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-2);
|
|
||||||
padding-bottom: var(--space-4);
|
|
||||||
border-bottom: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.research-section:last-child {
|
|
||||||
border-bottom: none;
|
|
||||||
padding-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.research-section--warn .section-title {
|
|
||||||
color: var(--color-warning);
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-title-row {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: space-between;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-title {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
font-weight: 700;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.04em;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-body {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text);
|
|
||||||
line-height: 1.6;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.section-body--private {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
.private-note {
|
|
||||||
font-size: 0.7rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-copy {
|
|
||||||
background: none;
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: 0.72rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
padding: 2px 8px;
|
|
||||||
transition: color 150ms, border-color 150ms;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-copy:hover { color: var(--color-primary); border-color: var(--color-primary); }
|
|
||||||
|
|
||||||
.btn-primary-sm {
|
|
||||||
background: var(--color-primary);
|
|
||||||
color: #fff;
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: var(--space-2) var(--space-5);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 600;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -13,7 +13,6 @@ const emit = defineEmits<{
|
||||||
move: [jobId: number, preSelectedStage?: PipelineStage]
|
move: [jobId: number, preSelectedStage?: PipelineStage]
|
||||||
prep: [jobId: number]
|
prep: [jobId: number]
|
||||||
survey: [jobId: number]
|
survey: [jobId: number]
|
||||||
research: [jobId: number]
|
|
||||||
}>()
|
}>()
|
||||||
|
|
||||||
// Signal state
|
// Signal state
|
||||||
|
|
@ -181,7 +180,6 @@ const columnColor = computed(() => {
|
||||||
</div>
|
</div>
|
||||||
<footer class="card-footer">
|
<footer class="card-footer">
|
||||||
<button class="card-action" @click.stop="emit('move', job.id)">Move to… ›</button>
|
<button class="card-action" @click.stop="emit('move', job.id)">Move to… ›</button>
|
||||||
<button v-if="['phone_screen', 'interviewing', 'offer'].includes(job.status)" class="card-action" @click.stop="emit('research', job.id)">🔍 Research</button>
|
|
||||||
<button v-if="['phone_screen', 'interviewing', 'offer'].includes(job.status)" class="card-action" @click.stop="emit('prep', job.id)">Prep →</button>
|
<button v-if="['phone_screen', 'interviewing', 'offer'].includes(job.status)" class="card-action" @click.stop="emit('prep', job.id)">Prep →</button>
|
||||||
<button
|
<button
|
||||||
v-if="['survey', 'phone_screen', 'interviewing', 'offer'].includes(job.status)"
|
v-if="['survey', 'phone_screen', 'interviewing', 'offer'].includes(job.status)"
|
||||||
|
|
|
||||||
|
|
@ -2,15 +2,12 @@ export type ApiError =
|
||||||
| { kind: 'network'; message: string }
|
| { kind: 'network'; message: string }
|
||||||
| { kind: 'http'; status: number; detail: string }
|
| { kind: 'http'; status: number; detail: string }
|
||||||
|
|
||||||
// Strip trailing slash so '/peregrine/' + '/api/...' → '/peregrine/api/...'
|
|
||||||
const _apiBase = import.meta.env.BASE_URL.replace(/\/$/, '')
|
|
||||||
|
|
||||||
export async function useApiFetch<T>(
|
export async function useApiFetch<T>(
|
||||||
url: string,
|
url: string,
|
||||||
opts?: RequestInit,
|
opts?: RequestInit,
|
||||||
): Promise<{ data: T | null; error: ApiError | null }> {
|
): Promise<{ data: T | null; error: ApiError | null }> {
|
||||||
try {
|
try {
|
||||||
const res = await fetch(_apiBase + url, opts)
|
const res = await fetch(url, opts)
|
||||||
if (!res.ok) {
|
if (!res.ok) {
|
||||||
const detail = await res.text().catch(() => '')
|
const detail = await res.text().catch(() => '')
|
||||||
return { data: null, error: { kind: 'http', status: res.status, detail } }
|
return { data: null, error: { kind: 'http', status: res.status, detail } }
|
||||||
|
|
@ -34,7 +31,7 @@ export function useApiSSE(
|
||||||
onComplete?: () => void,
|
onComplete?: () => void,
|
||||||
onError?: (e: Event) => void,
|
onError?: (e: Event) => void,
|
||||||
): () => void {
|
): () => void {
|
||||||
const es = new EventSource(_apiBase + url)
|
const es = new EventSource(url)
|
||||||
es.onmessage = (e) => {
|
es.onmessage = (e) => {
|
||||||
try {
|
try {
|
||||||
const data = JSON.parse(e.data) as Record<string, unknown>
|
const data = JSON.parse(e.data) as Record<string, unknown>
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import { onMounted, onUnmounted } from 'vue'
|
import { onMounted, onUnmounted } from 'vue'
|
||||||
import { useTheme } from './useTheme'
|
|
||||||
|
|
||||||
const KONAMI = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','b','a']
|
const KONAMI = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','b','a']
|
||||||
const KONAMI_AB = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','a','b']
|
const KONAMI_AB = ['ArrowUp','ArrowUp','ArrowDown','ArrowDown','ArrowLeft','ArrowRight','ArrowLeft','ArrowRight','a','b']
|
||||||
|
|
@ -32,10 +31,8 @@ export function useHackerMode() {
|
||||||
function toggle() {
|
function toggle() {
|
||||||
const root = document.documentElement
|
const root = document.documentElement
|
||||||
if (root.dataset.theme === 'hacker') {
|
if (root.dataset.theme === 'hacker') {
|
||||||
|
delete root.dataset.theme
|
||||||
localStorage.removeItem('cf-hacker-mode')
|
localStorage.removeItem('cf-hacker-mode')
|
||||||
// Let useTheme restore the user's chosen theme rather than just deleting data-theme
|
|
||||||
const { restoreTheme } = useTheme()
|
|
||||||
restoreTheme()
|
|
||||||
} else {
|
} else {
|
||||||
root.dataset.theme = 'hacker'
|
root.dataset.theme = 'hacker'
|
||||||
localStorage.setItem('cf-hacker-mode', 'true')
|
localStorage.setItem('cf-hacker-mode', 'true')
|
||||||
|
|
|
||||||
|
|
@ -1,82 +0,0 @@
|
||||||
/**
|
|
||||||
* useTheme — manual theme picker for Peregrine.
|
|
||||||
*
|
|
||||||
* Themes: 'auto' | 'light' | 'dark' | 'solarized-dark' | 'solarized-light' | 'colorblind'
|
|
||||||
* Persisted in localStorage under 'cf-theme'.
|
|
||||||
* Applied via document.documentElement.dataset.theme.
|
|
||||||
* 'auto' removes the attribute so the @media prefers-color-scheme rule takes effect.
|
|
||||||
*
|
|
||||||
* Hacker mode sits on top of this system — toggling it off calls restoreTheme()
|
|
||||||
* so the user's chosen theme is reinstated rather than dropping back to auto.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { ref, readonly } from 'vue'
|
|
||||||
import { useApiFetch } from './useApi'
|
|
||||||
|
|
||||||
export type Theme = 'auto' | 'light' | 'dark' | 'solarized-dark' | 'solarized-light' | 'colorblind'
|
|
||||||
|
|
||||||
const STORAGE_KEY = 'cf-theme'
|
|
||||||
const HACKER_KEY = 'cf-hacker-mode'
|
|
||||||
|
|
||||||
export const THEME_OPTIONS: { value: Theme; label: string; icon: string }[] = [
|
|
||||||
{ value: 'auto', label: 'Auto', icon: '⬡' },
|
|
||||||
{ value: 'light', label: 'Light', icon: '☀' },
|
|
||||||
{ value: 'dark', label: 'Dark', icon: '🌙' },
|
|
||||||
{ value: 'solarized-light', label: 'Solarized Light', icon: '🌤' },
|
|
||||||
{ value: 'solarized-dark', label: 'Solarized Dark', icon: '🌃' },
|
|
||||||
{ value: 'colorblind', label: 'Colorblind Safe', icon: '♿' },
|
|
||||||
]
|
|
||||||
|
|
||||||
// Module-level singleton so all consumers share the same reactive state.
|
|
||||||
const _current = ref<Theme>(_load())
|
|
||||||
|
|
||||||
function _load(): Theme {
|
|
||||||
return (localStorage.getItem(STORAGE_KEY) as Theme | null) ?? 'auto'
|
|
||||||
}
|
|
||||||
|
|
||||||
function _apply(theme: Theme) {
|
|
||||||
const root = document.documentElement
|
|
||||||
if (theme === 'auto') {
|
|
||||||
delete root.dataset.theme
|
|
||||||
} else {
|
|
||||||
root.dataset.theme = theme
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useTheme() {
|
|
||||||
function setTheme(theme: Theme) {
|
|
||||||
_current.value = theme
|
|
||||||
localStorage.setItem(STORAGE_KEY, theme)
|
|
||||||
_apply(theme)
|
|
||||||
// Best-effort persist to server; ignore failures (works offline / local LLM)
|
|
||||||
useApiFetch('/api/settings/theme', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ theme }),
|
|
||||||
}).catch(() => {})
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Restore user's chosen theme — called when hacker mode or other overlays exit. */
|
|
||||||
function restoreTheme() {
|
|
||||||
// Hacker mode clears itself; we only restore if it's actually off.
|
|
||||||
if (localStorage.getItem(HACKER_KEY) === 'true') return
|
|
||||||
_apply(_current.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Call once at app boot to apply persisted theme before first render. */
|
|
||||||
function initTheme() {
|
|
||||||
// Hacker mode takes priority on restore.
|
|
||||||
if (localStorage.getItem(HACKER_KEY) === 'true') {
|
|
||||||
document.documentElement.dataset.theme = 'hacker'
|
|
||||||
} else {
|
|
||||||
_apply(_current.value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
currentTheme: readonly(_current),
|
|
||||||
setTheme,
|
|
||||||
restoreTheme,
|
|
||||||
initTheme,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
/**
|
|
||||||
* useToast — global reactive toast singleton.
|
|
||||||
*
|
|
||||||
* Module-level ref shared across all importers; no Pinia needed for a single
|
|
||||||
* ephemeral string. Call showToast() from anywhere; App.vue renders it.
|
|
||||||
*/
|
|
||||||
import { ref } from 'vue'
|
|
||||||
|
|
||||||
const _message = ref<string | null>(null)
|
|
||||||
let _timer = 0
|
|
||||||
|
|
||||||
export function showToast(msg: string, duration = 3500): void {
|
|
||||||
clearTimeout(_timer)
|
|
||||||
_message.value = msg
|
|
||||||
_timer = window.setTimeout(() => { _message.value = null }, duration)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useToast() {
|
|
||||||
return { message: _message }
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +1,9 @@
|
||||||
import { createRouter, createWebHistory } from 'vue-router'
|
import { createRouter, createWebHistory } from 'vue-router'
|
||||||
import { useAppConfigStore } from '../stores/appConfig'
|
import { useAppConfigStore } from '../stores/appConfig'
|
||||||
import { settingsGuard } from './settingsGuard'
|
import { settingsGuard } from './settingsGuard'
|
||||||
import { wizardGuard } from './wizardGuard'
|
|
||||||
|
|
||||||
export const router = createRouter({
|
export const router = createRouter({
|
||||||
history: createWebHistory(import.meta.env.BASE_URL),
|
history: createWebHistory(),
|
||||||
routes: [
|
routes: [
|
||||||
{ path: '/', component: () => import('../views/HomeView.vue') },
|
{ path: '/', component: () => import('../views/HomeView.vue') },
|
||||||
{ path: '/review', component: () => import('../views/JobReviewView.vue') },
|
{ path: '/review', component: () => import('../views/JobReviewView.vue') },
|
||||||
|
|
@ -32,43 +31,14 @@ export const router = createRouter({
|
||||||
{ path: 'developer', component: () => import('../views/settings/DeveloperView.vue') },
|
{ path: 'developer', component: () => import('../views/settings/DeveloperView.vue') },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
// Onboarding wizard — full-page layout, no AppNav
|
|
||||||
{
|
|
||||||
path: '/setup',
|
|
||||||
component: () => import('../views/wizard/WizardLayout.vue'),
|
|
||||||
children: [
|
|
||||||
{ path: '', redirect: '/setup/hardware' },
|
|
||||||
{ path: 'hardware', component: () => import('../views/wizard/WizardHardwareStep.vue') },
|
|
||||||
{ path: 'tier', component: () => import('../views/wizard/WizardTierStep.vue') },
|
|
||||||
{ path: 'resume', component: () => import('../views/wizard/WizardResumeStep.vue') },
|
|
||||||
{ path: 'identity', component: () => import('../views/wizard/WizardIdentityStep.vue') },
|
|
||||||
{ path: 'inference', component: () => import('../views/wizard/WizardInferenceStep.vue') },
|
|
||||||
{ path: 'search', component: () => import('../views/wizard/WizardSearchStep.vue') },
|
|
||||||
{ path: 'integrations', component: () => import('../views/wizard/WizardIntegrationsStep.vue') },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
// Catch-all — FastAPI serves index.html for all unknown routes (SPA mode)
|
// Catch-all — FastAPI serves index.html for all unknown routes (SPA mode)
|
||||||
{ path: '/:pathMatch(.*)*', redirect: '/' },
|
{ path: '/:pathMatch(.*)*', redirect: '/' },
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
|
|
||||||
router.beforeEach(async (to, _from, next) => {
|
router.beforeEach(async (to, _from, next) => {
|
||||||
|
if (!to.path.startsWith('/settings/')) return next()
|
||||||
const config = useAppConfigStore()
|
const config = useAppConfigStore()
|
||||||
if (!config.loaded) await config.load()
|
if (!config.loaded) await config.load()
|
||||||
|
settingsGuard(to, _from, next)
|
||||||
// Demo mode: pre-seeded data, no wizard needed — route freely
|
|
||||||
if (config.isDemo) return next()
|
|
||||||
|
|
||||||
// Wizard gate runs first for every route except /setup itself
|
|
||||||
if (!to.path.startsWith('/setup') && !config.wizardComplete) {
|
|
||||||
return next('/setup')
|
|
||||||
}
|
|
||||||
|
|
||||||
// /setup routes: let wizardGuard handle complete→redirect-to-home logic
|
|
||||||
if (to.path.startsWith('/setup')) return wizardGuard(to, _from, next)
|
|
||||||
|
|
||||||
// Settings tier-gating (runs only when wizard is complete)
|
|
||||||
if (to.path.startsWith('/settings/')) return settingsGuard(to, _from, next)
|
|
||||||
|
|
||||||
next()
|
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
import { useAppConfigStore } from '../stores/appConfig'
|
|
||||||
import { useWizardStore } from '../stores/wizard'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gate the entire app behind /setup until wizard_complete is true.
|
|
||||||
*
|
|
||||||
* Rules:
|
|
||||||
* - Any non-/setup route while wizard is incomplete → redirect to /setup
|
|
||||||
* - /setup/* while wizard is complete → redirect to /
|
|
||||||
* - /setup with no step suffix → redirect to the current step route
|
|
||||||
*
|
|
||||||
* Must run AFTER appConfig.load() has resolved (called from router.beforeEach).
|
|
||||||
*/
|
|
||||||
export async function wizardGuard(
|
|
||||||
to: { path: string },
|
|
||||||
_from: unknown,
|
|
||||||
next: (to?: string | { path: string }) => void,
|
|
||||||
): Promise<void> {
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
|
|
||||||
// Ensure config is loaded before inspecting wizardComplete
|
|
||||||
if (!config.loaded) await config.load()
|
|
||||||
|
|
||||||
const onSetup = to.path.startsWith('/setup')
|
|
||||||
const complete = config.wizardComplete
|
|
||||||
|
|
||||||
// Wizard done — keep user out of /setup
|
|
||||||
if (complete && onSetup) return next('/')
|
|
||||||
|
|
||||||
// Wizard not done — redirect to setup
|
|
||||||
if (!complete && !onSetup) return next('/setup')
|
|
||||||
|
|
||||||
// On /setup exactly (no step) — delegate to WizardLayout which loads status
|
|
||||||
next()
|
|
||||||
}
|
|
||||||
|
|
@ -11,25 +11,20 @@ export const useAppConfigStore = defineStore('appConfig', () => {
|
||||||
const tier = ref<Tier>('free')
|
const tier = ref<Tier>('free')
|
||||||
const contractedClient = ref(false)
|
const contractedClient = ref(false)
|
||||||
const inferenceProfile = ref<InferenceProfile>('cpu')
|
const inferenceProfile = ref<InferenceProfile>('cpu')
|
||||||
const isDemo = ref(false)
|
|
||||||
const wizardComplete = ref(true) // optimistic default — guard corrects on load
|
|
||||||
const loaded = ref(false)
|
const loaded = ref(false)
|
||||||
const devTierOverride = ref(localStorage.getItem('dev_tier_override') ?? '')
|
const devTierOverride = ref(localStorage.getItem('dev_tier_override') ?? '')
|
||||||
|
|
||||||
async function load() {
|
async function load() {
|
||||||
const { data } = await useApiFetch<{
|
const { data } = await useApiFetch<{
|
||||||
isCloud: boolean; isDemo: boolean; isDevMode: boolean; tier: Tier
|
isCloud: boolean; isDevMode: boolean; tier: Tier
|
||||||
contractedClient: boolean; inferenceProfile: InferenceProfile
|
contractedClient: boolean; inferenceProfile: InferenceProfile
|
||||||
wizardComplete: boolean
|
|
||||||
}>('/api/config/app')
|
}>('/api/config/app')
|
||||||
if (!data) return
|
if (!data) return
|
||||||
isCloud.value = data.isCloud
|
isCloud.value = data.isCloud
|
||||||
isDemo.value = data.isDemo ?? false
|
|
||||||
isDevMode.value = data.isDevMode
|
isDevMode.value = data.isDevMode
|
||||||
tier.value = data.tier
|
tier.value = data.tier
|
||||||
contractedClient.value = data.contractedClient
|
contractedClient.value = data.contractedClient
|
||||||
inferenceProfile.value = data.inferenceProfile
|
inferenceProfile.value = data.inferenceProfile
|
||||||
wizardComplete.value = data.wizardComplete ?? true
|
|
||||||
loaded.value = true
|
loaded.value = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -43,5 +38,5 @@ export const useAppConfigStore = defineStore('appConfig', () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return { isCloud, isDemo, isDevMode, wizardComplete, tier, contractedClient, inferenceProfile, loaded, load, devTierOverride, setDevTierOverride }
|
return { isCloud, isDevMode, tier, contractedClient, inferenceProfile, loaded, load, devTierOverride, setDevTierOverride }
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,6 @@
|
||||||
import { ref } from 'vue'
|
import { ref } from 'vue'
|
||||||
import { defineStore } from 'pinia'
|
import { defineStore } from 'pinia'
|
||||||
import { useApiFetch } from '../../composables/useApi'
|
import { useApiFetch } from '../../composables/useApi'
|
||||||
import { useAppConfigStore } from '../appConfig'
|
|
||||||
import { showToast } from '../../composables/useToast'
|
|
||||||
|
|
||||||
export interface TrainingPair {
|
|
||||||
index: number
|
|
||||||
instruction: string
|
|
||||||
source_file: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useFineTuneStore = defineStore('settings/fineTune', () => {
|
export const useFineTuneStore = defineStore('settings/fineTune', () => {
|
||||||
const step = ref(1)
|
const step = ref(1)
|
||||||
|
|
@ -18,8 +10,6 @@ export const useFineTuneStore = defineStore('settings/fineTune', () => {
|
||||||
const quotaRemaining = ref<number | null>(null)
|
const quotaRemaining = ref<number | null>(null)
|
||||||
const uploading = ref(false)
|
const uploading = ref(false)
|
||||||
const loading = ref(false)
|
const loading = ref(false)
|
||||||
const pairs = ref<TrainingPair[]>([])
|
|
||||||
const pairsLoading = ref(false)
|
|
||||||
let _pollTimer: ReturnType<typeof setInterval> | null = null
|
let _pollTimer: ReturnType<typeof setInterval> | null = null
|
||||||
|
|
||||||
function resetStep() { step.value = 1 }
|
function resetStep() { step.value = 1 }
|
||||||
|
|
@ -43,31 +33,10 @@ export const useFineTuneStore = defineStore('settings/fineTune', () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function submitJob() {
|
async function submitJob() {
|
||||||
if (useAppConfigStore().isDemo) { showToast('AI features are disabled in demo mode'); return }
|
|
||||||
const { data, error } = await useApiFetch<{ job_id: string }>('/api/settings/fine-tune/submit', { method: 'POST' })
|
const { data, error } = await useApiFetch<{ job_id: string }>('/api/settings/fine-tune/submit', { method: 'POST' })
|
||||||
if (!error && data) { inFlightJob.value = true; jobStatus.value = 'queued' }
|
if (!error && data) { inFlightJob.value = true; jobStatus.value = 'queued' }
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadPairs() {
|
|
||||||
pairsLoading.value = true
|
|
||||||
const { data } = await useApiFetch<{ pairs: TrainingPair[]; total: number }>('/api/settings/fine-tune/pairs')
|
|
||||||
pairsLoading.value = false
|
|
||||||
if (data) {
|
|
||||||
pairs.value = data.pairs
|
|
||||||
pairsCount.value = data.total
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function deletePair(index: number) {
|
|
||||||
const { data } = await useApiFetch<{ ok: boolean; remaining: number }>(
|
|
||||||
`/api/settings/fine-tune/pairs/${index}`, { method: 'DELETE' }
|
|
||||||
)
|
|
||||||
if (data?.ok) {
|
|
||||||
pairs.value = pairs.value.filter(p => p.index !== index).map((p, i) => ({ ...p, index: i }))
|
|
||||||
pairsCount.value = data.remaining
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
step,
|
step,
|
||||||
inFlightJob,
|
inFlightJob,
|
||||||
|
|
@ -76,14 +45,10 @@ export const useFineTuneStore = defineStore('settings/fineTune', () => {
|
||||||
quotaRemaining,
|
quotaRemaining,
|
||||||
uploading,
|
uploading,
|
||||||
loading,
|
loading,
|
||||||
pairs,
|
|
||||||
pairsLoading,
|
|
||||||
resetStep,
|
resetStep,
|
||||||
loadStatus,
|
loadStatus,
|
||||||
startPolling,
|
startPolling,
|
||||||
stopPolling,
|
stopPolling,
|
||||||
submitJob,
|
submitJob,
|
||||||
loadPairs,
|
|
||||||
deletePair,
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@ export const useSearchStore = defineStore('settings/search', () => {
|
||||||
|
|
||||||
const titleSuggestions = ref<string[]>([])
|
const titleSuggestions = ref<string[]>([])
|
||||||
const locationSuggestions = ref<string[]>([])
|
const locationSuggestions = ref<string[]>([])
|
||||||
const excludeSuggestions = ref<string[]>([])
|
|
||||||
|
|
||||||
const loading = ref(false)
|
const loading = ref(false)
|
||||||
const saving = ref(false)
|
const saving = ref(false)
|
||||||
|
|
@ -100,24 +99,10 @@ export const useSearchStore = defineStore('settings/search', () => {
|
||||||
arr.value = arr.value.filter(v => v !== value)
|
arr.value = arr.value.filter(v => v !== value)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function suggestExcludeKeywords() {
|
function acceptSuggestion(type: 'title' | 'location', value: string) {
|
||||||
const { data } = await useApiFetch<{ suggestions: string[] }>('/api/settings/search/suggest', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ type: 'exclude_keywords', current: exclude_keywords.value }),
|
|
||||||
})
|
|
||||||
if (data?.suggestions) {
|
|
||||||
excludeSuggestions.value = data.suggestions.filter(s => !exclude_keywords.value.includes(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function acceptSuggestion(type: 'title' | 'location' | 'exclude', value: string) {
|
|
||||||
if (type === 'title') {
|
if (type === 'title') {
|
||||||
if (!job_titles.value.includes(value)) job_titles.value = [...job_titles.value, value]
|
if (!job_titles.value.includes(value)) job_titles.value = [...job_titles.value, value]
|
||||||
titleSuggestions.value = titleSuggestions.value.filter(s => s !== value)
|
titleSuggestions.value = titleSuggestions.value.filter(s => s !== value)
|
||||||
} else if (type === 'exclude') {
|
|
||||||
if (!exclude_keywords.value.includes(value)) exclude_keywords.value = [...exclude_keywords.value, value]
|
|
||||||
excludeSuggestions.value = excludeSuggestions.value.filter(s => s !== value)
|
|
||||||
} else {
|
} else {
|
||||||
if (!locations.value.includes(value)) locations.value = [...locations.value, value]
|
if (!locations.value.includes(value)) locations.value = [...locations.value, value]
|
||||||
locationSuggestions.value = locationSuggestions.value.filter(s => s !== value)
|
locationSuggestions.value = locationSuggestions.value.filter(s => s !== value)
|
||||||
|
|
@ -133,9 +118,8 @@ export const useSearchStore = defineStore('settings/search', () => {
|
||||||
return {
|
return {
|
||||||
remote_preference, job_titles, locations, exclude_keywords, job_boards,
|
remote_preference, job_titles, locations, exclude_keywords, job_boards,
|
||||||
custom_board_urls, blocklist_companies, blocklist_industries, blocklist_locations,
|
custom_board_urls, blocklist_companies, blocklist_industries, blocklist_locations,
|
||||||
titleSuggestions, locationSuggestions, excludeSuggestions,
|
titleSuggestions, locationSuggestions,
|
||||||
loading, saving, saveError, loadError,
|
loading, saving, saveError, loadError,
|
||||||
load, save, suggestTitles, suggestLocations, suggestExcludeKeywords,
|
load, save, suggestTitles, suggestLocations, addTag, removeTag, acceptSuggestion, toggleBoard,
|
||||||
addTag, removeTag, acceptSuggestion, toggleBoard,
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -1,279 +0,0 @@
|
||||||
import { ref, computed } from 'vue'
|
|
||||||
import { defineStore } from 'pinia'
|
|
||||||
import { useApiFetch } from '../composables/useApi'
|
|
||||||
|
|
||||||
export type WizardProfile = 'remote' | 'cpu' | 'single-gpu' | 'dual-gpu'
|
|
||||||
export type WizardTier = 'free' | 'paid' | 'premium'
|
|
||||||
|
|
||||||
export interface WorkExperience {
|
|
||||||
title: string
|
|
||||||
company: string
|
|
||||||
start_date: string
|
|
||||||
end_date: string
|
|
||||||
bullets: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WizardHardwareData {
|
|
||||||
gpus: string[]
|
|
||||||
suggestedProfile: WizardProfile
|
|
||||||
selectedProfile: WizardProfile
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WizardSearchData {
|
|
||||||
titles: string[]
|
|
||||||
locations: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WizardIdentityData {
|
|
||||||
name: string
|
|
||||||
email: string
|
|
||||||
phone: string
|
|
||||||
linkedin: string
|
|
||||||
careerSummary: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WizardInferenceData {
|
|
||||||
anthropicKey: string
|
|
||||||
openaiUrl: string
|
|
||||||
openaiKey: string
|
|
||||||
ollamaHost: string
|
|
||||||
ollamaPort: number
|
|
||||||
services: Record<string, string | number>
|
|
||||||
confirmed: boolean
|
|
||||||
testMessage: string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Total mandatory steps (integrations step 7 is optional/skip-able)
|
|
||||||
export const WIZARD_STEPS = 6
|
|
||||||
export const STEP_LABELS = ['Hardware', 'Tier', 'Resume', 'Identity', 'Inference', 'Search', 'Integrations']
|
|
||||||
export const STEP_ROUTES = [
|
|
||||||
'/setup/hardware',
|
|
||||||
'/setup/tier',
|
|
||||||
'/setup/resume',
|
|
||||||
'/setup/identity',
|
|
||||||
'/setup/inference',
|
|
||||||
'/setup/search',
|
|
||||||
'/setup/integrations',
|
|
||||||
]
|
|
||||||
|
|
||||||
export const useWizardStore = defineStore('wizard', () => {
|
|
||||||
// ── Navigation state ──────────────────────────────────────────────────────
|
|
||||||
const currentStep = ref(1) // 1-based; 7 = integrations (optional)
|
|
||||||
const loading = ref(false)
|
|
||||||
const saving = ref(false)
|
|
||||||
const errors = ref<string[]>([])
|
|
||||||
|
|
||||||
// ── Step data ─────────────────────────────────────────────────────────────
|
|
||||||
const hardware = ref<WizardHardwareData>({
|
|
||||||
gpus: [],
|
|
||||||
suggestedProfile: 'remote',
|
|
||||||
selectedProfile: 'remote',
|
|
||||||
})
|
|
||||||
|
|
||||||
const tier = ref<WizardTier>('free')
|
|
||||||
|
|
||||||
const resume = ref<{ experience: WorkExperience[]; parsedData: Record<string, unknown> | null }>({
|
|
||||||
experience: [],
|
|
||||||
parsedData: null,
|
|
||||||
})
|
|
||||||
|
|
||||||
const identity = ref<WizardIdentityData>({
|
|
||||||
name: '',
|
|
||||||
email: '',
|
|
||||||
phone: '',
|
|
||||||
linkedin: '',
|
|
||||||
careerSummary: '',
|
|
||||||
})
|
|
||||||
|
|
||||||
const inference = ref<WizardInferenceData>({
|
|
||||||
anthropicKey: '',
|
|
||||||
openaiUrl: '',
|
|
||||||
openaiKey: '',
|
|
||||||
ollamaHost: 'localhost',
|
|
||||||
ollamaPort: 11434,
|
|
||||||
services: {},
|
|
||||||
confirmed: false,
|
|
||||||
testMessage: '',
|
|
||||||
})
|
|
||||||
|
|
||||||
const search = ref<WizardSearchData>({
|
|
||||||
titles: [],
|
|
||||||
locations: [],
|
|
||||||
})
|
|
||||||
|
|
||||||
// ── Computed ──────────────────────────────────────────────────────────────
|
|
||||||
const progressFraction = computed(() =>
|
|
||||||
Math.min((currentStep.value - 1) / WIZARD_STEPS, 1),
|
|
||||||
)
|
|
||||||
|
|
||||||
const stepLabel = computed(() =>
|
|
||||||
currentStep.value <= WIZARD_STEPS
|
|
||||||
? `Step ${currentStep.value} of ${WIZARD_STEPS}`
|
|
||||||
: 'Almost done!',
|
|
||||||
)
|
|
||||||
|
|
||||||
const routeForStep = (step: number) => STEP_ROUTES[step - 1] ?? '/setup/hardware'
|
|
||||||
|
|
||||||
// ── Actions ───────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
/** Load wizard status from server and hydrate store. Returns the route to navigate to. */
|
|
||||||
async function loadStatus(isCloud: boolean): Promise<string> {
|
|
||||||
loading.value = true
|
|
||||||
errors.value = []
|
|
||||||
try {
|
|
||||||
const { data } = await useApiFetch<{
|
|
||||||
wizard_complete: boolean
|
|
||||||
wizard_step: number
|
|
||||||
saved_data: {
|
|
||||||
inference_profile?: string
|
|
||||||
tier?: string
|
|
||||||
name?: string
|
|
||||||
email?: string
|
|
||||||
phone?: string
|
|
||||||
linkedin?: string
|
|
||||||
career_summary?: string
|
|
||||||
services?: Record<string, string | number>
|
|
||||||
}
|
|
||||||
}>('/api/wizard/status')
|
|
||||||
|
|
||||||
if (!data) return '/setup/hardware'
|
|
||||||
|
|
||||||
const saved = data.saved_data
|
|
||||||
|
|
||||||
if (saved.inference_profile)
|
|
||||||
hardware.value.selectedProfile = saved.inference_profile as WizardProfile
|
|
||||||
if (saved.tier)
|
|
||||||
tier.value = saved.tier as WizardTier
|
|
||||||
if (saved.name) identity.value.name = saved.name
|
|
||||||
if (saved.email) identity.value.email = saved.email
|
|
||||||
if (saved.phone) identity.value.phone = saved.phone
|
|
||||||
if (saved.linkedin) identity.value.linkedin = saved.linkedin
|
|
||||||
if (saved.career_summary) identity.value.careerSummary = saved.career_summary
|
|
||||||
if (saved.services) inference.value.services = saved.services
|
|
||||||
|
|
||||||
// Cloud: auto-skip steps 1 (hardware), 2 (tier), 5 (inference)
|
|
||||||
if (isCloud) {
|
|
||||||
const cloudStep = data.wizard_step
|
|
||||||
if (cloudStep < 1) {
|
|
||||||
await saveStep(1, { inference_profile: 'single-gpu' })
|
|
||||||
await saveStep(2, { tier: tier.value })
|
|
||||||
currentStep.value = 3
|
|
||||||
return '/setup/resume'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resume at next step after last completed
|
|
||||||
const resumeAt = Math.max(1, Math.min(data.wizard_step + 1, 7))
|
|
||||||
currentStep.value = resumeAt
|
|
||||||
return routeForStep(resumeAt)
|
|
||||||
} finally {
|
|
||||||
loading.value = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Detect GPUs and populate hardware step. */
|
|
||||||
async function detectHardware(): Promise<void> {
|
|
||||||
loading.value = true
|
|
||||||
try {
|
|
||||||
const { data } = await useApiFetch<{
|
|
||||||
gpus: string[]
|
|
||||||
suggested_profile: string
|
|
||||||
profiles: string[]
|
|
||||||
}>('/api/wizard/hardware')
|
|
||||||
|
|
||||||
if (!data) return
|
|
||||||
hardware.value.gpus = data.gpus
|
|
||||||
hardware.value.suggestedProfile = data.suggested_profile as WizardProfile
|
|
||||||
// Only set selectedProfile if not already chosen by user
|
|
||||||
if (!hardware.value.selectedProfile || hardware.value.selectedProfile === 'remote') {
|
|
||||||
hardware.value.selectedProfile = data.suggested_profile as WizardProfile
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
loading.value = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Persist a step's data to the server. */
|
|
||||||
async function saveStep(step: number, data: Record<string, unknown>): Promise<boolean> {
|
|
||||||
saving.value = true
|
|
||||||
errors.value = []
|
|
||||||
try {
|
|
||||||
const { data: result, error } = await useApiFetch('/api/wizard/step', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ step, data }),
|
|
||||||
})
|
|
||||||
if (error) {
|
|
||||||
errors.value = [error.kind === 'http' ? error.detail : error.message]
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
currentStep.value = step
|
|
||||||
return true
|
|
||||||
} finally {
|
|
||||||
saving.value = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Test LLM / Ollama connectivity. */
|
|
||||||
async function testInference(): Promise<{ ok: boolean; message: string }> {
|
|
||||||
const payload = {
|
|
||||||
profile: hardware.value.selectedProfile,
|
|
||||||
anthropic_key: inference.value.anthropicKey,
|
|
||||||
openai_url: inference.value.openaiUrl,
|
|
||||||
openai_key: inference.value.openaiKey,
|
|
||||||
ollama_host: inference.value.ollamaHost,
|
|
||||||
ollama_port: inference.value.ollamaPort,
|
|
||||||
}
|
|
||||||
const { data } = await useApiFetch<{ ok: boolean; message: string }>(
|
|
||||||
'/api/wizard/inference/test',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(payload),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
const result = data ?? { ok: false, message: 'No response from server.' }
|
|
||||||
inference.value.testMessage = result.message
|
|
||||||
inference.value.confirmed = true // always soft-confirm so user isn't blocked
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Finalise the wizard. */
|
|
||||||
async function complete(): Promise<boolean> {
|
|
||||||
saving.value = true
|
|
||||||
try {
|
|
||||||
const { error } = await useApiFetch('/api/wizard/complete', { method: 'POST' })
|
|
||||||
if (error) {
|
|
||||||
errors.value = [error.kind === 'http' ? error.detail : error.message]
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
} finally {
|
|
||||||
saving.value = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
// state
|
|
||||||
currentStep,
|
|
||||||
loading,
|
|
||||||
saving,
|
|
||||||
errors,
|
|
||||||
hardware,
|
|
||||||
tier,
|
|
||||||
resume,
|
|
||||||
identity,
|
|
||||||
inference,
|
|
||||||
search,
|
|
||||||
// computed
|
|
||||||
progressFraction,
|
|
||||||
stepLabel,
|
|
||||||
// actions
|
|
||||||
loadStatus,
|
|
||||||
detectHardware,
|
|
||||||
saveStep,
|
|
||||||
testInference,
|
|
||||||
complete,
|
|
||||||
routeForStep,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
@ -53,13 +53,6 @@
|
||||||
:loading="taskRunning === 'score'"
|
:loading="taskRunning === 'score'"
|
||||||
@click="scoreUnscored"
|
@click="scoreUnscored"
|
||||||
/>
|
/>
|
||||||
<WorkflowButton
|
|
||||||
emoji="🔍"
|
|
||||||
label="Fill Missing Descriptions"
|
|
||||||
description="Re-fetch truncated job descriptions"
|
|
||||||
:loading="taskRunning === 'enrich'"
|
|
||||||
@click="runEnrich"
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<button
|
<button
|
||||||
|
|
@ -87,6 +80,7 @@
|
||||||
? `Last enriched ${formatRelative(store.status.enrichment_last_run)}`
|
? `Last enriched ${formatRelative(store.status.enrichment_last_run)}`
|
||||||
: 'Auto-enrichment active' }}
|
: 'Auto-enrichment active' }}
|
||||||
</span>
|
</span>
|
||||||
|
<button class="btn-ghost btn-ghost--sm" @click="runEnrich">Run Now</button>
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
|
@ -168,192 +162,22 @@
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<!-- Danger Zone -->
|
<!-- Advanced -->
|
||||||
<section class="home__section">
|
<section class="home__section">
|
||||||
<details class="danger-zone">
|
<details class="advanced">
|
||||||
<summary class="danger-zone__summary">⚠️ Danger Zone</summary>
|
<summary class="advanced__summary">Advanced</summary>
|
||||||
<div class="danger-zone__body">
|
<div class="advanced__body">
|
||||||
|
<p class="advanced__warning">⚠️ These actions are destructive and cannot be undone.</p>
|
||||||
<!-- Queue reset -->
|
<div class="home__actions home__actions--danger">
|
||||||
<div class="dz-block">
|
<button class="action-btn action-btn--danger" @click="confirmPurge">
|
||||||
<p class="dz-block__title">Queue reset</p>
|
🗑️ Purge Pending + Rejected
|
||||||
<p class="dz-block__desc">
|
|
||||||
Archive clears your review queue while keeping job URLs for dedup — same listings
|
|
||||||
won't resurface on the next discovery run. Use hard purge only for a full clean slate
|
|
||||||
including dedup history.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<fieldset class="dz-scope" aria-label="Clear scope">
|
|
||||||
<legend class="dz-scope__legend">Clear scope</legend>
|
|
||||||
<label class="dz-scope__option">
|
|
||||||
<input type="radio" v-model="dangerScope" value="pending" />
|
|
||||||
Pending only
|
|
||||||
</label>
|
|
||||||
<label class="dz-scope__option">
|
|
||||||
<input type="radio" v-model="dangerScope" value="pending_approved" />
|
|
||||||
Pending + approved (stale search)
|
|
||||||
</label>
|
|
||||||
</fieldset>
|
|
||||||
|
|
||||||
<div class="dz-actions">
|
|
||||||
<button
|
|
||||||
class="action-btn action-btn--primary"
|
|
||||||
:disabled="!!confirmAction"
|
|
||||||
@click="beginConfirm('archive')"
|
|
||||||
>
|
|
||||||
📦 Archive & reset
|
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button class="action-btn action-btn--danger" @click="killTasks">
|
||||||
class="action-btn action-btn--secondary"
|
🛑 Kill Stuck Tasks
|
||||||
:disabled="!!confirmAction"
|
|
||||||
@click="beginConfirm('purge')"
|
|
||||||
>
|
|
||||||
🗑 Hard purge (delete)
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Inline confirm -->
|
|
||||||
<div v-if="confirmAction" class="dz-confirm" role="alertdialog" aria-live="assertive">
|
|
||||||
<p v-if="confirmAction.type === 'archive'" class="dz-confirm__msg dz-confirm__msg--info">
|
|
||||||
Archive <strong>{{ confirmAction.statuses.join(' + ') }}</strong> jobs?
|
|
||||||
URLs are kept for dedup — nothing is permanently deleted.
|
|
||||||
</p>
|
|
||||||
<p v-else class="dz-confirm__msg dz-confirm__msg--warn">
|
|
||||||
Permanently delete <strong>{{ confirmAction.statuses.join(' + ') }}</strong> jobs?
|
|
||||||
This removes URLs from dedup history too. Cannot be undone.
|
|
||||||
</p>
|
|
||||||
<div class="dz-confirm__actions">
|
|
||||||
<button class="action-btn action-btn--primary" @click="executeConfirm">
|
|
||||||
{{ confirmAction.type === 'archive' ? 'Yes, archive' : 'Yes, delete' }}
|
|
||||||
</button>
|
|
||||||
<button class="action-btn action-btn--secondary" @click="confirmAction = null">
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<hr class="dz-divider" />
|
|
||||||
|
|
||||||
<!-- Background tasks -->
|
|
||||||
<div class="dz-block">
|
|
||||||
<p class="dz-block__title">Background tasks — {{ activeTasks.length }} active</p>
|
|
||||||
<template v-if="activeTasks.length > 0">
|
|
||||||
<div
|
|
||||||
v-for="task in activeTasks"
|
|
||||||
:key="task.id"
|
|
||||||
class="dz-task"
|
|
||||||
>
|
|
||||||
<span class="dz-task__icon">{{ taskIcon(task.task_type) }}</span>
|
|
||||||
<span class="dz-task__type">{{ task.task_type.replace(/_/g, ' ') }}</span>
|
|
||||||
<span class="dz-task__label">
|
|
||||||
{{ task.title ? `${task.title}${task.company ? ' @ ' + task.company : ''}` : `job #${task.job_id}` }}
|
|
||||||
</span>
|
|
||||||
<span class="dz-task__status">{{ task.status }}</span>
|
|
||||||
<button
|
|
||||||
class="btn-ghost btn-ghost--sm dz-task__cancel"
|
|
||||||
@click="cancelTaskById(task.id)"
|
|
||||||
:aria-label="`Cancel ${task.task_type} task`"
|
|
||||||
>
|
|
||||||
✕
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
<button
|
|
||||||
class="action-btn action-btn--secondary dz-kill"
|
|
||||||
:disabled="activeTasks.length === 0"
|
|
||||||
@click="killAll"
|
|
||||||
>
|
|
||||||
⏹ Kill all stuck
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<hr class="dz-divider" />
|
|
||||||
|
|
||||||
<!-- More options -->
|
|
||||||
<details class="dz-more">
|
|
||||||
<summary class="dz-more__summary">More options</summary>
|
|
||||||
<div class="dz-more__body">
|
|
||||||
|
|
||||||
<!-- Email purge -->
|
|
||||||
<div class="dz-more__item">
|
|
||||||
<p class="dz-block__title">Purge email data</p>
|
|
||||||
<p class="dz-block__desc">Clears all email thread logs and email-sourced pending jobs.</p>
|
|
||||||
<template v-if="moreConfirm === 'email'">
|
|
||||||
<p class="dz-confirm__msg dz-confirm__msg--warn">
|
|
||||||
Deletes all email contacts and email-sourced jobs. Cannot be undone.
|
|
||||||
</p>
|
|
||||||
<div class="dz-confirm__actions">
|
|
||||||
<button class="action-btn action-btn--primary" @click="executePurgeTarget('email')">Yes, purge emails</button>
|
|
||||||
<button class="action-btn action-btn--secondary" @click="moreConfirm = null">Cancel</button>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
<button v-else class="action-btn action-btn--secondary" @click="moreConfirm = 'email'">
|
|
||||||
📧 Purge Email Data
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Non-remote purge -->
|
|
||||||
<div class="dz-more__item">
|
|
||||||
<p class="dz-block__title">Purge non-remote</p>
|
|
||||||
<p class="dz-block__desc">Removes pending/approved/rejected on-site listings from the DB.</p>
|
|
||||||
<template v-if="moreConfirm === 'non_remote'">
|
|
||||||
<p class="dz-confirm__msg dz-confirm__msg--warn">
|
|
||||||
Deletes all non-remote jobs not yet applied to. Cannot be undone.
|
|
||||||
</p>
|
|
||||||
<div class="dz-confirm__actions">
|
|
||||||
<button class="action-btn action-btn--primary" @click="executePurgeTarget('non_remote')">Yes, purge on-site</button>
|
|
||||||
<button class="action-btn action-btn--secondary" @click="moreConfirm = null">Cancel</button>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
<button v-else class="action-btn action-btn--secondary" @click="moreConfirm = 'non_remote'">
|
|
||||||
🏢 Purge On-site Jobs
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Wipe + re-scrape -->
|
|
||||||
<div class="dz-more__item">
|
|
||||||
<p class="dz-block__title">Wipe all + re-scrape</p>
|
|
||||||
<p class="dz-block__desc">Deletes all non-applied jobs then immediately runs a fresh discovery.</p>
|
|
||||||
<template v-if="moreConfirm === 'rescrape'">
|
|
||||||
<p class="dz-confirm__msg dz-confirm__msg--warn">
|
|
||||||
Wipes ALL pending, approved, and rejected jobs, then re-scrapes.
|
|
||||||
Applied and synced records are kept.
|
|
||||||
</p>
|
|
||||||
<div class="dz-confirm__actions">
|
|
||||||
<button class="action-btn action-btn--primary" @click="executePurgeTarget('rescrape')">Yes, wipe + scrape</button>
|
|
||||||
<button class="action-btn action-btn--secondary" @click="moreConfirm = null">Cancel</button>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
<button v-else class="action-btn action-btn--secondary" @click="moreConfirm = 'rescrape'">
|
|
||||||
🔄 Wipe + Re-scrape
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
</div>
|
|
||||||
</details>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<!-- Setup banners -->
|
|
||||||
<section v-if="banners.length > 0" class="home__section" aria-labelledby="setup-heading">
|
|
||||||
<h2 id="setup-heading" class="home__section-title">Finish setting up Peregrine</h2>
|
|
||||||
<div class="banners">
|
|
||||||
<div v-for="banner in banners" :key="banner.key" class="banner">
|
|
||||||
<span class="banner__icon" aria-hidden="true">💡</span>
|
|
||||||
<span class="banner__text">{{ banner.text }}</span>
|
|
||||||
<RouterLink :to="banner.link" class="banner__link">Go to settings →</RouterLink>
|
|
||||||
<button
|
|
||||||
class="btn-ghost btn-ghost--sm banner__dismiss"
|
|
||||||
@click="dismissBanner(banner.key)"
|
|
||||||
:aria-label="`Dismiss: ${banner.text}`"
|
|
||||||
>
|
|
||||||
✕
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<!-- Stoop speed toast — easter egg 9.2 -->
|
<!-- Stoop speed toast — easter egg 9.2 -->
|
||||||
|
|
@ -366,7 +190,7 @@
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, computed, onMounted, onUnmounted } from 'vue'
|
import { ref, computed, onMounted } from 'vue'
|
||||||
import { RouterLink } from 'vue-router'
|
import { RouterLink } from 'vue-router'
|
||||||
import { useJobsStore } from '../stores/jobs'
|
import { useJobsStore } from '../stores/jobs'
|
||||||
import { useApiFetch } from '../composables/useApi'
|
import { useApiFetch } from '../composables/useApi'
|
||||||
|
|
@ -407,8 +231,6 @@ function formatRelative(isoStr: string) {
|
||||||
return hrs === 1 ? '1 hour ago' : `${hrs} hours ago`
|
return hrs === 1 ? '1 hour ago' : `${hrs} hours ago`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Task execution ─────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
const taskRunning = ref<string | null>(null)
|
const taskRunning = ref<string | null>(null)
|
||||||
const stoopToast = ref(false)
|
const stoopToast = ref(false)
|
||||||
|
|
||||||
|
|
@ -417,16 +239,13 @@ async function runTask(key: string, endpoint: string) {
|
||||||
await useApiFetch(endpoint, { method: 'POST' })
|
await useApiFetch(endpoint, { method: 'POST' })
|
||||||
taskRunning.value = null
|
taskRunning.value = null
|
||||||
store.refresh()
|
store.refresh()
|
||||||
fetchActiveTasks()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const runDiscovery = () => runTask('discovery', '/api/tasks/discovery')
|
const runDiscovery = () => runTask('discovery', '/api/tasks/discovery')
|
||||||
const syncEmails = () => runTask('email', '/api/tasks/email-sync')
|
const syncEmails = () => runTask('email', '/api/tasks/email-sync')
|
||||||
const scoreUnscored = () => runTask('score', '/api/tasks/score')
|
const scoreUnscored = () => runTask('score', '/api/tasks/score')
|
||||||
const syncIntegration = () => runTask('sync', '/api/tasks/sync')
|
const syncIntegration = () => runTask('sync', '/api/tasks/sync')
|
||||||
const runEnrich = () => runTask('enrich', '/api/tasks/enrich')
|
const runEnrich = () => useApiFetch('/api/tasks/enrich', { method: 'POST' })
|
||||||
|
|
||||||
// ── Add jobs ───────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
const addTab = ref<'url' | 'csv'>('url')
|
const addTab = ref<'url' | 'csv'>('url')
|
||||||
const urlInput = ref('')
|
const urlInput = ref('')
|
||||||
|
|
@ -450,8 +269,6 @@ function handleCsvUpload(e: Event) {
|
||||||
useApiFetch('/api/jobs/upload-csv', { method: 'POST', body: form })
|
useApiFetch('/api/jobs/upload-csv', { method: 'POST', body: form })
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Backlog archive ────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
async function archiveByStatus(statuses: string[]) {
|
async function archiveByStatus(statuses: string[]) {
|
||||||
await useApiFetch('/api/jobs/archive', {
|
await useApiFetch('/api/jobs/archive', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|
@ -461,100 +278,26 @@ async function archiveByStatus(statuses: string[]) {
|
||||||
store.refresh()
|
store.refresh()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Danger Zone ────────────────────────────────────────────────────────────
|
function confirmPurge() {
|
||||||
|
// TODO: replace with ConfirmModal component
|
||||||
interface TaskRow { id: number; task_type: string; status: string; title?: string; company?: string; job_id: number }
|
if (confirm('Permanently delete all pending and rejected jobs? This cannot be undone.')) {
|
||||||
interface Banner { key: string; text: string; link: string }
|
useApiFetch('/api/jobs/purge', {
|
||||||
interface ConfirmAction { type: 'archive' | 'purge'; statuses: string[] }
|
|
||||||
|
|
||||||
const activeTasks = ref<TaskRow[]>([])
|
|
||||||
const dangerScope = ref<'pending' | 'pending_approved'>('pending')
|
|
||||||
const confirmAction = ref<ConfirmAction | null>(null)
|
|
||||||
const moreConfirm = ref<string | null>(null)
|
|
||||||
const banners = ref<Banner[]>([])
|
|
||||||
|
|
||||||
let taskPollInterval: ReturnType<typeof setInterval> | null = null
|
|
||||||
|
|
||||||
async function fetchActiveTasks() {
|
|
||||||
const { data } = await useApiFetch<TaskRow[]>('/api/tasks')
|
|
||||||
activeTasks.value = data ?? []
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchBanners() {
|
|
||||||
const { data } = await useApiFetch<Banner[]>('/api/config/setup-banners')
|
|
||||||
banners.value = data ?? []
|
|
||||||
}
|
|
||||||
|
|
||||||
function scopeStatuses(): string[] {
|
|
||||||
return dangerScope.value === 'pending' ? ['pending'] : ['pending', 'approved']
|
|
||||||
}
|
|
||||||
|
|
||||||
function beginConfirm(type: 'archive' | 'purge') {
|
|
||||||
moreConfirm.value = null
|
|
||||||
confirmAction.value = { type, statuses: scopeStatuses() }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeConfirm() {
|
|
||||||
const action = confirmAction.value
|
|
||||||
confirmAction.value = null
|
|
||||||
if (!action) return
|
|
||||||
const endpoint = action.type === 'archive' ? '/api/jobs/archive' : '/api/jobs/purge'
|
|
||||||
const key = action.type === 'archive' ? 'statuses' : 'statuses'
|
|
||||||
await useApiFetch(endpoint, {
|
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ [key]: action.statuses }),
|
body: JSON.stringify({ target: 'pending_rejected' }),
|
||||||
})
|
})
|
||||||
store.refresh()
|
store.refresh()
|
||||||
fetchActiveTasks()
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function cancelTaskById(id: number) {
|
async function killTasks() {
|
||||||
await useApiFetch(`/api/tasks/${id}`, { method: 'DELETE' })
|
|
||||||
fetchActiveTasks()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function killAll() {
|
|
||||||
await useApiFetch('/api/tasks/kill', { method: 'POST' })
|
await useApiFetch('/api/tasks/kill', { method: 'POST' })
|
||||||
fetchActiveTasks()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executePurgeTarget(target: string) {
|
|
||||||
moreConfirm.value = null
|
|
||||||
await useApiFetch('/api/jobs/purge', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ target }),
|
|
||||||
})
|
|
||||||
store.refresh()
|
|
||||||
fetchActiveTasks()
|
|
||||||
}
|
|
||||||
|
|
||||||
async function dismissBanner(key: string) {
|
|
||||||
await useApiFetch(`/api/config/setup-banners/${key}/dismiss`, { method: 'POST' })
|
|
||||||
banners.value = banners.value.filter(b => b.key !== key)
|
|
||||||
}
|
|
||||||
|
|
||||||
function taskIcon(taskType: string): string {
|
|
||||||
const icons: Record<string, string> = {
|
|
||||||
cover_letter: '✉️', company_research: '🔍', discovery: '🌐',
|
|
||||||
enrich_descriptions: '📝', email_sync: '📧', score: '📊',
|
|
||||||
scrape_url: '🔗',
|
|
||||||
}
|
|
||||||
return icons[taskType] ?? '⚙️'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
onMounted(async () => {
|
onMounted(async () => {
|
||||||
store.refresh()
|
store.refresh()
|
||||||
const { data } = await useApiFetch<{ name: string }>('/api/config/user')
|
const { data } = await useApiFetch<{ name: string }>('/api/config/user')
|
||||||
if (data?.name) userName.value = data.name
|
if (data?.name) userName.value = data.name
|
||||||
fetchActiveTasks()
|
|
||||||
fetchBanners()
|
|
||||||
taskPollInterval = setInterval(fetchActiveTasks, 5000)
|
|
||||||
})
|
|
||||||
|
|
||||||
onUnmounted(() => {
|
|
||||||
if (taskPollInterval) clearInterval(taskPollInterval)
|
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|
@ -649,11 +392,12 @@ onUnmounted(() => {
|
||||||
|
|
||||||
.home__actions {
|
.home__actions {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||||
gap: var(--space-3);
|
gap: var(--space-3);
|
||||||
}
|
}
|
||||||
|
|
||||||
.home__actions--secondary { grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); }
|
.home__actions--secondary { grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); }
|
||||||
|
.home__actions--danger { grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); }
|
||||||
|
|
||||||
.sync-banner {
|
.sync-banner {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
|
@ -707,7 +451,9 @@ onUnmounted(() => {
|
||||||
|
|
||||||
.action-btn--secondary { background: var(--color-surface-alt); color: var(--color-text); border: 1px solid var(--color-border); }
|
.action-btn--secondary { background: var(--color-surface-alt); color: var(--color-text); border: 1px solid var(--color-border); }
|
||||||
.action-btn--secondary:hover { background: var(--color-border-light); }
|
.action-btn--secondary:hover { background: var(--color-border-light); }
|
||||||
.action-btn--secondary:disabled { opacity: 0.4; cursor: not-allowed; }
|
|
||||||
|
.action-btn--danger { background: transparent; color: var(--color-error); border: 1px solid var(--color-error); }
|
||||||
|
.action-btn--danger:hover { background: rgba(192, 57, 43, 0.08); }
|
||||||
|
|
||||||
.enrichment-row {
|
.enrichment-row {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
|
@ -782,15 +528,13 @@ onUnmounted(() => {
|
||||||
|
|
||||||
.add-jobs__textarea:focus { outline: 2px solid var(--app-primary); outline-offset: 1px; }
|
.add-jobs__textarea:focus { outline: 2px solid var(--app-primary); outline-offset: 1px; }
|
||||||
|
|
||||||
/* ── Danger Zone ──────────────────────────────────────── */
|
.advanced {
|
||||||
|
|
||||||
.danger-zone {
|
|
||||||
background: var(--color-surface-raised);
|
background: var(--color-surface-raised);
|
||||||
border: 1px solid var(--color-border-light);
|
border: 1px solid var(--color-border-light);
|
||||||
border-radius: var(--radius-md);
|
border-radius: var(--radius-md);
|
||||||
}
|
}
|
||||||
|
|
||||||
.danger-zone__summary {
|
.advanced__summary {
|
||||||
padding: var(--space-3) var(--space-4);
|
padding: var(--space-3) var(--space-4);
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
font-size: var(--text-sm);
|
font-size: var(--text-sm);
|
||||||
|
|
@ -800,172 +544,21 @@ onUnmounted(() => {
|
||||||
user-select: none;
|
user-select: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.danger-zone__summary::-webkit-details-marker { display: none; }
|
.advanced__summary::-webkit-details-marker { display: none; }
|
||||||
.danger-zone__summary::before { content: '▶ '; font-size: 0.7em; }
|
.advanced__summary::before { content: '▶ '; font-size: 0.7em; }
|
||||||
details[open] > .danger-zone__summary::before { content: '▼ '; }
|
details[open] > .advanced__summary::before { content: '▼ '; }
|
||||||
|
|
||||||
.danger-zone__body {
|
.advanced__body { padding: 0 var(--space-4) var(--space-4); display: flex; flex-direction: column; gap: var(--space-4); }
|
||||||
padding: 0 var(--space-4) var(--space-4);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-5);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-block { display: flex; flex-direction: column; gap: var(--space-3); }
|
.advanced__warning {
|
||||||
|
|
||||||
.dz-block__title {
|
|
||||||
font-size: var(--text-sm);
|
font-size: var(--text-sm);
|
||||||
font-weight: 600;
|
color: var(--color-warning);
|
||||||
color: var(--color-text);
|
background: rgba(212, 137, 26, 0.08);
|
||||||
}
|
|
||||||
|
|
||||||
.dz-block__desc {
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-scope {
|
|
||||||
border: none;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
display: flex;
|
|
||||||
gap: var(--space-5);
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-scope__legend {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-bottom: var(--space-2);
|
|
||||||
float: left;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-scope__option {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-actions {
|
|
||||||
display: flex;
|
|
||||||
gap: var(--space-3);
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-confirm {
|
|
||||||
padding: var(--space-3) var(--space-4);
|
padding: var(--space-3) var(--space-4);
|
||||||
border-radius: var(--radius-md);
|
border-radius: var(--radius-md);
|
||||||
display: flex;
|
border-left: 3px solid var(--color-warning);
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-3);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.dz-confirm__msg {
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
border-left: 3px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-confirm__msg--info {
|
|
||||||
background: rgba(52, 152, 219, 0.1);
|
|
||||||
border-color: var(--app-primary);
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-confirm__msg--warn {
|
|
||||||
background: rgba(192, 57, 43, 0.08);
|
|
||||||
border-color: var(--color-error);
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-confirm__actions {
|
|
||||||
display: flex;
|
|
||||||
gap: var(--space-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-divider {
|
|
||||||
border: none;
|
|
||||||
border-top: 1px solid var(--color-border-light);
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-task {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-task__icon { flex-shrink: 0; }
|
|
||||||
.dz-task__type { font-family: var(--font-mono); color: var(--color-text-muted); min-width: 120px; }
|
|
||||||
.dz-task__label { flex: 1; color: var(--color-text); overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
|
||||||
.dz-task__status { color: var(--color-text-muted); font-style: italic; }
|
|
||||||
.dz-task__cancel { margin-left: var(--space-2); }
|
|
||||||
|
|
||||||
.dz-kill { align-self: flex-start; }
|
|
||||||
|
|
||||||
.dz-more {
|
|
||||||
background: transparent;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-more__summary {
|
|
||||||
cursor: pointer;
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
list-style: none;
|
|
||||||
user-select: none;
|
|
||||||
padding: var(--space-1) 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-more__summary::-webkit-details-marker { display: none; }
|
|
||||||
.dz-more__summary::before { content: '▶ '; font-size: 0.7em; }
|
|
||||||
details[open] > .dz-more__summary::before { content: '▼ '; }
|
|
||||||
|
|
||||||
.dz-more__body {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
|
|
||||||
gap: var(--space-5);
|
|
||||||
margin-top: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.dz-more__item { display: flex; flex-direction: column; gap: var(--space-2); }
|
|
||||||
|
|
||||||
/* ── Setup banners ────────────────────────────────────── */
|
|
||||||
|
|
||||||
.banners {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.banner {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-3);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-size: var(--text-sm);
|
|
||||||
}
|
|
||||||
|
|
||||||
.banner__icon { flex-shrink: 0; }
|
|
||||||
.banner__text { flex: 1; color: var(--color-text); }
|
|
||||||
.banner__link { color: var(--app-primary); text-decoration: none; white-space: nowrap; font-weight: 500; }
|
|
||||||
.banner__link:hover { text-decoration: underline; }
|
|
||||||
.banner__dismiss { margin-left: var(--space-1); }
|
|
||||||
|
|
||||||
/* ── Toast ────────────────────────────────────────────── */
|
|
||||||
|
|
||||||
.stoop-toast {
|
.stoop-toast {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
bottom: var(--space-6);
|
bottom: var(--space-6);
|
||||||
|
|
@ -995,7 +588,6 @@ details[open] > .dz-more__summary::before { content: '▼ '; }
|
||||||
.home { padding: var(--space-4); gap: var(--space-6); }
|
.home { padding: var(--space-4); gap: var(--space-6); }
|
||||||
.home__greeting { font-size: var(--text-2xl); }
|
.home__greeting { font-size: var(--text-2xl); }
|
||||||
.home__metrics { grid-template-columns: repeat(3, 1fr); }
|
.home__metrics { grid-template-columns: repeat(3, 1fr); }
|
||||||
.dz-more__body { grid-template-columns: 1fr; }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (max-width: 480px) {
|
@media (max-width: 480px) {
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ import type { StageSignal } from '../stores/interviews'
|
||||||
import { useApiFetch } from '../composables/useApi'
|
import { useApiFetch } from '../composables/useApi'
|
||||||
import InterviewCard from '../components/InterviewCard.vue'
|
import InterviewCard from '../components/InterviewCard.vue'
|
||||||
import MoveToSheet from '../components/MoveToSheet.vue'
|
import MoveToSheet from '../components/MoveToSheet.vue'
|
||||||
import CompanyResearchModal from '../components/CompanyResearchModal.vue'
|
|
||||||
|
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const store = useInterviewsStore()
|
const store = useInterviewsStore()
|
||||||
|
|
@ -23,29 +22,10 @@ function openMove(jobId: number, preSelectedStage?: PipelineStage) {
|
||||||
|
|
||||||
async function onMove(stage: PipelineStage, opts: { interview_date?: string; rejection_stage?: string }) {
|
async function onMove(stage: PipelineStage, opts: { interview_date?: string; rejection_stage?: string }) {
|
||||||
if (!moveTarget.value) return
|
if (!moveTarget.value) return
|
||||||
const movedJob = moveTarget.value
|
|
||||||
const wasHired = stage === 'hired'
|
const wasHired = stage === 'hired'
|
||||||
await store.move(movedJob.id, stage, opts)
|
await store.move(moveTarget.value.id, stage, opts)
|
||||||
moveTarget.value = null
|
moveTarget.value = null
|
||||||
if (wasHired) triggerConfetti()
|
if (wasHired) triggerConfetti()
|
||||||
// Auto-open research modal when moving to phone_screen (mirrors Streamlit behaviour)
|
|
||||||
if (stage === 'phone_screen') openResearch(movedJob.id, `${movedJob.title} at ${movedJob.company}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Company research modal ─────────────────────────────────────────────────────
|
|
||||||
const researchJobId = ref<number | null>(null)
|
|
||||||
const researchJobTitle = ref('')
|
|
||||||
const researchAutoGen = ref(false)
|
|
||||||
|
|
||||||
function openResearch(jobId: number, jobTitle: string, autoGenerate = true) {
|
|
||||||
researchJobId.value = jobId
|
|
||||||
researchJobTitle.value = jobTitle
|
|
||||||
researchAutoGen.value = autoGenerate
|
|
||||||
}
|
|
||||||
|
|
||||||
function onInterviewCardResearch(jobId: number) {
|
|
||||||
const job = store.jobs.find(j => j.id === jobId)
|
|
||||||
if (job) openResearch(jobId, `${job.title} at ${job.company}`, false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Collapsible Applied section ────────────────────────────────────────────
|
// ── Collapsible Applied section ────────────────────────────────────────────
|
||||||
|
|
@ -486,8 +466,7 @@ function daysSince(dateStr: string | null) {
|
||||||
</div>
|
</div>
|
||||||
<InterviewCard v-for="(job, i) in store.phoneScreen" :key="job.id" :job="job"
|
<InterviewCard v-for="(job, i) in store.phoneScreen" :key="job.id" :job="job"
|
||||||
:focused="focusedCol === 0 && focusedCard === i"
|
:focused="focusedCol === 0 && focusedCard === i"
|
||||||
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)"
|
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)" />
|
||||||
@research="onInterviewCardResearch" />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="kanban-col" :class="{ 'kanban-col--focused': focusedCol === 1 }" aria-label="Interviewing">
|
<div class="kanban-col" :class="{ 'kanban-col--focused': focusedCol === 1 }" aria-label="Interviewing">
|
||||||
|
|
@ -500,8 +479,7 @@ function daysSince(dateStr: string | null) {
|
||||||
</div>
|
</div>
|
||||||
<InterviewCard v-for="(job, i) in store.interviewing" :key="job.id" :job="job"
|
<InterviewCard v-for="(job, i) in store.interviewing" :key="job.id" :job="job"
|
||||||
:focused="focusedCol === 1 && focusedCard === i"
|
:focused="focusedCol === 1 && focusedCard === i"
|
||||||
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)"
|
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)" />
|
||||||
@research="onInterviewCardResearch" />
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="kanban-col" :class="{ 'kanban-col--focused': focusedCol === 2 }" aria-label="Offer and Hired">
|
<div class="kanban-col" :class="{ 'kanban-col--focused': focusedCol === 2 }" aria-label="Offer and Hired">
|
||||||
|
|
@ -514,8 +492,7 @@ function daysSince(dateStr: string | null) {
|
||||||
</div>
|
</div>
|
||||||
<InterviewCard v-for="(job, i) in store.offerHired" :key="job.id" :job="job"
|
<InterviewCard v-for="(job, i) in store.offerHired" :key="job.id" :job="job"
|
||||||
:focused="focusedCol === 2 && focusedCard === i"
|
:focused="focusedCol === 2 && focusedCard === i"
|
||||||
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)"
|
@move="openMove" @prep="router.push(`/prep/${$event}`)" @survey="router.push('/survey/' + $event)" />
|
||||||
@research="onInterviewCardResearch" />
|
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
|
|
@ -548,14 +525,6 @@ function daysSince(dateStr: string | null) {
|
||||||
@move="onMove"
|
@move="onMove"
|
||||||
@close="moveTarget = null; movePreSelected = undefined"
|
@close="moveTarget = null; movePreSelected = undefined"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<CompanyResearchModal
|
|
||||||
v-if="researchJobId !== null"
|
|
||||||
:jobId="researchJobId"
|
|
||||||
:jobTitle="researchJobTitle"
|
|
||||||
:autoGenerate="researchAutoGen"
|
|
||||||
@close="researchJobId = null"
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -98,50 +98,25 @@
|
||||||
<span class="spinner" aria-hidden="true" />
|
<span class="spinner" aria-hidden="true" />
|
||||||
<span>Loading…</span>
|
<span>Loading…</span>
|
||||||
</div>
|
</div>
|
||||||
<template v-else>
|
<div v-else-if="store.listJobs.length === 0" class="review__empty" role="status">
|
||||||
<!-- Sort + filter bar -->
|
<p class="empty-desc">No {{ activeTab }} jobs.</p>
|
||||||
<div class="list-controls" aria-label="Sort and filter">
|
|
||||||
<select v-model="sortBy" class="list-sort" aria-label="Sort by">
|
|
||||||
<option value="match_score">Best match</option>
|
|
||||||
<option value="date_found">Newest first</option>
|
|
||||||
<option value="company">Company A–Z</option>
|
|
||||||
</select>
|
|
||||||
<label class="list-filter-remote">
|
|
||||||
<input type="checkbox" v-model="filterRemote" />
|
|
||||||
Remote only
|
|
||||||
</label>
|
|
||||||
<span class="list-count">{{ sortedFilteredJobs.length }} job{{ sortedFilteredJobs.length !== 1 ? 's' : '' }}</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="sortedFilteredJobs.length === 0" class="review__empty" role="status">
|
|
||||||
<p class="empty-desc">No {{ activeTab }} jobs{{ filterRemote ? ' (remote only)' : '' }}.</p>
|
|
||||||
</div>
|
</div>
|
||||||
<ul v-else class="job-list" role="list">
|
<ul v-else class="job-list" role="list">
|
||||||
<li v-for="job in sortedFilteredJobs" :key="job.id" class="job-list__item">
|
<li v-for="job in store.listJobs" :key="job.id" class="job-list__item">
|
||||||
<div class="job-list__info">
|
<div class="job-list__info">
|
||||||
<span class="job-list__title">{{ job.title }}</span>
|
<span class="job-list__title">{{ job.title }}</span>
|
||||||
<span class="job-list__company">
|
<span class="job-list__company">{{ job.company }}</span>
|
||||||
{{ job.company }}
|
|
||||||
<span v-if="job.is_remote" class="remote-tag">Remote</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="job-list__meta">
|
<div class="job-list__meta">
|
||||||
<span v-if="job.match_score !== null" class="score-pill" :class="scorePillClass(job.match_score)">
|
<span v-if="job.match_score !== null" class="score-pill" :class="scorePillClass(job.match_score)">
|
||||||
{{ job.match_score }}%
|
{{ job.match_score }}%
|
||||||
</span>
|
</span>
|
||||||
<button
|
|
||||||
v-if="activeTab === 'approved'"
|
|
||||||
class="job-list__action"
|
|
||||||
@click="router.push(`/apply/${job.id}`)"
|
|
||||||
:aria-label="`Draft cover letter for ${job.title}`"
|
|
||||||
>✨ Draft</button>
|
|
||||||
<a :href="job.url" target="_blank" rel="noopener noreferrer" class="job-list__link">
|
<a :href="job.url" target="_blank" rel="noopener noreferrer" class="job-list__link">
|
||||||
View ↗
|
View ↗
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</template>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- ── Help overlay ─────────────────────────────────────────────────── -->
|
<!-- ── Help overlay ─────────────────────────────────────────────────── -->
|
||||||
|
|
@ -211,13 +186,12 @@
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import { ref, computed, watch, onMounted, onUnmounted } from 'vue'
|
import { ref, computed, watch, onMounted, onUnmounted } from 'vue'
|
||||||
import { useRoute, useRouter } from 'vue-router'
|
import { useRoute } from 'vue-router'
|
||||||
import { useReviewStore } from '../stores/review'
|
import { useReviewStore } from '../stores/review'
|
||||||
import JobCardStack from '../components/JobCardStack.vue'
|
import JobCardStack from '../components/JobCardStack.vue'
|
||||||
|
|
||||||
const store = useReviewStore()
|
const store = useReviewStore()
|
||||||
const route = useRoute()
|
const route = useRoute()
|
||||||
const router = useRouter()
|
|
||||||
const stackRef = ref<InstanceType<typeof JobCardStack> | null>(null)
|
const stackRef = ref<InstanceType<typeof JobCardStack> | null>(null)
|
||||||
|
|
||||||
// ─── Tabs ──────────────────────────────────────────────────────────────────────
|
// ─── Tabs ──────────────────────────────────────────────────────────────────────
|
||||||
|
|
@ -341,30 +315,6 @@ function onKeyDown(e: KeyboardEvent) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ─── List view: sort + filter ─────────────────────────────────────────────────
|
|
||||||
|
|
||||||
type SortKey = 'match_score' | 'date_found' | 'company'
|
|
||||||
const sortBy = ref<SortKey>('match_score')
|
|
||||||
const filterRemote = ref(false)
|
|
||||||
|
|
||||||
const sortedFilteredJobs = computed(() => {
|
|
||||||
let jobs = [...store.listJobs]
|
|
||||||
if (filterRemote.value) jobs = jobs.filter(j => j.is_remote)
|
|
||||||
jobs.sort((a, b) => {
|
|
||||||
if (sortBy.value === 'match_score') return (b.match_score ?? -1) - (a.match_score ?? -1)
|
|
||||||
if (sortBy.value === 'date_found') return new Date(b.date_found).getTime() - new Date(a.date_found).getTime()
|
|
||||||
if (sortBy.value === 'company') return (a.company ?? '').localeCompare(b.company ?? '')
|
|
||||||
return 0
|
|
||||||
})
|
|
||||||
return jobs
|
|
||||||
})
|
|
||||||
|
|
||||||
// Reset filters when switching tabs
|
|
||||||
watch(activeTab, () => {
|
|
||||||
filterRemote.value = false
|
|
||||||
sortBy.value = 'match_score'
|
|
||||||
})
|
|
||||||
|
|
||||||
// ─── List view score pill ─────────────────────────────────────────────────────
|
// ─── List view score pill ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
function scorePillClass(score: number) {
|
function scorePillClass(score: number) {
|
||||||
|
|
@ -709,69 +659,6 @@ kbd {
|
||||||
font-weight: 600;
|
font-weight: 600;
|
||||||
}
|
}
|
||||||
|
|
||||||
.job-list__action {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--app-primary);
|
|
||||||
background: color-mix(in srgb, var(--app-primary) 10%, transparent);
|
|
||||||
border: 1px solid color-mix(in srgb, var(--app-primary) 25%, transparent);
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
padding: 2px 8px;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: background 150ms;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.job-list__action:hover {
|
|
||||||
background: color-mix(in srgb, var(--app-primary) 18%, transparent);
|
|
||||||
}
|
|
||||||
|
|
||||||
.remote-tag {
|
|
||||||
font-size: 0.65rem;
|
|
||||||
font-weight: 700;
|
|
||||||
color: var(--color-info);
|
|
||||||
background: color-mix(in srgb, var(--color-info) 12%, transparent);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
padding: 1px 5px;
|
|
||||||
margin-left: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── List controls (sort + filter) ──────────────────────────────────── */
|
|
||||||
|
|
||||||
.list-controls {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-3);
|
|
||||||
flex-wrap: wrap;
|
|
||||||
margin-bottom: var(--space-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.list-sort {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-sm);
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
color: var(--color-text);
|
|
||||||
padding: 3px 8px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.list-filter-remote {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-1);
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
cursor: pointer;
|
|
||||||
user-select: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.list-count {
|
|
||||||
font-size: var(--text-xs);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-left: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Help overlay ────────────────────────────────────────────────────── */
|
/* ── Help overlay ────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
.help-overlay {
|
.help-overlay {
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ import { useAppConfigStore } from '../../stores/appConfig'
|
||||||
|
|
||||||
const store = useFineTuneStore()
|
const store = useFineTuneStore()
|
||||||
const config = useAppConfigStore()
|
const config = useAppConfigStore()
|
||||||
const { step, inFlightJob, jobStatus, pairsCount, quotaRemaining, pairs, pairsLoading } = storeToRefs(store)
|
const { step, inFlightJob, jobStatus, pairsCount, quotaRemaining } = storeToRefs(store)
|
||||||
|
|
||||||
const fileInput = ref<HTMLInputElement | null>(null)
|
const fileInput = ref<HTMLInputElement | null>(null)
|
||||||
const selectedFiles = ref<File[]>([])
|
const selectedFiles = ref<File[]>([])
|
||||||
|
|
@ -45,7 +45,6 @@ async function checkLocalModel() {
|
||||||
|
|
||||||
onMounted(async () => {
|
onMounted(async () => {
|
||||||
store.startPolling()
|
store.startPolling()
|
||||||
await store.loadPairs()
|
|
||||||
if (store.step === 3 && !config.isCloud) await checkLocalModel()
|
if (store.step === 3 && !config.isCloud) await checkLocalModel()
|
||||||
})
|
})
|
||||||
onUnmounted(() => { store.stopPolling(); store.resetStep() })
|
onUnmounted(() => { store.stopPolling(); store.resetStep() })
|
||||||
|
|
@ -100,22 +99,6 @@ onUnmounted(() => { store.stopPolling(); store.resetStep() })
|
||||||
</button>
|
</button>
|
||||||
<button @click="store.step = 3" class="btn-secondary">Skip → Train</button>
|
<button @click="store.step = 3" class="btn-secondary">Skip → Train</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Training pairs list -->
|
|
||||||
<div v-if="pairs.length > 0" class="pairs-list">
|
|
||||||
<h4>Training Pairs <span class="pairs-badge">{{ pairs.length }}</span></h4>
|
|
||||||
<p class="section-note">Review and remove any low-quality pairs before training.</p>
|
|
||||||
<div v-if="pairsLoading" class="pairs-loading">Loading…</div>
|
|
||||||
<ul v-else class="pairs-items">
|
|
||||||
<li v-for="pair in pairs" :key="pair.index" class="pair-item">
|
|
||||||
<div class="pair-info">
|
|
||||||
<span class="pair-instruction">{{ pair.instruction }}</span>
|
|
||||||
<span class="pair-source">{{ pair.source_file }}</span>
|
|
||||||
</div>
|
|
||||||
<button class="pair-delete" @click="store.deletePair(pair.index)" title="Remove this pair">✕</button>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<!-- Step 3: Train -->
|
<!-- Step 3: Train -->
|
||||||
|
|
@ -177,16 +160,4 @@ onUnmounted(() => { store.stopPolling(); store.resetStep() })
|
||||||
.status-running { background: var(--color-warning-bg, #fef3c7); color: var(--color-warning-fg, #92400e); }
|
.status-running { background: var(--color-warning-bg, #fef3c7); color: var(--color-warning-fg, #92400e); }
|
||||||
.status-ok { color: var(--color-success, #16a34a); }
|
.status-ok { color: var(--color-success, #16a34a); }
|
||||||
.status-fail { color: var(--color-error, #dc2626); }
|
.status-fail { color: var(--color-error, #dc2626); }
|
||||||
|
|
||||||
.pairs-list { margin-top: var(--space-6, 1.5rem); }
|
|
||||||
.pairs-list h4 { font-size: 0.95rem; font-weight: 600; margin: 0 0 var(--space-2, 0.5rem); display: flex; align-items: center; gap: 0.5rem; }
|
|
||||||
.pairs-badge { background: var(--color-primary, #2d5a27); color: #fff; font-size: 0.75rem; padding: 1px 7px; border-radius: var(--radius-full, 9999px); }
|
|
||||||
.pairs-loading { color: var(--color-text-muted); font-size: 0.875rem; padding: var(--space-2, 0.5rem) 0; }
|
|
||||||
.pairs-items { list-style: none; margin: 0; padding: 0; display: flex; flex-direction: column; gap: var(--space-2, 0.5rem); max-height: 280px; overflow-y: auto; }
|
|
||||||
.pair-item { display: flex; align-items: center; gap: var(--space-3, 0.75rem); padding: var(--space-2, 0.5rem) var(--space-3, 0.75rem); background: var(--color-surface-alt); border: 1px solid var(--color-border-light); border-radius: var(--radius-md); }
|
|
||||||
.pair-info { flex: 1; min-width: 0; display: flex; flex-direction: column; gap: 2px; }
|
|
||||||
.pair-instruction { font-size: 0.85rem; color: var(--color-text); white-space: nowrap; overflow: hidden; text-overflow: ellipsis; }
|
|
||||||
.pair-source { font-size: 0.75rem; color: var(--color-text-muted); }
|
|
||||||
.pair-delete { flex-shrink: 0; background: none; border: none; color: var(--color-error); cursor: pointer; font-size: 0.9rem; padding: 2px 4px; border-radius: var(--radius-sm); transition: background 150ms; }
|
|
||||||
.pair-delete:hover { background: var(--color-error); color: #fff; }
|
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
|
|
@ -62,16 +62,9 @@
|
||||||
rows="3"
|
rows="3"
|
||||||
placeholder="How you write and communicate — used to shape cover letter voice."
|
placeholder="How you write and communicate — used to shape cover letter voice."
|
||||||
/>
|
/>
|
||||||
<button
|
|
||||||
v-if="config.tier !== 'free'"
|
|
||||||
class="btn-generate"
|
|
||||||
type="button"
|
|
||||||
@click="generateVoice"
|
|
||||||
:disabled="generatingVoice"
|
|
||||||
>{{ generatingVoice ? 'Generating…' : 'Generate ✦' }}</button>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div v-if="!config.isCloud" class="field-row">
|
<div class="field-row">
|
||||||
<label class="field-label" for="profile-inference">Inference profile</label>
|
<label class="field-label" for="profile-inference">Inference profile</label>
|
||||||
<select id="profile-inference" v-model="store.inference_profile" class="select-input">
|
<select id="profile-inference" v-model="store.inference_profile" class="select-input">
|
||||||
<option value="remote">Remote</option>
|
<option value="remote">Remote</option>
|
||||||
|
|
@ -217,7 +210,6 @@ const config = useAppConfigStore()
|
||||||
const newNdaCompany = ref('')
|
const newNdaCompany = ref('')
|
||||||
const generatingSummary = ref(false)
|
const generatingSummary = ref(false)
|
||||||
const generatingMissions = ref(false)
|
const generatingMissions = ref(false)
|
||||||
const generatingVoice = ref(false)
|
|
||||||
|
|
||||||
onMounted(() => { store.load() })
|
onMounted(() => { store.load() })
|
||||||
|
|
||||||
|
|
@ -273,15 +265,6 @@ async function generateMissions() {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateVoice() {
|
|
||||||
generatingVoice.value = true
|
|
||||||
const { data, error } = await useApiFetch<{ voice?: string }>(
|
|
||||||
'/api/settings/profile/generate-voice', { method: 'POST' }
|
|
||||||
)
|
|
||||||
generatingVoice.value = false
|
|
||||||
if (!error && data?.voice) store.candidate_voice = data.voice
|
|
||||||
}
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style scoped>
|
<style scoped>
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,7 @@
|
||||||
<div class="empty-card">
|
<div class="empty-card">
|
||||||
<h3>Upload & Parse</h3>
|
<h3>Upload & Parse</h3>
|
||||||
<p>Upload a PDF, DOCX, or ODT and we'll extract your info automatically.</p>
|
<p>Upload a PDF, DOCX, or ODT and we'll extract your info automatically.</p>
|
||||||
<input type="file" accept=".pdf,.docx,.odt" @change="handleFileSelect" ref="fileInput" />
|
<input type="file" accept=".pdf,.docx,.odt" @change="handleUpload" ref="fileInput" />
|
||||||
<button
|
|
||||||
v-if="pendingFile"
|
|
||||||
@click="handleUpload"
|
|
||||||
:disabled="uploading"
|
|
||||||
style="margin-top:10px"
|
|
||||||
>{{ uploading ? 'Parsing…' : `Parse "${pendingFile.name}"` }}</button>
|
|
||||||
<p v-if="uploadError" class="error">{{ uploadError }}</p>
|
<p v-if="uploadError" class="error">{{ uploadError }}</p>
|
||||||
</div>
|
</div>
|
||||||
<!-- Blank -->
|
<!-- Blank -->
|
||||||
|
|
@ -30,8 +24,8 @@
|
||||||
<p>Start with a blank form and fill in your details.</p>
|
<p>Start with a blank form and fill in your details.</p>
|
||||||
<button @click="store.createBlank()" :disabled="store.loading">Start from Scratch</button>
|
<button @click="store.createBlank()" :disabled="store.loading">Start from Scratch</button>
|
||||||
</div>
|
</div>
|
||||||
<!-- Wizard — self-hosted only -->
|
<!-- Wizard -->
|
||||||
<div v-if="!config.isCloud" class="empty-card">
|
<div class="empty-card">
|
||||||
<h3>Run Setup Wizard</h3>
|
<h3>Run Setup Wizard</h3>
|
||||||
<p>Walk through the onboarding wizard to set up your profile step by step.</p>
|
<p>Walk through the onboarding wizard to set up your profile step by step.</p>
|
||||||
<RouterLink to="/setup">Open Setup Wizard →</RouterLink>
|
<RouterLink to="/setup">Open Setup Wizard →</RouterLink>
|
||||||
|
|
@ -41,21 +35,6 @@
|
||||||
|
|
||||||
<!-- Full form (when resume exists) -->
|
<!-- Full form (when resume exists) -->
|
||||||
<template v-else-if="store.hasResume">
|
<template v-else-if="store.hasResume">
|
||||||
<!-- Replace resume via upload -->
|
|
||||||
<section class="form-section replace-section">
|
|
||||||
<h3>Replace Resume</h3>
|
|
||||||
<p class="section-note">Upload a new PDF, DOCX, or ODT to re-parse and overwrite the current data.</p>
|
|
||||||
<input type="file" accept=".pdf,.docx,.odt" @change="handleFileSelect" ref="replaceFileInput" />
|
|
||||||
<button
|
|
||||||
v-if="pendingFile"
|
|
||||||
@click="handleUpload"
|
|
||||||
:disabled="uploading"
|
|
||||||
class="btn-primary"
|
|
||||||
style="margin-top:10px"
|
|
||||||
>{{ uploading ? 'Parsing…' : `Parse "${pendingFile.name}"` }}</button>
|
|
||||||
<p v-if="uploadError" class="error">{{ uploadError }}</p>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<!-- Personal Information -->
|
<!-- Personal Information -->
|
||||||
<section class="form-section">
|
<section class="form-section">
|
||||||
<h3>Personal Information</h3>
|
<h3>Personal Information</h3>
|
||||||
|
|
@ -242,22 +221,17 @@ import { ref, onMounted } from 'vue'
|
||||||
import { storeToRefs } from 'pinia'
|
import { storeToRefs } from 'pinia'
|
||||||
import { useResumeStore } from '../../stores/settings/resume'
|
import { useResumeStore } from '../../stores/settings/resume'
|
||||||
import { useProfileStore } from '../../stores/settings/profile'
|
import { useProfileStore } from '../../stores/settings/profile'
|
||||||
import { useAppConfigStore } from '../../stores/appConfig'
|
|
||||||
import { useApiFetch } from '../../composables/useApi'
|
import { useApiFetch } from '../../composables/useApi'
|
||||||
|
|
||||||
const store = useResumeStore()
|
const store = useResumeStore()
|
||||||
const profileStore = useProfileStore()
|
const profileStore = useProfileStore()
|
||||||
const config = useAppConfigStore()
|
|
||||||
const { loadError } = storeToRefs(store)
|
const { loadError } = storeToRefs(store)
|
||||||
const showSelfId = ref(false)
|
const showSelfId = ref(false)
|
||||||
const skillInput = ref('')
|
const skillInput = ref('')
|
||||||
const domainInput = ref('')
|
const domainInput = ref('')
|
||||||
const kwInput = ref('')
|
const kwInput = ref('')
|
||||||
const uploadError = ref<string | null>(null)
|
const uploadError = ref<string | null>(null)
|
||||||
const uploading = ref(false)
|
|
||||||
const pendingFile = ref<File | null>(null)
|
|
||||||
const fileInput = ref<HTMLInputElement | null>(null)
|
const fileInput = ref<HTMLInputElement | null>(null)
|
||||||
const replaceFileInput = ref<HTMLInputElement | null>(null)
|
|
||||||
|
|
||||||
onMounted(async () => {
|
onMounted(async () => {
|
||||||
await store.load()
|
await store.load()
|
||||||
|
|
@ -272,16 +246,9 @@ onMounted(async () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
function handleFileSelect(event: Event) {
|
async function handleUpload(event: Event) {
|
||||||
const file = (event.target as HTMLInputElement).files?.[0]
|
const file = (event.target as HTMLInputElement).files?.[0]
|
||||||
pendingFile.value = file ?? null
|
|
||||||
uploadError.value = null
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleUpload() {
|
|
||||||
const file = pendingFile.value
|
|
||||||
if (!file) return
|
if (!file) return
|
||||||
uploading.value = true
|
|
||||||
uploadError.value = null
|
uploadError.value = null
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
formData.append('file', file)
|
formData.append('file', file)
|
||||||
|
|
@ -289,14 +256,10 @@ async function handleUpload() {
|
||||||
'/api/settings/resume/upload',
|
'/api/settings/resume/upload',
|
||||||
{ method: 'POST', body: formData }
|
{ method: 'POST', body: formData }
|
||||||
)
|
)
|
||||||
uploading.value = false
|
|
||||||
if (error || !data?.ok) {
|
if (error || !data?.ok) {
|
||||||
uploadError.value = data?.error ?? (typeof error === 'string' ? error : (error?.kind === 'network' ? error.message : error?.detail ?? 'Upload failed'))
|
uploadError.value = data?.error ?? (typeof error === 'string' ? error : (error?.kind === 'network' ? error.message : error?.detail ?? 'Upload failed'))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
pendingFile.value = null
|
|
||||||
if (fileInput.value) fileInput.value.value = ''
|
|
||||||
if (replaceFileInput.value) replaceFileInput.value.value = ''
|
|
||||||
if (data.data) {
|
if (data.data) {
|
||||||
await store.load()
|
await store.load()
|
||||||
}
|
}
|
||||||
|
|
@ -344,5 +307,4 @@ h3 { font-size: 1rem; font-weight: 600; margin-bottom: var(--space-3, 16px); col
|
||||||
.section-note { font-size: 0.8rem; color: var(--color-text-secondary, #94a3b8); margin-bottom: 16px; }
|
.section-note { font-size: 0.8rem; color: var(--color-text-secondary, #94a3b8); margin-bottom: 16px; }
|
||||||
.toggle-btn { margin-left: 10px; padding: 2px 10px; background: transparent; border: 1px solid var(--color-border, rgba(255,255,255,0.15)); border-radius: 4px; color: var(--color-text-secondary, #94a3b8); cursor: pointer; font-size: 0.78rem; }
|
.toggle-btn { margin-left: 10px; padding: 2px 10px; background: transparent; border: 1px solid var(--color-border, rgba(255,255,255,0.15)); border-radius: 4px; color: var(--color-text-secondary, #94a3b8); cursor: pointer; font-size: 0.78rem; }
|
||||||
.loading { text-align: center; padding: var(--space-8, 48px); color: var(--color-text-secondary, #94a3b8); }
|
.loading { text-align: center; padding: var(--space-8, 48px); color: var(--color-text-secondary, #94a3b8); }
|
||||||
.replace-section { background: var(--color-surface-2, rgba(255,255,255,0.03)); border-radius: 8px; padding: var(--space-4, 24px); }
|
|
||||||
</style>
|
</style>
|
||||||
|
|
|
||||||
|
|
@ -69,18 +69,7 @@
|
||||||
{{ kw }} <button @click="store.removeTag('exclude_keywords', kw)">×</button>
|
{{ kw }} <button @click="store.removeTag('exclude_keywords', kw)">×</button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="tag-input-row">
|
|
||||||
<input v-model="excludeInput" @keydown.enter.prevent="store.addTag('exclude_keywords', excludeInput); excludeInput = ''" placeholder="Add keyword, press Enter" />
|
<input v-model="excludeInput" @keydown.enter.prevent="store.addTag('exclude_keywords', excludeInput); excludeInput = ''" placeholder="Add keyword, press Enter" />
|
||||||
<button @click="store.suggestExcludeKeywords()" class="btn-suggest">Suggest</button>
|
|
||||||
</div>
|
|
||||||
<div v-if="store.excludeSuggestions.length > 0" class="suggestions">
|
|
||||||
<span
|
|
||||||
v-for="s in store.excludeSuggestions"
|
|
||||||
:key="s"
|
|
||||||
class="suggestion-chip"
|
|
||||||
@click="store.acceptSuggestion('exclude', s)"
|
|
||||||
>+ {{ s }}</span>
|
|
||||||
</div>
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<!-- Job Boards -->
|
<!-- Job Boards -->
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,6 @@ const devOverride = computed(() => !!config.devTierOverride)
|
||||||
const gpuProfiles = ['single-gpu', 'dual-gpu']
|
const gpuProfiles = ['single-gpu', 'dual-gpu']
|
||||||
|
|
||||||
const showSystem = computed(() => !config.isCloud)
|
const showSystem = computed(() => !config.isCloud)
|
||||||
const showData = computed(() => !config.isCloud)
|
|
||||||
const showFineTune = computed(() => {
|
const showFineTune = computed(() => {
|
||||||
if (config.isCloud) return config.tier === 'premium'
|
if (config.isCloud) return config.tier === 'premium'
|
||||||
return gpuProfiles.includes(config.inferenceProfile)
|
return gpuProfiles.includes(config.inferenceProfile)
|
||||||
|
|
@ -66,7 +65,7 @@ const allGroups = [
|
||||||
]},
|
]},
|
||||||
{ label: 'Account', items: [
|
{ label: 'Account', items: [
|
||||||
{ key: 'license', path: '/settings/license', label: 'License', show: true },
|
{ key: 'license', path: '/settings/license', label: 'License', show: true },
|
||||||
{ key: 'data', path: '/settings/data', label: 'Data', show: showData },
|
{ key: 'data', path: '/settings/data', label: 'Data', show: true },
|
||||||
{ key: 'privacy', path: '/settings/privacy', label: 'Privacy', show: true },
|
{ key: 'privacy', path: '/settings/privacy', label: 'Privacy', show: true },
|
||||||
]},
|
]},
|
||||||
{ label: 'Dev', items: [
|
{ label: 'Dev', items: [
|
||||||
|
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 1 — Hardware Detection</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Peregrine uses your hardware profile to choose the right inference setup.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div v-if="wizard.loading" class="step__info">Detecting hardware…</div>
|
|
||||||
|
|
||||||
<template v-else>
|
|
||||||
<div v-if="wizard.hardware.gpus.length" class="step__success">
|
|
||||||
✅ Detected {{ wizard.hardware.gpus.length }} GPU(s):
|
|
||||||
{{ wizard.hardware.gpus.join(', ') }}
|
|
||||||
</div>
|
|
||||||
<div v-else class="step__info">
|
|
||||||
No NVIDIA GPUs detected. "Remote" or "CPU" mode recommended.
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label" for="hw-profile">Inference profile</label>
|
|
||||||
<select id="hw-profile" v-model="selectedProfile" class="step__select">
|
|
||||||
<option value="remote">Remote — use cloud API keys</option>
|
|
||||||
<option value="cpu">CPU — local Ollama, no GPU</option>
|
|
||||||
<option value="single-gpu">Single GPU — local Ollama + one GPU</option>
|
|
||||||
<option value="dual-gpu">Dual GPU — local Ollama + two GPUs</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div
|
|
||||||
v-if="selectedProfile !== 'remote' && !wizard.hardware.gpus.length"
|
|
||||||
class="step__warning"
|
|
||||||
>
|
|
||||||
⚠️ No GPUs detected — a GPU profile may not work. Choose CPU or Remote
|
|
||||||
if you don't have a local NVIDIA GPU.
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<div class="step__nav step__nav--end">
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { ref, onMounted } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
const selectedProfile = ref(wizard.hardware.selectedProfile)
|
|
||||||
|
|
||||||
onMounted(() => wizard.detectHardware())
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
wizard.hardware.selectedProfile = selectedProfile.value
|
|
||||||
const ok = await wizard.saveStep(1, { inference_profile: selectedProfile.value })
|
|
||||||
if (ok) router.push('/setup/tier')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
@ -1,117 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 4 — Your Identity</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Used in cover letters, research briefs, and interview prep. You can update
|
|
||||||
this any time in Settings → My Profile.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label" for="id-name">Full name <span class="required">*</span></label>
|
|
||||||
<input id="id-name" v-model="form.name" type="text" class="step__input"
|
|
||||||
placeholder="Your Name" autocomplete="name" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label" for="id-email">Email <span class="required">*</span></label>
|
|
||||||
<input id="id-email" v-model="form.email" type="email" class="step__input"
|
|
||||||
placeholder="you@example.com" autocomplete="email" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label step__label--optional" for="id-phone">Phone</label>
|
|
||||||
<input id="id-phone" v-model="form.phone" type="tel" class="step__input"
|
|
||||||
placeholder="555-000-0000" autocomplete="tel" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label step__label--optional" for="id-linkedin">LinkedIn URL</label>
|
|
||||||
<input id="id-linkedin" v-model="form.linkedin" type="url" class="step__input"
|
|
||||||
placeholder="linkedin.com/in/yourprofile" autocomplete="url" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label" for="id-summary">
|
|
||||||
Career summary <span class="required">*</span>
|
|
||||||
</label>
|
|
||||||
<textarea
|
|
||||||
id="id-summary"
|
|
||||||
v-model="form.careerSummary"
|
|
||||||
class="step__textarea"
|
|
||||||
rows="5"
|
|
||||||
placeholder="2–3 sentences summarising your experience, domain, and what you're looking for next."
|
|
||||||
/>
|
|
||||||
<p class="field-hint">This appears in your cover letters and research briefs.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="validationError" class="step__warning">{{ validationError }}</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { reactive, ref } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
const validationError = ref('')
|
|
||||||
|
|
||||||
// Local reactive copy — sync back to store on Next
|
|
||||||
const form = reactive({
|
|
||||||
name: wizard.identity.name,
|
|
||||||
email: wizard.identity.email,
|
|
||||||
phone: wizard.identity.phone,
|
|
||||||
linkedin: wizard.identity.linkedin,
|
|
||||||
careerSummary: wizard.identity.careerSummary,
|
|
||||||
})
|
|
||||||
|
|
||||||
function back() { router.push('/setup/resume') }
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
validationError.value = ''
|
|
||||||
if (!form.name.trim()) {
|
|
||||||
validationError.value = 'Full name is required.'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (!form.email.trim() || !form.email.includes('@')) {
|
|
||||||
validationError.value = 'A valid email address is required.'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (!form.careerSummary.trim()) {
|
|
||||||
validationError.value = 'Please add a short career summary.'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
wizard.identity = { ...form }
|
|
||||||
const ok = await wizard.saveStep(4, {
|
|
||||||
name: form.name,
|
|
||||||
email: form.email,
|
|
||||||
phone: form.phone,
|
|
||||||
linkedin: form.linkedin,
|
|
||||||
career_summary: form.careerSummary,
|
|
||||||
})
|
|
||||||
if (ok) router.push('/setup/inference')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.required {
|
|
||||||
color: var(--color-error);
|
|
||||||
margin-left: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-hint {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-top: var(--space-1);
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,169 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 5 — Inference & API Keys</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Configure how Peregrine generates AI content. You can adjust this any time
|
|
||||||
in Settings → System.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Remote mode -->
|
|
||||||
<template v-if="isRemote">
|
|
||||||
<div class="step__info">
|
|
||||||
Remote mode: at least one external API key is required for AI generation.
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label" for="inf-anthropic">Anthropic API key</label>
|
|
||||||
<input id="inf-anthropic" v-model="form.anthropicKey" type="password"
|
|
||||||
class="step__input" placeholder="sk-ant-…" autocomplete="off" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label step__label--optional" for="inf-oai-url">
|
|
||||||
OpenAI-compatible endpoint
|
|
||||||
</label>
|
|
||||||
<input id="inf-oai-url" v-model="form.openaiUrl" type="url"
|
|
||||||
class="step__input" placeholder="https://api.together.xyz/v1" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="form.openaiUrl" class="step__field">
|
|
||||||
<label class="step__label step__label--optional" for="inf-oai-key">
|
|
||||||
Endpoint API key
|
|
||||||
</label>
|
|
||||||
<input id="inf-oai-key" v-model="form.openaiKey" type="password"
|
|
||||||
class="step__input" placeholder="API key for the endpoint above"
|
|
||||||
autocomplete="off" />
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<!-- Local mode -->
|
|
||||||
<template v-else>
|
|
||||||
<div class="step__info">
|
|
||||||
Local mode ({{ wizard.hardware.selectedProfile }}): Peregrine uses
|
|
||||||
Ollama for AI generation. No API keys needed.
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<!-- Advanced: service ports -->
|
|
||||||
<div class="step__expandable">
|
|
||||||
<button class="step__expandable__toggle" @click="showAdvanced = !showAdvanced">
|
|
||||||
{{ showAdvanced ? '▼' : '▶' }} Advanced — service hosts & ports
|
|
||||||
</button>
|
|
||||||
<div v-if="showAdvanced" class="step__expandable__body">
|
|
||||||
<div class="svc-row" v-for="svc in services" :key="svc.key">
|
|
||||||
<span class="svc-label">{{ svc.label }}</span>
|
|
||||||
<input v-model="svc.host" type="text" class="step__input svc-input" />
|
|
||||||
<input v-model.number="svc.port" type="number" class="step__input svc-port" />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Connection test -->
|
|
||||||
<div class="test-row">
|
|
||||||
<button class="btn-secondary" :disabled="testing" @click="runTest">
|
|
||||||
{{ testing ? 'Testing…' : '🔌 Test connection' }}
|
|
||||||
</button>
|
|
||||||
<span v-if="testResult" :class="testResult.ok ? 'test-ok' : 'test-warn'">
|
|
||||||
{{ testResult.message }}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { reactive, ref, computed } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
|
|
||||||
const isRemote = computed(() => wizard.hardware.selectedProfile === 'remote')
|
|
||||||
const showAdvanced = ref(false)
|
|
||||||
const testing = ref(false)
|
|
||||||
const testResult = ref<{ ok: boolean; message: string } | null>(null)
|
|
||||||
|
|
||||||
const form = reactive({
|
|
||||||
anthropicKey: wizard.inference.anthropicKey,
|
|
||||||
openaiUrl: wizard.inference.openaiUrl,
|
|
||||||
openaiKey: wizard.inference.openaiKey,
|
|
||||||
})
|
|
||||||
|
|
||||||
const services = reactive([
|
|
||||||
{ key: 'ollama', label: 'Ollama', host: 'ollama', port: 11434 },
|
|
||||||
{ key: 'searxng', label: 'SearXNG', host: 'searxng', port: 8080 },
|
|
||||||
])
|
|
||||||
|
|
||||||
async function runTest() {
|
|
||||||
testing.value = true
|
|
||||||
testResult.value = null
|
|
||||||
wizard.inference.anthropicKey = form.anthropicKey
|
|
||||||
wizard.inference.openaiUrl = form.openaiUrl
|
|
||||||
wizard.inference.openaiKey = form.openaiKey
|
|
||||||
testResult.value = await wizard.testInference()
|
|
||||||
testing.value = false
|
|
||||||
}
|
|
||||||
|
|
||||||
function back() { router.push('/setup/identity') }
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
// Sync form back to store
|
|
||||||
wizard.inference.anthropicKey = form.anthropicKey
|
|
||||||
wizard.inference.openaiUrl = form.openaiUrl
|
|
||||||
wizard.inference.openaiKey = form.openaiKey
|
|
||||||
|
|
||||||
const svcMap: Record<string, string | number> = {}
|
|
||||||
services.forEach(s => {
|
|
||||||
svcMap[`${s.key}_host`] = s.host
|
|
||||||
svcMap[`${s.key}_port`] = s.port
|
|
||||||
})
|
|
||||||
wizard.inference.services = svcMap
|
|
||||||
|
|
||||||
const ok = await wizard.saveStep(5, {
|
|
||||||
anthropic_key: form.anthropicKey,
|
|
||||||
openai_url: form.openaiUrl,
|
|
||||||
openai_key: form.openaiKey,
|
|
||||||
services: svcMap,
|
|
||||||
})
|
|
||||||
if (ok) router.push('/setup/search')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.test-row {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-4);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
.test-ok { font-size: 0.875rem; color: var(--color-success); }
|
|
||||||
.test-warn { font-size: 0.875rem; color: var(--color-warning); }
|
|
||||||
|
|
||||||
.svc-row {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 6rem 1fr 5rem;
|
|
||||||
gap: var(--space-2);
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.svc-label {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 500;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.svc-port {
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,160 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 7 — Integrations</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Optional. Connect external tools to supercharge your workflow.
|
|
||||||
You can configure these any time in Settings → System.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div class="int-grid">
|
|
||||||
<label
|
|
||||||
v-for="card in integrations"
|
|
||||||
:key="card.id"
|
|
||||||
class="int-card"
|
|
||||||
:class="{
|
|
||||||
'int-card--selected': selected.has(card.id),
|
|
||||||
'int-card--paid': card.paid && !isPaid,
|
|
||||||
}"
|
|
||||||
>
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
class="int-card__check"
|
|
||||||
:value="card.id"
|
|
||||||
:disabled="card.paid && !isPaid"
|
|
||||||
v-model="checkedIds"
|
|
||||||
/>
|
|
||||||
<span class="int-card__icon" aria-hidden="true">{{ card.icon }}</span>
|
|
||||||
<span class="int-card__name">{{ card.name }}</span>
|
|
||||||
<span v-if="card.paid && !isPaid" class="int-card__badge">Paid</span>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="selected.size > 0" class="step__info" style="margin-top: var(--space-4)">
|
|
||||||
You'll configure credentials for {{ [...selected].map(id => labelFor(id)).join(', ') }}
|
|
||||||
in Settings → System after setup completes.
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="finish">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Finish Setup →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { ref, computed } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import { useAppConfigStore } from '../../stores/appConfig'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
const router = useRouter()
|
|
||||||
|
|
||||||
const isPaid = computed(() =>
|
|
||||||
wizard.tier === 'paid' || wizard.tier === 'premium',
|
|
||||||
)
|
|
||||||
|
|
||||||
interface IntegrationCard {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
icon: string
|
|
||||||
paid: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
const integrations: IntegrationCard[] = [
|
|
||||||
{ id: 'notion', name: 'Notion', icon: '🗒️', paid: false },
|
|
||||||
{ id: 'google_calendar', name: 'Google Calendar', icon: '📅', paid: true },
|
|
||||||
{ id: 'apple_calendar', name: 'Apple Calendar', icon: '🍏', paid: true },
|
|
||||||
{ id: 'slack', name: 'Slack', icon: '💬', paid: true },
|
|
||||||
{ id: 'discord', name: 'Discord', icon: '🎮', paid: true },
|
|
||||||
{ id: 'google_drive', name: 'Google Drive', icon: '📁', paid: true },
|
|
||||||
]
|
|
||||||
|
|
||||||
const checkedIds = ref<string[]>([])
|
|
||||||
const selected = computed(() => new Set(checkedIds.value))
|
|
||||||
|
|
||||||
function labelFor(id: string): string {
|
|
||||||
return integrations.find(i => i.id === id)?.name ?? id
|
|
||||||
}
|
|
||||||
|
|
||||||
function back() { router.push('/setup/search') }
|
|
||||||
|
|
||||||
async function finish() {
|
|
||||||
// Save integration selections (step 7) then mark wizard complete
|
|
||||||
await wizard.saveStep(7, { integrations: [...checkedIds.value] })
|
|
||||||
const ok = await wizard.complete()
|
|
||||||
if (ok) router.replace('/')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.int-grid {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fill, minmax(140px, 1fr));
|
|
||||||
gap: var(--space-3);
|
|
||||||
margin-top: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card {
|
|
||||||
position: relative;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
padding: var(--space-4) var(--space-3);
|
|
||||||
border: 2px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: border-color var(--transition), background var(--transition);
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card:hover:not(.int-card--paid) {
|
|
||||||
border-color: var(--color-border);
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card--selected {
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
background: color-mix(in srgb, var(--color-primary) 6%, var(--color-surface-alt));
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card--paid {
|
|
||||||
opacity: 0.55;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card__check {
|
|
||||||
/* visually hidden but accessible */
|
|
||||||
position: absolute;
|
|
||||||
opacity: 0;
|
|
||||||
width: 0;
|
|
||||||
height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card__icon {
|
|
||||||
font-size: 1.75rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card__name {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--color-text);
|
|
||||||
line-height: 1.2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.int-card__badge {
|
|
||||||
font-size: 0.65rem;
|
|
||||||
font-weight: 700;
|
|
||||||
letter-spacing: 0.04em;
|
|
||||||
text-transform: uppercase;
|
|
||||||
color: var(--color-warning);
|
|
||||||
background: color-mix(in srgb, var(--color-warning) 12%, transparent);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
padding: 1px 6px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,204 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="wizard">
|
|
||||||
<div class="wizard__card">
|
|
||||||
<!-- Header -->
|
|
||||||
<div class="wizard__header">
|
|
||||||
<img
|
|
||||||
v-if="logoSrc"
|
|
||||||
:src="logoSrc"
|
|
||||||
alt="Peregrine"
|
|
||||||
class="wizard__logo"
|
|
||||||
/>
|
|
||||||
<h1 class="wizard__title">Welcome to Peregrine</h1>
|
|
||||||
<p class="wizard__subtitle">
|
|
||||||
Complete the setup to start your job search.
|
|
||||||
Progress saves automatically.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Progress bar -->
|
|
||||||
<div class="wizard__progress" role="progressbar"
|
|
||||||
:aria-valuenow="Math.round(wizard.progressFraction * 100)"
|
|
||||||
aria-valuemin="0" aria-valuemax="100">
|
|
||||||
<div class="wizard__progress-track">
|
|
||||||
<div class="wizard__progress-fill" :style="{ width: `${wizard.progressFraction * 100}%` }" />
|
|
||||||
</div>
|
|
||||||
<span class="wizard__progress-label">{{ wizard.stepLabel }}</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Step content -->
|
|
||||||
<div class="wizard__body">
|
|
||||||
<div v-if="wizard.loading" class="wizard__loading" aria-live="polite">
|
|
||||||
<span class="wizard__spinner" aria-hidden="true" />
|
|
||||||
Loading…
|
|
||||||
</div>
|
|
||||||
<RouterView v-else />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Global error banner -->
|
|
||||||
<div v-if="wizard.errors.length" class="wizard__error" role="alert">
|
|
||||||
<span v-for="e in wizard.errors" :key="e">{{ e }}</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { onMounted } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import { useAppConfigStore } from '../../stores/appConfig'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const config = useAppConfigStore()
|
|
||||||
const router = useRouter()
|
|
||||||
|
|
||||||
// Peregrine logo — served from the static assets directory
|
|
||||||
const logoSrc = '/static/peregrine_logo_circle.png'
|
|
||||||
|
|
||||||
onMounted(async () => {
|
|
||||||
if (!config.loaded) await config.load()
|
|
||||||
const target = await wizard.loadStatus(config.isCloud)
|
|
||||||
if (router.currentRoute.value.path === '/setup') {
|
|
||||||
router.replace(target)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.wizard {
|
|
||||||
min-height: 100dvh;
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
justify-content: center;
|
|
||||||
padding: var(--space-8) var(--space-4);
|
|
||||||
background: var(--color-surface);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__card {
|
|
||||||
width: 100%;
|
|
||||||
max-width: 640px;
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-lg);
|
|
||||||
box-shadow: var(--shadow-lg);
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__header {
|
|
||||||
padding: var(--space-8) var(--space-8) var(--space-6);
|
|
||||||
text-align: center;
|
|
||||||
border-bottom: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__logo {
|
|
||||||
width: 56px;
|
|
||||||
height: 56px;
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__title {
|
|
||||||
font-family: var(--font-display);
|
|
||||||
font-size: 1.625rem;
|
|
||||||
font-weight: 700;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin-bottom: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__subtitle {
|
|
||||||
font-size: 0.9rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Progress */
|
|
||||||
.wizard__progress {
|
|
||||||
padding: var(--space-4) var(--space-8);
|
|
||||||
border-bottom: 1px solid var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__progress-track {
|
|
||||||
height: 6px;
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
overflow: hidden;
|
|
||||||
margin-bottom: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__progress-fill {
|
|
||||||
height: 100%;
|
|
||||||
background: var(--color-primary);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
transition: width var(--transition-slow);
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__progress-label {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Body */
|
|
||||||
.wizard__body {
|
|
||||||
padding: var(--space-8);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Loading */
|
|
||||||
.wizard__loading {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-3);
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
padding: var(--space-8) 0;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__spinner {
|
|
||||||
display: inline-block;
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
border: 2px solid var(--color-border);
|
|
||||||
border-top-color: var(--color-primary);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
animation: spin 0.7s linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes spin {
|
|
||||||
to { transform: rotate(360deg); }
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Error */
|
|
||||||
.wizard__error {
|
|
||||||
margin: 0 var(--space-8) var(--space-6);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
background: color-mix(in srgb, var(--color-error) 10%, transparent);
|
|
||||||
border: 1px solid var(--color-error);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
color: var(--color-error);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Mobile */
|
|
||||||
@media (max-width: 680px) {
|
|
||||||
.wizard {
|
|
||||||
padding: 0;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__card {
|
|
||||||
border-radius: 0;
|
|
||||||
box-shadow: none;
|
|
||||||
min-height: 100dvh;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wizard__header,
|
|
||||||
.wizard__body {
|
|
||||||
padding-left: var(--space-6);
|
|
||||||
padding-right: var(--space-6);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,313 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 3 — Your Resume</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Upload a resume to auto-populate your profile, or build it manually.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Tabs -->
|
|
||||||
<div class="resume-tabs" role="tablist">
|
|
||||||
<button
|
|
||||||
role="tab"
|
|
||||||
:aria-selected="tab === 'upload'"
|
|
||||||
class="resume-tab"
|
|
||||||
:class="{ 'resume-tab--active': tab === 'upload' }"
|
|
||||||
@click="tab = 'upload'"
|
|
||||||
>Upload File</button>
|
|
||||||
<button
|
|
||||||
role="tab"
|
|
||||||
:aria-selected="tab === 'manual'"
|
|
||||||
class="resume-tab"
|
|
||||||
:class="{ 'resume-tab--active': tab === 'manual' }"
|
|
||||||
@click="tab = 'manual'"
|
|
||||||
>Build Manually</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Upload tab -->
|
|
||||||
<div v-if="tab === 'upload'" class="resume-upload">
|
|
||||||
<label class="upload-zone" :class="{ 'upload-zone--active': dragging }"
|
|
||||||
@dragover.prevent="dragging = true"
|
|
||||||
@dragleave="dragging = false"
|
|
||||||
@drop.prevent="onDrop">
|
|
||||||
<input
|
|
||||||
type="file"
|
|
||||||
accept=".pdf,.docx,.odt"
|
|
||||||
class="upload-input"
|
|
||||||
@change="onFileChange"
|
|
||||||
/>
|
|
||||||
<span class="upload-icon" aria-hidden="true">📄</span>
|
|
||||||
<span class="upload-label">
|
|
||||||
{{ fileName || 'Drop PDF, DOCX, or ODT here, or click to browse' }}
|
|
||||||
</span>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
<div v-if="parseError" class="step__warning">{{ parseError }}</div>
|
|
||||||
|
|
||||||
<button
|
|
||||||
v-if="selectedFile"
|
|
||||||
class="btn-secondary"
|
|
||||||
:disabled="parsing"
|
|
||||||
style="margin-top: var(--space-3)"
|
|
||||||
@click="parseResume"
|
|
||||||
>
|
|
||||||
{{ parsing ? 'Parsing…' : '⚙️ Parse Resume' }}
|
|
||||||
</button>
|
|
||||||
|
|
||||||
<div v-if="parsedOk" class="step__success">
|
|
||||||
✅ Resume parsed — {{ wizard.resume.experience.length }} experience
|
|
||||||
{{ wizard.resume.experience.length === 1 ? 'entry' : 'entries' }} found.
|
|
||||||
Switch to "Build Manually" to review or edit.
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Manual build tab -->
|
|
||||||
<div v-if="tab === 'manual'" class="resume-manual">
|
|
||||||
<div
|
|
||||||
v-for="(exp, i) in wizard.resume.experience"
|
|
||||||
:key="i"
|
|
||||||
class="exp-entry"
|
|
||||||
>
|
|
||||||
<div class="exp-entry__header">
|
|
||||||
<span class="exp-entry__num">{{ i + 1 }}</span>
|
|
||||||
<button class="exp-entry__remove btn-ghost" @click="removeExp(i)">✕ Remove</button>
|
|
||||||
</div>
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">Job title</label>
|
|
||||||
<input v-model="exp.title" type="text" class="step__input" placeholder="Software Engineer" />
|
|
||||||
</div>
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">Company</label>
|
|
||||||
<input v-model="exp.company" type="text" class="step__input" placeholder="Acme Corp" />
|
|
||||||
</div>
|
|
||||||
<div class="exp-dates">
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">Start</label>
|
|
||||||
<input v-model="exp.start_date" type="text" class="step__input" placeholder="2020" />
|
|
||||||
</div>
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">End</label>
|
|
||||||
<input v-model="exp.end_date" type="text" class="step__input" placeholder="present" />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">Key accomplishments (one per line)</label>
|
|
||||||
<textarea
|
|
||||||
class="step__textarea"
|
|
||||||
rows="4"
|
|
||||||
:value="exp.bullets.join('\n')"
|
|
||||||
@input="(e) => exp.bullets = (e.target as HTMLTextAreaElement).value.split('\n')"
|
|
||||||
placeholder="Reduced load time by 40% Led a team of 5 engineers"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button class="btn-secondary" style="width: 100%" @click="addExp">
|
|
||||||
+ Add Experience Entry
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="validationError" class="step__warning" style="margin-top: var(--space-4)">
|
|
||||||
{{ validationError }}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { ref } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import type { WorkExperience } from '../../stores/wizard'
|
|
||||||
import { useApiFetch } from '../../composables/useApi'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
|
|
||||||
const tab = ref<'upload' | 'manual'>(
|
|
||||||
wizard.resume.experience.length > 0 ? 'manual' : 'upload',
|
|
||||||
)
|
|
||||||
const dragging = ref(false)
|
|
||||||
const selectedFile = ref<File | null>(null)
|
|
||||||
const fileName = ref('')
|
|
||||||
const parsing = ref(false)
|
|
||||||
const parsedOk = ref(false)
|
|
||||||
const parseError = ref('')
|
|
||||||
const validationError = ref('')
|
|
||||||
|
|
||||||
function onFileChange(e: Event) {
|
|
||||||
const file = (e.target as HTMLInputElement).files?.[0]
|
|
||||||
if (file) { selectedFile.value = file; fileName.value = file.name }
|
|
||||||
}
|
|
||||||
|
|
||||||
function onDrop(e: DragEvent) {
|
|
||||||
dragging.value = false
|
|
||||||
const file = e.dataTransfer?.files[0]
|
|
||||||
if (file) { selectedFile.value = file; fileName.value = file.name }
|
|
||||||
}
|
|
||||||
|
|
||||||
async function parseResume() {
|
|
||||||
if (!selectedFile.value) return
|
|
||||||
parsing.value = true
|
|
||||||
parseError.value = ''
|
|
||||||
parsedOk.value = false
|
|
||||||
|
|
||||||
const form = new FormData()
|
|
||||||
form.append('file', selectedFile.value)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/settings/resume/upload', { method: 'POST', body: form })
|
|
||||||
if (!res.ok) {
|
|
||||||
parseError.value = `Parse failed (HTTP ${res.status}) — switch to Build Manually to enter your resume.`
|
|
||||||
tab.value = 'manual'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const resp = await res.json()
|
|
||||||
// API returns { ok, data: { experience, name, email, … } }
|
|
||||||
const data = resp.data ?? {}
|
|
||||||
// Map parsed sections to experience entries
|
|
||||||
if (data.experience?.length) {
|
|
||||||
wizard.resume.experience = data.experience as WorkExperience[]
|
|
||||||
}
|
|
||||||
wizard.resume.parsedData = data
|
|
||||||
// Pre-fill identity from parsed data
|
|
||||||
if (data.name && !wizard.identity.name) wizard.identity.name = data.name
|
|
||||||
if (data.email && !wizard.identity.email) wizard.identity.email = data.email
|
|
||||||
if (data.phone && !wizard.identity.phone) wizard.identity.phone = data.phone
|
|
||||||
if (data.career_summary && !wizard.identity.careerSummary)
|
|
||||||
wizard.identity.careerSummary = data.career_summary
|
|
||||||
|
|
||||||
parsedOk.value = true
|
|
||||||
tab.value = 'manual'
|
|
||||||
} catch {
|
|
||||||
parseError.value = 'Network error — switch to Build Manually to enter your resume.'
|
|
||||||
tab.value = 'manual'
|
|
||||||
} finally {
|
|
||||||
parsing.value = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function addExp() {
|
|
||||||
wizard.resume.experience.push({
|
|
||||||
title: '', company: '', start_date: '', end_date: 'present', bullets: [],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeExp(i: number) {
|
|
||||||
wizard.resume.experience.splice(i, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
function back() { router.push('/setup/tier') }
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
validationError.value = ''
|
|
||||||
const valid = wizard.resume.experience.some(e => e.title.trim() && e.company.trim())
|
|
||||||
if (!valid) {
|
|
||||||
validationError.value = 'Add at least one experience entry with a title and company.'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const ok = await wizard.saveStep(3, { resume: {
|
|
||||||
experience: wizard.resume.experience,
|
|
||||||
...(wizard.resume.parsedData ?? {}),
|
|
||||||
}})
|
|
||||||
if (ok) router.push('/setup/identity')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.resume-tabs {
|
|
||||||
display: flex;
|
|
||||||
gap: 0;
|
|
||||||
border-bottom: 2px solid var(--color-border-light);
|
|
||||||
margin-bottom: var(--space-6);
|
|
||||||
}
|
|
||||||
|
|
||||||
.resume-tab {
|
|
||||||
padding: var(--space-2) var(--space-5);
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
border-bottom: 2px solid transparent;
|
|
||||||
margin-bottom: -2px;
|
|
||||||
cursor: pointer;
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
transition: color var(--transition), border-color var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.resume-tab--active {
|
|
||||||
color: var(--color-primary);
|
|
||||||
border-bottom-color: var(--color-primary);
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-zone {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: var(--space-3);
|
|
||||||
padding: var(--space-8);
|
|
||||||
border: 2px dashed var(--color-border);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
cursor: pointer;
|
|
||||||
text-align: center;
|
|
||||||
transition: border-color var(--transition), background var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-zone--active,
|
|
||||||
.upload-zone:hover {
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
background: var(--color-primary-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-input {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.upload-icon { font-size: 2rem; }
|
|
||||||
|
|
||||||
.upload-label {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.exp-entry {
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: var(--space-4);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
}
|
|
||||||
|
|
||||||
.exp-entry__header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: var(--space-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.exp-entry__num {
|
|
||||||
font-weight: 700;
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.exp-entry__remove {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
padding: var(--space-1) var(--space-2);
|
|
||||||
min-height: 32px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.exp-dates {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 1fr 1fr;
|
|
||||||
gap: var(--space-4);
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,232 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 6 — Search Preferences</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
Tell Peregrine what roles and markets to watch. You can add more profiles
|
|
||||||
in Settings → Search later.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<!-- Job titles -->
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">
|
|
||||||
Job titles <span class="required">*</span>
|
|
||||||
</label>
|
|
||||||
<div class="chip-field">
|
|
||||||
<div class="chip-list" v-if="form.titles.length">
|
|
||||||
<span v-for="(t, i) in form.titles" :key="i" class="chip">
|
|
||||||
{{ t }}
|
|
||||||
<button class="chip__remove" @click="removeTitle(i)" aria-label="Remove title">×</button>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
v-model="titleInput"
|
|
||||||
type="text"
|
|
||||||
class="step__input chip-input"
|
|
||||||
placeholder="e.g. Software Engineer — press Enter to add"
|
|
||||||
@keydown.enter.prevent="addTitle"
|
|
||||||
@keydown.","="onTitleComma"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<p class="field-hint">Press Enter or comma after each title.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Locations -->
|
|
||||||
<div class="step__field">
|
|
||||||
<label class="step__label">
|
|
||||||
Locations <span class="step__label--optional">(optional)</span>
|
|
||||||
</label>
|
|
||||||
<div class="chip-field">
|
|
||||||
<div class="chip-list" v-if="form.locations.length">
|
|
||||||
<span v-for="(l, i) in form.locations" :key="i" class="chip">
|
|
||||||
{{ l }}
|
|
||||||
<button class="chip__remove" @click="removeLocation(i)" aria-label="Remove location">×</button>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<input
|
|
||||||
v-model="locationInput"
|
|
||||||
type="text"
|
|
||||||
class="step__input chip-input"
|
|
||||||
placeholder="e.g. San Francisco, CA — press Enter to add"
|
|
||||||
@keydown.enter.prevent="addLocation"
|
|
||||||
@keydown.","="onLocationComma"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<p class="field-hint">Leave blank to search everywhere, or add specific cities/metros.</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Remote preference -->
|
|
||||||
<div class="step__field step__field--inline">
|
|
||||||
<label class="step__label step__label--inline" for="srch-remote">
|
|
||||||
Remote jobs only
|
|
||||||
</label>
|
|
||||||
<input
|
|
||||||
id="srch-remote"
|
|
||||||
v-model="form.remoteOnly"
|
|
||||||
type="checkbox"
|
|
||||||
class="step__checkbox"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div v-if="validationError" class="step__warning">{{ validationError }}</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { reactive, ref } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
const validationError = ref('')
|
|
||||||
|
|
||||||
const form = reactive({
|
|
||||||
titles: [...wizard.search.titles],
|
|
||||||
locations: [...wizard.search.locations],
|
|
||||||
remoteOnly: false,
|
|
||||||
})
|
|
||||||
|
|
||||||
const titleInput = ref('')
|
|
||||||
const locationInput = ref('')
|
|
||||||
|
|
||||||
function addTitle() {
|
|
||||||
const v = titleInput.value.trim().replace(/,$/, '')
|
|
||||||
if (v && !form.titles.includes(v)) form.titles.push(v)
|
|
||||||
titleInput.value = ''
|
|
||||||
}
|
|
||||||
|
|
||||||
function onTitleComma(e: KeyboardEvent) {
|
|
||||||
e.preventDefault()
|
|
||||||
addTitle()
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeTitle(i: number) {
|
|
||||||
form.titles.splice(i, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
function addLocation() {
|
|
||||||
const v = locationInput.value.trim().replace(/,$/, '')
|
|
||||||
if (v && !form.locations.includes(v)) form.locations.push(v)
|
|
||||||
locationInput.value = ''
|
|
||||||
}
|
|
||||||
|
|
||||||
function onLocationComma(e: KeyboardEvent) {
|
|
||||||
e.preventDefault()
|
|
||||||
addLocation()
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeLocation(i: number) {
|
|
||||||
form.locations.splice(i, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
function back() { router.push('/setup/inference') }
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
// Flush any partial inputs before validating
|
|
||||||
addTitle()
|
|
||||||
addLocation()
|
|
||||||
|
|
||||||
validationError.value = ''
|
|
||||||
if (form.titles.length === 0) {
|
|
||||||
validationError.value = 'Add at least one job title.'
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
wizard.search.titles = [...form.titles]
|
|
||||||
wizard.search.locations = [...form.locations]
|
|
||||||
|
|
||||||
const ok = await wizard.saveStep(6, {
|
|
||||||
search: {
|
|
||||||
titles: form.titles,
|
|
||||||
locations: form.locations,
|
|
||||||
remote_only: form.remoteOnly,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if (ok) router.push('/setup/integrations')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped>
|
|
||||||
.required {
|
|
||||||
color: var(--color-error);
|
|
||||||
margin-left: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.field-hint {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-top: var(--space-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__field--inline {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-3);
|
|
||||||
flex-direction: row;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__label--inline {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__checkbox {
|
|
||||||
width: 18px;
|
|
||||||
height: 18px;
|
|
||||||
accent-color: var(--color-primary);
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Chip input */
|
|
||||||
.chip-field {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chip-list {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chip {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-1);
|
|
||||||
padding: var(--space-1) var(--space-3);
|
|
||||||
background: color-mix(in srgb, var(--color-primary) 12%, transparent);
|
|
||||||
color: var(--color-primary);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
font-size: 0.85rem;
|
|
||||||
font-weight: 500;
|
|
||||||
border: 1px solid color-mix(in srgb, var(--color-primary) 25%, transparent);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chip__remove {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
color: inherit;
|
|
||||||
font-size: 1rem;
|
|
||||||
line-height: 1;
|
|
||||||
padding: 0 2px;
|
|
||||||
opacity: 0.7;
|
|
||||||
transition: opacity var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.chip__remove:hover {
|
|
||||||
opacity: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.chip-input {
|
|
||||||
margin-top: var(--space-1);
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
<template>
|
|
||||||
<div class="step">
|
|
||||||
<h2 class="step__heading">Step 2 — Choose Your Plan</h2>
|
|
||||||
<p class="step__caption">
|
|
||||||
You can upgrade or change this later in Settings → License.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<div class="step__radio-group">
|
|
||||||
<label
|
|
||||||
v-for="option in tiers"
|
|
||||||
:key="option.value"
|
|
||||||
class="step__radio-card"
|
|
||||||
:class="{ 'step__radio-card--selected': selected === option.value }"
|
|
||||||
>
|
|
||||||
<input type="radio" :value="option.value" v-model="selected" />
|
|
||||||
<div class="step__radio-card__body">
|
|
||||||
<span class="step__radio-card__title">{{ option.label }}</span>
|
|
||||||
<span class="step__radio-card__desc">{{ option.desc }}</span>
|
|
||||||
</div>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="step__nav">
|
|
||||||
<button class="btn-ghost" @click="back">← Back</button>
|
|
||||||
<button class="btn-primary" :disabled="wizard.saving" @click="next">
|
|
||||||
{{ wizard.saving ? 'Saving…' : 'Next →' }}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import { ref } from 'vue'
|
|
||||||
import { useRouter } from 'vue-router'
|
|
||||||
import { useWizardStore } from '../../stores/wizard'
|
|
||||||
import type { WizardTier } from '../../stores/wizard'
|
|
||||||
import './wizard.css'
|
|
||||||
|
|
||||||
const wizard = useWizardStore()
|
|
||||||
const router = useRouter()
|
|
||||||
const selected = ref<WizardTier>(wizard.tier)
|
|
||||||
|
|
||||||
const tiers = [
|
|
||||||
{
|
|
||||||
value: 'free' as WizardTier,
|
|
||||||
label: '🆓 Free',
|
|
||||||
desc: 'Core pipeline, job discovery, and resume matching. Bring your own LLM to unlock AI generation.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
value: 'paid' as WizardTier,
|
|
||||||
label: '⭐ Paid',
|
|
||||||
desc: 'Everything in Free, plus cloud AI generation, integrations (Notion, Calendar, Slack), and email sync.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
value: 'premium' as WizardTier,
|
|
||||||
label: '🏆 Premium',
|
|
||||||
desc: 'Everything in Paid, plus fine-tuned cover letter model, multi-user support, and advanced analytics.',
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
function back() { router.push('/setup/hardware') }
|
|
||||||
|
|
||||||
async function next() {
|
|
||||||
wizard.tier = selected.value
|
|
||||||
const ok = await wizard.saveStep(2, { tier: selected.value })
|
|
||||||
if (ok) router.push('/setup/resume')
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
@ -1,329 +0,0 @@
|
||||||
/* wizard.css — shared styles imported by every WizardXxxStep component */
|
|
||||||
|
|
||||||
/* ── Step heading ──────────────────────────────────────────────────────────── */
|
|
||||||
.step__heading {
|
|
||||||
font-family: var(--font-display);
|
|
||||||
font-size: 1.25rem;
|
|
||||||
font-weight: 700;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin-bottom: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__caption {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
margin-bottom: var(--space-6);
|
|
||||||
line-height: 1.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Info / warning banners ────────────────────────────────────────────────── */
|
|
||||||
.step__info {
|
|
||||||
background: color-mix(in srgb, var(--color-info) 10%, transparent);
|
|
||||||
border: 1px solid color-mix(in srgb, var(--color-info) 40%, transparent);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
line-height: 1.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__warning {
|
|
||||||
background: color-mix(in srgb, var(--color-warning) 10%, transparent);
|
|
||||||
border: 1px solid color-mix(in srgb, var(--color-warning) 40%, transparent);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__success {
|
|
||||||
background: color-mix(in srgb, var(--color-success) 10%, transparent);
|
|
||||||
border: 1px solid color-mix(in srgb, var(--color-success) 40%, transparent);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
padding: var(--space-3) var(--space-4);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
color: var(--color-text);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Form fields ───────────────────────────────────────────────────────────── */
|
|
||||||
.step__field {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-1);
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__label {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-weight: 600;
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__label--optional::after {
|
|
||||||
content: ' (optional)';
|
|
||||||
font-weight: 400;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__input,
|
|
||||||
.step__select,
|
|
||||||
.step__textarea {
|
|
||||||
width: 100%;
|
|
||||||
padding: var(--space-2) var(--space-3);
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
background: var(--color-surface-raised);
|
|
||||||
color: var(--color-text);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
transition: border-color var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__input:focus,
|
|
||||||
.step__select:focus,
|
|
||||||
.step__textarea:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
box-shadow: 0 0 0 3px color-mix(in srgb, var(--color-primary) 15%, transparent);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__input[type="password"] {
|
|
||||||
font-family: var(--font-mono);
|
|
||||||
letter-spacing: 0.1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__textarea {
|
|
||||||
resize: vertical;
|
|
||||||
min-height: 100px;
|
|
||||||
line-height: 1.5;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Radio cards (Tier step) ──────────────────────────────────────────────── */
|
|
||||||
.step__radio-group {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-3);
|
|
||||||
margin-bottom: var(--space-6);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card {
|
|
||||||
display: flex;
|
|
||||||
align-items: flex-start;
|
|
||||||
gap: var(--space-3);
|
|
||||||
padding: var(--space-4);
|
|
||||||
border: 2px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
cursor: pointer;
|
|
||||||
transition: border-color var(--transition), background var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card:hover {
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
background: var(--color-primary-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card--selected {
|
|
||||||
border-color: var(--color-primary);
|
|
||||||
background: var(--color-primary-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card input[type="radio"] {
|
|
||||||
margin-top: 2px;
|
|
||||||
accent-color: var(--color-primary);
|
|
||||||
flex-shrink: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card__body {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: var(--space-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card__title {
|
|
||||||
font-weight: 700;
|
|
||||||
font-size: 0.95rem;
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__radio-card__desc {
|
|
||||||
font-size: 0.8rem;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
line-height: 1.4;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Chip list (Search step) ──────────────────────────────────────────────── */
|
|
||||||
.step__chip-list {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
gap: var(--space-2);
|
|
||||||
margin-bottom: var(--space-3);
|
|
||||||
min-height: 36px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__chip {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-1);
|
|
||||||
padding: var(--space-1) var(--space-3);
|
|
||||||
background: var(--color-primary-light);
|
|
||||||
border: 1px solid color-mix(in srgb, var(--color-primary) 30%, transparent);
|
|
||||||
border-radius: var(--radius-full);
|
|
||||||
font-size: 0.825rem;
|
|
||||||
color: var(--color-primary);
|
|
||||||
font-weight: 500;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__chip__remove {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--color-primary);
|
|
||||||
padding: 0;
|
|
||||||
line-height: 1;
|
|
||||||
font-size: 1rem;
|
|
||||||
opacity: 0.6;
|
|
||||||
transition: opacity var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__chip__remove:hover {
|
|
||||||
opacity: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__chip-input-row {
|
|
||||||
display: flex;
|
|
||||||
gap: var(--space-2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__chip-input-row .step__input {
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Two-column layout (Search step) ─────────────────────────────────────── */
|
|
||||||
.step__cols {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 1fr 1fr;
|
|
||||||
gap: var(--space-6);
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 520px) {
|
|
||||||
.step__cols {
|
|
||||||
grid-template-columns: 1fr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Expandable (advanced section) ───────────────────────────────────────── */
|
|
||||||
.step__expandable {
|
|
||||||
margin-bottom: var(--space-4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__expandable__toggle {
|
|
||||||
background: none;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
font-family: var(--font-body);
|
|
||||||
padding: var(--space-2) 0;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: var(--space-2);
|
|
||||||
transition: color var(--transition);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__expandable__toggle:hover {
|
|
||||||
color: var(--color-text);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__expandable__body {
|
|
||||||
padding: var(--space-4);
|
|
||||||
border: 1px solid var(--color-border-light);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
margin-top: var(--space-2);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Navigation footer ────────────────────────────────────────────────────── */
|
|
||||||
.step__nav {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-top: var(--space-8);
|
|
||||||
padding-top: var(--space-6);
|
|
||||||
border-top: 1px solid var(--color-border-light);
|
|
||||||
gap: var(--space-3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.step__nav--end {
|
|
||||||
justify-content: flex-end;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary {
|
|
||||||
padding: var(--space-2) var(--space-6);
|
|
||||||
background: var(--color-primary);
|
|
||||||
color: var(--color-text-inverse);
|
|
||||||
border: none;
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
font-weight: 600;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: background var(--transition), opacity var(--transition);
|
|
||||||
min-height: 44px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:hover:not(:disabled) {
|
|
||||||
background: var(--color-primary-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-primary:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-ghost {
|
|
||||||
padding: var(--space-2) var(--space-4);
|
|
||||||
background: none;
|
|
||||||
color: var(--color-text-muted);
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: 0.9rem;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: color var(--transition), border-color var(--transition);
|
|
||||||
min-height: 44px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-ghost:hover:not(:disabled) {
|
|
||||||
color: var(--color-text);
|
|
||||||
border-color: var(--color-border);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-ghost:disabled {
|
|
||||||
opacity: 0.4;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary {
|
|
||||||
padding: var(--space-2) var(--space-4);
|
|
||||||
background: var(--color-surface-alt);
|
|
||||||
color: var(--color-text);
|
|
||||||
border: 1px solid var(--color-border);
|
|
||||||
border-radius: var(--radius-md);
|
|
||||||
font-family: var(--font-body);
|
|
||||||
font-size: 0.875rem;
|
|
||||||
cursor: pointer;
|
|
||||||
transition: background var(--transition);
|
|
||||||
min-height: 40px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:hover:not(:disabled) {
|
|
||||||
background: var(--color-border-light);
|
|
||||||
}
|
|
||||||
|
|
||||||
.btn-secondary:disabled {
|
|
||||||
opacity: 0.5;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
@ -3,7 +3,6 @@ import vue from '@vitejs/plugin-vue'
|
||||||
import UnoCSS from 'unocss/vite'
|
import UnoCSS from 'unocss/vite'
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
base: process.env.VITE_BASE_PATH || '/',
|
|
||||||
plugins: [vue(), UnoCSS()],
|
plugins: [vue(), UnoCSS()],
|
||||||
server: {
|
server: {
|
||||||
host: '0.0.0.0',
|
host: '0.0.0.0',
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue