Some checks failed
CI / test (pull_request) Failing after 1m16s
- Lower vue_ui_beta gate to "free" so all licensed users can access the new UI without a paid subscription - Remove "Paid tier" wording from the Try New UI banner - Fix Vue SPA navigation in cloud/demo deployments: add VITE_BASE_PATH build arg so Vite sets the correct subpath base, and pass import.meta.env.BASE_URL to createWebHistory() so router links emit /peregrine/... paths that Caddy can match - Fix feedback button missing on cloud instance by passing FORGEJO_API_TOKEN through compose.cloud.yml - Remove vLLM container from compose.yml (vLLM dropped from stack; cf-research service in cfcore covers the use case) - Fix cloud config path in Apply page (use get_config_dir() so per-user cloud data roots resolve correctly for user.yaml and resume YAML) - Refactor generate_cover_letter._build_system_context and _build_mission_notes to accept explicit profile arg (enables per-user cover letter generation in cloud multi-tenant mode) - Add API proxy block to nginx.conf (Vue web container can now call /api/ directly without Vite dev proxy) - Update .env.example: remove vLLM vars, add research model + tuning vars for external vLLM deployments - Update llm.yaml: switch vllm base_url to host.docker.internal (vLLM now runs outside Docker stack) Closes #63 (feedback button) Related: #8 (Vue SPA), #50–#62 (parity milestone)
181 lines
6.6 KiB
Python
181 lines
6.6 KiB
Python
"""
|
|
Tier definitions and feature gates for Peregrine.
|
|
|
|
Tiers: free < paid < premium < ultra (ultra reserved; no Peregrine features use it yet)
|
|
FEATURES maps feature key → minimum tier required.
|
|
Features not in FEATURES are available to all tiers (free).
|
|
|
|
BYOK policy
|
|
-----------
|
|
Features in BYOK_UNLOCKABLE are gated only because CircuitForge would otherwise
|
|
be providing the LLM compute. When a user has any configured LLM backend (local
|
|
ollama/vllm or their own API key), those features unlock regardless of tier.
|
|
Pass has_byok=has_configured_llm() to can_use() at call sites.
|
|
|
|
Features that stay gated even with BYOK:
|
|
- Integrations (Notion sync, calendars, etc.) — infrastructure we run
|
|
- llm_keywords_blocklist — orchestration pipeline over background keyword data
|
|
- email_classifier — training pipeline, not a single LLM call
|
|
- shared_cover_writer_model — our fine-tuned model weights
|
|
- model_fine_tuning — GPU infrastructure
|
|
- multi_user — account infrastructure
|
|
"""
|
|
from __future__ import annotations
|
|
|
|
import os as _os
|
|
from pathlib import Path
|
|
|
|
from circuitforge_core.tiers import (
|
|
can_use as _core_can_use,
|
|
TIERS,
|
|
tier_label as _core_tier_label,
|
|
)
|
|
|
|
# Maps feature key → minimum tier string required.
|
|
# Features absent from this dict are free (available to all).
|
|
FEATURES: dict[str, str] = {
|
|
# Wizard LLM generation — BYOK-unlockable (pure LLM calls)
|
|
"llm_career_summary": "paid",
|
|
"llm_expand_bullets": "paid",
|
|
"llm_suggest_skills": "paid",
|
|
"llm_voice_guidelines": "premium",
|
|
"llm_job_titles": "paid",
|
|
"llm_mission_notes": "paid",
|
|
|
|
# Orchestration — stays gated (background data pipeline, not just an LLM call)
|
|
"llm_keywords_blocklist": "paid",
|
|
|
|
# App features — BYOK-unlockable (pure LLM calls over job/profile data)
|
|
"company_research": "paid",
|
|
"interview_prep": "paid",
|
|
"survey_assistant": "paid",
|
|
|
|
# Orchestration / infrastructure — stays gated
|
|
"email_classifier": "paid",
|
|
"model_fine_tuning": "premium",
|
|
"shared_cover_writer_model": "paid",
|
|
"multi_user": "premium",
|
|
|
|
# Integrations — stays gated (infrastructure CircuitForge operates)
|
|
"notion_sync": "paid",
|
|
"google_sheets_sync": "paid",
|
|
"airtable_sync": "paid",
|
|
"google_calendar_sync": "paid",
|
|
"apple_calendar_sync": "paid",
|
|
"slack_notifications": "paid",
|
|
|
|
# Beta UI access — open to all tiers (access management, not compute)
|
|
"vue_ui_beta": "free",
|
|
}
|
|
|
|
# Features that unlock when the user supplies any LLM backend (local or BYOK).
|
|
# These are pure LLM-call features — the only reason they're behind a tier is
|
|
# because CircuitForge would otherwise be providing the compute.
|
|
BYOK_UNLOCKABLE: frozenset[str] = frozenset({
|
|
"llm_career_summary",
|
|
"llm_expand_bullets",
|
|
"llm_suggest_skills",
|
|
"llm_voice_guidelines",
|
|
"llm_job_titles",
|
|
"llm_mission_notes",
|
|
"company_research",
|
|
"interview_prep",
|
|
"survey_assistant",
|
|
})
|
|
|
|
# Demo mode flag — read from environment at module load time.
|
|
# Allows demo toolbar to override tier without accessing st.session_state (thread-safe).
|
|
# _DEMO_MODE is immutable after import for the process lifetime.
|
|
# DEMO_MODE must be set in the environment before the process starts (e.g., via
|
|
# Docker Compose environment:). Runtime toggling is not supported.
|
|
_DEMO_MODE = _os.environ.get("DEMO_MODE", "").lower() in ("1", "true", "yes")
|
|
|
|
# Free integrations (not in FEATURES):
|
|
# google_drive_sync, dropbox_sync, onedrive_sync, mega_sync,
|
|
# nextcloud_sync, discord_notifications, home_assistant
|
|
|
|
_LLM_CFG = Path(__file__).parent.parent.parent / "config" / "llm.yaml"
|
|
|
|
|
|
def has_configured_llm(config_path: Path | None = None) -> bool:
|
|
"""Return True if at least one non-vision LLM backend is enabled in llm.yaml.
|
|
|
|
Local backends (ollama, vllm) count — the policy is "you're providing the
|
|
compute", whether that's your own hardware or your own API key.
|
|
"""
|
|
import yaml
|
|
path = config_path or _LLM_CFG
|
|
try:
|
|
with open(path) as f:
|
|
cfg = yaml.safe_load(f) or {}
|
|
return any(
|
|
b.get("enabled", True) and b.get("type") != "vision_service"
|
|
for b in cfg.get("backends", {}).values()
|
|
)
|
|
except Exception:
|
|
return False
|
|
|
|
|
|
def can_use(
|
|
tier: str,
|
|
feature: str,
|
|
has_byok: bool = False,
|
|
*,
|
|
demo_tier: str | None = None,
|
|
) -> bool:
|
|
"""Return True if the given tier has access to the feature.
|
|
|
|
has_byok: pass has_configured_llm() to unlock BYOK_UNLOCKABLE features
|
|
for users who supply their own LLM backend regardless of tier.
|
|
|
|
demo_tier: when set AND _DEMO_MODE is True, substitutes for `tier`.
|
|
Read from st.session_state by the *caller*, not here — keeps
|
|
this function thread-safe for background tasks and tests.
|
|
|
|
Returns True for unknown features (not gated).
|
|
Returns False for unknown/invalid tier strings.
|
|
"""
|
|
effective_tier = demo_tier if (demo_tier is not None and _DEMO_MODE) else tier
|
|
# Pass Peregrine's BYOK_UNLOCKABLE via has_byok collapse — core's frozenset is empty
|
|
if has_byok and feature in BYOK_UNLOCKABLE:
|
|
return True
|
|
return _core_can_use(feature, effective_tier, _features=FEATURES)
|
|
|
|
|
|
def tier_label(feature: str, has_byok: bool = False) -> str:
|
|
"""Return a display label for a locked feature, or '' if free/unlocked."""
|
|
if has_byok and feature in BYOK_UNLOCKABLE:
|
|
return ""
|
|
raw = _core_tier_label(feature, _features=FEATURES)
|
|
if not raw or raw == "free":
|
|
return ""
|
|
return "🔒 Paid" if raw == "paid" else "⭐ Premium"
|
|
|
|
|
|
def effective_tier(
|
|
profile=None,
|
|
license_path=None,
|
|
public_key_path=None,
|
|
) -> str:
|
|
"""Return the effective tier for this installation.
|
|
|
|
Priority:
|
|
1. profile.dev_tier_override (developer mode override)
|
|
2. License JWT verification (offline RS256 check)
|
|
3. "free" (fallback)
|
|
|
|
license_path and public_key_path default to production paths when None.
|
|
Pass explicit paths in tests to avoid touching real files.
|
|
"""
|
|
if profile and getattr(profile, "dev_tier_override", None):
|
|
return profile.dev_tier_override
|
|
|
|
from scripts.license import effective_tier as _license_tier
|
|
from pathlib import Path as _Path
|
|
|
|
kwargs = {}
|
|
if license_path is not None:
|
|
kwargs["license_path"] = _Path(license_path)
|
|
if public_key_path is not None:
|
|
kwargs["public_key_path"] = _Path(public_key_path)
|
|
return _license_tier(**kwargs)
|