feat: BYOK unlocks LLM features regardless of tier

BYOK policy: if a user supplies any LLM backend (local ollama/vllm or
their own API key), they get full access to AI generation features.
Charging for the UI around a service they already pay for is bad UX.

app/wizard/tiers.py:
  - BYOK_UNLOCKABLE frozenset: pure LLM-call features that unlock with
    any configured backend (llm_career_summary, company_research,
    interview_prep, survey_assistant, voice guidelines, etc.)
  - has_configured_llm(): checks llm.yaml for any enabled non-vision
    backend; local + external API keys both count
  - can_use(tier, feature, has_byok=False): BYOK_UNLOCKABLE features
    return True when has_byok=True regardless of tier
  - tier_label(feature, has_byok=False): suppresses lock icon for
    BYOK_UNLOCKABLE features when BYOK is active

Still gated (require CF infrastructure, not just an LLM call):
  llm_keywords_blocklist, email_classifier, model_fine_tuning,
  shared_cover_writer_model, multi_user, all integrations

app/pages/2_Settings.py:
  - Compute _byok = has_configured_llm() once at page load
  - Pass has_byok=_byok to can_use() for _gen_panel_active
  - Update caption to mention BYOK as an alternative to paid tier

app/pages/0_Setup.py:
  - Wizard generation widget passes has_byok=has_configured_llm()
    to can_use() and tier_label()

tests/test_wizard_tiers.py:
  - 6 new BYOK-specific tests covering unlock, non-unlock, and
    label suppression cases
This commit is contained in:
pyr0ball 2026-03-02 11:34:36 -08:00
parent 0a728fddbc
commit ebb82b7ca7
4 changed files with 127 additions and 16 deletions

View file

@ -105,10 +105,11 @@ def _generation_widget(section: str, label: str, tier: str,
Call this inside a step to add LLM generation support. Call this inside a step to add LLM generation support.
The caller decides whether to auto-populate a field with the result. The caller decides whether to auto-populate a field with the result.
""" """
from app.wizard.tiers import can_use, tier_label as tl from app.wizard.tiers import can_use, tier_label as tl, has_configured_llm
if not can_use(tier, feature_key): _has_byok = has_configured_llm()
st.caption(f"{tl(feature_key)} {label}") if not can_use(tier, feature_key, has_byok=_has_byok):
st.caption(f"{tl(feature_key, has_byok=_has_byok)} {label}")
return None return None
col_btn, col_fb = st.columns([2, 5]) col_btn, col_fb = st.columns([2, 5])

View file

@ -98,11 +98,12 @@ _all_tabs = st.tabs(_tab_names)
tab_profile, tab_resume, tab_search, tab_system, tab_finetune, tab_license, tab_data = _all_tabs[:7] tab_profile, tab_resume, tab_search, tab_system, tab_finetune, tab_license, tab_data = _all_tabs[:7]
# ── Inline LLM generate buttons ─────────────────────────────────────────────── # ── Inline LLM generate buttons ───────────────────────────────────────────────
# Paid-tier feature: ✨ Generate buttons sit directly below each injectable field. # Unlocked when user has a configured LLM backend (BYOK) OR a paid tier.
# Writes into session state keyed to the widget's `key=` param, then reruns. # Writes into session state keyed to the widget's `key=` param, then reruns.
from app.wizard.tiers import can_use as _cu from app.wizard.tiers import can_use as _cu, has_configured_llm as _has_llm
_byok = _has_llm()
_gen_panel_active = bool(_profile) and _cu( _gen_panel_active = bool(_profile) and _cu(
_profile.effective_tier if _profile else "free", "llm_career_summary" _profile.effective_tier if _profile else "free", "llm_career_summary", has_byok=_byok
) )
# Seed session state for LLM-injectable text fields on first load # Seed session state for LLM-injectable text fields on first load
@ -251,7 +252,7 @@ with tab_profile:
st.rerun() st.rerun()
if not _can_generate: if not _can_generate:
st.caption("✨ AI generation requires a paid tier.") st.caption("✨ AI generation requires a paid tier or a configured LLM backend (BYOK).")
_mission_updated = { _mission_updated = {
r["key"]: r["value"] r["key"]: r["value"]

View file

@ -4,33 +4,54 @@ Tier definitions and feature gates for Peregrine.
Tiers: free < paid < premium Tiers: free < paid < premium
FEATURES maps feature key minimum tier required. FEATURES maps feature key minimum tier required.
Features not in FEATURES are available to all tiers (free). Features not in FEATURES are available to all tiers (free).
BYOK policy
-----------
Features in BYOK_UNLOCKABLE are gated only because CircuitForge would otherwise
be providing the LLM compute. When a user has any configured LLM backend (local
ollama/vllm or their own API key), those features unlock regardless of tier.
Pass has_byok=has_configured_llm() to can_use() at call sites.
Features that stay gated even with BYOK:
- Integrations (Notion sync, calendars, etc.) infrastructure we run
- llm_keywords_blocklist orchestration pipeline over background keyword data
- email_classifier training pipeline, not a single LLM call
- shared_cover_writer_model our fine-tuned model weights
- model_fine_tuning GPU infrastructure
- multi_user account infrastructure
""" """
from __future__ import annotations from __future__ import annotations
from pathlib import Path
TIERS = ["free", "paid", "premium"] TIERS = ["free", "paid", "premium"]
# Maps feature key → minimum tier string required. # Maps feature key → minimum tier string required.
# Features absent from this dict are free (available to all). # Features absent from this dict are free (available to all).
FEATURES: dict[str, str] = { FEATURES: dict[str, str] = {
# Wizard LLM generation # Wizard LLM generation — BYOK-unlockable (pure LLM calls)
"llm_career_summary": "paid", "llm_career_summary": "paid",
"llm_expand_bullets": "paid", "llm_expand_bullets": "paid",
"llm_suggest_skills": "paid", "llm_suggest_skills": "paid",
"llm_voice_guidelines": "premium", "llm_voice_guidelines": "premium",
"llm_job_titles": "paid", "llm_job_titles": "paid",
"llm_keywords_blocklist": "paid",
"llm_mission_notes": "paid", "llm_mission_notes": "paid",
# App features # Orchestration — stays gated (background data pipeline, not just an LLM call)
"llm_keywords_blocklist": "paid",
# App features — BYOK-unlockable (pure LLM calls over job/profile data)
"company_research": "paid", "company_research": "paid",
"interview_prep": "paid", "interview_prep": "paid",
"email_classifier": "paid",
"survey_assistant": "paid", "survey_assistant": "paid",
# Orchestration / infrastructure — stays gated
"email_classifier": "paid",
"model_fine_tuning": "premium", "model_fine_tuning": "premium",
"shared_cover_writer_model": "paid", "shared_cover_writer_model": "paid",
"multi_user": "premium", "multi_user": "premium",
# Integrations (paid) # Integrations — stays gated (infrastructure CircuitForge operates)
"notion_sync": "paid", "notion_sync": "paid",
"google_sheets_sync": "paid", "google_sheets_sync": "paid",
"airtable_sync": "paid", "airtable_sync": "paid",
@ -39,28 +60,71 @@ FEATURES: dict[str, str] = {
"slack_notifications": "paid", "slack_notifications": "paid",
} }
# Features that unlock when the user supplies any LLM backend (local or BYOK).
# These are pure LLM-call features — the only reason they're behind a tier is
# because CircuitForge would otherwise be providing the compute.
BYOK_UNLOCKABLE: frozenset[str] = frozenset({
"llm_career_summary",
"llm_expand_bullets",
"llm_suggest_skills",
"llm_voice_guidelines",
"llm_job_titles",
"llm_mission_notes",
"company_research",
"interview_prep",
"survey_assistant",
})
# Free integrations (not in FEATURES): # Free integrations (not in FEATURES):
# google_drive_sync, dropbox_sync, onedrive_sync, mega_sync, # google_drive_sync, dropbox_sync, onedrive_sync, mega_sync,
# nextcloud_sync, discord_notifications, home_assistant # nextcloud_sync, discord_notifications, home_assistant
_LLM_CFG = Path(__file__).parent.parent.parent / "config" / "llm.yaml"
def can_use(tier: str, feature: str) -> bool:
def has_configured_llm(config_path: Path | None = None) -> bool:
"""Return True if at least one non-vision LLM backend is enabled in llm.yaml.
Local backends (ollama, vllm) count the policy is "you're providing the
compute", whether that's your own hardware or your own API key.
"""
import yaml
path = config_path or _LLM_CFG
try:
with open(path) as f:
cfg = yaml.safe_load(f) or {}
return any(
b.get("enabled", True) and b.get("type") != "vision_service"
for b in cfg.get("backends", {}).values()
)
except Exception:
return False
def can_use(tier: str, feature: str, has_byok: bool = False) -> bool:
"""Return True if the given tier has access to the feature. """Return True if the given tier has access to the feature.
has_byok: pass has_configured_llm() to unlock BYOK_UNLOCKABLE features
for users who supply their own LLM backend regardless of tier.
Returns True for unknown features (not gated). Returns True for unknown features (not gated).
Returns False for unknown/invalid tier strings. Returns False for unknown/invalid tier strings.
""" """
required = FEATURES.get(feature) required = FEATURES.get(feature)
if required is None: if required is None:
return True # not gated — available to all return True # not gated — available to all
if has_byok and feature in BYOK_UNLOCKABLE:
return True
try: try:
return TIERS.index(tier) >= TIERS.index(required) return TIERS.index(tier) >= TIERS.index(required)
except ValueError: except ValueError:
return False # invalid tier string return False # invalid tier string
def tier_label(feature: str) -> str: def tier_label(feature: str, has_byok: bool = False) -> str:
"""Return a display label for a locked feature, or '' if free/unknown.""" """Return a display label for a locked feature, or '' if free/unlocked."""
if has_byok and feature in BYOK_UNLOCKABLE:
return ""
required = FEATURES.get(feature) required = FEATURES.get(feature)
if required is None: if required is None:
return "" return ""

View file

@ -2,7 +2,7 @@ import sys
from pathlib import Path from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent)) sys.path.insert(0, str(Path(__file__).parent.parent))
from app.wizard.tiers import can_use, tier_label, TIERS, FEATURES from app.wizard.tiers import can_use, tier_label, TIERS, FEATURES, BYOK_UNLOCKABLE
def test_tiers_list(): def test_tiers_list():
@ -67,3 +67,48 @@ def test_free_integrations_are_accessible():
def test_paid_integrations_gated(): def test_paid_integrations_gated():
assert can_use("free", "notion_sync") is False assert can_use("free", "notion_sync") is False
assert can_use("paid", "notion_sync") is True assert can_use("paid", "notion_sync") is True
# ── BYOK tests ────────────────────────────────────────────────────────────────
def test_byok_unlocks_llm_features_for_free_tier():
# BYOK_UNLOCKABLE features become accessible on free tier when has_byok=True
for feature in BYOK_UNLOCKABLE:
assert can_use("free", feature, has_byok=True) is True, (
f"{feature} should be accessible with BYOK on free tier"
)
def test_byok_does_not_unlock_integrations():
# Integrations stay gated even with BYOK — they depend on CF infrastructure
for feature in ["notion_sync", "google_sheets_sync", "slack_notifications"]:
assert can_use("free", feature, has_byok=True) is False, (
f"{feature} should stay gated even with BYOK"
)
def test_byok_does_not_unlock_orchestration_features():
# These features depend on background pipelines, not just an LLM call
for feature in ["llm_keywords_blocklist", "email_classifier", "model_fine_tuning"]:
assert can_use("free", feature, has_byok=True) is False, (
f"{feature} should stay gated even with BYOK"
)
def test_tier_label_hidden_when_byok_unlocks():
# BYOK_UNLOCKABLE features should show no lock label when has_byok=True
for feature in BYOK_UNLOCKABLE:
assert tier_label(feature, has_byok=True) == "", (
f"{feature} should show no lock label when BYOK is active"
)
def test_tier_label_still_shows_for_non_unlockable_with_byok():
assert tier_label("notion_sync", has_byok=True) != ""
assert tier_label("email_classifier", has_byok=True) != ""
def test_byok_false_preserves_original_gating():
# has_byok=False (default) must not change existing behaviour
assert can_use("free", "company_research", has_byok=False) is False
assert can_use("paid", "company_research", has_byok=False) is True