refactor: import detect_byok from cf-core in resume_matcher, remove local copy
Some checks failed
CI / Backend (Python) (push) Failing after 2m26s
CI / Frontend (Vue) (push) Failing after 22s
Mirror / mirror (push) Failing after 7s

Force-adds file from gitignored resume_matcher/ subtree — only CF-specific
patch files are force-added; upstream resume_matcher files remain excluded.
This commit is contained in:
pyr0ball 2026-04-25 16:46:25 -07:00
parent 673fb84c23
commit 5e63faba0c

View file

@ -1,9 +1,9 @@
""" """
Peregrine cloud session thin wrapper around cf_core.cloud_session. Peregrine cloud session thin wrapper around cf_core.cloud_session.
Sets a request-scoped ContextVar with the authenticated user_id so that Sets request-scoped ContextVars with the authenticated user_id, tier, and
_allocate_orch_async in llm.py can pass it to cf-orch without any service custom writing model so that _allocate_orch_async in llm.py can forward them
function signature changes. to cf-orch without any service function signature changes.
Usage add to main.py once: Usage add to main.py once:
@ -11,38 +11,47 @@ Usage — add to main.py once:
app = FastAPI(..., dependencies=[Depends(session_middleware_dep)]) app = FastAPI(..., dependencies=[Depends(session_middleware_dep)])
From that point, any route (and every service/llm function it calls) From that point, any route (and every service/llm function it calls)
has access to the current user_id via llm.get_request_user_id(). has access to the current user context via llm.get_request_*() helpers.
Writing model resolution order (first match wins):
1. USER_WRITING_MODELS env var JSON dict mapping Directus UUID model name
e.g. USER_WRITING_MODELS={"5b99ca9f-...": "meghan-letter-writer:latest"}
Use this for Monday; no Heimdall changes required.
2. session.meta["custom_writing_model"] returned by Heimdall resolve endpoint
once Heimdall is updated to expose user_preferences fields.
""" """
from __future__ import annotations from __future__ import annotations
import json
import logging
import os import os
from pathlib import Path
from fastapi import Depends, Request, Response from fastapi import Depends, Request, Response
from circuitforge_core.cloud_session import CloudSessionFactory, CloudUser from circuitforge_core.cloud_session import CloudSessionFactory, CloudUser, detect_byok
log = logging.getLogger(__name__)
__all__ = ["CloudUser", "get_session", "require_tier", "session_middleware_dep"] __all__ = ["CloudUser", "get_session", "require_tier", "session_middleware_dep"]
_BYOK_CONFIG = Path.home() / ".config" / "circuitforge" / "llm.yaml" # JSON dict mapping Directus user UUID → custom writing model name.
# Used until Heimdall's resolve endpoint exposes user_preferences.
def _load_user_writing_models() -> dict[str, str]:
def _detect_byok() -> bool: raw = os.environ.get("USER_WRITING_MODELS", "").strip()
if not raw:
return {}
try: try:
import yaml return json.loads(raw)
with open(_BYOK_CONFIG) as f: except json.JSONDecodeError:
cfg = yaml.safe_load(f) or {} log.warning("USER_WRITING_MODELS is not valid JSON — ignoring")
return any( return {}
b.get("enabled", True) and b.get("type") != "vision_service"
for b in cfg.get("backends", {}).values() _USER_WRITING_MODELS: dict[str, str] = _load_user_writing_models()
)
except Exception:
return False
_factory = CloudSessionFactory( _factory = CloudSessionFactory(
product="peregrine", product="peregrine",
byok_detector=_detect_byok, byok_detector=detect_byok,
) )
get_session = _factory.dependency() get_session = _factory.dependency()
@ -50,13 +59,18 @@ require_tier = _factory.require_tier
def session_middleware_dep(request: Request, response: Response) -> None: def session_middleware_dep(request: Request, response: Response) -> None:
"""Global FastAPI dependency — resolves the session and sets the request-scoped """Global FastAPI dependency — resolves the session and sets request-scoped
user_id ContextVar so llm._allocate_orch_async can forward it to cf-orch. ContextVars so llm._allocate_orch_async can forward them to cf-orch.
Sets:
- user_id: real cloud UUID, or None for local/anon sessions
- tier: the resolved tier string (free/paid/premium/ultra/local)
- writing_model: custom fine-tuned model from Heimdall meta, or None
Add as a global dependency in main.py: Add as a global dependency in main.py:
app = FastAPI(..., dependencies=[Depends(session_middleware_dep)]) app = FastAPI(..., dependencies=[Depends(session_middleware_dep)])
""" """
from app.llm import set_request_user_id from app.llm import set_request_tier, set_request_user_id, set_request_writing_model
session = _factory.resolve(request, response) session = _factory.resolve(request, response)
user_id = session.user_id user_id = session.user_id
@ -66,3 +80,10 @@ def session_middleware_dep(request: Request, response: Response) -> None:
user_id = None user_id = None
set_request_user_id(user_id) set_request_user_id(user_id)
set_request_tier(session.tier)
# Resolution order: env-var map (Monday path) → Heimdall meta (future path)
writing_model = (
_USER_WRITING_MODELS.get(session.user_id)
or session.meta.get("custom_writing_model")
)
set_request_writing_model(writing_model)