feat: byok_guard — cloud backend detection with full test coverage
This commit is contained in:
parent
4d6cbce91e
commit
9c4250f48e
2 changed files with 152 additions and 0 deletions
56
scripts/byok_guard.py
Normal file
56
scripts/byok_guard.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
"""
|
||||
BYOK cloud backend detection.
|
||||
|
||||
Determines whether LLM backends in llm.yaml send data to third-party cloud
|
||||
providers. Used by Settings (activation warning) and app.py (sidebar indicator).
|
||||
|
||||
No Streamlit dependency — pure Python so it's unit-testable and reusable.
|
||||
"""
|
||||
|
||||
LOCAL_URL_MARKERS = ("localhost", "127.0.0.1", "0.0.0.0")
|
||||
|
||||
|
||||
def is_cloud_backend(name: str, cfg: dict) -> bool:
|
||||
"""Return True if this backend sends prompts to a third-party cloud provider.
|
||||
|
||||
Classification rules (applied in order):
|
||||
1. local: true in cfg → always local (user override)
|
||||
2. vision_service type → always local
|
||||
3. anthropic or claude_code type → always cloud
|
||||
4. openai_compat with a localhost/loopback base_url → local
|
||||
5. openai_compat with any other base_url → cloud
|
||||
6. anything else → local (unknown types assumed safe)
|
||||
"""
|
||||
if cfg.get("local", False):
|
||||
return False
|
||||
|
||||
btype = cfg.get("type", "")
|
||||
|
||||
if btype == "vision_service":
|
||||
return False
|
||||
|
||||
if btype in ("anthropic", "claude_code"):
|
||||
return True
|
||||
|
||||
if btype == "openai_compat":
|
||||
url = cfg.get("base_url", "")
|
||||
return not any(marker in url for marker in LOCAL_URL_MARKERS)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def cloud_backends(llm_cfg: dict) -> list[str]:
|
||||
"""Return names of enabled cloud backends from a parsed llm.yaml dict.
|
||||
|
||||
Args:
|
||||
llm_cfg: parsed contents of config/llm.yaml
|
||||
|
||||
Returns:
|
||||
List of backend names that are enabled and classified as cloud.
|
||||
Empty list means fully local configuration.
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name, cfg in llm_cfg.get("backends", {}).items()
|
||||
if cfg.get("enabled", True) and is_cloud_backend(name, cfg)
|
||||
]
|
||||
96
tests/test_byok_guard.py
Normal file
96
tests/test_byok_guard.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
"""Tests for BYOK cloud backend detection."""
|
||||
import pytest
|
||||
from scripts.byok_guard import is_cloud_backend, cloud_backends
|
||||
|
||||
|
||||
class TestIsCloudBackend:
|
||||
def test_anthropic_type_is_always_cloud(self):
|
||||
assert is_cloud_backend("anthropic", {"type": "anthropic", "enabled": True}) is True
|
||||
|
||||
def test_claude_code_type_is_cloud(self):
|
||||
assert is_cloud_backend("claude_code", {"type": "claude_code", "enabled": True}) is True
|
||||
|
||||
def test_vision_service_is_always_local(self):
|
||||
assert is_cloud_backend("vision", {"type": "vision_service"}) is False
|
||||
|
||||
def test_openai_compat_localhost_is_local(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "http://localhost:11434/v1"}
|
||||
assert is_cloud_backend("ollama", cfg) is False
|
||||
|
||||
def test_openai_compat_127_is_local(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "http://127.0.0.1:8000/v1"}
|
||||
assert is_cloud_backend("vllm", cfg) is False
|
||||
|
||||
def test_openai_compat_0000_is_local(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "http://0.0.0.0:8000/v1"}
|
||||
assert is_cloud_backend("vllm", cfg) is False
|
||||
|
||||
def test_openai_compat_remote_url_is_cloud(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "https://api.openai.com/v1"}
|
||||
assert is_cloud_backend("openai", cfg) is True
|
||||
|
||||
def test_openai_compat_together_is_cloud(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "https://api.together.xyz/v1"}
|
||||
assert is_cloud_backend("together", cfg) is True
|
||||
|
||||
def test_local_override_suppresses_cloud_detection(self):
|
||||
cfg = {"type": "openai_compat", "base_url": "http://192.168.1.100:11434/v1", "local": True}
|
||||
assert is_cloud_backend("nas_ollama", cfg) is False
|
||||
|
||||
def test_local_override_on_anthropic_suppresses_detection(self):
|
||||
cfg = {"type": "anthropic", "local": True}
|
||||
assert is_cloud_backend("anthropic", cfg) is False
|
||||
|
||||
def test_unknown_type_without_url_is_local(self):
|
||||
assert is_cloud_backend("mystery", {"type": "unknown_type"}) is False
|
||||
|
||||
|
||||
class TestCloudBackends:
|
||||
def test_empty_config_returns_empty(self):
|
||||
assert cloud_backends({}) == []
|
||||
|
||||
def test_fully_local_config_returns_empty(self):
|
||||
cfg = {
|
||||
"backends": {
|
||||
"ollama": {"type": "openai_compat", "base_url": "http://localhost:11434/v1", "enabled": True},
|
||||
"vision": {"type": "vision_service", "enabled": True},
|
||||
}
|
||||
}
|
||||
assert cloud_backends(cfg) == []
|
||||
|
||||
def test_cloud_backend_returned(self):
|
||||
cfg = {
|
||||
"backends": {
|
||||
"anthropic": {"type": "anthropic", "enabled": True},
|
||||
}
|
||||
}
|
||||
assert cloud_backends(cfg) == ["anthropic"]
|
||||
|
||||
def test_disabled_cloud_backend_excluded(self):
|
||||
cfg = {
|
||||
"backends": {
|
||||
"anthropic": {"type": "anthropic", "enabled": False},
|
||||
}
|
||||
}
|
||||
assert cloud_backends(cfg) == []
|
||||
|
||||
def test_mix_returns_only_enabled_cloud(self):
|
||||
cfg = {
|
||||
"backends": {
|
||||
"ollama": {"type": "openai_compat", "base_url": "http://localhost:11434/v1", "enabled": True},
|
||||
"anthropic": {"type": "anthropic", "enabled": True},
|
||||
"openai": {"type": "openai_compat", "base_url": "https://api.openai.com/v1", "enabled": False},
|
||||
}
|
||||
}
|
||||
result = cloud_backends(cfg)
|
||||
assert result == ["anthropic"]
|
||||
|
||||
def test_multiple_cloud_backends_all_returned(self):
|
||||
cfg = {
|
||||
"backends": {
|
||||
"anthropic": {"type": "anthropic", "enabled": True},
|
||||
"openai": {"type": "openai_compat", "base_url": "https://api.openai.com/v1", "enabled": True},
|
||||
}
|
||||
}
|
||||
result = cloud_backends(cfg)
|
||||
assert set(result) == {"anthropic", "openai"}
|
||||
Loading…
Reference in a new issue