feat: services tab uses docker compose commands and SSL-aware health checks

Replace hardcoded systemd/shell-script service commands with docker compose
profile-aware commands. Add inference_profile-based filtering (hidden flag
removes Ollama on remote profile, vLLM unless dual-gpu). Replace TCP socket
health check with HTTP-based _port_open() that accepts host/ssl/verify params
for remote/TLS-terminated service support.
This commit is contained in:
pyr0ball 2026-02-24 19:34:44 -08:00
parent 3d8ec6d9a9
commit b6ee6a3924

View file

@ -498,67 +498,75 @@ with tab_notion:
# ── Services tab ─────────────────────────────────────────────────────────────── # ── Services tab ───────────────────────────────────────────────────────────────
with tab_services: with tab_services:
import socket
import subprocess as _sp import subprocess as _sp
TOKENS_CFG = CONFIG_DIR / "tokens.yaml" TOKENS_CFG = CONFIG_DIR / "tokens.yaml"
# Service definitions: (display_name, port, start_cmd, stop_cmd, notes) # Service definitions: (display_name, port, start_cmd, stop_cmd, notes)
COMPOSE_DIR = str(Path(__file__).parent.parent.parent)
_profile_name = _profile.inference_profile if _profile else "remote"
SERVICES = [ SERVICES = [
{ {
"name": "Streamlit UI", "name": "Streamlit UI",
"port": 8501, "port": _profile._svc["streamlit_port"] if _profile else 8501,
"start": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-ui.sh"), "start"], "start": ["docker", "compose", "--profile", _profile_name, "up", "-d", "app"],
"stop": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-ui.sh"), "stop"], "stop": ["docker", "compose", "stop", "app"],
"cwd": str(Path(__file__).parent.parent.parent), "cwd": COMPOSE_DIR,
"note": "Job Seeker web interface", "note": "Peregrine web interface",
}, },
{ {
"name": "Ollama (local LLM)", "name": "Ollama (local LLM)",
"port": 11434, "port": _profile._svc["ollama_port"] if _profile else 11434,
"start": ["sudo", "systemctl", "start", "ollama"], "start": ["docker", "compose", "--profile", _profile_name, "up", "-d", "ollama"],
"stop": ["sudo", "systemctl", "stop", "ollama"], "stop": ["docker", "compose", "stop", "ollama"],
"cwd": "/", "cwd": COMPOSE_DIR,
"note": "Local inference engine — systemd service", "note": f"Local inference engine — profile: {_profile_name}",
"hidden": _profile_name == "remote",
}, },
{ {
"name": "vLLM Server", "name": "vLLM Server",
"port": 8000, "port": _profile._svc["vllm_port"] if _profile else 8000,
"start": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "start"], "start": ["docker", "compose", "--profile", _profile_name, "up", "-d", "vllm"],
"stop": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vllm.sh"), "stop"], "stop": ["docker", "compose", "stop", "vllm"],
"cwd": str(Path(__file__).parent.parent.parent), "cwd": COMPOSE_DIR,
"model_dir": str(_profile.vllm_models_dir) if _profile else str(Path.home() / "models" / "vllm"), "model_dir": str(_profile.vllm_models_dir) if _profile else str(Path.home() / "models" / "vllm"),
"note": "Local vLLM inference (port 8000, GPU 1)", "note": "vLLM inference — dual-gpu profile only",
}, "hidden": _profile_name != "dual-gpu",
{
"name": "Vision Service (moondream2)",
"port": 8002,
"start": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vision.sh"), "start"],
"stop": ["bash", str(Path(__file__).parent.parent.parent / "scripts/manage-vision.sh"), "stop"],
"cwd": str(Path(__file__).parent.parent.parent),
"note": "Survey screenshot analysis — moondream2 (port 8002, optional)",
}, },
{ {
"name": "SearXNG (company scraper)", "name": "SearXNG (company scraper)",
"port": _profile._svc["searxng_port"] if _profile else 8888, "port": _profile._svc["searxng_port"] if _profile else 8888,
"start": ["docker", "compose", "--profile", "searxng", "up", "-d", "searxng"], "start": ["docker", "compose", "up", "-d", "searxng"],
"stop": ["docker", "compose", "stop", "searxng"], "stop": ["docker", "compose", "stop", "searxng"],
"cwd": str(Path(__file__).parent.parent.parent), "cwd": COMPOSE_DIR,
"note": "Privacy-respecting meta-search for company research", "note": "Privacy-respecting meta-search for company research",
}, },
] ]
# Filter hidden services based on active profile
SERVICES = [s for s in SERVICES if not s.get("hidden")]
def _port_open(port: int) -> bool: def _port_open(port: int, host: str = "127.0.0.1",
ssl: bool = False, verify: bool = True) -> bool:
try: try:
with socket.create_connection(("127.0.0.1", port), timeout=1): import requests as _r
scheme = "https" if ssl else "http"
_r.get(f"{scheme}://{host}:{port}/", timeout=1, verify=verify)
return True return True
except OSError: except Exception:
return False return False
st.caption("Monitor and control the LLM backend services. Status is checked live on each page load.") st.caption("Monitor and control the LLM backend services. Status is checked live on each page load.")
for svc in SERVICES: for svc in SERVICES:
up = _port_open(svc["port"]) _svc_host = "127.0.0.1"
_svc_ssl = False
_svc_verify = True
if _profile:
_svc_host = _profile._svc.get(f"{svc['name'].split()[0].lower()}_host", "127.0.0.1")
_svc_ssl = _profile._svc.get(f"{svc['name'].split()[0].lower()}_ssl", False)
_svc_verify = _profile._svc.get(f"{svc['name'].split()[0].lower()}_ssl_verify", True)
up = _port_open(svc["port"], host=_svc_host, ssl=_svc_ssl, verify=_svc_verify)
badge = "🟢 Running" if up else "🔴 Stopped" badge = "🟢 Running" if up else "🔴 Stopped"
header = f"**{svc['name']}** — {badge}" header = f"**{svc['name']}** — {badge}"