feat: auto-generate llm.yaml base_url values from user profile services config
This commit is contained in:
parent
5232a26520
commit
1d4b0e734d
3 changed files with 75 additions and 1 deletions
|
|
@ -364,12 +364,21 @@ with tab_llm:
|
|||
for n in new_order
|
||||
))
|
||||
|
||||
if st.button("💾 Save LLM settings", type="primary"):
|
||||
col_save_llm, col_sync_llm = st.columns(2)
|
||||
if col_save_llm.button("💾 Save LLM settings", type="primary"):
|
||||
save_yaml(LLM_CFG, {**cfg, "backends": updated_backends, "fallback_order": new_order})
|
||||
st.session_state.pop("_llm_order", None)
|
||||
st.session_state.pop("_llm_order_cfg_key", None)
|
||||
st.success("LLM settings saved!")
|
||||
|
||||
if col_sync_llm.button("🔄 Sync URLs from Profile", help="Regenerate backend base_url values from your service host/port settings in user.yaml"):
|
||||
if _profile is not None:
|
||||
from scripts.generate_llm_config import apply_service_urls as _apply_urls
|
||||
_apply_urls(_profile, LLM_CFG)
|
||||
st.success("Profile saved and service URLs updated.")
|
||||
else:
|
||||
st.warning("No user profile found — configure it in the My Profile tab first.")
|
||||
|
||||
# ── Notion tab ────────────────────────────────────────────────────────────────
|
||||
with tab_notion:
|
||||
cfg = load_yaml(NOTION_CFG) if NOTION_CFG.exists() else {}
|
||||
|
|
|
|||
18
scripts/generate_llm_config.py
Normal file
18
scripts/generate_llm_config.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
"""Update config/llm.yaml base_url values from the user profile's services block."""
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
from scripts.user_profile import UserProfile
|
||||
|
||||
|
||||
def apply_service_urls(profile: UserProfile, llm_yaml_path: Path) -> None:
|
||||
"""Rewrite base_url for ollama, ollama_research, and vllm backends in llm.yaml."""
|
||||
if not llm_yaml_path.exists():
|
||||
return
|
||||
cfg = yaml.safe_load(llm_yaml_path.read_text()) or {}
|
||||
urls = profile.generate_llm_urls()
|
||||
backends = cfg.get("backends", {})
|
||||
for backend_name, url in urls.items():
|
||||
if backend_name in backends:
|
||||
backends[backend_name]["base_url"] = url
|
||||
cfg["backends"] = backends
|
||||
llm_yaml_path.write_text(yaml.dump(cfg, default_flow_style=False, allow_unicode=True))
|
||||
47
tests/test_llm_config_generation.py
Normal file
47
tests/test_llm_config_generation.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from pathlib import Path
|
||||
import yaml
|
||||
from scripts.user_profile import UserProfile
|
||||
from scripts.generate_llm_config import apply_service_urls
|
||||
|
||||
|
||||
def test_urls_applied_to_llm_yaml(tmp_path):
|
||||
user_yaml = tmp_path / "user.yaml"
|
||||
user_yaml.write_text(yaml.dump({
|
||||
"name": "Test",
|
||||
"services": {
|
||||
"ollama_host": "myserver", "ollama_port": 11434, "ollama_ssl": False,
|
||||
"ollama_ssl_verify": True,
|
||||
"vllm_host": "localhost", "vllm_port": 8000, "vllm_ssl": False,
|
||||
"vllm_ssl_verify": True,
|
||||
"searxng_host": "localhost", "searxng_port": 8888,
|
||||
"searxng_ssl": False, "searxng_ssl_verify": True,
|
||||
}
|
||||
}))
|
||||
llm_yaml = tmp_path / "llm.yaml"
|
||||
llm_yaml.write_text(yaml.dump({"backends": {
|
||||
"ollama": {"base_url": "http://old:11434/v1", "type": "openai_compat"},
|
||||
"vllm": {"base_url": "http://old:8000/v1", "type": "openai_compat"},
|
||||
}}))
|
||||
|
||||
profile = UserProfile(user_yaml)
|
||||
apply_service_urls(profile, llm_yaml)
|
||||
|
||||
result = yaml.safe_load(llm_yaml.read_text())
|
||||
assert result["backends"]["ollama"]["base_url"] == "http://myserver:11434/v1"
|
||||
assert result["backends"]["vllm"]["base_url"] == "http://localhost:8000/v1"
|
||||
|
||||
|
||||
def test_missing_llm_yaml_is_noop(tmp_path):
|
||||
"""apply_service_urls should not crash if llm.yaml doesn't exist."""
|
||||
user_yaml = tmp_path / "user.yaml"
|
||||
user_yaml.write_text(yaml.dump({"name": "Test", "services": {
|
||||
"ollama_host": "localhost", "ollama_port": 11434, "ollama_ssl": False,
|
||||
"ollama_ssl_verify": True,
|
||||
"vllm_host": "localhost", "vllm_port": 8000, "vllm_ssl": False,
|
||||
"vllm_ssl_verify": True,
|
||||
"searxng_host": "localhost", "searxng_port": 8888,
|
||||
"searxng_ssl": False, "searxng_ssl_verify": True,
|
||||
}}))
|
||||
profile = UserProfile(user_yaml)
|
||||
# Should not raise
|
||||
apply_service_urls(profile, tmp_path / "nonexistent.yaml")
|
||||
Loading…
Reference in a new issue