preflight.py now detects when a managed service (ollama, vllm, vision, searxng) is already running on its configured port and adopts it rather than reassigning or conflicting: - Generates compose.override.yml disabling Docker containers for adopted services (profiles: [_external_] — a profile never passed via --profile) - Rewrites config/llm.yaml base_url entries to host.docker.internal:<port> so the app container can reach host-side services through Docker's host-gateway mapping - compose.yml: adds extra_hosts host.docker.internal:host-gateway to the app service (required on Linux; no-op on macOS Docker Desktop) - .gitignore: excludes compose.override.yml (auto-generated, host-specific) Only streamlit is non-adoptable and continues to reassign on conflict.
63 lines
1.3 KiB
YAML
63 lines
1.3 KiB
YAML
backends:
|
|
anthropic:
|
|
api_key_env: ANTHROPIC_API_KEY
|
|
enabled: false
|
|
model: claude-sonnet-4-6
|
|
supports_images: true
|
|
type: anthropic
|
|
claude_code:
|
|
api_key: any
|
|
base_url: http://localhost:3009/v1
|
|
enabled: false
|
|
model: claude-code-terminal
|
|
supports_images: true
|
|
type: openai_compat
|
|
github_copilot:
|
|
api_key: any
|
|
base_url: http://localhost:3010/v1
|
|
enabled: false
|
|
model: gpt-4o
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama:
|
|
api_key: ollama
|
|
base_url: http://host.docker.internal:11434/v1
|
|
enabled: true
|
|
model: llama3.2:3b
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama_research:
|
|
api_key: ollama
|
|
base_url: http://host.docker.internal:11434/v1
|
|
enabled: true
|
|
model: llama3.2:3b
|
|
supports_images: false
|
|
type: openai_compat
|
|
vision_service:
|
|
base_url: http://host.docker.internal:8002
|
|
enabled: true
|
|
supports_images: true
|
|
type: vision_service
|
|
vllm:
|
|
api_key: ''
|
|
base_url: http://host.docker.internal:8000/v1
|
|
enabled: true
|
|
model: __auto__
|
|
supports_images: false
|
|
type: openai_compat
|
|
fallback_order:
|
|
- ollama
|
|
- claude_code
|
|
- vllm
|
|
- github_copilot
|
|
- anthropic
|
|
research_fallback_order:
|
|
- claude_code
|
|
- vllm
|
|
- ollama_research
|
|
- github_copilot
|
|
- anthropic
|
|
vision_fallback_order:
|
|
- vision_service
|
|
- claude_code
|
|
- anthropic
|