- llm.yaml + example: replace localhost URLs with Docker service names (ollama:11434, vllm:8000, vision:8002); replace personal model names (meghan-cover-writer, llama3.1:8b) with llama3.2:3b - user.yaml.example: update service hosts to Docker names (ollama, vllm, searxng) and searxng port from 8888 (host-mapped) to 8080 (internal) - wizard step 5: fix hardcoded localhost defaults — wizard runs inside Docker, so service name defaults are required for connection tests to pass - scrapers/companyScraper.py: bundle scraper so Dockerfile COPY succeeds - setup.sh: remove host Ollama install (conflicts with Docker Ollama on port 11434); Docker entrypoint handles model download automatically - README + setup.sh banner: add Circuit Forge mission statement
63 lines
1.3 KiB
YAML
63 lines
1.3 KiB
YAML
backends:
|
|
anthropic:
|
|
api_key_env: ANTHROPIC_API_KEY
|
|
enabled: false
|
|
model: claude-sonnet-4-6
|
|
supports_images: true
|
|
type: anthropic
|
|
claude_code:
|
|
api_key: any
|
|
base_url: http://localhost:3009/v1
|
|
enabled: false
|
|
model: claude-code-terminal
|
|
supports_images: true
|
|
type: openai_compat
|
|
github_copilot:
|
|
api_key: any
|
|
base_url: http://localhost:3010/v1
|
|
enabled: false
|
|
model: gpt-4o
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama:
|
|
api_key: ollama
|
|
base_url: http://ollama:11434/v1
|
|
enabled: true
|
|
model: llama3.2:3b
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama_research:
|
|
api_key: ollama
|
|
base_url: http://ollama:11434/v1
|
|
enabled: true
|
|
model: llama3.2:3b
|
|
supports_images: false
|
|
type: openai_compat
|
|
vision_service:
|
|
base_url: http://vision:8002
|
|
enabled: true
|
|
supports_images: true
|
|
type: vision_service
|
|
vllm:
|
|
api_key: ''
|
|
base_url: http://vllm:8000/v1
|
|
enabled: true
|
|
model: __auto__
|
|
supports_images: false
|
|
type: openai_compat
|
|
fallback_order:
|
|
- ollama
|
|
- claude_code
|
|
- vllm
|
|
- github_copilot
|
|
- anthropic
|
|
research_fallback_order:
|
|
- claude_code
|
|
- vllm
|
|
- ollama_research
|
|
- github_copilot
|
|
- anthropic
|
|
vision_fallback_order:
|
|
- vision_service
|
|
- claude_code
|
|
- anthropic
|