Some checks failed
CI / test (pull_request) Failing after 1m16s
- Lower vue_ui_beta gate to "free" so all licensed users can access the new UI without a paid subscription - Remove "Paid tier" wording from the Try New UI banner - Fix Vue SPA navigation in cloud/demo deployments: add VITE_BASE_PATH build arg so Vite sets the correct subpath base, and pass import.meta.env.BASE_URL to createWebHistory() so router links emit /peregrine/... paths that Caddy can match - Fix feedback button missing on cloud instance by passing FORGEJO_API_TOKEN through compose.cloud.yml - Remove vLLM container from compose.yml (vLLM dropped from stack; cf-research service in cfcore covers the use case) - Fix cloud config path in Apply page (use get_config_dir() so per-user cloud data roots resolve correctly for user.yaml and resume YAML) - Refactor generate_cover_letter._build_system_context and _build_mission_notes to accept explicit profile arg (enables per-user cover letter generation in cloud multi-tenant mode) - Add API proxy block to nginx.conf (Vue web container can now call /api/ directly without Vite dev proxy) - Update .env.example: remove vLLM vars, add research model + tuning vars for external vLLM deployments - Update llm.yaml: switch vllm base_url to host.docker.internal (vLLM now runs outside Docker stack) Closes #63 (feedback button) Related: #8 (Vue SPA), #50–#62 (parity milestone)
75 lines
1.6 KiB
YAML
75 lines
1.6 KiB
YAML
backends:
|
|
anthropic:
|
|
api_key_env: ANTHROPIC_API_KEY
|
|
enabled: false
|
|
model: claude-sonnet-4-6
|
|
supports_images: true
|
|
type: anthropic
|
|
claude_code:
|
|
api_key: any
|
|
base_url: http://localhost:3009/v1
|
|
enabled: false
|
|
model: claude-code-terminal
|
|
supports_images: true
|
|
type: openai_compat
|
|
github_copilot:
|
|
api_key: any
|
|
base_url: http://localhost:3010/v1
|
|
enabled: false
|
|
model: gpt-4o
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama:
|
|
api_key: ollama
|
|
base_url: http://host.docker.internal:11434/v1
|
|
enabled: true
|
|
model: llama3.2:3b
|
|
supports_images: false
|
|
type: openai_compat
|
|
ollama_research:
|
|
api_key: ollama
|
|
base_url: http://ollama_research:11434/v1
|
|
enabled: true
|
|
model: llama3.1:8b
|
|
supports_images: false
|
|
type: openai_compat
|
|
vision_service:
|
|
base_url: http://host.docker.internal:8002
|
|
enabled: true
|
|
supports_images: true
|
|
type: vision_service
|
|
vllm:
|
|
api_key: ''
|
|
base_url: http://host.docker.internal:8000/v1
|
|
enabled: true
|
|
model: __auto__
|
|
supports_images: false
|
|
type: openai_compat
|
|
cf_orch:
|
|
service: vllm
|
|
model_candidates:
|
|
- Qwen2.5-3B-Instruct
|
|
ttl_s: 300
|
|
vllm_research:
|
|
api_key: ''
|
|
base_url: http://host.docker.internal:8000/v1
|
|
enabled: true
|
|
model: __auto__
|
|
supports_images: false
|
|
type: openai_compat
|
|
fallback_order:
|
|
- ollama
|
|
- claude_code
|
|
- vllm
|
|
- github_copilot
|
|
- anthropic
|
|
research_fallback_order:
|
|
- claude_code
|
|
- vllm_research
|
|
- ollama_research
|
|
- github_copilot
|
|
- anthropic
|
|
vision_fallback_order:
|
|
- vision_service
|
|
- claude_code
|
|
- anthropic
|