# Avocet — environment variable configuration # Copy to .env and fill in values. All keys are optional. # label_tool.yaml takes precedence over env vars where both exist. # ── Local inference (Ollama) ─────────────────────────────────────────────────── # OLLAMA_HOST defaults to http://localhost:11434 if unset. OLLAMA_HOST=http://localhost:11434 OLLAMA_MODEL=llama3.2:3b # ── cf-orch coordinator (paid/premium tiers) ─────────────────────────────────── # Required for multi-GPU LLM benchmarking via the cf-orch benchmark harness. # Free-tier users can leave these unset and use Ollama only. CF_ORCH_URL=http://localhost:7700 CF_LICENSE_KEY=CFG-AVCT-xxxx-xxxx-xxxx # ── Cloud LLM backends (optional — paid/premium) ────────────────────────────── # Set one of these to use a cloud LLM instead of a local model. # ANTHROPIC_API_KEY=sk-ant-... # OPENAI_API_KEY=sk-...