Files
localgenai/opencode/opencode.json
noisedestroyers a29793032d Document current coding-workflow stack state
Snapshot of where opencode + Qwen3-Coder + MCPs + Kimi-Linear + voice
  + Phoenix tracing land today, plus in-flight (oc-tree, kimi-linear
  context ramp) and next (ComfyUI) items with pointers to per-project
  NEXT_STEPS.md guides.
2026-05-10 21:14:43 -04:00

98 lines
2.4 KiB
JSON

{
"$schema": "https://opencode.ai/config.json",
"experimental": {
"openTelemetry": true
},
"plugin": ["./.opencode/plugin/phoenix-bridge.js"],
"provider": {
"framework": {
"npm": "@ai-sdk/openai-compatible",
"name": "Framework Desktop (Strix Halo) — Ollama",
"options": {
"baseURL": "http://framework:11434/v1"
},
"models": {
"qwen3-coder:30b": {
"name": "Qwen3 Coder 30B (local)",
"limit": {
"context": 131072,
"output": 16384
}
}
}
},
"framework-vllm": {
"npm": "@ai-sdk/openai-compatible",
"name": "Framework Desktop (Strix Halo) — vLLM",
"options": {
"baseURL": "http://framework:8000/v1",
"apiKey": "dummy"
},
"models": {
"kimi-linear": {
"name": "Kimi-Linear 48B-A3B (long-context, vLLM)",
"limit": {
"context": 32768,
"output": 8192
},
"tool_call": false
}
}
}
},
"mcp": {
"playwright": {
"type": "local",
"command": ["npx", "-y", "@playwright/mcp@latest"],
"enabled": true
},
"searxng": {
"type": "local",
"command": ["npx", "-y", "mcp-searxng"],
"enabled": true,
"environment": {
"SEARXNG_URL": "https://searxng.n0n.io"
}
},
"serena": {
"type": "local",
"command": [
"serena", "start-mcp-server",
"--context", "./serena-ide-trim.yml",
"--project-from-cwd",
"--open-web-dashboard", "false"
],
"enabled": true
},
"basic-memory": {
"type": "local",
"command": ["uvx", "basic-memory", "mcp"],
"enabled": true
},
"sequential-thinking": {
"type": "local",
"command": ["npx", "-y", "@modelcontextprotocol/server-sequential-thinking"],
"enabled": true
},
"github": {
"type": "local",
"command": [
"github-mcp-server",
"stdio",
"--read-only",
"--toolsets", "repos,issues,pull_requests,code_security"
],
"enabled": false
},
"task-master": {
"type": "local",
"command": ["npx", "-y", "task-master-ai"],
"enabled": true,
"environment": {
"OLLAMA_BASE_URL": "http://framework:11434/v1"
}
}
},
"model": "framework/qwen3-coder:30b"
}