From b3df515775c6756cff2487a1c1ac592f6710a8d0 Mon Sep 17 00:00:00 2001 From: imbytecat Date: Thu, 19 Mar 2026 09:51:55 +0800 Subject: [PATCH] chore: sync JSONC config overrides --- oh-my-opencode.jsonc | 13 +++++++++---- opencode.jsonc | 30 +----------------------------- 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/oh-my-opencode.jsonc b/oh-my-opencode.jsonc index d5a1319..11606c2 100644 --- a/oh-my-opencode.jsonc +++ b/oh-my-opencode.jsonc @@ -4,8 +4,8 @@ "sisyphus": { "model": "openai/gpt-5.4", "variant": "medium" }, "hephaestus": { "model": "openai/gpt-5.3-codex", "variant": "medium" }, "oracle": { "model": "openai/gpt-5.4", "variant": "high" }, - "librarian": { "model": "litellm/minimax-m2.5" }, - "explore": { "model": "litellm/minimax-m2.5" }, + "librarian": { "model": "openai/gpt-5.4-mini" }, + "explore": { "model": "openai/gpt-5.4-mini" }, "multimodal-looker": { "model": "openai/gpt-5.4", "variant": "medium" }, "prometheus": { "model": "openai/gpt-5.4", "variant": "high" }, "metis": { "model": "openai/gpt-5.4", "variant": "high" }, @@ -18,10 +18,15 @@ "ultrabrain": { "model": "openai/gpt-5.3-codex", "variant": "xhigh" }, "deep": { "model": "openai/gpt-5.3-codex", "variant": "medium" }, "artistry": { "model": "openai/gpt-5.4", "variant": "medium" }, - "quick": { "model": "litellm/minimax-m2.5" }, + "quick": { "model": "openai/gpt-5.4-mini" }, "unspecified-low": { "model": "openai/gpt-5.4", "variant": "medium" }, "unspecified-high": { "model": "openai/gpt-5.4", "variant": "high" }, - "writing": { "model": "litellm/kimi-k2.5" } + "writing": { "model": "openai/gpt-5.4", "variant": "medium" } + }, + "experimental": { + "dynamic_context_pruning": { + "enabled": true + } }, "git_master": { "commit_footer": false, diff --git a/opencode.jsonc b/opencode.jsonc index 17f55ee..3dc36b8 100644 --- a/opencode.jsonc +++ b/opencode.jsonc @@ -3,7 +3,7 @@ "plugin": ["oh-my-opencode@latest"], "autoupdate": false, "model": "openai/gpt-5.4", - "small_model": "litellm/minimax-m2.5", + "small_model": "openai/gpt-5.4-mini", "provider": { "openai": { "npm": "@ai-sdk/openai", @@ -31,34 +31,6 @@ "limit": { "context": 400000, "input": 272000, "output": 128000 } } } - }, - "litellm": { - "name": "LiteLLM", - "npm": "@ai-sdk/openai-compatible", - "options": { - "baseURL": "{env:LITELLM_BASE_URL}/v1", - "apiKey": "{env:LITELLM_API_KEY}" - }, - "models": { - "glm-5": { - "name": "GLM-5", - "reasoning": true, - "modalities": { "input": ["text"], "output": ["text"] }, - "limit": { "context": 204800, "output": 131072 } - }, - "kimi-k2.5": { - "name": "Kimi K2.5", - "reasoning": true, - "modalities": { "input": ["text", "image", "video"], "output": ["text"] }, - "limit": { "context": 262144, "output": 262144 } - }, - "minimax-m2.5": { - "name": "MiniMax M2.5", - "reasoning": true, - "modalities": { "input": ["text"], "output": ["text"] }, - "limit": { "context": 204800, "output": 131072 } - } - } } }, "experimental": {