diff --git a/.env.example b/.env.example index 8607e32..514c955 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,7 @@ # Provider +LITELLM_BASE_URL=https://ai.furtherverse.com LITELLM_API_KEY= -BAILIAN_API_KEY=sk-sp-xxxxxxxx +BAILIAN_API_KEY= # MCP EXA_API_KEY= diff --git a/oh-my-opencode.jsonc b/oh-my-opencode.jsonc index 93c27d3..ff6eb69 100644 --- a/oh-my-opencode.jsonc +++ b/oh-my-opencode.jsonc @@ -1,26 +1,26 @@ { "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json", "agents": { - "sisyphus": { "model": "litellm/claude-opus-4-6", "variant": "max" }, - "hephaestus": { "model": "litellm/gpt-5.3-codex", "variant": "medium" }, - "oracle": { "model": "litellm/gpt-5.2", "variant": "high" }, + "sisyphus": { "model": "anthropic/claude-opus-4-6", "variant": "max" }, + "hephaestus": { "model": "openai/gpt-5.3-codex", "variant": "medium" }, + "oracle": { "model": "openai/gpt-5.2", "variant": "high" }, "librarian": { "model": "bailian-coding-plan/MiniMax-M2.5" }, "explore": { "model": "bailian-coding-plan/MiniMax-M2.5" }, "multimodal-looker": { "model": "bailian-coding-plan/kimi-k2.5" }, - "prometheus": { "model": "litellm/claude-opus-4-6", "variant": "max" }, - "metis": { "model": "litellm/claude-opus-4-6", "variant": "max" }, - "momus": { "model": "litellm/gpt-5.2", "variant": "medium" }, + "prometheus": { "model": "anthropic/claude-opus-4-6", "variant": "max" }, + "metis": { "model": "anthropic/claude-opus-4-6", "variant": "max" }, + "momus": { "model": "openai/gpt-5.2", "variant": "medium" }, "atlas": { "model": "bailian-coding-plan/kimi-k2.5" }, - "sisyphus-junior": { "model": "litellm/claude-sonnet-4-6" } + "sisyphus-junior": { "model": "anthropic/claude-sonnet-4-6" } }, "categories": { - "visual-engineering": { "model": "litellm/gemini-3.1-pro-preview", "variant": "high" }, - "ultrabrain": { "model": "litellm/gpt-5.3-codex", "variant": "xhigh" }, - "deep": { "model": "litellm/gpt-5.3-codex", "variant": "medium" }, - "artistry": { "model": "litellm/gemini-3.1-pro-preview", "variant": "high" }, - "quick": { "model": "litellm/claude-haiku-4-5" }, - "unspecified-low": { "model": "litellm/claude-sonnet-4-6" }, - "unspecified-high": { "model": "litellm/claude-opus-4-6", "variant": "max" }, + "visual-engineering": { "model": "google/gemini-3.1-pro-preview", "variant": "high" }, + "ultrabrain": { "model": "openai/gpt-5.3-codex", "variant": "xhigh" }, + "deep": { "model": "openai/gpt-5.3-codex", "variant": "medium" }, + "artistry": { "model": "google/gemini-3.1-pro-preview", "variant": "high" }, + "quick": { "model": "anthropic/claude-haiku-4-5" }, + "unspecified-low": { "model": "anthropic/claude-sonnet-4-6" }, + "unspecified-high": { "model": "anthropic/claude-opus-4-6", "variant": "max" }, "writing": { "model": "bailian-coding-plan/kimi-k2.5" } }, "git_master": { diff --git a/opencode.jsonc b/opencode.jsonc index 846fbc2..47587b8 100644 --- a/opencode.jsonc +++ b/opencode.jsonc @@ -1,20 +1,16 @@ { "$schema": "https://opencode.ai/config.json", "autoupdate": false, - "model": "litellm/claude-opus-4-6", - "small_model": "litellm/claude-haiku-4-5", + "model": "anthropic/claude-opus-4-6", + "small_model": "anthropic/claude-haiku-4-5", "plugin": ["oh-my-opencode@latest"], "provider": { - "litellm": { - "name": "LiteLLM", - "npm": "@ai-sdk/openai-compatible", + "anthropic": { "options": { - "baseURL": "https://ai.furtherverse.com/v1", - "apiKey": "{env:LITELLM_API_KEY}", - "setCacheKey": true + "baseURL": "{env:LITELLM_BASE_URL}/v1", + "apiKey": "{env:LITELLM_API_KEY}" }, "models": { - // Anthropic "claude-opus-4-6": { "name": "Claude Opus 4.6", "reasoning": true, @@ -32,23 +28,15 @@ "reasoning": true, "modalities": { "input": ["text", "image", "pdf"], "output": ["text"] }, "limit": { "context": 200000, "output": 64000 } - }, - - // Google - "gemini-3.1-pro-preview": { - "name": "Gemini 3.1 Pro", - "reasoning": true, - "modalities": { "input": ["text", "image", "video", "audio", "pdf"], "output": ["text"] }, - "limit": { "context": 1048576, "output": 65536 } - }, - "gemini-3-flash-preview": { - "name": "Gemini 3 Flash", - "reasoning": true, - "modalities": { "input": ["text", "image", "video", "audio", "pdf"], "output": ["text"] }, - "limit": { "context": 1048576, "output": 65536 } - }, - - // OpenAI + } + } + }, + "openai": { + "options": { + "baseURL": "{env:LITELLM_BASE_URL}/v1", + "apiKey": "{env:LITELLM_API_KEY}" + }, + "models": { "gpt-5.2": { "name": "GPT-5.2", "reasoning": true, @@ -63,6 +51,26 @@ } } }, + "google": { + "options": { + "baseURL": "{env:LITELLM_BASE_URL}/v1beta", + "apiKey": "{env:LITELLM_API_KEY}" + }, + "models": { + "gemini-3.1-pro-preview": { + "name": "Gemini 3.1 Pro", + "reasoning": true, + "modalities": { "input": ["text", "image", "video", "audio", "pdf"], "output": ["text"] }, + "limit": { "context": 1048576, "output": 65536 } + }, + "gemini-3-flash-preview": { + "name": "Gemini 3 Flash", + "reasoning": true, + "modalities": { "input": ["text", "image", "video", "audio", "pdf"], "output": ["text"] }, + "limit": { "context": 1048576, "output": 65536 } + } + } + }, "bailian-coding-plan": { "npm": "@ai-sdk/anthropic", "name": "Model Studio Coding Plan",