首页/分享/文章阅读

opencode tui/cli 的配置文件opencode.json正确格式

分享 2026-04-04 59 0

安装opencode tui最新版:

打开官网:https://opencode.ai/

找到一键安装的命令,复制它,连接vps,在根目录下执行即可:

curl -fsSL https://opencode.ai/install | bash

找到配置文件存放位置:

/root/.opencode

新建配置文件:

opencode.json

之后输入以下格式:

{
  "$schema": "https://opencode.ai/config.json",
  "provider": {
    "自定义名字": {
      "npm": "@ai-sdk/openai-compatible",
      "name": "自定义名字",
      "options": {
        "baseURL": "https://中转站1.com/v1",
        "apiKey": "sk-秘钥"
      },
      "models": {
        "gpt-5.4": {
          "name": "gpt-5.4"
        }
      }
    },
    "openai": {
      "name": "openai-xianxian",
      "options": {
        "baseURL": "中转站2.com/v1",
        "apiKey": "sk-秘钥"
      },
      "models": {
        "gpt-5-codex": {
          "name": "GPT-5 Codex",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {}
          }
        },
        "gpt-5.1-codex": {
          "name": "GPT-5.1 Codex",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {}
          }
        },
        "gpt-5.1-codex-max": {
          "name": "GPT-5.1 Codex Max",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {}
          }
        },
        "gpt-5.1-codex-mini": {
          "name": "GPT-5.1 Codex Mini",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {}
          }
        },
        "gpt-5.2": {
          "name": "GPT-5.2",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {},
            "xhigh": {}
          }
        },
        "gpt-5.4": {
          "name": "GPT-5.4",
          "limit": {
            "context": 1050000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {},
            "xhigh": {}
          }
        },
        "gpt-5.3-codex-spark": {
          "name": "GPT-5.3 Codex Spark",
          "limit": {
            "context": 128000,
            "output": 32000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {},
            "xhigh": {}
          }
        },
        "gpt-5.3-codex": {
          "name": "GPT-5.3 Codex",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {},
            "xhigh": {}
          }
        },
        "gpt-5.2-codex": {
          "name": "GPT-5.2 Codex",
          "limit": {
            "context": 400000,
            "output": 128000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {},
            "xhigh": {}
          }
        },
        "codex-mini-latest": {
          "name": "Codex Mini",
          "limit": {
            "context": 200000,
            "output": 100000
          },
          "options": {
            "store": false
          },
          "variants": {
            "low": {},
            "medium": {},
            "high": {}
          }
        }
      }
  }
}
}

之后可以在opencode中使用/model 进行切换不同的模型使用。

为什么使用opencode?

上下文比codex压缩更好,codex经常遇到上下文超限,所以换回opencode了。

正文结束

流量消耗器源码分享当前最新版本Claude Code v2.1.104 强制登录的解决办法

评论区

还没有评论,来坐沙发吧。