跳转到内容

配置参考

Claudex 按以下顺序搜索配置文件:

  1. $CLAUDEX_CONFIG 环境变量
  2. ./claudex.toml(当前目录)
  3. ./.claudex/config.toml(当前目录)
  4. 父目录(最多向上 10 级),检查上述两种模式

参见配置了解完整详情。

# Claude 二进制文件路径(默认:从 PATH 中查找 "claude")
claude_binary = "claude"
# 代理服务器监听端口
proxy_port = 13456
# 代理服务器绑定地址
proxy_host = "127.0.0.1"
# 日志级别:trace, debug, info, warn, error
log_level = "info"
字段类型默认值说明
claude_binarystring"claude"Claude Code CLI 二进制文件路径
proxy_portinteger13456翻译代理监听端口
proxy_hoststring"127.0.0.1"代理绑定地址
log_levelstring"info"最低日志级别

为模型标识定义简称:

[model_aliases]
grok3 = "grok-3-beta"
gpt4o = "gpt-4o"
ds3 = "deepseek-chat"
claude = "claude-sonnet-4-20250514"

使用别名:

Terminal window
claudex run grok -m grok3
[[profiles]]
name = "grok"
provider_type = "OpenAICompatible"
base_url = "https://api.x.ai/v1"
api_key = "xai-..."
# api_key_keyring = "grok-api-key"
default_model = "grok-3-beta"
auth_type = "api-key" # "api-key"(默认)或 "oauth"
# oauth_provider = "openai" # auth_type = "oauth" 时必填
backup_providers = ["deepseek"]
custom_headers = {}
extra_env = {}
priority = 100
enabled = true
# 模型 slot 映射(可选)
[profiles.models]
haiku = "grok-3-mini-beta"
sonnet = "grok-3-beta"
opus = "grok-3-beta"
字段类型默认值说明
namestring必填唯一标识名
provider_typestring"DirectAnthropic""DirectAnthropic""OpenAICompatible""OpenAIResponses"
base_urlstring必填提供商 API 端点 URL
api_keystring""明文 API 密钥
api_key_keyringstringOS 钥匙链条目名(覆盖 api_key
default_modelstring必填默认模型标识
auth_typestring"api-key"认证方式:"api-key""oauth"
oauth_providerstringOAuth 提供商名称(auth_type = "oauth" 时必填)。可选:claudeopenaigoogleqwenkimigithub
backup_providersstring[][]故障转移 profile 名,按顺序尝试
custom_headersmap{}每个请求附带的额外 HTTP 头
extra_envmap{}启动 Claude 时设置的环境变量
priorityinteger100智能路由优先级权重(越高越优先)
enabledbooleantrue是否激活此 profile

可选的 [profiles.models] 表将 Claude Code 的 /model 切换器 slot 映射到提供商特定的模型名。当你在 Claude Code 中切换模型(例如 /model opus)时,Claudex 会将请求翻译为映射的模型。

[profiles.models]
haiku = "grok-3-mini-beta" # 映射 /model haiku
sonnet = "grok-3-beta" # 映射 /model sonnet
opus = "grok-3-beta" # 映射 /model opus
字段类型说明
haikustringClaude Code 选择 haiku 时使用的模型
sonnetstringClaude Code 选择 sonnet 时使用的模型
opusstringClaude Code 选择 opus 时使用的模型
# Anthropic(DirectAnthropic — 无需翻译)
[[profiles]]
name = "anthropic"
provider_type = "DirectAnthropic"
base_url = "https://api.anthropic.com"
api_key = "sk-ant-..."
default_model = "claude-sonnet-4-20250514"
# MiniMax(DirectAnthropic — 无需翻译)
[[profiles]]
name = "minimax"
provider_type = "DirectAnthropic"
base_url = "https://api.minimax.io/anthropic"
api_key = "..."
default_model = "claude-sonnet-4-20250514"
backup_providers = ["anthropic"]
# OpenRouter(OpenAICompatible — 需要翻译)
[[profiles]]
name = "openrouter"
provider_type = "OpenAICompatible"
base_url = "https://openrouter.ai/api/v1"
api_key = "..."
default_model = "anthropic/claude-sonnet-4"
# Grok(OpenAICompatible — 需要翻译)
[[profiles]]
name = "grok"
provider_type = "OpenAICompatible"
base_url = "https://api.x.ai/v1"
api_key = "xai-..."
default_model = "grok-3-beta"
backup_providers = ["deepseek"]
# OpenAI(OpenAICompatible — 需要翻译)
[[profiles]]
name = "chatgpt"
provider_type = "OpenAICompatible"
base_url = "https://api.openai.com/v1"
api_key = "sk-..."
default_model = "gpt-4o"
# DeepSeek(OpenAICompatible — 需要翻译)
[[profiles]]
name = "deepseek"
provider_type = "OpenAICompatible"
base_url = "https://api.deepseek.com"
api_key = "..."
default_model = "deepseek-chat"
backup_providers = ["grok"]
# Kimi / Moonshot(OpenAICompatible — 需要翻译)
[[profiles]]
name = "kimi"
provider_type = "OpenAICompatible"
base_url = "https://api.moonshot.cn/v1"
api_key = "..."
default_model = "moonshot-v1-128k"
# GLM / 智谱(OpenAICompatible — 需要翻译)
[[profiles]]
name = "glm"
provider_type = "OpenAICompatible"
base_url = "https://open.bigmodel.cn/api/paas/v4"
api_key = "..."
default_model = "glm-4-plus"
# Ollama(本地,无需 API 密钥)
[[profiles]]
name = "local-qwen"
provider_type = "OpenAICompatible"
base_url = "http://localhost:11434/v1"
api_key = ""
default_model = "qwen2.5:72b"
enabled = false
# vLLM / LM Studio(本地)
[[profiles]]
name = "local-llama"
provider_type = "OpenAICompatible"
base_url = "http://localhost:8000/v1"
api_key = ""
default_model = "llama-3.3-70b"
enabled = false
# ChatGPT/Codex 订阅(OpenAIResponses — Responses API 翻译)
[[profiles]]
name = "codex-sub"
provider_type = "OpenAIResponses"
base_url = "https://chatgpt.com/backend-api/codex"
default_model = "gpt-4o"
auth_type = "oauth"
oauth_provider = "openai"
# OpenAI OAuth(从 Codex CLI ~/.codex/auth.json 读取 token)
[[profiles]]
name = "chatgpt-oauth"
provider_type = "OpenAICompatible"
base_url = "https://api.openai.com/v1"
default_model = "gpt-4o"
auth_type = "oauth"
oauth_provider = "openai"
[profiles.models]
haiku = "gpt-4o-mini"
sonnet = "gpt-4o"
opus = "o1"
# Claude 订阅(跳过代理,使用 Claude 原生 OAuth,从 ~/.claude 读取)
[[profiles]]
name = "claude-sub"
provider_type = "DirectAnthropic"
base_url = "https://api.anthropic.com"
default_model = "claude-sonnet-4-20250514"
auth_type = "oauth"
oauth_provider = "claude"
[profiles.models]
haiku = "claude-haiku-4-20250514"
sonnet = "claude-sonnet-4-20250514"
opus = "claude-opus-4-20250514"
# Google Gemini OAuth
[[profiles]]
name = "gemini"
provider_type = "OpenAICompatible"
base_url = "https://generativelanguage.googleapis.com/v1beta/openai"
default_model = "gemini-2.5-pro"
auth_type = "oauth"
oauth_provider = "google"
# Kimi OAuth
[[profiles]]
name = "kimi-oauth"
provider_type = "OpenAICompatible"
base_url = "https://api.moonshot.cn/v1"
default_model = "moonshot-v1-128k"
auth_type = "oauth"
oauth_provider = "kimi"
# Qwen OAuth(通义千问)
[[profiles]]
name = "qwen-oauth"
provider_type = "OpenAICompatible"
base_url = "https://chat.qwenlm.ai/api/chat/v1"
default_model = "qwen-max"
auth_type = "oauth"
oauth_provider = "qwen"
# GitHub Copilot OAuth
[[profiles]]
name = "github-copilot"
provider_type = "OpenAICompatible"
base_url = "https://api.githubcopilot.com"
default_model = "gpt-4o"
auth_type = "oauth"
oauth_provider = "github"
# ChatGPT/Codex 订阅 OAuth(OpenAIResponses)
[[profiles]]
name = "codex-sub"
provider_type = "OpenAIResponses"
base_url = "https://chatgpt.com/backend-api/codex"
default_model = "gpt-4o"
auth_type = "oauth"
oauth_provider = "openai"
[profiles.models]
haiku = "gpt-4o-mini"
sonnet = "gpt-4o"
opus = "o1-pro"
[router]
enabled = false
profile = "local-qwen" # 复用此 profile 的 base_url + api_key
model = "qwen2.5:3b" # 覆盖模型(可选)
字段类型默认值说明
enabledbooleanfalse启用智能路由
profilestring""复用的 profile 名(使用其 base_url + api_key
modelstring""分类模型覆盖(默认用 profile 的 default_model
[router.rules]
code = "deepseek"
analysis = "grok"
creative = "chatgpt"
search = "kimi"
math = "deepseek"
default = "grok"
说明
code编程任务对应的 profile
analysis分析推理对应的 profile
creative创意写作对应的 profile
search搜索研究对应的 profile
math数学逻辑对应的 profile
default未分类时的默认 profile
[context.compression]
enabled = false
threshold_tokens = 50000
keep_recent = 10
profile = "local-qwen" # 复用此 profile 的 base_url + api_key
model = "qwen2.5:3b" # 覆盖模型(可选)
字段类型默认值说明
enabledbooleanfalse启用对话压缩
threshold_tokensinteger50000token 数超过此值时触发压缩
keep_recentinteger10始终保留最近 N 条消息不压缩
profilestring""复用的 profile 名(使用其 base_url + api_key
modelstring""摘要模型覆盖(默认用 profile 的 default_model
[context.sharing]
enabled = false
max_context_size = 2000
字段类型默认值说明
enabledbooleanfalse启用跨 profile 上下文共享
max_context_sizeinteger2000从其他 profile 注入的最大 token 数
[context.rag]
enabled = false
index_paths = ["./src", "./docs"]
profile = "local-qwen" # 复用此 profile 的 base_url + api_key
model = "nomic-embed-text" # embedding 模型
chunk_size = 512
top_k = 5
字段类型默认值说明
enabledbooleanfalse启用本地 RAG
index_pathsstring[][]要索引的目录
profilestring""复用的 profile 名(使用其 base_url + api_key
modelstring""embedding 模型名称(默认用 profile 的 default_model
chunk_sizeinteger512文本块大小(token)
top_kinteger5注入的结果数量