From a754756f9a51f09a98a6daf2b034a66c12d0a316 Mon Sep 17 00:00:00 2001 From: quanruzhuoxiu Date: Mon, 27 Oct 2025 17:00:45 +0800 Subject: [PATCH] feat(env): add backward compatibility for MIDSCENE_OPENAI_* environment variables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add backward compatibility support for legacy MIDSCENE_OPENAI_* environment variables: - MIDSCENE_OPENAI_INIT_CONFIG_JSON (now MIDSCENE_MODEL_INIT_CONFIG_JSON) - MIDSCENE_OPENAI_HTTP_PROXY (now MIDSCENE_MODEL_HTTP_PROXY) - MIDSCENE_OPENAI_SOCKS_PROXY (now MIDSCENE_MODEL_SOCKS_PROXY) Changes: - Add deprecated constants to types.ts with @deprecated tags - Add legacy variables to MODEL_ENV_KEYS for overrideAIConfig support - Update DEFAULT_MODEL_CONFIG_KEYS_LEGACY to use legacy variable names - Implement priority fallback logic in decide-model-config.ts (new variables take precedence) - Update documentation (zh/en model-provider.mdx) with deprecation notices All 139 tests pass, confirming backward compatibility works correctly. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- apps/site/docs/en/model-provider.mdx | 9 ++-- apps/site/docs/zh/model-provider.mdx | 9 ++-- packages/shared/src/env/constants.ts | 11 +++-- .../shared/src/env/decide-model-config.ts | 47 +++++++++++++++++-- packages/shared/src/env/types.ts | 22 +++++++-- 5 files changed, 80 insertions(+), 18 deletions(-) diff --git a/apps/site/docs/en/model-provider.mdx b/apps/site/docs/en/model-provider.mdx index 8159937c7..502872823 100644 --- a/apps/site/docs/en/model-provider.mdx +++ b/apps/site/docs/en/model-provider.mdx @@ -45,9 +45,12 @@ Some advanced configs are also supported. Usually you don't need to use them. | Name | Description | |------|-------------| -| `MIDSCENE_OPENAI_INIT_CONFIG_JSON` | Optional. Custom JSON config for OpenAI SDK initialization | -| `MIDSCENE_OPENAI_HTTP_PROXY` | Optional. HTTP/HTTPS proxy configuration (e.g. `http://127.0.0.1:8080` or `https://proxy.example.com:8080`). This option has higher priority than `MIDSCENE_OPENAI_SOCKS_PROXY` | -| `MIDSCENE_OPENAI_SOCKS_PROXY` | Optional. SOCKS proxy configuration (e.g. "socks5://127.0.0.1:1080") | +| `MIDSCENE_MODEL_INIT_CONFIG_JSON` | Optional (recommended). Custom JSON config for OpenAI SDK initialization | +| `MIDSCENE_MODEL_HTTP_PROXY` | Optional (recommended). HTTP/HTTPS proxy configuration (e.g. `http://127.0.0.1:8080` or `https://proxy.example.com:8080`). This option has higher priority than `MIDSCENE_MODEL_SOCKS_PROXY` | +| `MIDSCENE_MODEL_SOCKS_PROXY` | Optional (recommended). SOCKS proxy configuration (e.g. "socks5://127.0.0.1:1080") | +| `MIDSCENE_OPENAI_INIT_CONFIG_JSON` | Deprecated but still compatible. Recommended to use `MIDSCENE_MODEL_INIT_CONFIG_JSON` | +| `MIDSCENE_OPENAI_HTTP_PROXY` | Deprecated but still compatible. Recommended to use `MIDSCENE_MODEL_HTTP_PROXY` | +| `MIDSCENE_OPENAI_SOCKS_PROXY` | Deprecated but still compatible. Recommended to use `MIDSCENE_MODEL_SOCKS_PROXY` | | `MIDSCENE_PREFERRED_LANGUAGE` | Optional. The preferred language for the model response. The default is `Chinese` if the current timezone is GMT+8 and `English` otherwise. | | `MIDSCENE_REPLANNING_CYCLE_LIMIT` | Optional. The maximum number of replanning cycles, default is 10 | | `OPENAI_MAX_TOKENS` | Optional. Maximum tokens for model response, default is 2048 | diff --git a/apps/site/docs/zh/model-provider.mdx b/apps/site/docs/zh/model-provider.mdx index 22b78ea40..21b27492c 100644 --- a/apps/site/docs/zh/model-provider.mdx +++ b/apps/site/docs/zh/model-provider.mdx @@ -48,9 +48,12 @@ Midscene 默认集成了 OpenAI SDK 调用 AI 服务。使用这个 SDK 限定 | 名称 | 描述 | |------|-------------| -| `MIDSCENE_OPENAI_INIT_CONFIG_JSON` | 可选。OpenAI SDK 的初始化配置 JSON | -| `MIDSCENE_OPENAI_HTTP_PROXY` | 可选。HTTP/HTTPS 代理配置 (如 `http://127.0.0.1:8080` 或 `https://proxy.example.com:8080`)。这个选项优先级高于 `MIDSCENE_OPENAI_SOCKS_PROXY` | -| `MIDSCENE_OPENAI_SOCKS_PROXY` | 可选。SOCKS 代理配置 (如 "socks5://127.0.0.1:1080") | +| `MIDSCENE_MODEL_INIT_CONFIG_JSON` | 可选(推荐)。OpenAI SDK 的初始化配置 JSON | +| `MIDSCENE_MODEL_HTTP_PROXY` | 可选(推荐)。HTTP/HTTPS 代理配置 (如 `http://127.0.0.1:8080` 或 `https://proxy.example.com:8080`)。这个选项优先级高于 `MIDSCENE_MODEL_SOCKS_PROXY` | +| `MIDSCENE_MODEL_SOCKS_PROXY` | 可选(推荐)。SOCKS 代理配置 (如 "socks5://127.0.0.1:1080") | +| `MIDSCENE_OPENAI_INIT_CONFIG_JSON` | 已弃用但仍兼容。建议使用 `MIDSCENE_MODEL_INIT_CONFIG_JSON` | +| `MIDSCENE_OPENAI_HTTP_PROXY` | 已弃用但仍兼容。建议使用 `MIDSCENE_MODEL_HTTP_PROXY` | +| `MIDSCENE_OPENAI_SOCKS_PROXY` | 已弃用但仍兼容。建议使用 `MIDSCENE_MODEL_SOCKS_PROXY` | | `MIDSCENE_PREFERRED_LANGUAGE` | 可选。模型响应的语言。如果当前时区是 GMT+8 则默认是 `Chinese`,否则是 `English` | | `MIDSCENE_REPLANNING_CYCLE_LIMIT` | 可选。最大重规划次数限制,默认是 10 | | `OPENAI_MAX_TOKENS` | 可选。模型响应的 max_tokens 数,默认是 2048 | diff --git a/packages/shared/src/env/constants.ts b/packages/shared/src/env/constants.ts index 10984367a..be7a9ae1e 100644 --- a/packages/shared/src/env/constants.ts +++ b/packages/shared/src/env/constants.ts @@ -13,6 +13,9 @@ import { MIDSCENE_MODEL_INIT_CONFIG_JSON, MIDSCENE_MODEL_NAME, MIDSCENE_MODEL_SOCKS_PROXY, + MIDSCENE_OPENAI_HTTP_PROXY, + MIDSCENE_OPENAI_INIT_CONFIG_JSON, + MIDSCENE_OPENAI_SOCKS_PROXY, MIDSCENE_PLANNING_LOCATOR_MODE, MIDSCENE_PLANNING_MODEL_API_KEY, MIDSCENE_PLANNING_MODEL_BASE_URL, @@ -134,16 +137,16 @@ export const DEFAULT_MODEL_CONFIG_KEYS: IModelConfigKeys = { export const DEFAULT_MODEL_CONFIG_KEYS_LEGACY: IModelConfigKeys = { modelName: MIDSCENE_MODEL_NAME, /** - * proxy + * proxy - Uses legacy MIDSCENE_OPENAI_* variables for backward compatibility */ - socksProxy: MIDSCENE_MODEL_SOCKS_PROXY, - httpProxy: MIDSCENE_MODEL_HTTP_PROXY, + socksProxy: MIDSCENE_OPENAI_SOCKS_PROXY, + httpProxy: MIDSCENE_OPENAI_HTTP_PROXY, /** * Model API - Uses legacy OPENAI_* variables for backward compatibility */ openaiBaseURL: OPENAI_BASE_URL, openaiApiKey: OPENAI_API_KEY, - openaiExtraConfig: MIDSCENE_MODEL_INIT_CONFIG_JSON, + openaiExtraConfig: MIDSCENE_OPENAI_INIT_CONFIG_JSON, /** * Extra */ diff --git a/packages/shared/src/env/decide-model-config.ts b/packages/shared/src/env/decide-model-config.ts index fa2a93022..54b17768d 100644 --- a/packages/shared/src/env/decide-model-config.ts +++ b/packages/shared/src/env/decide-model-config.ts @@ -12,7 +12,16 @@ import { PLANNING_MODEL_CONFIG_KEYS, VQA_MODEL_CONFIG_KEYS, } from './constants'; -import { MODEL_API_KEY, MODEL_BASE_URL } from './types'; +import { + MIDSCENE_MODEL_HTTP_PROXY, + MIDSCENE_MODEL_INIT_CONFIG_JSON, + MIDSCENE_MODEL_SOCKS_PROXY, + MIDSCENE_OPENAI_HTTP_PROXY, + MIDSCENE_OPENAI_INIT_CONFIG_JSON, + MIDSCENE_OPENAI_SOCKS_PROXY, + MODEL_API_KEY, + MODEL_BASE_URL, +} from './types'; import { getDebug } from '../logger'; import { assert } from '../utils'; @@ -64,15 +73,16 @@ export const decideOpenaiSdkConfig = ({ initDebugConfig(); const debugLog = getDebug('ai:config'); - const socksProxy = provider[keys.socksProxy]; - const httpProxy = provider[keys.httpProxy]; const vlMode = provider[keys.vlMode]; debugLog('enter decideOpenaiSdkConfig with keys:', keys); - // Implement compatibility logic: prefer new variable names (MODEL_*), fallback to old ones (OPENAI_*) + // Implement compatibility logic: prefer new variable names (MIDSCENE_MODEL_*), fallback to old ones (MIDSCENE_OPENAI_*) let openaiBaseURL: string | undefined; let openaiApiKey: string | undefined; + let socksProxy: string | undefined; + let httpProxy: string | undefined; + let openaiExtraConfigStr: string | undefined; // When using legacy keys (OPENAI_BASE_URL, OPENAI_API_KEY), check for new names first if (keys.openaiBaseURL === 'OPENAI_BASE_URL') { @@ -89,9 +99,36 @@ export const decideOpenaiSdkConfig = ({ openaiApiKey = provider[keys.openaiApiKey]; } + // Proxy compatibility: prefer MIDSCENE_MODEL_* over MIDSCENE_OPENAI_* + if (keys.socksProxy === MIDSCENE_OPENAI_SOCKS_PROXY) { + // Priority: MIDSCENE_MODEL_SOCKS_PROXY > MIDSCENE_OPENAI_SOCKS_PROXY + socksProxy = + provider[MIDSCENE_MODEL_SOCKS_PROXY] || provider[keys.socksProxy]; + } else { + socksProxy = provider[keys.socksProxy]; + } + + if (keys.httpProxy === MIDSCENE_OPENAI_HTTP_PROXY) { + // Priority: MIDSCENE_MODEL_HTTP_PROXY > MIDSCENE_OPENAI_HTTP_PROXY + httpProxy = + provider[MIDSCENE_MODEL_HTTP_PROXY] || provider[keys.httpProxy]; + } else { + httpProxy = provider[keys.httpProxy]; + } + + // Init config compatibility: prefer MIDSCENE_MODEL_INIT_CONFIG_JSON over MIDSCENE_OPENAI_INIT_CONFIG_JSON + if (keys.openaiExtraConfig === MIDSCENE_OPENAI_INIT_CONFIG_JSON) { + // Priority: MIDSCENE_MODEL_INIT_CONFIG_JSON > MIDSCENE_OPENAI_INIT_CONFIG_JSON + openaiExtraConfigStr = + provider[MIDSCENE_MODEL_INIT_CONFIG_JSON] || + provider[keys.openaiExtraConfig]; + } else { + openaiExtraConfigStr = provider[keys.openaiExtraConfig]; + } + const openaiExtraConfig = parseJson( keys.openaiExtraConfig, - provider[keys.openaiExtraConfig], + openaiExtraConfigStr, ); valueAssert(openaiApiKey, keys.openaiApiKey); diff --git a/packages/shared/src/env/types.ts b/packages/shared/src/env/types.ts index 9c234124d..51de664c6 100644 --- a/packages/shared/src/env/types.ts +++ b/packages/shared/src/env/types.ts @@ -30,6 +30,19 @@ export const OPENAI_API_KEY = 'OPENAI_API_KEY'; * @deprecated Use MODEL_BASE_URL instead. This is kept for backward compatibility. */ export const OPENAI_BASE_URL = 'OPENAI_BASE_URL'; +/** + * @deprecated Use MIDSCENE_MODEL_INIT_CONFIG_JSON instead. This is kept for backward compatibility. + */ +export const MIDSCENE_OPENAI_INIT_CONFIG_JSON = + 'MIDSCENE_OPENAI_INIT_CONFIG_JSON'; +/** + * @deprecated Use MIDSCENE_MODEL_HTTP_PROXY instead. This is kept for backward compatibility. + */ +export const MIDSCENE_OPENAI_HTTP_PROXY = 'MIDSCENE_OPENAI_HTTP_PROXY'; +/** + * @deprecated Use MIDSCENE_MODEL_SOCKS_PROXY instead. This is kept for backward compatibility. + */ +export const MIDSCENE_OPENAI_SOCKS_PROXY = 'MIDSCENE_OPENAI_SOCKS_PROXY'; export const OPENAI_MAX_TOKENS = 'OPENAI_MAX_TOKENS'; export const MIDSCENE_ADB_PATH = 'MIDSCENE_ADB_PATH'; @@ -182,6 +195,9 @@ export const MODEL_ENV_KEYS = [ // model default legacy OPENAI_API_KEY, OPENAI_BASE_URL, + MIDSCENE_OPENAI_INIT_CONFIG_JSON, + MIDSCENE_OPENAI_HTTP_PROXY, + MIDSCENE_OPENAI_SOCKS_PROXY, MODEL_API_KEY, MODEL_BASE_URL, // VQA @@ -311,12 +327,12 @@ export interface IModelConfigForDefaultLegacy { // model name [MIDSCENE_MODEL_NAME]: string; // proxy - [MIDSCENE_MODEL_SOCKS_PROXY]?: string; - [MIDSCENE_MODEL_HTTP_PROXY]?: string; + [MIDSCENE_OPENAI_SOCKS_PROXY]?: string; + [MIDSCENE_OPENAI_HTTP_PROXY]?: string; // OpenAI [OPENAI_BASE_URL]?: string; [OPENAI_API_KEY]?: string; - [MIDSCENE_MODEL_INIT_CONFIG_JSON]?: string; + [MIDSCENE_OPENAI_INIT_CONFIG_JSON]?: string; // extra [MIDSCENE_LOCATOR_MODE]?: TVlModeValues; }