Skip to content

Commit 3a88e2f

Browse files
.env整改
1 parent f185944 commit 3a88e2f

File tree

3 files changed

+60
-19
lines changed

3 files changed

+60
-19
lines changed

backend/controller/conversation_api.py

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
from sqlalchemy.orm import identity
1212

13-
from ..utils.globals import set_language
13+
from ..utils.globals import set_language, apply_llm_env_defaults
1414
from ..utils.auth_utils import extract_and_store_api_key
1515
import server
1616
from aiohttp import web
@@ -28,6 +28,19 @@
2828
from ..utils.modelscope_gateway import ModelScopeGateway
2929
import folder_paths
3030

31+
32+
def get_llm_config_from_headers(request):
33+
"""Extract LLM-related configuration from request headers."""
34+
return {
35+
"openai_api_key": request.headers.get('Openai-Api-Key'),
36+
"openai_base_url": request.headers.get('Openai-Base-Url'),
37+
# Workflow LLM settings (optional, used by tools/agents that need a different LLM)
38+
"workflow_llm_api_key": request.headers.get('Workflow-LLM-Api-Key'),
39+
"workflow_llm_base_url": request.headers.get('Workflow-LLM-Base-Url'),
40+
"workflow_llm_model": request.headers.get('Workflow-LLM-Model'),
41+
}
42+
43+
3144
# 全局下载进度存储
3245
download_progress = {}
3346
download_lock = threading.Lock()
@@ -240,14 +253,11 @@ async def invoke_chat(request):
240253
config = {
241254
"session_id": session_id,
242255
"workflow_checkpoint_id": workflow_checkpoint_id,
243-
"openai_api_key": request.headers.get('Openai-Api-Key'),
244-
"openai_base_url": request.headers.get('Openai-Base-Url'),
245-
# Workflow LLM settings (optional, used by tools/agents that need a different LLM)
246-
"workflow_llm_api_key": request.headers.get('Workflow-LLM-Api-Key'),
247-
"workflow_llm_base_url": request.headers.get('Workflow-LLM-Base-Url'),
248-
"workflow_llm_model": request.headers.get('Workflow-LLM-Model'),
256+
**get_llm_config_from_headers(request),
249257
"model_select": next((x['data'][0] for x in ext if x['type'] == 'model_select' and x.get('data')), None)
250258
}
259+
# Apply .env-based defaults for LLM-related fields (config > .env > code defaults)
260+
config = apply_llm_env_defaults(config)
251261

252262
# 设置请求上下文 - 这里建立context隔离
253263
set_request_context(session_id, workflow_checkpoint_id, config)
@@ -510,13 +520,10 @@ async def invoke_debug(request):
510520
config = {
511521
"session_id": session_id,
512522
"model": "gemini-2.5-flash", # Default model for debug agents
513-
"openai_api_key": request.headers.get('Openai-Api-Key'),
514-
"openai_base_url": request.headers.get('Openai-Base-Url'),
515-
# Workflow LLM settings (optional)
516-
"workflow_llm_api_key": request.headers.get('Workflow-LLM-Api-Key'),
517-
"workflow_llm_base_url": request.headers.get('Workflow-LLM-Base-Url'),
518-
"workflow_llm_model": request.headers.get('Workflow-LLM-Model'),
523+
**get_llm_config_from_headers(request),
519524
}
525+
# Apply .env-based defaults for LLM-related fields (config > .env > code defaults)
526+
config = apply_llm_env_defaults(config)
520527

521528
# 获取当前语言
522529
language = request.headers.get('Accept-Language', 'en')

backend/utils/globals.py

Lines changed: 39 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,27 +103,61 @@ def set_comfyui_copilot_api_key(api_key: str) -> None:
103103
LMSTUDIO_DEFAULT_BASE_URL = "http://localhost:1234/v1"
104104
WORKFLOW_MODEL_NAME = os.getenv("WORKFLOW_MODEL_NAME", "us.anthropic.claude-sonnet-4-20250514-v1:0")
105105
# WORKFLOW_MODEL_NAME = "gpt-5-2025-08-07-GlobalStandard"
106-
LLM_DEFAULT_BASE_URL = os.getenv("LLM_DEFAULT_BASE_URL", BACKEND_BASE_URL + "/v1")
106+
LLM_DEFAULT_BASE_URL = "https://comfyui-copilot-server.onrender.com/v1"
107+
108+
# LLM-related env defaults (used as fallback when request config does not provide values)
109+
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") or None
110+
OPENAI_BASE_URL = os.getenv("OPENAI_BASE_URL") or None
111+
WORKFLOW_LLM_API_KEY = os.getenv("WORKFLOW_LLM_API_KEY") or None
112+
WORKFLOW_LLM_BASE_URL = os.getenv("WORKFLOW_LLM_BASE_URL") or None
113+
# If WORKFLOW_LLM_MODEL is not set, fall back to WORKFLOW_MODEL_NAME
114+
WORKFLOW_LLM_MODEL = os.getenv("WORKFLOW_LLM_MODEL") or WORKFLOW_MODEL_NAME
115+
116+
117+
def apply_llm_env_defaults(config: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
118+
"""
119+
Apply LLM-related defaults with precedence:
120+
request config > .env > hard-coded defaults.
121+
122+
This function does NOT mutate the incoming config.
123+
"""
124+
cfg: Dict[str, Any] = dict(config or {})
125+
126+
# Chat LLM (OpenAI-compatible) settings
127+
if not cfg.get("openai_api_key") and OPENAI_API_KEY:
128+
cfg["openai_api_key"] = OPENAI_API_KEY
129+
if not cfg.get("openai_base_url") and OPENAI_BASE_URL:
130+
cfg["openai_base_url"] = OPENAI_BASE_URL
131+
132+
# Workflow LLM settings (tools/agents that might use a different LLM)
133+
if not cfg.get("workflow_llm_api_key") and WORKFLOW_LLM_API_KEY:
134+
cfg["workflow_llm_api_key"] = WORKFLOW_LLM_API_KEY
135+
if not cfg.get("workflow_llm_base_url") and WORKFLOW_LLM_BASE_URL:
136+
cfg["workflow_llm_base_url"] = WORKFLOW_LLM_BASE_URL
137+
if not cfg.get("workflow_llm_model") and WORKFLOW_LLM_MODEL:
138+
cfg["workflow_llm_model"] = WORKFLOW_LLM_MODEL
139+
140+
return cfg
107141

108142

109143
def is_lmstudio_url(base_url: str) -> bool:
110144
"""Check if the base URL is likely LMStudio based on common patterns."""
111145
if not base_url:
112146
return False
113-
147+
114148
base_url_lower = base_url.lower()
115149
# Common LMStudio patterns (supporting various ports and configurations)
116150
lmstudio_patterns = [
117151
"localhost:1234", # Standard LMStudio port
118-
"127.0.0.1:1234",
152+
"127.0.0.1:1234",
119153
"0.0.0.0:1234",
120154
":1234/v1",
121155
"localhost:1235", # Alternative port some users might use
122-
"127.0.0.1:1235",
156+
"127.0.0.1:1235",
123157
"0.0.0.0:1235",
124158
":1235/v1",
125159
"localhost/v1", # Generic localhost patterns
126160
"127.0.0.1/v1"
127161
]
128-
162+
129163
return any(pattern in base_url_lower for pattern in lmstudio_patterns)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[project]
22
name = "ComfyUI-Copilot"
33
description = "Your Intelligent Assistant for Comfy-UI."
4-
version = "2.0.22"
4+
version = "2.0.23"
55
license = {file = "LICENSE"}
66

77
[project.urls]

0 commit comments

Comments
 (0)