hdsp-jupyter-extension 2.0.13__py3-none-any.whl → 2.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_server/core/llm_service.py +3 -2
- agent_server/langchain/llm_factory.py +12 -6
- hdsp_agent_core/llm/service.py +3 -2
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/build_log.json +1 -1
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/package.json +2 -2
- jupyter_ext/labextension/static/lib_index_js.5449ba3c7e25177d2987.js → hdsp_jupyter_extension-2.0.15.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.90a86cec4c50b0798fb2.js +18 -10
- hdsp_jupyter_extension-2.0.15.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.90a86cec4c50b0798fb2.js.map +1 -0
- hdsp_jupyter_extension-2.0.13.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.a8e0b064eb9b1c1ff463.js → hdsp_jupyter_extension-2.0.15.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.256addd4d61251ca6763.js +3 -3
- jupyter_ext/labextension/static/remoteEntry.a8e0b064eb9b1c1ff463.js.map → hdsp_jupyter_extension-2.0.15.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.256addd4d61251ca6763.js.map +1 -1
- {hdsp_jupyter_extension-2.0.13.dist-info → hdsp_jupyter_extension-2.0.15.dist-info}/METADATA +2 -1
- {hdsp_jupyter_extension-2.0.13.dist-info → hdsp_jupyter_extension-2.0.15.dist-info}/RECORD +41 -41
- jupyter_ext/_version.py +1 -1
- jupyter_ext/labextension/build_log.json +1 -1
- jupyter_ext/labextension/package.json +2 -2
- hdsp_jupyter_extension-2.0.13.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.5449ba3c7e25177d2987.js → jupyter_ext/labextension/static/lib_index_js.90a86cec4c50b0798fb2.js +18 -10
- jupyter_ext/labextension/static/lib_index_js.90a86cec4c50b0798fb2.js.map +1 -0
- jupyter_ext/labextension/static/{remoteEntry.a8e0b064eb9b1c1ff463.js → remoteEntry.256addd4d61251ca6763.js} +3 -3
- hdsp_jupyter_extension-2.0.13.data/data/share/jupyter/labextensions/hdsp-agent/static/remoteEntry.a8e0b064eb9b1c1ff463.js.map → jupyter_ext/labextension/static/remoteEntry.256addd4d61251ca6763.js.map +1 -1
- hdsp_jupyter_extension-2.0.13.data/data/share/jupyter/labextensions/hdsp-agent/static/lib_index_js.5449ba3c7e25177d2987.js.map +0 -1
- jupyter_ext/labextension/static/lib_index_js.5449ba3c7e25177d2987.js.map +0 -1
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/etc/jupyter/jupyter_server_config.d/hdsp_jupyter_extension.json +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/install.json +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.037b3c8e5d6a92b63b16.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/frontend_styles_index_js.037b3c8e5d6a92b63b16.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b80.c095373419d05e6f141a.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/node_modules_emotion_use-insertion-effect-with-fallbacks_dist_emotion-use-insertion-effect-wi-3ba6b81.61e75fb98ecff46cf836.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/style.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_babel_runtime_helpers_esm_extends_js-node_modules_emotion_serialize_dist-051195.e2553aab0c3963b83dd7.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_cache_dist_emotion-cache_browser_development_esm_js.24edcc52a1c014a8a5f0.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_react_dist_emotion-react_browser_development_esm_js.19ecf6babe00caff6b8a.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_emotion_styled_dist_emotion-styled_browser_development_esm_js.661fb5836f4978a7c6e1.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_index_js.985697e0162d8d088ca2.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js +0 -0
- {hdsp_jupyter_extension-2.0.13.data → hdsp_jupyter_extension-2.0.15.data}/data/share/jupyter/labextensions/hdsp-agent/static/vendors-node_modules_mui_material_utils_createSvgIcon_js.1f5038488cdfd8b3a85d.js.map +0 -0
- {hdsp_jupyter_extension-2.0.13.dist-info → hdsp_jupyter_extension-2.0.15.dist-info}/WHEEL +0 -0
- {hdsp_jupyter_extension-2.0.13.dist-info → hdsp_jupyter_extension-2.0.15.dist-info}/licenses/LICENSE +0 -0
agent_server/core/llm_service.py
CHANGED
|
@@ -71,9 +71,10 @@ class LLMService:
|
|
|
71
71
|
def _get_vllm_config(self) -> tuple[str, str, Dict[str, str]]:
|
|
72
72
|
"""Get vLLM config: (model, url, headers)."""
|
|
73
73
|
cfg = self.config.get("vllm", {})
|
|
74
|
-
|
|
74
|
+
# User provides full base URL (e.g., https://openrouter.ai/api/v1)
|
|
75
|
+
endpoint = cfg.get("endpoint", "http://localhost:8000/v1")
|
|
75
76
|
model = cfg.get("model", "default")
|
|
76
|
-
url = f"{endpoint}/
|
|
77
|
+
url = f"{endpoint}/chat/completions"
|
|
77
78
|
headers = {"Content-Type": "application/json"}
|
|
78
79
|
if cfg.get("apiKey"):
|
|
79
80
|
headers["Authorization"] = f"Bearer {cfg['apiKey']}"
|
|
@@ -93,18 +93,21 @@ def _create_vllm_llm(llm_config: Dict[str, Any], callbacks):
|
|
|
93
93
|
from langchain_openai import ChatOpenAI
|
|
94
94
|
|
|
95
95
|
vllm_config = llm_config.get("vllm", {})
|
|
96
|
-
|
|
96
|
+
# User provides full base URL (e.g., https://openrouter.ai/api/v1)
|
|
97
|
+
endpoint = vllm_config.get("endpoint", "http://localhost:8000/v1")
|
|
97
98
|
model = vllm_config.get("model", "default")
|
|
98
99
|
api_key = vllm_config.get("apiKey", "dummy")
|
|
100
|
+
use_responses_api = vllm_config.get("useResponsesApi", False)
|
|
99
101
|
|
|
100
|
-
logger.info(f"Creating vLLM LLM with model: {model}, endpoint: {endpoint}")
|
|
102
|
+
logger.info(f"Creating vLLM LLM with model: {model}, endpoint: {endpoint}, use_responses_api: {use_responses_api}")
|
|
101
103
|
|
|
102
104
|
return ChatOpenAI(
|
|
103
105
|
model=model,
|
|
104
106
|
api_key=api_key,
|
|
105
|
-
base_url=
|
|
107
|
+
base_url=endpoint, # Use endpoint as-is (no /v1 suffix added)
|
|
108
|
+
use_responses_api=use_responses_api,
|
|
106
109
|
temperature=0.0,
|
|
107
|
-
max_tokens=
|
|
110
|
+
max_tokens=32768,
|
|
108
111
|
callbacks=callbacks,
|
|
109
112
|
)
|
|
110
113
|
|
|
@@ -149,14 +152,17 @@ def create_summarization_llm(llm_config: Dict[str, Any]):
|
|
|
149
152
|
from langchain_openai import ChatOpenAI
|
|
150
153
|
|
|
151
154
|
vllm_config = llm_config.get("vllm", {})
|
|
152
|
-
|
|
155
|
+
# User provides full base URL (e.g., https://openrouter.ai/api/v1)
|
|
156
|
+
endpoint = vllm_config.get("endpoint", "http://localhost:8000/v1")
|
|
153
157
|
model = vllm_config.get("model", "default")
|
|
154
158
|
api_key = vllm_config.get("apiKey", "dummy")
|
|
159
|
+
use_responses_api = vllm_config.get("useResponsesApi", False)
|
|
155
160
|
|
|
156
161
|
return ChatOpenAI(
|
|
157
162
|
model=model,
|
|
158
163
|
api_key=api_key,
|
|
159
|
-
base_url=
|
|
164
|
+
base_url=endpoint, # Use endpoint as-is
|
|
165
|
+
use_responses_api=use_responses_api,
|
|
160
166
|
temperature=0.0,
|
|
161
167
|
)
|
|
162
168
|
except Exception as e:
|
hdsp_agent_core/llm/service.py
CHANGED
|
@@ -72,9 +72,10 @@ class LLMService:
|
|
|
72
72
|
def _get_vllm_config(self) -> tuple[str, str, Dict[str, str]]:
|
|
73
73
|
"""Get vLLM config: (model, url, headers)."""
|
|
74
74
|
cfg = self.config.get("vllm", {})
|
|
75
|
-
|
|
75
|
+
# User provides full base URL (e.g., https://openrouter.ai/api/v1)
|
|
76
|
+
endpoint = cfg.get("endpoint", "http://localhost:8000/v1")
|
|
76
77
|
model = cfg.get("model", "default")
|
|
77
|
-
url = f"{endpoint}/
|
|
78
|
+
url = f"{endpoint}/chat/completions"
|
|
78
79
|
headers = {"Content-Type": "application/json"}
|
|
79
80
|
if cfg.get("apiKey"):
|
|
80
81
|
headers["Authorization"] = f"Bearer {cfg['apiKey']}"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hdsp-agent",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.15",
|
|
4
4
|
"description": "HDSP Agent JupyterLab Extension - Thin client for Agent Server",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jupyter",
|
|
@@ -132,7 +132,7 @@
|
|
|
132
132
|
}
|
|
133
133
|
},
|
|
134
134
|
"_build": {
|
|
135
|
-
"load": "static/remoteEntry.
|
|
135
|
+
"load": "static/remoteEntry.256addd4d61251ca6763.js",
|
|
136
136
|
"extension": "./extension",
|
|
137
137
|
"style": "./style"
|
|
138
138
|
}
|
|
@@ -716,7 +716,7 @@ const ChatPanel = (0,react__WEBPACK_IMPORTED_MODULE_0__.forwardRef)(({ apiServic
|
|
|
716
716
|
}
|
|
717
717
|
else if (config.provider === 'vllm') {
|
|
718
718
|
console.log('vLLM Model:', config.vllm?.model || 'default');
|
|
719
|
-
console.log('vLLM Endpoint:', config.vllm?.endpoint || 'http://localhost:8000');
|
|
719
|
+
console.log('vLLM Endpoint:', config.vllm?.endpoint || 'http://localhost:8000/v1');
|
|
720
720
|
console.log('vLLM API Key:', config.vllm?.apiKey ? '✓ Configured' : '✗ Not configured');
|
|
721
721
|
}
|
|
722
722
|
else if (config.provider === 'openai') {
|
|
@@ -4219,9 +4219,10 @@ const SettingsPanel = ({ onClose, onSave, currentConfig }) => {
|
|
|
4219
4219
|
};
|
|
4220
4220
|
const [geminiModel, setGeminiModel] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(validateGeminiModel(initConfig.gemini?.model));
|
|
4221
4221
|
// vLLM settings
|
|
4222
|
-
const [vllmEndpoint, setVllmEndpoint] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.vllm?.endpoint || 'http://localhost:8000');
|
|
4222
|
+
const [vllmEndpoint, setVllmEndpoint] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.vllm?.endpoint || 'http://localhost:8000/v1');
|
|
4223
4223
|
const [vllmApiKey, setVllmApiKey] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.vllm?.apiKey || '');
|
|
4224
4224
|
const [vllmModel, setVllmModel] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.vllm?.model || 'meta-llama/Llama-2-7b-chat-hf');
|
|
4225
|
+
const [vllmUseResponsesApi, setVllmUseResponsesApi] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(Boolean(initConfig.vllm?.useResponsesApi));
|
|
4225
4226
|
// OpenAI settings
|
|
4226
4227
|
const [openaiApiKey, setOpenaiApiKey] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.openai?.apiKey || '');
|
|
4227
4228
|
const [openaiModel, setOpenaiModel] = (0,react__WEBPACK_IMPORTED_MODULE_0__.useState)(initConfig.openai?.model || 'gpt-4');
|
|
@@ -4261,9 +4262,10 @@ const SettingsPanel = ({ onClose, onSave, currentConfig }) => {
|
|
|
4261
4262
|
setGeminiApiKeys(['']);
|
|
4262
4263
|
}
|
|
4263
4264
|
setGeminiModel(validateGeminiModel(currentConfig.gemini?.model));
|
|
4264
|
-
setVllmEndpoint(currentConfig.vllm?.endpoint || 'http://localhost:8000');
|
|
4265
|
+
setVllmEndpoint(currentConfig.vllm?.endpoint || 'http://localhost:8000/v1');
|
|
4265
4266
|
setVllmApiKey(currentConfig.vllm?.apiKey || '');
|
|
4266
4267
|
setVllmModel(currentConfig.vllm?.model || 'meta-llama/Llama-2-7b-chat-hf');
|
|
4268
|
+
setVllmUseResponsesApi(Boolean(currentConfig.vllm?.useResponsesApi));
|
|
4267
4269
|
setOpenaiApiKey(currentConfig.openai?.apiKey || '');
|
|
4268
4270
|
setOpenaiModel(currentConfig.openai?.model || 'gpt-4');
|
|
4269
4271
|
setSystemPrompt(currentConfig.systemPrompt || (0,_services_ApiKeyManager__WEBPACK_IMPORTED_MODULE_10__.getDefaultLLMConfig)().systemPrompt || '');
|
|
@@ -4292,7 +4294,8 @@ const SettingsPanel = ({ onClose, onSave, currentConfig }) => {
|
|
|
4292
4294
|
vllm: {
|
|
4293
4295
|
endpoint: vllmEndpoint,
|
|
4294
4296
|
apiKey: vllmApiKey,
|
|
4295
|
-
model: vllmModel
|
|
4297
|
+
model: vllmModel,
|
|
4298
|
+
useResponsesApi: vllmUseResponsesApi
|
|
4296
4299
|
},
|
|
4297
4300
|
openai: {
|
|
4298
4301
|
apiKey: openaiApiKey,
|
|
@@ -4445,16 +4448,21 @@ const SettingsPanel = ({ onClose, onSave, currentConfig }) => {
|
|
|
4445
4448
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("option", { value: "gemini-2.5-flash" }, "Gemini 2.5 Flash"),
|
|
4446
4449
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("option", { value: "gemini-2.5-pro" }, "Gemini 2.5 Pro"))))),
|
|
4447
4450
|
provider === 'vllm' && (react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-provider" },
|
|
4448
|
-
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("h3", null, "vLLM \uC124\uC815"),
|
|
4451
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("h3", null, "vLLM / OpenAI Compatible \uC124\uC815"),
|
|
4449
4452
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-group" },
|
|
4450
|
-
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("label", { className: "jp-agent-settings-label" }, "\
|
|
4451
|
-
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "text", className: "jp-agent-settings-input", value: vllmEndpoint, onChange: (e) => setVllmEndpoint(e.target.value), placeholder: "
|
|
4453
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("label", { className: "jp-agent-settings-label" }, "API Base URL (\uC804\uCCB4 \uACBD\uB85C \uC785\uB825)"),
|
|
4454
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "text", className: "jp-agent-settings-input", value: vllmEndpoint, onChange: (e) => setVllmEndpoint(e.target.value), placeholder: "https://openrouter.ai/api/v1" })),
|
|
4452
4455
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-group" },
|
|
4453
4456
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("label", { className: "jp-agent-settings-label" }, "API \uD0A4 (\uC120\uD0DD\uC0AC\uD56D)"),
|
|
4454
4457
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "password", className: "jp-agent-settings-input", value: vllmApiKey, onChange: (e) => setVllmApiKey(e.target.value), placeholder: "API \uD0A4\uAC00 \uD544\uC694\uD55C \uACBD\uC6B0 \uC785\uB825" })),
|
|
4455
4458
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-group" },
|
|
4456
4459
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("label", { className: "jp-agent-settings-label" }, "\uBAA8\uB378 \uC774\uB984"),
|
|
4457
|
-
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "text", className: "jp-agent-settings-input", value: vllmModel, onChange: (e) => setVllmModel(e.target.value), placeholder: "meta-llama/Llama-2-7b-chat-hf" }))
|
|
4460
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "text", className: "jp-agent-settings-input", value: vllmModel, onChange: (e) => setVllmModel(e.target.value), placeholder: "meta-llama/Llama-2-7b-chat-hf" })),
|
|
4461
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-group" },
|
|
4462
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("label", { className: "jp-agent-settings-checkbox" },
|
|
4463
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("input", { type: "checkbox", checked: vllmUseResponsesApi, onChange: (e) => setVllmUseResponsesApi(e.target.checked) }),
|
|
4464
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("span", null, "Use Responses API (OpenAI \uC804\uC6A9, /chat/completions \uB300\uC2E0 /responses \uC0AC\uC6A9)")),
|
|
4465
|
+
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("small", { style: { color: '#666', marginTop: '4px', display: 'block', marginLeft: '24px' } }, "OpenAI \uACF5\uC2DD API\uC5D0\uC11C\uB9CC \uC9C0\uC6D0\uB429\uB2C8\uB2E4. OpenRouter \uB4F1 \uC11C\uB4DC\uD30C\uD2F0\uC5D0\uC11C\uB294 \uC791\uB3D9\uD558\uC9C0 \uC54A\uC744 \uC218 \uC788\uC2B5\uB2C8\uB2E4.")))),
|
|
4458
4466
|
provider === 'openai' && (react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-provider" },
|
|
4459
4467
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("h3", null, "OpenAI \uC124\uC815"),
|
|
4460
4468
|
react__WEBPACK_IMPORTED_MODULE_0___default().createElement("div", { className: "jp-agent-settings-group" },
|
|
@@ -8609,7 +8617,7 @@ function getDefaultLLMConfig() {
|
|
|
8609
8617
|
model: 'gpt-4'
|
|
8610
8618
|
},
|
|
8611
8619
|
vllm: {
|
|
8612
|
-
endpoint: 'http://localhost:8000',
|
|
8620
|
+
endpoint: 'http://localhost:8000/v1',
|
|
8613
8621
|
model: 'default'
|
|
8614
8622
|
},
|
|
8615
8623
|
systemPrompt: prompts?.single || DEFAULT_LANGCHAIN_SYSTEM_PROMPT,
|
|
@@ -11795,4 +11803,4 @@ __webpack_require__.r(__webpack_exports__);
|
|
|
11795
11803
|
/***/ }
|
|
11796
11804
|
|
|
11797
11805
|
}]);
|
|
11798
|
-
//# sourceMappingURL=lib_index_js.
|
|
11806
|
+
//# sourceMappingURL=lib_index_js.90a86cec4c50b0798fb2.js.map
|