opc-agent 4.0.42 → 4.0.43
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/studio/server.js +37 -27
- package/package.json +1 -1
- package/src/studio/server.ts +37 -27
package/dist/studio/server.js
CHANGED
|
@@ -733,37 +733,47 @@ class StudioServer {
|
|
|
733
733
|
});
|
|
734
734
|
const allMsgs = [{ role: 'system', content: agent.systemPrompt }, ...messages];
|
|
735
735
|
const lastMsg = allMsgs[allMsgs.length - 1]?.content || '';
|
|
736
|
-
//
|
|
736
|
+
// Use createProvider directly to call LLM
|
|
737
737
|
try {
|
|
738
|
-
const
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
738
|
+
const { createProvider } = require('../providers');
|
|
739
|
+
// Read OAD config for provider info
|
|
740
|
+
let providerName = agent.provider || process.env.OPC_LLM_PROVIDER;
|
|
741
|
+
if (!providerName) {
|
|
742
|
+
// Try reading from oad.yaml
|
|
743
|
+
try {
|
|
744
|
+
const oadPath = (0, path_1.join)(this.config.agentDir, 'oad.yaml');
|
|
745
|
+
if ((0, fs_1.existsSync)(oadPath)) {
|
|
746
|
+
const yaml = require('js-yaml');
|
|
747
|
+
const oad = yaml.load((0, fs_1.readFileSync)(oadPath, 'utf-8'));
|
|
748
|
+
providerName = oad?.spec?.provider?.default;
|
|
749
|
+
}
|
|
748
750
|
}
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
751
|
+
catch { }
|
|
752
|
+
}
|
|
753
|
+
providerName = providerName || 'openai';
|
|
754
|
+
const provider = createProvider(providerName, agent.model);
|
|
755
|
+
let fullText = '';
|
|
756
|
+
try {
|
|
757
|
+
for await (const chunk of provider.chatStream(allMsgs, agent.systemPrompt)) {
|
|
758
|
+
const sseData = JSON.stringify({
|
|
759
|
+
choices: [{ delta: { content: chunk }, index: 0 }],
|
|
760
|
+
});
|
|
761
|
+
res.write(`data: ${sseData}\n\n`);
|
|
762
|
+
fullText += chunk;
|
|
754
763
|
}
|
|
755
|
-
}
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
764
|
+
}
|
|
765
|
+
catch (streamErr) {
|
|
766
|
+
if (!fullText) {
|
|
767
|
+
// No content streamed yet, send error
|
|
768
|
+
const errData = JSON.stringify({ error: streamErr.message });
|
|
769
|
+
res.write(`data: ${errData}\n\n`);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
res.write('data: [DONE]\n\n');
|
|
773
|
+
res.end();
|
|
765
774
|
}
|
|
766
|
-
catch {
|
|
775
|
+
catch (err) {
|
|
776
|
+
// Fallback: try simulated response
|
|
767
777
|
this.sendSimulatedResponse(res, lastMsg, agent);
|
|
768
778
|
}
|
|
769
779
|
}
|
package/package.json
CHANGED
package/src/studio/server.ts
CHANGED
|
@@ -735,35 +735,45 @@ class StudioServer {
|
|
|
735
735
|
const allMsgs = [{ role: 'system', content: agent.systemPrompt }, ...messages];
|
|
736
736
|
const lastMsg = allMsgs[allMsgs.length - 1]?.content || '';
|
|
737
737
|
|
|
738
|
-
//
|
|
738
|
+
// Use createProvider directly to call LLM
|
|
739
739
|
try {
|
|
740
|
-
const
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
740
|
+
const { createProvider } = require('../providers');
|
|
741
|
+
// Read OAD config for provider info
|
|
742
|
+
let providerName = agent.provider || process.env.OPC_LLM_PROVIDER;
|
|
743
|
+
if (!providerName) {
|
|
744
|
+
// Try reading from oad.yaml
|
|
745
|
+
try {
|
|
746
|
+
const oadPath = join(this.config.agentDir, 'oad.yaml');
|
|
747
|
+
if (existsSync(oadPath)) {
|
|
748
|
+
const yaml = require('js-yaml');
|
|
749
|
+
const oad = yaml.load(readFileSync(oadPath, 'utf-8'));
|
|
750
|
+
providerName = oad?.spec?.provider?.default;
|
|
751
|
+
}
|
|
752
|
+
} catch {}
|
|
753
|
+
}
|
|
754
|
+
providerName = providerName || 'openai';
|
|
755
|
+
const provider = createProvider(providerName, agent.model);
|
|
756
|
+
|
|
757
|
+
let fullText = '';
|
|
758
|
+
try {
|
|
759
|
+
for await (const chunk of provider.chatStream(allMsgs, agent.systemPrompt)) {
|
|
760
|
+
const sseData = JSON.stringify({
|
|
761
|
+
choices: [{ delta: { content: chunk }, index: 0 }],
|
|
762
|
+
});
|
|
763
|
+
res.write(`data: ${sseData}\n\n`);
|
|
764
|
+
fullText += chunk;
|
|
755
765
|
}
|
|
756
|
-
})
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
766
|
+
} catch (streamErr: any) {
|
|
767
|
+
if (!fullText) {
|
|
768
|
+
// No content streamed yet, send error
|
|
769
|
+
const errData = JSON.stringify({ error: streamErr.message });
|
|
770
|
+
res.write(`data: ${errData}\n\n`);
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
res.write('data: [DONE]\n\n');
|
|
774
|
+
res.end();
|
|
775
|
+
} catch (err: any) {
|
|
776
|
+
// Fallback: try simulated response
|
|
767
777
|
this.sendSimulatedResponse(res, lastMsg, agent);
|
|
768
778
|
}
|
|
769
779
|
}
|