@promptbook/openai 0.71.0-0 → 0.72.0-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -0
- package/esm/index.es.js +306 -28
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/cli.index.d.ts +4 -0
- package/esm/typings/src/_packages/core.index.d.ts +6 -2
- package/esm/typings/src/_packages/openai.index.d.ts +8 -0
- package/esm/typings/src/_packages/types.index.d.ts +2 -0
- package/esm/typings/src/execution/createPipelineExecutor/10-executePipeline.d.ts +1 -1
- package/esm/typings/src/execution/translation/automatic-translate/automatic-translators/LindatAutomaticTranslator.d.ts +1 -1
- package/esm/typings/src/execution/utils/addUsage.d.ts +0 -56
- package/esm/typings/src/execution/utils/usage-constants.d.ts +127 -0
- package/esm/typings/src/knowledge/dialogs/callback/CallbackInterfaceTools.d.ts +1 -1
- package/esm/typings/src/knowledge/dialogs/simple-prompt/SimplePromptInterfaceTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +3 -2
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +3 -2
- package/esm/typings/src/llm-providers/mocked/MockedEchoLlmExecutionTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/mocked/MockedFackedLlmExecutionTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionTools.d.ts +37 -0
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +14 -0
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -2
- package/esm/typings/src/llm-providers/openai/createOpenAiAssistantExecutionTools.d.ts +15 -0
- package/esm/typings/src/llm-providers/openai/register-configuration.d.ts +9 -0
- package/esm/typings/src/llm-providers/openai/register-constructor.d.ts +9 -0
- package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -1
- package/esm/typings/src/scripting/javascript/JavascriptEvalExecutionTools.d.ts +1 -1
- package/esm/typings/src/scripting/python/PythonExecutionTools.d.ts +1 -1
- package/esm/typings/src/scripting/typescript/TypescriptExecutionTools.d.ts +1 -1
- package/esm/typings/src/storage/files-storage/FilesStorage.d.ts +1 -1
- package/esm/typings/src/types/PipelineJson/KnowledgeSourceJson.d.ts +2 -9
- package/package.json +2 -2
- package/umd/index.umd.js +312 -31
- package/umd/index.umd.js.map +1 -1
package/umd/index.umd.js
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
(function (global, factory) {
|
|
2
|
-
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('colors'), require('
|
|
3
|
-
typeof define === 'function' && define.amd ? define(['exports', 'colors', '
|
|
4
|
-
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["promptbook-openai"] = {}, global.colors, global.
|
|
5
|
-
})(this, (function (exports, colors,
|
|
2
|
+
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('colors'), require('spacetrim'), require('openai')) :
|
|
3
|
+
typeof define === 'function' && define.amd ? define(['exports', 'colors', 'spacetrim', 'openai'], factory) :
|
|
4
|
+
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["promptbook-openai"] = {}, global.colors, global.spaceTrim, global.OpenAI));
|
|
5
|
+
})(this, (function (exports, colors, spaceTrim, OpenAI) { 'use strict';
|
|
6
6
|
|
|
7
7
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
8
8
|
|
|
9
9
|
var colors__default = /*#__PURE__*/_interopDefaultLegacy(colors);
|
|
10
|
-
var OpenAI__default = /*#__PURE__*/_interopDefaultLegacy(OpenAI);
|
|
11
10
|
var spaceTrim__default = /*#__PURE__*/_interopDefaultLegacy(spaceTrim);
|
|
11
|
+
var OpenAI__default = /*#__PURE__*/_interopDefaultLegacy(OpenAI);
|
|
12
12
|
|
|
13
13
|
// ⚠️ WARNING: This code has been generated so that any manual changes will be overwritten
|
|
14
14
|
/**
|
|
15
15
|
* The version of the Promptbook library
|
|
16
16
|
*/
|
|
17
|
-
var PROMPTBOOK_VERSION = '0.
|
|
17
|
+
var PROMPTBOOK_VERSION = '0.72.0-0';
|
|
18
18
|
// TODO:[main] !!!! List here all the versions and annotate + put into script
|
|
19
19
|
|
|
20
20
|
/*! *****************************************************************************
|
|
@@ -155,46 +155,37 @@
|
|
|
155
155
|
var $isRunningInWebWorker = new Function("\n try {\n if (typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope) {\n return true;\n } else {\n return false;\n }\n } catch (e) {\n return false;\n }\n");
|
|
156
156
|
|
|
157
157
|
/**
|
|
158
|
-
* This error indicates
|
|
158
|
+
* This error type indicates that some part of the code is not implemented yet
|
|
159
159
|
*
|
|
160
160
|
* @public exported from `@promptbook/core`
|
|
161
161
|
*/
|
|
162
|
-
var
|
|
163
|
-
__extends(
|
|
164
|
-
function
|
|
165
|
-
var _this = _super.call(this, message) || this;
|
|
166
|
-
_this.name = '
|
|
167
|
-
Object.setPrototypeOf(_this,
|
|
162
|
+
var NotYetImplementedError = /** @class */ (function (_super) {
|
|
163
|
+
__extends(NotYetImplementedError, _super);
|
|
164
|
+
function NotYetImplementedError(message) {
|
|
165
|
+
var _this = _super.call(this, spaceTrim.spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This feature is not implemented yet but it will be soon.\n\n If you want speed up the implementation or just read more, look here:\n https://github.com/webgptorg/promptbook\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
|
|
166
|
+
_this.name = 'NotYetImplementedError';
|
|
167
|
+
Object.setPrototypeOf(_this, NotYetImplementedError.prototype);
|
|
168
168
|
return _this;
|
|
169
169
|
}
|
|
170
|
-
return
|
|
170
|
+
return NotYetImplementedError;
|
|
171
171
|
}(Error));
|
|
172
172
|
|
|
173
173
|
/**
|
|
174
|
-
* This error
|
|
174
|
+
* This error indicates errors during the execution of the pipeline
|
|
175
175
|
*
|
|
176
176
|
* @public exported from `@promptbook/core`
|
|
177
177
|
*/
|
|
178
|
-
var
|
|
179
|
-
__extends(
|
|
180
|
-
function
|
|
181
|
-
var _this = _super.call(this,
|
|
182
|
-
_this.name = '
|
|
183
|
-
Object.setPrototypeOf(_this,
|
|
178
|
+
var PipelineExecutionError = /** @class */ (function (_super) {
|
|
179
|
+
__extends(PipelineExecutionError, _super);
|
|
180
|
+
function PipelineExecutionError(message) {
|
|
181
|
+
var _this = _super.call(this, message) || this;
|
|
182
|
+
_this.name = 'PipelineExecutionError';
|
|
183
|
+
Object.setPrototypeOf(_this, PipelineExecutionError.prototype);
|
|
184
184
|
return _this;
|
|
185
185
|
}
|
|
186
|
-
return
|
|
186
|
+
return PipelineExecutionError;
|
|
187
187
|
}(Error));
|
|
188
188
|
|
|
189
|
-
/**
|
|
190
|
-
* Get current date in ISO 8601 format
|
|
191
|
-
*
|
|
192
|
-
* @private internal utility
|
|
193
|
-
*/
|
|
194
|
-
function getCurrentIsoDate() {
|
|
195
|
-
return new Date().toISOString();
|
|
196
|
-
}
|
|
197
|
-
|
|
198
189
|
/**
|
|
199
190
|
* @@@
|
|
200
191
|
*
|
|
@@ -229,6 +220,84 @@
|
|
|
229
220
|
* TODO: [🧠] Is there a way how to meaningfully test this utility
|
|
230
221
|
*/
|
|
231
222
|
|
|
223
|
+
/**
|
|
224
|
+
* Represents the usage with no resources consumed
|
|
225
|
+
*
|
|
226
|
+
* @public exported from `@promptbook/core`
|
|
227
|
+
*/
|
|
228
|
+
$deepFreeze({
|
|
229
|
+
price: { value: 0 },
|
|
230
|
+
input: {
|
|
231
|
+
tokensCount: { value: 0 },
|
|
232
|
+
charactersCount: { value: 0 },
|
|
233
|
+
wordsCount: { value: 0 },
|
|
234
|
+
sentencesCount: { value: 0 },
|
|
235
|
+
linesCount: { value: 0 },
|
|
236
|
+
paragraphsCount: { value: 0 },
|
|
237
|
+
pagesCount: { value: 0 },
|
|
238
|
+
},
|
|
239
|
+
output: {
|
|
240
|
+
tokensCount: { value: 0 },
|
|
241
|
+
charactersCount: { value: 0 },
|
|
242
|
+
wordsCount: { value: 0 },
|
|
243
|
+
sentencesCount: { value: 0 },
|
|
244
|
+
linesCount: { value: 0 },
|
|
245
|
+
paragraphsCount: { value: 0 },
|
|
246
|
+
pagesCount: { value: 0 },
|
|
247
|
+
},
|
|
248
|
+
});
|
|
249
|
+
/**
|
|
250
|
+
* Represents the usage with unknown resources consumed
|
|
251
|
+
*
|
|
252
|
+
* @public exported from `@promptbook/core`
|
|
253
|
+
*/
|
|
254
|
+
var UNCERTAIN_USAGE = $deepFreeze({
|
|
255
|
+
price: { value: 0, isUncertain: true },
|
|
256
|
+
input: {
|
|
257
|
+
tokensCount: { value: 0, isUncertain: true },
|
|
258
|
+
charactersCount: { value: 0, isUncertain: true },
|
|
259
|
+
wordsCount: { value: 0, isUncertain: true },
|
|
260
|
+
sentencesCount: { value: 0, isUncertain: true },
|
|
261
|
+
linesCount: { value: 0, isUncertain: true },
|
|
262
|
+
paragraphsCount: { value: 0, isUncertain: true },
|
|
263
|
+
pagesCount: { value: 0, isUncertain: true },
|
|
264
|
+
},
|
|
265
|
+
output: {
|
|
266
|
+
tokensCount: { value: 0, isUncertain: true },
|
|
267
|
+
charactersCount: { value: 0, isUncertain: true },
|
|
268
|
+
wordsCount: { value: 0, isUncertain: true },
|
|
269
|
+
sentencesCount: { value: 0, isUncertain: true },
|
|
270
|
+
linesCount: { value: 0, isUncertain: true },
|
|
271
|
+
paragraphsCount: { value: 0, isUncertain: true },
|
|
272
|
+
pagesCount: { value: 0, isUncertain: true },
|
|
273
|
+
},
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Get current date in ISO 8601 format
|
|
278
|
+
*
|
|
279
|
+
* @private internal utility
|
|
280
|
+
*/
|
|
281
|
+
function getCurrentIsoDate() {
|
|
282
|
+
return new Date().toISOString();
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* This error type indicates that the error should not happen and its last check before crashing with some other error
|
|
287
|
+
*
|
|
288
|
+
* @public exported from `@promptbook/core`
|
|
289
|
+
*/
|
|
290
|
+
var UnexpectedError = /** @class */ (function (_super) {
|
|
291
|
+
__extends(UnexpectedError, _super);
|
|
292
|
+
function UnexpectedError(message) {
|
|
293
|
+
var _this = _super.call(this, spaceTrim.spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This error should not happen.\n It's probbably a bug in the pipeline collection\n\n Please report issue:\n https://github.com/webgptorg/promptbook/issues\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
|
|
294
|
+
_this.name = 'UnexpectedError';
|
|
295
|
+
Object.setPrototypeOf(_this, UnexpectedError.prototype);
|
|
296
|
+
return _this;
|
|
297
|
+
}
|
|
298
|
+
return UnexpectedError;
|
|
299
|
+
}(Error));
|
|
300
|
+
|
|
232
301
|
/**
|
|
233
302
|
* Checks if the value is [🚉] serializable as JSON
|
|
234
303
|
* If not, throws an UnexpectedError with a rich error message and tracking
|
|
@@ -1387,6 +1456,15 @@
|
|
|
1387
1456
|
});
|
|
1388
1457
|
});
|
|
1389
1458
|
};
|
|
1459
|
+
/**
|
|
1460
|
+
* Create (sub)tools for calling OpenAI API Assistants
|
|
1461
|
+
*
|
|
1462
|
+
* @param assistantId Which assistant to use
|
|
1463
|
+
* @returns Tools for calling OpenAI API Assistants with same token
|
|
1464
|
+
*/
|
|
1465
|
+
OpenAiExecutionTools.prototype.createAssistantSubtools = function (assistantId) {
|
|
1466
|
+
return new OpenAiAssistantExecutionTools(__assign(__assign({}, this.options), { assistantId: assistantId }));
|
|
1467
|
+
};
|
|
1390
1468
|
/**
|
|
1391
1469
|
* Check the `options` passed to `constructor`
|
|
1392
1470
|
*/
|
|
@@ -1682,6 +1760,197 @@
|
|
|
1682
1760
|
* TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
|
|
1683
1761
|
*/
|
|
1684
1762
|
|
|
1763
|
+
/**
|
|
1764
|
+
* Execution Tools for calling OpenAI API Assistants
|
|
1765
|
+
*
|
|
1766
|
+
* This is usefull for calling OpenAI API with a single assistant, for more wide usage use `OpenAiExecutionTools`.
|
|
1767
|
+
*
|
|
1768
|
+
* @public exported from `@promptbook/openai`
|
|
1769
|
+
*/
|
|
1770
|
+
var OpenAiAssistantExecutionTools = /** @class */ (function (_super) {
|
|
1771
|
+
__extends(OpenAiAssistantExecutionTools, _super);
|
|
1772
|
+
/**
|
|
1773
|
+
* Creates OpenAI Execution Tools.
|
|
1774
|
+
*
|
|
1775
|
+
* @param options which are relevant are directly passed to the OpenAI client
|
|
1776
|
+
*/
|
|
1777
|
+
function OpenAiAssistantExecutionTools(options) {
|
|
1778
|
+
var _this = _super.call(this, options) || this;
|
|
1779
|
+
_this.assistantId = options.assistantId;
|
|
1780
|
+
return _this;
|
|
1781
|
+
}
|
|
1782
|
+
Object.defineProperty(OpenAiAssistantExecutionTools.prototype, "title", {
|
|
1783
|
+
get: function () {
|
|
1784
|
+
return 'OpenAI Assistant';
|
|
1785
|
+
},
|
|
1786
|
+
enumerable: false,
|
|
1787
|
+
configurable: true
|
|
1788
|
+
});
|
|
1789
|
+
Object.defineProperty(OpenAiAssistantExecutionTools.prototype, "description", {
|
|
1790
|
+
get: function () {
|
|
1791
|
+
return 'Use single assistant provided by OpenAI';
|
|
1792
|
+
},
|
|
1793
|
+
enumerable: false,
|
|
1794
|
+
configurable: true
|
|
1795
|
+
});
|
|
1796
|
+
/**
|
|
1797
|
+
* Calls OpenAI API to use a chat model.
|
|
1798
|
+
*/
|
|
1799
|
+
OpenAiAssistantExecutionTools.prototype.callChatModel = function (prompt) {
|
|
1800
|
+
var _a, _b, _c;
|
|
1801
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
1802
|
+
var content, parameters, modelRequirements /*, format*/, client, _d, _e, key, rawPromptContent, rawRequest, start, complete, stream, rawResponse, resultContent, usage;
|
|
1803
|
+
var e_1, _f;
|
|
1804
|
+
var _this = this;
|
|
1805
|
+
return __generator(this, function (_g) {
|
|
1806
|
+
switch (_g.label) {
|
|
1807
|
+
case 0:
|
|
1808
|
+
if (this.options.isVerbose) {
|
|
1809
|
+
console.info('💬 OpenAI callChatModel call', { prompt: prompt });
|
|
1810
|
+
}
|
|
1811
|
+
content = prompt.content, parameters = prompt.parameters, modelRequirements = prompt.modelRequirements;
|
|
1812
|
+
return [4 /*yield*/, this.getClient()];
|
|
1813
|
+
case 1:
|
|
1814
|
+
client = _g.sent();
|
|
1815
|
+
// TODO: [☂] Use here more modelRequirements
|
|
1816
|
+
if (modelRequirements.modelVariant !== 'CHAT') {
|
|
1817
|
+
throw new PipelineExecutionError('Use callChatModel only for CHAT variant');
|
|
1818
|
+
}
|
|
1819
|
+
try {
|
|
1820
|
+
// TODO: [👨👨👧👧] Remove:
|
|
1821
|
+
for (_d = __values(['maxTokens', 'modelName', 'seed', 'temperature']), _e = _d.next(); !_e.done; _e = _d.next()) {
|
|
1822
|
+
key = _e.value;
|
|
1823
|
+
if (modelRequirements[key] !== undefined) {
|
|
1824
|
+
throw new NotYetImplementedError("In `OpenAiAssistantExecutionTools` you cannot specify `".concat(key, "`"));
|
|
1825
|
+
}
|
|
1826
|
+
}
|
|
1827
|
+
}
|
|
1828
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
1829
|
+
finally {
|
|
1830
|
+
try {
|
|
1831
|
+
if (_e && !_e.done && (_f = _d.return)) _f.call(_d);
|
|
1832
|
+
}
|
|
1833
|
+
finally { if (e_1) throw e_1.error; }
|
|
1834
|
+
}
|
|
1835
|
+
rawPromptContent = replaceParameters(content, __assign(__assign({}, parameters), { modelName: 'assistant' }));
|
|
1836
|
+
rawRequest = {
|
|
1837
|
+
// [👨👨👧👧] ...modelSettings,
|
|
1838
|
+
assistant_id: 'asst_CJCZzFCbBL0f2D4OWMXVTdBB',
|
|
1839
|
+
// <- Note: This is not a private information, just ID of the assistant which is accessible only with correct API key
|
|
1840
|
+
thread: {
|
|
1841
|
+
messages: [
|
|
1842
|
+
// TODO: !!!!!! Unhardcode
|
|
1843
|
+
// TODO: !!!!!! Allow threads to be passed
|
|
1844
|
+
{ role: 'user', content: 'What is the meaning of life? I want breathtaking speech.' },
|
|
1845
|
+
],
|
|
1846
|
+
},
|
|
1847
|
+
// !!!!!! user: this.options.user,
|
|
1848
|
+
};
|
|
1849
|
+
start = getCurrentIsoDate();
|
|
1850
|
+
if (this.options.isVerbose) {
|
|
1851
|
+
console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
|
|
1852
|
+
}
|
|
1853
|
+
return [4 /*yield*/, client.beta.threads.createAndRunStream(rawRequest)];
|
|
1854
|
+
case 2:
|
|
1855
|
+
stream = _g.sent();
|
|
1856
|
+
stream.on('connect', function () {
|
|
1857
|
+
if (_this.options.isVerbose) {
|
|
1858
|
+
console.info('connect', stream.currentEvent);
|
|
1859
|
+
}
|
|
1860
|
+
});
|
|
1861
|
+
stream.on('messageDelta', function (messageDelta) {
|
|
1862
|
+
var _a;
|
|
1863
|
+
if (_this.options.isVerbose &&
|
|
1864
|
+
messageDelta &&
|
|
1865
|
+
messageDelta.content &&
|
|
1866
|
+
messageDelta.content[0] &&
|
|
1867
|
+
messageDelta.content[0].type === 'text') {
|
|
1868
|
+
console.info('messageDelta', (_a = messageDelta.content[0].text) === null || _a === void 0 ? void 0 : _a.value);
|
|
1869
|
+
}
|
|
1870
|
+
// TODO: !!!!!! report progress
|
|
1871
|
+
});
|
|
1872
|
+
stream.on('messageCreated', function (message) {
|
|
1873
|
+
if (_this.options.isVerbose) {
|
|
1874
|
+
console.info('messageCreated', message);
|
|
1875
|
+
}
|
|
1876
|
+
});
|
|
1877
|
+
stream.on('messageDone', function (message) {
|
|
1878
|
+
if (_this.options.isVerbose) {
|
|
1879
|
+
console.info('messageDone', message);
|
|
1880
|
+
}
|
|
1881
|
+
});
|
|
1882
|
+
return [4 /*yield*/, stream.finalMessages()];
|
|
1883
|
+
case 3:
|
|
1884
|
+
rawResponse = _g.sent();
|
|
1885
|
+
if (this.options.isVerbose) {
|
|
1886
|
+
console.info(colors__default["default"].bgWhite('rawResponse'), JSON.stringify(rawResponse, null, 4));
|
|
1887
|
+
}
|
|
1888
|
+
if (rawResponse.length !== 1) {
|
|
1889
|
+
throw new PipelineExecutionError("There is NOT 1 BUT ".concat(rawResponse.length, " finalMessages from OpenAI"));
|
|
1890
|
+
}
|
|
1891
|
+
if (rawResponse[0].content.length !== 1) {
|
|
1892
|
+
throw new PipelineExecutionError("There is NOT 1 BUT ".concat(rawResponse[0].content.length, " finalMessages content from OpenAI"));
|
|
1893
|
+
}
|
|
1894
|
+
if (((_a = rawResponse[0].content[0]) === null || _a === void 0 ? void 0 : _a.type) !== 'text') {
|
|
1895
|
+
throw new PipelineExecutionError("There is NOT 'text' BUT ".concat((_b = rawResponse[0].content[0]) === null || _b === void 0 ? void 0 : _b.type, " finalMessages content type from OpenAI"));
|
|
1896
|
+
}
|
|
1897
|
+
resultContent = (_c = rawResponse[0].content[0]) === null || _c === void 0 ? void 0 : _c.text.value;
|
|
1898
|
+
// <- TODO: !!!!!! There are also annotations, maybe use them
|
|
1899
|
+
// eslint-disable-next-line prefer-const
|
|
1900
|
+
complete = getCurrentIsoDate();
|
|
1901
|
+
usage = UNCERTAIN_USAGE;
|
|
1902
|
+
// TODO: !!!!!!> = computeOpenAiUsage(content, resultContent || '', rawResponse);
|
|
1903
|
+
if (resultContent === null) {
|
|
1904
|
+
throw new PipelineExecutionError('No response message from OpenAI');
|
|
1905
|
+
}
|
|
1906
|
+
return [2 /*return*/, $asDeeplyFrozenSerializableJson('OpenAiAssistantExecutionTools ChatPromptResult', {
|
|
1907
|
+
content: resultContent,
|
|
1908
|
+
modelName: 'assistant',
|
|
1909
|
+
// <- TODO: !!!!!! Can we detect really used model: rawResponse.model || modelName,
|
|
1910
|
+
timing: {
|
|
1911
|
+
start: start,
|
|
1912
|
+
complete: complete,
|
|
1913
|
+
},
|
|
1914
|
+
usage: usage,
|
|
1915
|
+
rawPromptContent: rawPromptContent,
|
|
1916
|
+
rawRequest: rawRequest,
|
|
1917
|
+
rawResponse: rawResponse,
|
|
1918
|
+
// <- [🗯]
|
|
1919
|
+
})];
|
|
1920
|
+
}
|
|
1921
|
+
});
|
|
1922
|
+
});
|
|
1923
|
+
};
|
|
1924
|
+
return OpenAiAssistantExecutionTools;
|
|
1925
|
+
}(OpenAiExecutionTools));
|
|
1926
|
+
/**
|
|
1927
|
+
* TODO: !!!!!! DO not use colors - can be used in browser
|
|
1928
|
+
* TODO: [🧠][🧙♂️] Maybe there can be some wizzard for thoose who want to use just OpenAI
|
|
1929
|
+
* TODO: Maybe make custom OpenAiError
|
|
1930
|
+
* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
|
|
1931
|
+
* TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
|
|
1932
|
+
*/
|
|
1933
|
+
|
|
1934
|
+
/**
|
|
1935
|
+
* Execution Tools for calling OpenAI API
|
|
1936
|
+
*
|
|
1937
|
+
* @public exported from `@promptbook/openai`
|
|
1938
|
+
*/
|
|
1939
|
+
var createOpenAiAssistantExecutionTools = Object.assign(function (options) {
|
|
1940
|
+
// TODO: [🧠][main] !!!! If browser, auto add `dangerouslyAllowBrowser`
|
|
1941
|
+
if (($isRunningInBrowser() || $isRunningInWebWorker()) && !options.dangerouslyAllowBrowser) {
|
|
1942
|
+
options = __assign(__assign({}, options), { dangerouslyAllowBrowser: true });
|
|
1943
|
+
}
|
|
1944
|
+
return new OpenAiAssistantExecutionTools(options);
|
|
1945
|
+
}, {
|
|
1946
|
+
packageName: '@promptbook/openai',
|
|
1947
|
+
className: 'OpenAiAssistantExecutionTools',
|
|
1948
|
+
});
|
|
1949
|
+
/**
|
|
1950
|
+
* TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
|
|
1951
|
+
* TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
|
|
1952
|
+
*/
|
|
1953
|
+
|
|
1685
1954
|
/**
|
|
1686
1955
|
* Execution Tools for calling OpenAI API
|
|
1687
1956
|
*
|
|
@@ -1770,14 +2039,26 @@
|
|
|
1770
2039
|
* @public exported from `@promptbook/cli`
|
|
1771
2040
|
*/
|
|
1772
2041
|
var _OpenAiRegistration = $llmToolsRegister.register(createOpenAiExecutionTools);
|
|
2042
|
+
/**
|
|
2043
|
+
* @@@ registration2
|
|
2044
|
+
*
|
|
2045
|
+
* Note: [🏐] Configurations registrations are done in @@@ BUT constructor @@@
|
|
2046
|
+
*
|
|
2047
|
+
* @public exported from `@promptbook/openai`
|
|
2048
|
+
* @public exported from `@promptbook/cli`
|
|
2049
|
+
*/
|
|
2050
|
+
var _OpenAiAssistantRegistration = $llmToolsRegister.register(createOpenAiAssistantExecutionTools);
|
|
1773
2051
|
/**
|
|
1774
2052
|
* TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
|
|
1775
2053
|
*/
|
|
1776
2054
|
|
|
1777
2055
|
exports.OPENAI_MODELS = OPENAI_MODELS;
|
|
2056
|
+
exports.OpenAiAssistantExecutionTools = OpenAiAssistantExecutionTools;
|
|
1778
2057
|
exports.OpenAiExecutionTools = OpenAiExecutionTools;
|
|
1779
2058
|
exports.PROMPTBOOK_VERSION = PROMPTBOOK_VERSION;
|
|
2059
|
+
exports._OpenAiAssistantRegistration = _OpenAiAssistantRegistration;
|
|
1780
2060
|
exports._OpenAiRegistration = _OpenAiRegistration;
|
|
2061
|
+
exports.createOpenAiAssistantExecutionTools = createOpenAiAssistantExecutionTools;
|
|
1781
2062
|
exports.createOpenAiExecutionTools = createOpenAiExecutionTools;
|
|
1782
2063
|
|
|
1783
2064
|
Object.defineProperty(exports, '__esModule', { value: true });
|