openlayer 0.0.1-alpha.0 → 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +10 -6
  2. package/_shims/web-runtime.js +1 -1
  3. package/_shims/web-runtime.js.map +1 -1
  4. package/_shims/web-runtime.mjs +1 -1
  5. package/_shims/web-runtime.mjs.map +1 -1
  6. package/lib/core/cli.d.ts +15 -1
  7. package/lib/core/cli.d.ts.map +1 -1
  8. package/lib/core/cli.js +3 -4
  9. package/lib/core/cli.js.map +1 -1
  10. package/lib/core/cli.mjs +4 -3
  11. package/lib/core/cli.mjs.map +1 -1
  12. package/lib/core/openai-monitor.d.ts +54 -0
  13. package/lib/core/openai-monitor.d.ts.map +1 -0
  14. package/lib/core/openai-monitor.js +376 -0
  15. package/lib/core/openai-monitor.js.map +1 -0
  16. package/lib/core/openai-monitor.mjs +371 -0
  17. package/lib/core/openai-monitor.mjs.map +1 -0
  18. package/package.json +2 -2
  19. package/src/_shims/web-runtime.ts +1 -1
  20. package/src/lib/core/cli.ts +21 -5
  21. package/src/lib/core/index.d.ts +2 -0
  22. package/src/lib/core/openai-monitor.ts +470 -0
  23. package/src/lib/index.d.ts +1 -0
  24. package/src/version.ts +1 -1
  25. package/version.d.ts +1 -1
  26. package/version.js +1 -1
  27. package/version.js.map +1 -1
  28. package/version.mjs +1 -1
  29. package/version.mjs.map +1 -1
  30. package/lib/core/index.d.ts +0 -237
  31. package/lib/core/index.d.ts.map +0 -1
  32. package/lib/core/index.js +0 -635
  33. package/lib/core/index.js.map +0 -1
  34. package/lib/core/index.mjs +0 -627
  35. package/lib/core/index.mjs.map +0 -1
  36. package/lib/core/run.d.ts +0 -14
  37. package/lib/core/run.d.ts.map +0 -1
  38. package/lib/core/run.js +0 -3
  39. package/lib/core/run.js.map +0 -1
  40. package/lib/core/run.mjs +0 -2
  41. package/lib/core/run.mjs.map +0 -1
  42. package/src/lib/core/index.ts +0 -1067
  43. package/src/lib/core/run.ts +0 -14
@@ -0,0 +1,371 @@
1
+ import OpenAI from 'openai';
2
+ const OpenAIPricing = {
3
+ 'babbage-002': {
4
+ input: 0.0004,
5
+ output: 0.0004,
6
+ },
7
+ 'davinci-002': {
8
+ input: 0.002,
9
+ output: 0.002,
10
+ },
11
+ 'gpt-3.5-turbo': {
12
+ input: 0.0005,
13
+ output: 0.0015,
14
+ },
15
+ 'gpt-3.5-turbo-0125': {
16
+ input: 0.0005,
17
+ output: 0.0015,
18
+ },
19
+ 'gpt-3.5-turbo-0301': {
20
+ input: 0.0015,
21
+ output: 0.002,
22
+ },
23
+ 'gpt-3.5-turbo-0613': {
24
+ input: 0.0015,
25
+ output: 0.002,
26
+ },
27
+ 'gpt-3.5-turbo-1106': {
28
+ input: 0.001,
29
+ output: 0.002,
30
+ },
31
+ 'gpt-3.5-turbo-16k-0613': {
32
+ input: 0.003,
33
+ output: 0.004,
34
+ },
35
+ 'gpt-3.5-turbo-instruct': {
36
+ input: 0.0015,
37
+ output: 0.002,
38
+ },
39
+ 'gpt-4': {
40
+ input: 0.03,
41
+ output: 0.06,
42
+ },
43
+ 'gpt-4-0125-preview': {
44
+ input: 0.01,
45
+ output: 0.03,
46
+ },
47
+ 'gpt-4-0314': {
48
+ input: 0.03,
49
+ output: 0.06,
50
+ },
51
+ 'gpt-4-0613': {
52
+ input: 0.03,
53
+ output: 0.06,
54
+ },
55
+ 'gpt-4-1106-preview': {
56
+ input: 0.01,
57
+ output: 0.03,
58
+ },
59
+ 'gpt-4-1106-vision-preview': {
60
+ input: 0.01,
61
+ output: 0.03,
62
+ },
63
+ 'gpt-4-32k': {
64
+ input: 0.06,
65
+ output: 0.12,
66
+ },
67
+ 'gpt-4-32k-0314': {
68
+ input: 0.06,
69
+ output: 0.12,
70
+ },
71
+ 'gpt-4-32k-0613': {
72
+ input: 0.03,
73
+ output: 0.06,
74
+ },
75
+ };
76
+ class OpenAIMonitor {
77
+ /**
78
+ * Constructs an OpenAIMonitor instance.
79
+ * @param {OpenAIMonitorConstructorProps} props - The configuration properties for the OpenAI and Openlayer clients.
80
+ */
81
+ constructor({ openAiApiKey, openlayerClient, openlayerInferencePipelineId, }) {
82
+ this.defaultConfig = {
83
+ costColumnName: 'cost',
84
+ inferenceIdColumnName: 'id',
85
+ latencyColumnName: 'latency',
86
+ numOfTokenColumnName: 'tokens',
87
+ outputColumnName: 'output',
88
+ timestampColumnName: 'timestamp',
89
+ };
90
+ this.cost = (model, inputTokens, outputTokens) => {
91
+ const pricing = OpenAIPricing[model];
92
+ const inputCost = typeof pricing === 'undefined' ? undefined : (inputTokens / 1000) * pricing.input;
93
+ const outputCost = typeof pricing === 'undefined' ? undefined : (outputTokens / 1000) * pricing.output;
94
+ return typeof pricing === 'undefined' ? undefined : (inputCost ?? 0) + (outputCost ?? 0);
95
+ };
96
+ this.chatCompletionPrompt = (fromMessages) => fromMessages.map(({ content, role }, i) => ({
97
+ content: role === 'user' ? `{{ message_${i} }}`
98
+ : content === null || typeof content === 'undefined' ? ''
99
+ : content,
100
+ role,
101
+ }));
102
+ this.threadPrompt = async (fromMessages) => {
103
+ const messages = [];
104
+ for await (const page of fromMessages.iterPages()) {
105
+ messages.push(...page.getPaginatedItems());
106
+ }
107
+ return messages
108
+ .map(({ content, role }) => content.map((item) => ({
109
+ content: (() => {
110
+ switch (item.type) {
111
+ case 'image_file':
112
+ return item.image_file.file_id;
113
+ case 'text':
114
+ return item.text.value;
115
+ default:
116
+ return '';
117
+ }
118
+ })(),
119
+ role,
120
+ })))
121
+ .flat();
122
+ };
123
+ this.inputVariables = (fromPrompt, andMessages) => {
124
+ const inputVariableNames = fromPrompt
125
+ .filter(({ role }) => role === 'user')
126
+ .map(({ content }) => String(content).replace(/{{\s*|\s*}}/g, ''));
127
+ const inputVariables = andMessages
128
+ .filter(({ role }) => role === 'user')
129
+ .map(({ content }) => content);
130
+ const inputVariablesMap = inputVariableNames.reduce((acc, name, i) => ({ ...acc, [name]: inputVariables[i] }), {});
131
+ return { inputVariableNames, inputVariables, inputVariablesMap };
132
+ };
133
+ /**
134
+ * Creates a chat completion using the OpenAI client and streams the result to Openlayer.
135
+ * @param {ChatCompletionCreateParams} body - The parameters for creating a chat completion.
136
+ * @param {RequestOptions} [options] - Optional request options.
137
+ * @param {Openlayer.RequestOptions<any> | undefined} [additionalLogs] - Optional metadata logs to include with the request sent to Openlayer.
138
+ * @returns {Promise<ChatCompletion | Stream<ChatCompletionChunk>>} Promise of a ChatCompletion or a Stream
139
+ * @throws {Error} Throws errors from the OpenAI client.
140
+ */
141
+ this.createChatCompletion = async (body, options, additionalLogs) => {
142
+ // Start a timer to measure latency
143
+ const startTime = Date.now();
144
+ // Accumulate output for streamed responses
145
+ let streamedOutput = '';
146
+ const response = await this.openAIClient.chat.completions.create(body, options);
147
+ if (this.openlayerInferencePipelineId.length > 0) {
148
+ try {
149
+ const prompt = this.chatCompletionPrompt(body.messages);
150
+ const { inputVariableNames, inputVariablesMap } = this.inputVariables(prompt, body.messages);
151
+ const config = {
152
+ ...this.defaultConfig,
153
+ inputVariableNames,
154
+ prompt,
155
+ };
156
+ if (body.stream) {
157
+ const streamedResponse = response;
158
+ for await (const chunk of streamedResponse) {
159
+ const [choice] = chunk.choices;
160
+ // Process each chunk - for example, accumulate input data
161
+ const chunkOutput = choice?.delta.content ?? '';
162
+ streamedOutput += chunkOutput;
163
+ }
164
+ const endTime = Date.now();
165
+ const latency = endTime - startTime;
166
+ this.openlayerClient.inferencePipelines.data.stream(this.openlayerInferencePipelineId, {
167
+ config,
168
+ rows: [
169
+ {
170
+ latency,
171
+ output: streamedOutput,
172
+ timestamp: startTime,
173
+ ...inputVariablesMap,
174
+ ...additionalLogs,
175
+ },
176
+ ],
177
+ });
178
+ }
179
+ else {
180
+ const nonStreamedResponse = response;
181
+ // Handle regular (non-streamed) response
182
+ const endTime = Date.now();
183
+ const latency = endTime - startTime;
184
+ const [choice] = nonStreamedResponse.choices;
185
+ const output = choice?.message.content;
186
+ const tokens = nonStreamedResponse.usage?.total_tokens ?? 0;
187
+ const inputTokens = nonStreamedResponse.usage?.prompt_tokens ?? 0;
188
+ const outputTokens = nonStreamedResponse.usage?.completion_tokens ?? 0;
189
+ const cost = this.cost(nonStreamedResponse.model, inputTokens, outputTokens);
190
+ if (typeof output === 'string') {
191
+ this.openlayerClient.inferencePipelines.data.stream(this.openlayerInferencePipelineId, {
192
+ config,
193
+ rows: [
194
+ {
195
+ cost,
196
+ latency,
197
+ model: nonStreamedResponse.model,
198
+ output,
199
+ timestamp: startTime,
200
+ tokens,
201
+ ...inputVariablesMap,
202
+ ...additionalLogs,
203
+ },
204
+ ],
205
+ });
206
+ }
207
+ else {
208
+ console.error('No output received from OpenAI.');
209
+ }
210
+ }
211
+ }
212
+ catch (error) {
213
+ console.error(error);
214
+ }
215
+ }
216
+ return response;
217
+ };
218
+ /**
219
+ * Creates a completion using the OpenAI client and streams the result to Openlayer.
220
+ * @param {CompletionCreateParams} body - The parameters for creating a completion.
221
+ * @param {RequestOptions} [options] - Optional request options.
222
+ * @param {Openlayer.RequestOptions<any> | undefined} [additionalLogs] - Optional metadata logs to include with the request sent to Openlayer.
223
+ * @returns {Promise<Completion | Stream<Completion>>} Promise that resolves to a Completion or a Stream.
224
+ * @throws {Error} Throws errors from the OpenAI client.
225
+ */
226
+ this.createCompletion = async (body, options, additionalLogs) => {
227
+ if (!body.prompt) {
228
+ console.error('No prompt provided.');
229
+ }
230
+ // Start a timer to measure latency
231
+ const startTime = Date.now();
232
+ // Accumulate output and tokens data for streamed responses
233
+ let streamedModel = body.model;
234
+ let streamedOutput = '';
235
+ let streamedTokens = 0;
236
+ let streamedInputTokens = 0;
237
+ let streamedOutputTokens = 0;
238
+ const response = await this.openAIClient.completions.create(body, options);
239
+ if (this.openlayerInferencePipelineId.length > 0) {
240
+ try {
241
+ const config = {
242
+ ...this.defaultConfig,
243
+ inputVariableNames: ['input'],
244
+ };
245
+ if (body.stream) {
246
+ const streamedResponse = response;
247
+ for await (const chunk of streamedResponse) {
248
+ const [choice] = chunk.choices;
249
+ // Process each chunk - for example, accumulate input data
250
+ streamedModel = chunk.model;
251
+ streamedOutput += choice?.text.trim();
252
+ streamedTokens += chunk.usage?.total_tokens ?? 0;
253
+ streamedInputTokens += chunk.usage?.prompt_tokens ?? 0;
254
+ streamedOutputTokens += chunk.usage?.completion_tokens ?? 0;
255
+ }
256
+ const endTime = Date.now();
257
+ const latency = endTime - startTime;
258
+ const cost = this.cost(streamedModel, streamedInputTokens, streamedOutputTokens);
259
+ this.openlayerClient.inferencePipelines.data.stream(this.openlayerInferencePipelineId, {
260
+ config,
261
+ rows: [
262
+ {
263
+ cost,
264
+ input: body.prompt,
265
+ latency,
266
+ output: streamedOutput,
267
+ timestamp: startTime,
268
+ tokens: streamedTokens,
269
+ ...additionalLogs,
270
+ },
271
+ ],
272
+ });
273
+ }
274
+ else {
275
+ const nonStreamedResponse = response;
276
+ const [choice] = nonStreamedResponse.choices;
277
+ // Handle regular (non-streamed) response
278
+ const endTime = Date.now();
279
+ const latency = endTime - startTime;
280
+ const tokens = nonStreamedResponse.usage?.total_tokens ?? 0;
281
+ const inputTokens = nonStreamedResponse.usage?.prompt_tokens ?? 0;
282
+ const outputTokens = nonStreamedResponse.usage?.completion_tokens ?? 0;
283
+ const cost = this.cost(nonStreamedResponse.model, inputTokens, outputTokens);
284
+ this.openlayerClient.inferencePipelines.data.stream(this.openlayerInferencePipelineId, {
285
+ config,
286
+ rows: [
287
+ {
288
+ cost,
289
+ input: body.prompt,
290
+ latency,
291
+ output: choice?.text ?? '',
292
+ timestamp: startTime,
293
+ tokens,
294
+ ...additionalLogs,
295
+ },
296
+ ],
297
+ });
298
+ }
299
+ }
300
+ catch (error) {
301
+ console.error(error);
302
+ }
303
+ }
304
+ return response;
305
+ };
306
+ this.openlayerInferencePipelineId = openlayerInferencePipelineId;
307
+ this.openlayerClient = openlayerClient;
308
+ this.openAIClient = new OpenAI({
309
+ apiKey: openAiApiKey,
310
+ dangerouslyAllowBrowser: true,
311
+ });
312
+ }
313
+ /**
314
+ * Monitor a run from an OpenAI assistant.
315
+ * Once the run is completed, the thread data is published to Openlayer,
316
+ * along with the latency, cost, and number of tokens used.
317
+ * @param {Run} run - The run created by the OpenAI assistant.
318
+ * @param {Openlayer.RequestOptions<any> | undefined} [additionalLogs] - Optional metadata logs to include with the request sent to Openlayer.
319
+ * @returns {Promise<void>} A promise that resolves when the run data has been successfully published to Openlayer.
320
+ */
321
+ async monitorThreadRun(run, additionalLogs) {
322
+ if (run.status !== 'completed' || this.openlayerInferencePipelineId.length === 0) {
323
+ return;
324
+ }
325
+ try {
326
+ const { assistant_id, completed_at, created_at, model, thread_id,
327
+ // @ts-ignore
328
+ usage, } = run;
329
+ // @ts-ignore
330
+ const { completion_tokens, prompt_tokens, total_tokens } = typeof usage === 'undefined' || typeof usage !== 'object' || usage === null ? {} : usage;
331
+ const cost = this.cost(model, prompt_tokens, completion_tokens);
332
+ const latency = completed_at === null || created_at === null || isNaN(completed_at) || isNaN(created_at) ?
333
+ undefined
334
+ : (completed_at - created_at) * 1000;
335
+ const messages = await this.openAIClient.beta.threads.messages.list(thread_id, { order: 'asc' });
336
+ const populatedPrompt = await this.threadPrompt(messages);
337
+ const prompt = this.chatCompletionPrompt(populatedPrompt);
338
+ const { inputVariableNames, inputVariablesMap } = this.inputVariables(prompt, populatedPrompt);
339
+ const config = {
340
+ ...this.defaultConfig,
341
+ inputVariableNames,
342
+ prompt: prompt.slice(0, prompt.length - 1),
343
+ };
344
+ const output = prompt[prompt.length - 1]?.content;
345
+ const resolvedOutput = typeof output === 'string' ? output
346
+ : typeof output === 'undefined' || output === null ? ''
347
+ : `${output}`;
348
+ this.openlayerClient.inferencePipelines.data.stream(this.openlayerInferencePipelineId, {
349
+ config,
350
+ rows: [
351
+ {
352
+ cost,
353
+ latency,
354
+ openai_assistant_id: assistant_id,
355
+ openai_thread_id: thread_id,
356
+ output: resolvedOutput,
357
+ timestamp: run.created_at,
358
+ tokens: total_tokens,
359
+ ...inputVariablesMap,
360
+ ...additionalLogs,
361
+ },
362
+ ],
363
+ });
364
+ }
365
+ catch (error) {
366
+ console.error('Error logging thread run:', error);
367
+ }
368
+ }
369
+ }
370
+ export default OpenAIMonitor;
371
+ //# sourceMappingURL=openai-monitor.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai-monitor.mjs","sourceRoot":"","sources":["../../src/lib/core/openai-monitor.ts"],"names":[],"mappings":"OAAO,MAAM,MAAM,QAAQ;AA0B3B,MAAM,aAAa,GAA+B;IAChD,aAAa,EAAE;QACb,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,MAAM;KACf;IACD,aAAa,EAAE;QACb,KAAK,EAAE,KAAK;QACZ,MAAM,EAAE,KAAK;KACd;IACD,eAAe,EAAE;QACf,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,MAAM;KACf;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,MAAM;KACf;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,KAAK;KACd;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,KAAK;KACd;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,KAAK;QACZ,MAAM,EAAE,KAAK;KACd;IACD,wBAAwB,EAAE;QACxB,KAAK,EAAE,KAAK;QACZ,MAAM,EAAE,KAAK;KACd;IACD,wBAAwB,EAAE;QACxB,KAAK,EAAE,MAAM;QACb,MAAM,EAAE,KAAK;KACd;IACD,OAAO,EAAE;QACP,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,oBAAoB,EAAE;QACpB,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,2BAA2B,EAAE;QAC3B,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,WAAW,EAAE;QACX,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,gBAAgB,EAAE;QAChB,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;IACD,gBAAgB,EAAE;QAChB,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,IAAI;KACb;CACF,CAAC;AAEF,MAAM,aAAa;IAgBjB;;;OAGG;IACH,YAAY,EACV,YAAY,EACZ,eAAe,EACf,4BAA4B,GACE;QAjBxB,kBAAa,GAA+D;YAClF,cAAc,EAAE,MAAM;YACtB,qBAAqB,EAAE,IAAI;YAC3B,iBAAiB,EAAE,SAAS;YAC5B,oBAAoB,EAAE,QAAQ;YAC9B,gBAAgB,EAAE,QAAQ;YAC1B,mBAAmB,EAAE,WAAW;SACjC,CAAC;QAoBM,SAAI,GAAG,CAAC,KAAa,EAAE,WAAmB,EAAE,YAAoB,EAAE,EAAE;YAC1E,MAAM,OAAO,GAAwB,aAAa,CAAC,KAAK,CAAC,CAAC;YAC1D,MAAM,SAAS,GAAG,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,IAAI,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;YACpG,MAAM,UAAU,GAAG,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC;YACvG,OAAO,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,GAAG,CAAC,UAAU,IAAI,CAAC,CAAC,CAAC;QAC3F,CAAC,CAAC;QAEM,yBAAoB,GAAG,CAC7B,YAA0C,EAC2B,EAAE,CACvE,YAAY,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;YAC1C,OAAO,EACL,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,cAAc,CAAC,KAAK;gBACtC,CAAC,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC,CAAC,EAAE;oBACzD,CAAC,CAAC,OAAO;YACX,IAAI;SACL,CAAC,CAAC,CAAC;QAEE,iBAAY,GAAG,KAAK,EAC1B,YAAkC,EACK,EAAE;YACzC,MAAM,QAAQ,GAA+B,EAAE,CAAC;YAChD,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,YAAY,CAAC,SAAS,EAAE,EAAE;gBACjD,QAAQ,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,iBAAiB,EAAE,CAAC,CAAC;aAC5C;YAED,OAAO,QAAQ;iBACZ,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CACzB,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;gBACrB,OAAO,EAAE,CAAC,GAAG,EAAE;oBACb,QAAQ,IAAI,CAAC,IAAI,EAAE;wBACjB,KAAK,YAAY;4BACf,OAAO,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC;wBACjC,KAAK,MAAM;4BACT,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC;wBACzB;4BACE,OAAO,EAAE,CAAC;qBACb;gBACH,CAAC,CAAC,EAAE;gBACJ,IAAI;aACL,CAAC,CAAC,CACJ;iBACA,IAAI,EAAE,CAAC;QACZ,CAAC,CAAC;QAEM,mBAAc,GAAG,CACvB,UAA+E,EAC/E,WAAyC,EACzC,EAAE;YACF,MAAM,kBAAkB,GAAG,UAAU;iBAClC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,KAAK,MAAM,CAAC;iBACrC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,cAAc,EAAE,EAAE,CAAC,CAAa,CAAC;YACjF,MAAM,cAAc,GAAG,WAAW;iBAC/B,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,IAAI,KAAK,MAAM,CAAC;iBACrC,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC,OAAO,CAAa,CAAC;YAC7C,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,MAAM,CACjD,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,EACzD,EAAE,CACH,CAAC;YAEF,OAAO,EAAE,kBAAkB,EAAE,cAAc,EAAE,iBAAiB,EAAE,CAAC;QACnE,CAAC,CAAC;QAEF;;;;;;;WAOG;QACI,yBAAoB,GAAG,KAAK,EACjC,IAAgC,EAChC,OAAwB,EACxB,cAA0D,EACH,EAAE;YACzD,mCAAmC;YACnC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YAC7B,2CAA2C;YAC3C,IAAI,cAAc,GAAG,EAAE,CAAC;YAExB,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAEhF,IAAI,IAAI,CAAC,4BAA4B,CAAC,MAAM,GAAG,CAAC,EAAE;gBAChD,IAAI;oBACF,MAAM,MAAM,GAAG,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBACxD,MAAM,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,GAAG,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAE7F,MAAM,MAAM,GAA+D;wBACzE,GAAG,IAAI,CAAC,aAAa;wBACrB,kBAAkB;wBAClB,MAAM;qBACP,CAAC;oBAEF,IAAI,IAAI,CAAC,MAAM,EAAE;wBACf,MAAM,gBAAgB,GAAG,QAAuC,CAAC;wBAEjE,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,gBAAgB,EAAE;4BAC1C,MAAM,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;4BAC/B,0DAA0D;4BAC1D,MAAM,WAAW,GAAG,MAAM,EAAE,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;4BAChD,cAAc,IAAI,WAAW,CAAC;yBAC/B;wBAED,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;wBAC3B,MAAM,OAAO,GAAG,OAAO,GAAG,SAAS,CAAC;wBAEpC,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,4BAA4B,EAAE;4BACrF,MAAM;4BACN,IAAI,EAAE;gCACJ;oCACE,OAAO;oCACP,MAAM,EAAE,cAAc;oCACtB,SAAS,EAAE,SAAS;oCACpB,GAAG,iBAAiB;oCACpB,GAAG,cAAc;iCAClB;6BACF;yBACF,CAAC,CAAC;qBACJ;yBAAM;wBACL,MAAM,mBAAmB,GAAG,QAA0B,CAAC;wBACvD,yCAAyC;wBACzC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;wBAC3B,MAAM,OAAO,GAAG,OAAO,GAAG,SAAS,CAAC;wBACpC,MAAM,CAAC,MAAM,CAAC,GAAG,mBAAmB,CAAC,OAAO,CAAC;wBAC7C,MAAM,MAAM,GAAG,MAAM,EAAE,OAAO,CAAC,OAAO,CAAC;wBACvC,MAAM,MAAM,GAAG,mBAAmB,CAAC,KAAK,EAAE,YAAY,IAAI,CAAC,CAAC;wBAC5D,MAAM,WAAW,GAAG,mBAAmB,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;wBAClE,MAAM,YAAY,GAAG,mBAAmB,CAAC,KAAK,EAAE,iBAAiB,IAAI,CAAC,CAAC;wBACvE,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,KAAK,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;wBAE7E,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;4BAC9B,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,4BAA4B,EAAE;gCACrF,MAAM;gCACN,IAAI,EAAE;oCACJ;wCACE,IAAI;wCACJ,OAAO;wCACP,KAAK,EAAE,mBAAmB,CAAC,KAAK;wCAChC,MAAM;wCACN,SAAS,EAAE,SAAS;wCACpB,MAAM;wCACN,GAAG,iBAAiB;wCACpB,GAAG,cAAc;qCAClB;iCACF;6BACF,CAAC,CAAC;yBACJ;6BAAM;4BACL,OAAO,CAAC,KAAK,CAAC,iCAAiC,CAAC,CAAC;yBAClD;qBACF;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;iBACtB;aACF;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC,CAAC;QAEF;;;;;;;WAOG;QACI,qBAAgB,GAAG,KAAK,EAC7B,IAA4B,EAC5B,OAAwB,EACxB,cAA0D,EAChB,EAAE;YAC5C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,OAAO,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;aACtC;YAED,mCAAmC;YACnC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;YAE7B,2DAA2D;YAC3D,IAAI,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC;YAC/B,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,IAAI,cAAc,GAAG,CAAC,CAAC;YACvB,IAAI,mBAAmB,GAAG,CAAC,CAAC;YAC5B,IAAI,oBAAoB,GAAG,CAAC,CAAC;YAE7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,WAAW,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAE3E,IAAI,IAAI,CAAC,4BAA4B,CAAC,MAAM,GAAG,CAAC,EAAE;gBAChD,IAAI;oBACF,MAAM,MAAM,GAA+D;wBACzE,GAAG,IAAI,CAAC,aAAa;wBACrB,kBAAkB,EAAE,CAAC,OAAO,CAAC;qBAC9B,CAAC;oBAEF,IAAI,IAAI,CAAC,MAAM,EAAE;wBACf,MAAM,gBAAgB,GAAG,QAA8B,CAAC;wBAExD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,gBAAgB,EAAE;4BAC1C,MAAM,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;4BAC/B,0DAA0D;4BAC1D,aAAa,GAAG,KAAK,CAAC,KAAK,CAAC;4BAC5B,cAAc,IAAI,MAAM,EAAE,IAAI,CAAC,IAAI,EAAE,CAAC;4BACtC,cAAc,IAAI,KAAK,CAAC,KAAK,EAAE,YAAY,IAAI,CAAC,CAAC;4BACjD,mBAAmB,IAAI,KAAK,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;4BACvD,oBAAoB,IAAI,KAAK,CAAC,KAAK,EAAE,iBAAiB,IAAI,CAAC,CAAC;yBAC7D;wBAED,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;wBAC3B,MAAM,OAAO,GAAG,OAAO,GAAG,SAAS,CAAC;wBACpC,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,EAAE,oBAAoB,CAAC,CAAC;wBAEjF,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,4BAA4B,EAAE;4BACrF,MAAM;4BACN,IAAI,EAAE;gCACJ;oCACE,IAAI;oCACJ,KAAK,EAAE,IAAI,CAAC,MAAM;oCAClB,OAAO;oCACP,MAAM,EAAE,cAAc;oCACtB,SAAS,EAAE,SAAS;oCACpB,MAAM,EAAE,cAAc;oCACtB,GAAG,cAAc;iCAClB;6BACF;yBACF,CAAC,CAAC;qBACJ;yBAAM;wBACL,MAAM,mBAAmB,GAAG,QAAsB,CAAC;wBACnD,MAAM,CAAC,MAAM,CAAC,GAAG,mBAAmB,CAAC,OAAO,CAAC;wBAC7C,yCAAyC;wBACzC,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;wBAC3B,MAAM,OAAO,GAAG,OAAO,GAAG,SAAS,CAAC;wBACpC,MAAM,MAAM,GAAG,mBAAmB,CAAC,KAAK,EAAE,YAAY,IAAI,CAAC,CAAC;wBAC5D,MAAM,WAAW,GAAG,mBAAmB,CAAC,KAAK,EAAE,aAAa,IAAI,CAAC,CAAC;wBAClE,MAAM,YAAY,GAAG,mBAAmB,CAAC,KAAK,EAAE,iBAAiB,IAAI,CAAC,CAAC;wBACvE,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,KAAK,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;wBAE7E,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,4BAA4B,EAAE;4BACrF,MAAM;4BACN,IAAI,EAAE;gCACJ;oCACE,IAAI;oCACJ,KAAK,EAAE,IAAI,CAAC,MAAM;oCAClB,OAAO;oCACP,MAAM,EAAE,MAAM,EAAE,IAAI,IAAI,EAAE;oCAC1B,SAAS,EAAE,SAAS;oCACpB,MAAM;oCACN,GAAG,cAAc;iCAClB;6BACF;yBACF,CAAC,CAAC;qBACJ;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;iBACtB;aACF;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC,CAAC;QA3QA,IAAI,CAAC,4BAA4B,GAAG,4BAA4B,CAAC;QACjE,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QAEvC,IAAI,CAAC,YAAY,GAAG,IAAI,MAAM,CAAC;YAC7B,MAAM,EAAE,YAAY;YACpB,uBAAuB,EAAE,IAAI;SAC9B,CAAC,CAAC;IACL,CAAC;IAsQD;;;;;;;OAOG;IACI,KAAK,CAAC,gBAAgB,CAAC,GAAQ,EAAE,cAA0D;QAChG,IAAI,GAAG,CAAC,MAAM,KAAK,WAAW,IAAI,IAAI,CAAC,4BAA4B,CAAC,MAAM,KAAK,CAAC,EAAE;YAChF,OAAO;SACR;QAED,IAAI;YACF,MAAM,EACJ,YAAY,EACZ,YAAY,EACZ,UAAU,EACV,KAAK,EACL,SAAS;YACT,aAAa;YACb,KAAK,GACN,GAAG,GAAG,CAAC;YAER,aAAa;YACb,MAAM,EAAE,iBAAiB,EAAE,aAAa,EAAE,YAAY,EAAE,GACtD,OAAO,KAAK,KAAK,WAAW,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC;YAE3F,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,iBAAiB,CAAC,CAAC;YAChE,MAAM,OAAO,GACX,YAAY,KAAK,IAAI,IAAI,UAAU,KAAK,IAAI,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC;gBACxF,SAAS;gBACX,CAAC,CAAC,CAAC,YAAY,GAAG,UAAU,CAAC,GAAG,IAAI,CAAC;YAEvC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAC;YAEjG,MAAM,eAAe,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC1D,MAAM,MAAM,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,CAAC,CAAC;YAC1D,MAAM,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,GAAG,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/F,MAAM,MAAM,GAA+D;gBACzE,GAAG,IAAI,CAAC,aAAa;gBACrB,kBAAkB;gBAClB,MAAM,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;aAC3C,CAAC;YAEF,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC;YAClD,MAAM,cAAc,GAClB,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM;gBACnC,CAAC,CAAC,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE;oBACvD,CAAC,CAAC,GAAG,MAAM,EAAE,CAAC;YAEhB,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,4BAA4B,EAAE;gBACrF,MAAM;gBACN,IAAI,EAAE;oBACJ;wBACE,IAAI;wBACJ,OAAO;wBACP,mBAAmB,EAAE,YAAY;wBACjC,gBAAgB,EAAE,SAAS;wBAC3B,MAAM,EAAE,cAAc;wBACtB,SAAS,EAAE,GAAG,CAAC,UAAU;wBACzB,MAAM,EAAE,YAAY;wBACpB,GAAG,iBAAiB;wBACpB,GAAG,cAAc;qBAClB;iBACF;aACF,CAAC,CAAC;SACJ;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,CAAC,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;SACnD;IACH,CAAC;CACF;AAED,eAAe,aAAa,CAAC"}
package/package.json CHANGED
@@ -1,12 +1,12 @@
1
1
  {
2
2
  "name": "openlayer",
3
- "version": "0.0.1-alpha.0",
3
+ "version": "0.0.1-alpha.1",
4
4
  "description": "The official TypeScript library for the Openlayer API",
5
5
  "author": "Openlayer <support@openlayer.com>",
6
6
  "types": "./index.d.ts",
7
7
  "main": "./index.js",
8
8
  "type": "commonjs",
9
- "repository": "github:stainless-sdks/openlayer-node",
9
+ "repository": "github:openlayer-ai/openlayer-ts",
10
10
  "license": "Apache-2.0",
11
11
  "packageManager": "yarn@1.22.22",
12
12
  "files": [
@@ -95,7 +95,7 @@ export function getRuntime({ manuallyImported }: { manuallyImported?: boolean }
95
95
  getDefaultAgent: (url: string) => undefined,
96
96
  fileFromPath: () => {
97
97
  throw new Error(
98
- 'The `fileFromPath` function is only supported in Node. See the README for more details: https://www.github.com/stainless-sdks/openlayer-node#file-uploads',
98
+ 'The `fileFromPath` function is only supported in Node. See the README for more details: https://www.github.com/openlayer-ai/openlayer-ts#file-uploads',
99
99
  );
100
100
  },
101
101
  isFsReadStream: (value: any) => false,
@@ -13,9 +13,23 @@
13
13
  import { program } from 'commander';
14
14
  import * as fs from 'fs';
15
15
  import * as path from 'path';
16
- import { Config, RunReturn } from './run';
17
16
 
18
- export class CLIHandler {
17
+ // Define shared interfaces and utilities here
18
+ export interface RunReturn {
19
+ otherFields: { [key: string]: any };
20
+ output: any;
21
+ }
22
+
23
+ // Define an interface for the configuration object
24
+ export interface Config {
25
+ inputVariableNames?: string[];
26
+ metadata: {
27
+ outputTimestamp: number;
28
+ };
29
+ outputColumnName: string;
30
+ }
31
+
32
+ class CLIHandler {
19
33
  private run: (...args: any[]) => Promise<any>;
20
34
 
21
35
  constructor(runFunction: (...args: any[]) => Promise<any>) {
@@ -43,7 +57,7 @@ export class CLIHandler {
43
57
  const result = await this.run(item);
44
58
  // Merge the original item fields with the result
45
59
  return { ...item, ...result.otherFields, output: result.output };
46
- })
60
+ }),
47
61
  )
48
62
  .then((results) => {
49
63
  /*
@@ -74,7 +88,9 @@ export class CLIHandler {
74
88
  fs.writeFileSync(datasetPath, JSON.stringify(results, null, 4), 'utf8');
75
89
  fs.writeFileSync(configPath, JSON.stringify(config, null, 4), 'utf8');
76
90
 
77
- console.log(`Output written to ${datasetPath}`);
78
- console.log(`Config written to ${configPath}`);
91
+ console.info(`Output written to ${datasetPath}`);
92
+ console.info(`Config written to ${configPath}`);
79
93
  }
80
94
  }
95
+
96
+ export default CLIHandler;
@@ -0,0 +1,2 @@
1
+ export * as cli from './cli';
2
+ export * as openAiMonitor from './openai-monitor';