openlayer 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +27 -36
  2. package/package.json +2 -1
package/dist/index.js CHANGED
@@ -8,17 +8,6 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
8
8
  step((generator = generator.apply(thisArg, _arguments || [])).next());
9
9
  });
10
10
  };
11
- var __rest = (this && this.__rest) || function (s, e) {
12
- var t = {};
13
- for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
14
- t[p] = s[p];
15
- if (s != null && typeof Object.getOwnPropertySymbols === "function")
16
- for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
17
- if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
18
- t[p[i]] = s[p[i]];
19
- }
20
- return t;
21
- };
22
11
  var __asyncValues = (this && this.__asyncValues) || function (o) {
23
12
  if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
24
13
  var m = o[Symbol.asyncIterator], i;
@@ -27,6 +16,7 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
27
16
  function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
28
17
  };
29
18
  Object.defineProperty(exports, "__esModule", { value: true });
19
+ const node_fetch_1 = require("node-fetch");
30
20
  const openai_1 = require("openai");
31
21
  const request_1 = require("./utils/request");
32
22
  class OpenAIMonitor {
@@ -44,7 +34,7 @@ class OpenAIMonitor {
44
34
  const uploadToInferencePipeline = (id) => __awaiter(this, void 0, void 0, function* () {
45
35
  const dataStreamEndpoint = `/inference-pipelines/${id}/data-stream`;
46
36
  const dataStreamQuery = this.resolvedQuery(dataStreamEndpoint);
47
- const response = yield fetch(dataStreamQuery, {
37
+ const response = yield (0, node_fetch_1.default)(dataStreamQuery, {
48
38
  body: JSON.stringify({
49
39
  config: {
50
40
  inferenceIdColumnName: 'inference_id',
@@ -62,7 +52,6 @@ class OpenAIMonitor {
62
52
  },
63
53
  method: 'POST',
64
54
  });
65
- console.log('bruh', response, dataStreamQuery);
66
55
  if (!response.ok) {
67
56
  console.error('Error making POST request:', response.status);
68
57
  throw new Error(`Error: ${response.status}`);
@@ -78,7 +67,7 @@ class OpenAIMonitor {
78
67
  name: this.openlayerProjectName,
79
68
  };
80
69
  const projectsQuery = this.resolvedQuery(projectsEndpoint, projectsQueryParameters);
81
- const projectsResponse = yield fetch(projectsQuery, {
70
+ const projectsResponse = yield (0, node_fetch_1.default)(projectsQuery, {
82
71
  headers: {
83
72
  Authorization: `Bearer ${this.openlayerApiKey}`,
84
73
  'Content-Type': 'application/json',
@@ -98,7 +87,7 @@ class OpenAIMonitor {
98
87
  name: this.openlayerInferencePipelineName,
99
88
  };
100
89
  const inferencePipelineQuery = this.resolvedQuery(inferencePipelineEndpoint, inferencePipelineQueryParameters);
101
- const inferencePipelineResponse = yield fetch(inferencePipelineQuery, {
90
+ const inferencePipelineResponse = yield (0, node_fetch_1.default)(inferencePipelineQuery, {
102
91
  headers: {
103
92
  Authorization: `Bearer ${this.openlayerApiKey}`,
104
93
  'Content-Type': 'application/json',
@@ -117,7 +106,7 @@ class OpenAIMonitor {
117
106
  else {
118
107
  const createInferencePipelineEndpoint = `/projects/${project.id}/inference-pipelines`;
119
108
  const createInferencePipelineQuery = this.resolvedQuery(createInferencePipelineEndpoint);
120
- const createInferencePipelineResponse = yield fetch(createInferencePipelineQuery, {
109
+ const createInferencePipelineResponse = yield (0, node_fetch_1.default)(createInferencePipelineQuery, {
121
110
  body: JSON.stringify({
122
111
  description: '',
123
112
  name: typeof this.openlayerInferencePipelineName === 'undefined'
@@ -130,8 +119,7 @@ class OpenAIMonitor {
130
119
  },
131
120
  method: 'POST',
132
121
  });
133
- const _a = yield createInferencePipelineResponse.json(), { id: inferencePipelineId } = _a, response = __rest(_a, ["id"]);
134
- console.log(createInferencePipelineResponse, response, inferencePipelineId);
122
+ const { id: inferencePipelineId } = yield createInferencePipelineResponse.json();
135
123
  if (!inferencePipelineId) {
136
124
  throw new Error('Error creating inference pipeline');
137
125
  }
@@ -144,8 +132,8 @@ class OpenAIMonitor {
144
132
  }
145
133
  });
146
134
  this.createChatCompletion = (body, options) => __awaiter(this, void 0, void 0, function* () {
147
- var _b, e_1, _c, _d;
148
- var _e, _f;
135
+ var _a, e_1, _b, _c;
136
+ var _d, _e;
149
137
  if (!this.monitoringOn) {
150
138
  throw new Error('Monitoring is not active.');
151
139
  }
@@ -157,10 +145,10 @@ class OpenAIMonitor {
157
145
  if (body.stream) {
158
146
  const streamedResponse = response;
159
147
  try {
160
- for (var _g = true, streamedResponse_1 = __asyncValues(streamedResponse), streamedResponse_1_1; streamedResponse_1_1 = yield streamedResponse_1.next(), _b = streamedResponse_1_1.done, !_b; _g = true) {
161
- _d = streamedResponse_1_1.value;
162
- _g = false;
163
- const chunk = _d;
148
+ for (var _f = true, streamedResponse_1 = __asyncValues(streamedResponse), streamedResponse_1_1; streamedResponse_1_1 = yield streamedResponse_1.next(), _a = streamedResponse_1_1.done, !_a; _f = true) {
149
+ _c = streamedResponse_1_1.value;
150
+ _f = false;
151
+ const chunk = _c;
164
152
  // Process each chunk - for example, accumulate input data
165
153
  outputData += chunk.choices[0].delta.content;
166
154
  }
@@ -168,7 +156,7 @@ class OpenAIMonitor {
168
156
  catch (e_1_1) { e_1 = { error: e_1_1 }; }
169
157
  finally {
170
158
  try {
171
- if (!_g && !_b && (_c = streamedResponse_1.return)) yield _c.call(streamedResponse_1);
159
+ if (!_f && !_a && (_b = streamedResponse_1.return)) yield _b.call(streamedResponse_1);
172
160
  }
173
161
  finally { if (e_1) throw e_1.error; }
174
162
  }
@@ -189,14 +177,14 @@ class OpenAIMonitor {
189
177
  input: this.formatChatCompletionInput(body.messages),
190
178
  latency,
191
179
  output: nonStreamedResponse.choices[0].message.content,
192
- tokens: (_f = (_e = nonStreamedResponse.usage) === null || _e === void 0 ? void 0 : _e.total_tokens) !== null && _f !== void 0 ? _f : 0,
180
+ tokens: (_e = (_d = nonStreamedResponse.usage) === null || _d === void 0 ? void 0 : _d.total_tokens) !== null && _e !== void 0 ? _e : 0,
193
181
  });
194
182
  }
195
183
  return response;
196
184
  });
197
185
  this.createCompletion = (body, options) => __awaiter(this, void 0, void 0, function* () {
198
- var _h, e_2, _j, _k;
199
- var _l, _m, _o, _p;
186
+ var _g, e_2, _h, _j;
187
+ var _k, _l, _m, _o;
200
188
  if (!this.monitoringOn) {
201
189
  throw new Error('Monitoring is not active.');
202
190
  }
@@ -209,19 +197,19 @@ class OpenAIMonitor {
209
197
  if (body.stream) {
210
198
  const streamedResponse = response;
211
199
  try {
212
- for (var _q = true, streamedResponse_2 = __asyncValues(streamedResponse), streamedResponse_2_1; streamedResponse_2_1 = yield streamedResponse_2.next(), _h = streamedResponse_2_1.done, !_h; _q = true) {
213
- _k = streamedResponse_2_1.value;
214
- _q = false;
215
- const chunk = _k;
200
+ for (var _p = true, streamedResponse_2 = __asyncValues(streamedResponse), streamedResponse_2_1; streamedResponse_2_1 = yield streamedResponse_2.next(), _g = streamedResponse_2_1.done, !_g; _p = true) {
201
+ _j = streamedResponse_2_1.value;
202
+ _p = false;
203
+ const chunk = _j;
216
204
  // Process each chunk - for example, accumulate input data
217
205
  outputData += chunk.choices[0].text.trim();
218
- tokensData += (_m = (_l = chunk.usage) === null || _l === void 0 ? void 0 : _l.total_tokens) !== null && _m !== void 0 ? _m : 0;
206
+ tokensData += (_l = (_k = chunk.usage) === null || _k === void 0 ? void 0 : _k.total_tokens) !== null && _l !== void 0 ? _l : 0;
219
207
  }
220
208
  }
221
209
  catch (e_2_1) { e_2 = { error: e_2_1 }; }
222
210
  finally {
223
211
  try {
224
- if (!_q && !_h && (_j = streamedResponse_2.return)) yield _j.call(streamedResponse_2);
212
+ if (!_p && !_g && (_h = streamedResponse_2.return)) yield _h.call(streamedResponse_2);
225
213
  }
226
214
  finally { if (e_2) throw e_2.error; }
227
215
  }
@@ -243,12 +231,15 @@ class OpenAIMonitor {
243
231
  input: body.prompt,
244
232
  latency,
245
233
  output: nonStreamedResponse.choices[0].text,
246
- tokens: (_p = (_o = nonStreamedResponse.usage) === null || _o === void 0 ? void 0 : _o.total_tokens) !== null && _p !== void 0 ? _p : 0,
234
+ tokens: (_o = (_m = nonStreamedResponse.usage) === null || _m === void 0 ? void 0 : _m.total_tokens) !== null && _o !== void 0 ? _o : 0,
247
235
  });
248
236
  }
249
237
  return response;
250
238
  });
251
- this.OpenAIClient = new openai_1.default({ apiKey: openAiApiKey });
239
+ this.OpenAIClient = new openai_1.default({
240
+ apiKey: openAiApiKey,
241
+ dangerouslyAllowBrowser: true,
242
+ });
252
243
  this.openlayerApiKey = openlayerApiKey;
253
244
  this.openlayerInferencePipelineName = openlayerInferencePipelineName;
254
245
  this.openlayerProjectName = openlayerProjectName;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openlayer",
3
- "version": "0.0.3",
3
+ "version": "0.0.5",
4
4
  "description": "The Openlayer TypeScript client",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -18,6 +18,7 @@
18
18
  "eslint-config-prettier": "^9.0.0",
19
19
  "eslint-plugin-prettier": "^5.0.1",
20
20
  "eslint-plugin-typescript-sort-keys": "^3.1.0",
21
+ "node-fetch": "^3.3.2",
21
22
  "openai": "^4.19.0"
22
23
  },
23
24
  "devDependencies": {