openlayer 0.0.7 → 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +8 -2
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -29,7 +29,7 @@ class OpenAIMonitor {
29
29
  .map(({ content }) => content)
30
30
  .join('\n')
31
31
  .trim();
32
- this.resolvedQuery = (endpoint, args = {}) => (0, request_1.resolvedQuery)(this.openlayerServerUrl, endpoint, Object.assign({ version: this.version }, args));
32
+ this.resolvedQuery = (endpoint, args = {}) => (0, request_1.resolvedQuery)(this.openlayerServerUrl, endpoint, args);
33
33
  this.uploadDataToOpenlayer = (data) => __awaiter(this, void 0, void 0, function* () {
34
34
  const uploadToInferencePipeline = (id) => __awaiter(this, void 0, void 0, function* () {
35
35
  const dataStreamEndpoint = `/inference-pipelines/${id}/data-stream`;
@@ -68,6 +68,7 @@ class OpenAIMonitor {
68
68
  const projectsEndpoint = '/projects';
69
69
  const projectsQueryParameters = {
70
70
  name: this.openlayerProjectName,
71
+ version: this.version,
71
72
  };
72
73
  const projectsQuery = this.resolvedQuery(projectsEndpoint, projectsQueryParameters);
73
74
  const projectsResponse = yield fetch(projectsQuery, {
@@ -88,6 +89,7 @@ class OpenAIMonitor {
88
89
  const inferencePipelineEndpoint = `/projects/${project.id}/inference-pipelines`;
89
90
  const inferencePipelineQueryParameters = {
90
91
  name: this.openlayerInferencePipelineName,
92
+ version: this.version,
91
93
  };
92
94
  const inferencePipelineQuery = this.resolvedQuery(inferencePipelineEndpoint, inferencePipelineQueryParameters);
93
95
  const inferencePipelineResponse = yield fetch(inferencePipelineQuery, {
@@ -108,7 +110,7 @@ class OpenAIMonitor {
108
110
  }
109
111
  else {
110
112
  const createInferencePipelineEndpoint = `/projects/${project.id}/inference-pipelines`;
111
- const createInferencePipelineQuery = this.resolvedQuery(createInferencePipelineEndpoint);
113
+ const createInferencePipelineQuery = this.resolvedQuery(createInferencePipelineEndpoint, { version: this.version });
112
114
  const createInferencePipelineResponse = yield fetch(createInferencePipelineQuery, {
113
115
  body: JSON.stringify({
114
116
  description: '',
@@ -169,6 +171,7 @@ class OpenAIMonitor {
169
171
  input: this.formatChatCompletionInput(body.messages),
170
172
  latency,
171
173
  output: outputData,
174
+ timestamp: startTime,
172
175
  });
173
176
  }
174
177
  else {
@@ -180,6 +183,7 @@ class OpenAIMonitor {
180
183
  input: this.formatChatCompletionInput(body.messages),
181
184
  latency,
182
185
  output: nonStreamedResponse.choices[0].message.content,
186
+ timestamp: startTime,
183
187
  tokens: (_e = (_d = nonStreamedResponse.usage) === null || _d === void 0 ? void 0 : _d.total_tokens) !== null && _e !== void 0 ? _e : 0,
184
188
  });
185
189
  }
@@ -222,6 +226,7 @@ class OpenAIMonitor {
222
226
  input: body.prompt,
223
227
  latency,
224
228
  output: outputData,
229
+ timestamp: startTime,
225
230
  tokens: tokensData,
226
231
  });
227
232
  }
@@ -234,6 +239,7 @@ class OpenAIMonitor {
234
239
  input: body.prompt,
235
240
  latency,
236
241
  output: nonStreamedResponse.choices[0].text,
242
+ timestamp: startTime,
237
243
  tokens: (_o = (_m = nonStreamedResponse.usage) === null || _m === void 0 ? void 0 : _m.total_tokens) !== null && _o !== void 0 ? _o : 0,
238
244
  });
239
245
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openlayer",
3
- "version": "0.0.7",
3
+ "version": "0.0.9",
4
4
  "description": "The Openlayer TypeScript client",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",