openlayer 0.1.15 → 0.1.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -15,10 +15,6 @@ export interface StreamingData {
15
15
  * The output string generated by the chat completion.
16
16
  */
17
17
  output: string;
18
- /**
19
- * The full prompt history for the chat completion.
20
- */
21
- prompt?: ChatCompletionMessageParam[];
22
18
  /**
23
19
  * A timestamp representing when the chat completion occurred. Optional.
24
20
  */
@@ -57,9 +53,9 @@ interface StreamingDataConfig {
57
53
  */
58
54
  outputColumnName: string | null;
59
55
  /**
60
- * The name of the column that stores the prompt template. Can be null.
56
+ * The full prompt history for the chat completion.
61
57
  */
62
- promptColumnName: string | null;
58
+ prompt?: ChatCompletionMessageParam[];
63
59
  /**
64
60
  * The name of the column that stores timestamp data. Can be null.
65
61
  */
package/dist/index.js CHANGED
@@ -33,7 +33,6 @@ class OpenlayerClient {
33
33
  latencyColumnName: 'latency',
34
34
  numOfTokenColumnName: 'tokens',
35
35
  outputColumnName: 'output',
36
- promptColumnName: 'prompt',
37
36
  timestampColumnName: 'timestamp',
38
37
  };
39
38
  this.openlayerServerUrl = 'https://api.openlayer.com/v1';
@@ -259,7 +258,8 @@ class OpenAIMonitor {
259
258
  .filter(({ role }) => role === 'user')
260
259
  .map(({ content }) => content);
261
260
  const inputVariablesMap = inputVariableNames.reduce((acc, name, i) => (Object.assign(Object.assign({}, acc), { [name]: inputVariables[i] })), {});
262
- const config = Object.assign(Object.assign({}, this.openlayerClient.defaultConfig), { inputVariableNames });
261
+ const config = Object.assign(Object.assign({}, this.openlayerClient.defaultConfig), { inputVariableNames,
262
+ prompt });
263
263
  if (body.stream) {
264
264
  const streamedResponse = response;
265
265
  try {
@@ -280,7 +280,7 @@ class OpenAIMonitor {
280
280
  }
281
281
  const endTime = Date.now();
282
282
  const latency = endTime - startTime;
283
- this.openlayerClient.streamData(Object.assign({ latency, output: outputData, prompt, timestamp: startTime }, inputVariablesMap), config, inferencePipeline.id);
283
+ this.openlayerClient.streamData(Object.assign({ latency, output: outputData, timestamp: startTime }, inputVariablesMap), config, inferencePipeline.id);
284
284
  }
285
285
  else {
286
286
  const nonStreamedResponse = response;
@@ -292,8 +292,7 @@ class OpenAIMonitor {
292
292
  throw new Error('No output received from OpenAI.');
293
293
  }
294
294
  this.openlayerClient.streamData(Object.assign({ latency,
295
- output,
296
- prompt, timestamp: startTime, tokens: (_e = (_d = nonStreamedResponse.usage) === null || _d === void 0 ? void 0 : _d.total_tokens) !== null && _e !== void 0 ? _e : 0 }, inputVariablesMap), config, inferencePipeline.id);
295
+ output, timestamp: startTime, tokens: (_e = (_d = nonStreamedResponse.usage) === null || _d === void 0 ? void 0 : _d.total_tokens) !== null && _e !== void 0 ? _e : 0 }, inputVariablesMap), config, inferencePipeline.id);
297
296
  }
298
297
  return response;
299
298
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openlayer",
3
- "version": "0.1.15",
3
+ "version": "0.1.16",
4
4
  "description": "The Openlayer TypeScript client",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",