openlayer 0.1.31 → 0.1.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -563,8 +563,8 @@ class OpenAIMonitor {
563
563
  : typeof output === 'undefined' || output === null
564
564
  ? ''
565
565
  : `${output}`;
566
- this.openlayerClient.streamData(Object.assign(Object.assign({ 'OpenAI Assistant ID': assistant_id, 'OpenAI Thread ID': thread_id, cost,
567
- latency, output: resolvedOutput, timestamp: run.created_at, tokens: total_tokens }, inputVariablesMap), additionalLogs), config, this.openlayerInferencePipelineId);
566
+ this.openlayerClient.streamData(Object.assign(Object.assign({ cost,
567
+ latency, openai_assistant_id: assistant_id, openai_thread_id: thread_id, output: resolvedOutput, timestamp: run.created_at, tokens: total_tokens }, inputVariablesMap), additionalLogs), config, this.openlayerInferencePipelineId);
568
568
  }
569
569
  catch (error) {
570
570
  console.error('Error logging thread run:', error);
@@ -6,19 +6,18 @@ import OpenAI from 'openai';
6
6
  import { OpenAIMonitor } from 'openlayer';
7
7
 
8
8
  const openai = new OpenAI({
9
- apiKey: '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',
9
+ apiKey: 'YOUR_OPENAI_API_KEY',
10
10
  });
11
11
 
12
12
  // Create monitor with your credentials
13
13
  const monitor = new OpenAIMonitor({
14
- openAiApiKey: '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',
15
- openlayerApiKey: 'UyKge0qbzrnAg_vehsTtw_e_mArrHHyT',
14
+ openAiApiKey: 'YOUR_OPENAI_API_KEY',
15
+ openlayerApiKey: 'YOUR_OPENLAYER_API_KEY',
16
16
  // EITHER specify an existing inference pipeline ID
17
- openlayerInferencePipelineId: 'ef604fd7-2237-419f-90a0-4a456020ecbb',
17
+ openlayerInferencePipelineId: 'YOUR_OPENLAYER_INFERENCE_PIPELINE_ID',
18
18
  // OR the project and inference pipeline names to create or load one
19
- // openlayerInferencePipelineName: 'production',
20
- // openlayerProjectName: 'Python QA5',
21
- openlayerServerUrl: 'http://localhost:8080/v1',
19
+ openlayerInferencePipelineName: 'production',
20
+ openlayerProjectName: 'YOUR_OPENLAYER_PROJECT_NAME',
22
21
  });
23
22
 
24
23
  await monitor.initialize();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "openlayer",
3
- "version": "0.1.31",
3
+ "version": "0.1.32",
4
4
  "description": "The Openlayer TypeScript client",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -20,7 +20,6 @@
20
20
  "eslint-plugin-typescript-sort-keys": "^3.1.0",
21
21
  "node-fetch": "^3.3.2",
22
22
  "openai": "^4.19.0",
23
- "openlayer": "^0.1.30",
24
23
  "uuid": "^9.0.1"
25
24
  },
26
25
  "devDependencies": {