@promptbook/openai 0.72.0-11 → 0.72.0-13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -20,11 +20,11 @@ Supercharge your use of large language models
20
20
 
21
21
  - ✨ **Support of [OpenAI o1 model](https://openai.com/o1/)**
22
22
 
23
- <blockquote style="color: #ff8811">
24
- <b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
25
- </blockquote>
26
23
 
27
24
 
25
+ <blockquote style="color: #ff8811">
26
+ <b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
27
+ </blockquote>
28
28
 
29
29
  ## 📦 Package `@promptbook/openai`
30
30
 
package/esm/index.es.js CHANGED
@@ -6,7 +6,7 @@ import OpenAI from 'openai';
6
6
  /**
7
7
  * The version of the Promptbook library
8
8
  */
9
- var PROMPTBOOK_VERSION = '0.72.0-10';
9
+ var PROMPTBOOK_VERSION = '0.72.0-12';
10
10
  // TODO: [main] !!!! List here all the versions and annotate + put into script
11
11
 
12
12
  /*! *****************************************************************************
@@ -52,7 +52,7 @@ export type LlmExecutionTools = {
52
52
  callEmbeddingModel?(prompt: Prompt): Promise<EmbeddingPromptResult>;
53
53
  };
54
54
  /**
55
- * TODO: Implement destroyable pattern to free resources
55
+ * TODO: [🍚] Implement destroyable pattern to free resources
56
56
  * TODO: [🏳] Add `callTranslationModel`
57
57
  * TODO: [🧠] Emulation of one type of model with another one - emuate chat with completion; emulate translation with chat
58
58
  * TODO: [🍓][♐] Some heuristic to pick the best model in listed models
@@ -1,4 +1,4 @@
1
- import type { PipelineExecutor } from './PipelineExecutor';
1
+ import type { PipelineExecutorResult } from './PipelineExecutorResult';
2
2
  /**
3
3
  * Asserts that the execution of a Promptbook is successful
4
4
  *
@@ -6,7 +6,7 @@ import type { PipelineExecutor } from './PipelineExecutor';
6
6
  * @throws {PipelineExecutionError} If the execution is not successful or if multiple errors occurred
7
7
  * @public exported from `@promptbook/core`
8
8
  */
9
- export declare function assertsExecutionSuccessful(executionResult: Pick<Awaited<ReturnType<PipelineExecutor>>, 'isSuccessful' | 'errors'>): void;
9
+ export declare function assertsExecutionSuccessful(executionResult: Pick<PipelineExecutorResult, 'isSuccessful' | 'errors'>): void;
10
10
  /**
11
11
  * TODO: [🧠] Can this return type be better typed than void
12
12
  */
@@ -1,3 +1,4 @@
1
+ import type { Promisable } from 'type-fest';
1
2
  import type { PipelineCollection } from '../../../collection/PipelineCollection';
2
3
  import type { CommonToolsOptions } from '../../../execution/CommonToolsOptions';
3
4
  import type { LlmExecutionTools } from '../../../execution/LlmExecutionTools';
@@ -51,7 +52,7 @@ export type CollectionRemoteServerOptions<TCustomOptions> = {
51
52
  /**
52
53
  * Creates llm execution tools for each client
53
54
  */
54
- createLlmExecutionTools(options: CollectionRemoteServerClientOptions<TCustomOptions>): LlmExecutionTools;
55
+ createLlmExecutionTools(options: CollectionRemoteServerClientOptions<TCustomOptions>): Promisable<LlmExecutionTools>;
55
56
  };
56
57
  export type CollectionRemoteServerClientOptions<TCustomOptions> = {
57
58
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/openai",
3
- "version": "0.72.0-11",
3
+ "version": "0.72.0-13",
4
4
  "description": "Supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -51,7 +51,7 @@
51
51
  "module": "./esm/index.es.js",
52
52
  "typings": "./esm/typings/src/_packages/openai.index.d.ts",
53
53
  "peerDependencies": {
54
- "@promptbook/core": "0.72.0-11"
54
+ "@promptbook/core": "0.72.0-13"
55
55
  },
56
56
  "dependencies": {
57
57
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -14,7 +14,7 @@
14
14
  /**
15
15
  * The version of the Promptbook library
16
16
  */
17
- var PROMPTBOOK_VERSION = '0.72.0-10';
17
+ var PROMPTBOOK_VERSION = '0.72.0-12';
18
18
  // TODO: [main] !!!! List here all the versions and annotate + put into script
19
19
 
20
20
  /*! *****************************************************************************