@promptbook/openai 0.65.0-7 → 0.65.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -6,7 +6,7 @@ import spaceTrim$1, { spaceTrim } from 'spacetrim';
6
6
  /**
7
7
  * The version of the Promptbook library
8
8
  */
9
- var PROMPTBOOK_VERSION = '0.65.0-6';
9
+ var PROMPTBOOK_VERSION = '0.65.0-7';
10
10
  // TODO: !!!! List here all the versions and annotate + put into script
11
11
 
12
12
  /*! *****************************************************************************
@@ -49,7 +49,6 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
49
49
  * TODO: Maybe Create some common util for callChatModel and callCompletionModel
50
50
  * TODO: Maybe make custom OpenaiError
51
51
  * TODO: [🧠][🈁] Maybe use `isDeterministic` from options
52
- * TODO: [🍜] !!!!!! Auto use anonymous server in browser
53
52
  * TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
54
53
  * TODO: [📅] Maybe instead of `RemoteLlmExecutionToolsOptions` use `proxyWithAnonymousRemoteServer` (if implemented)
55
54
  */
@@ -27,7 +27,5 @@ export type AnthropicClaudeExecutionToolsProxiedOptions = CommonExecutionToolsOp
27
27
  isProxied: true;
28
28
  } & Pick<RemoteLlmExecutionToolsOptions, 'remoteUrl' | 'path'>;
29
29
  /**
30
- * TODO: [🍜] Default remote remoteUrl and path for anonymous server
31
- * TODO: [🍜] !!!!!! Auto add WebGPT / Promptbook.studio anonymous server in browser
32
30
  * TODO: [🧠][ðŸĪš] Detecting `user`
33
31
  */
@@ -8,6 +8,7 @@ import type { AnthropicClaudeExecutionToolsOptions } from './AnthropicClaudeExec
8
8
  */
9
9
  export declare function createAnthropicClaudeExecutionTools(options: AnthropicClaudeExecutionToolsOptions): AnthropicClaudeExecutionTools | RemoteLlmExecutionTools;
10
10
  /**
11
- * TODO: !!!!!! Make this with all LLM providers
12
- * TODO: !!!!!! Maybe change all `new AnthropicClaudeExecutionTools` -> `createAnthropicClaudeExecutionTools` in manual
11
+ * TODO: [🧠] !!!! Make anonymous this with all LLM providers
12
+ * TODO: [🧠] !!!! Maybe change all `new AnthropicClaudeExecutionTools` -> `createAnthropicClaudeExecutionTools` in manual
13
+ * TODO: [🧠] Maybe auto-detect usage in browser and determine default value of `isProxied`
13
14
  */
@@ -1,6 +1,6 @@
1
1
  #!/usr/bin/env ts-node
2
2
  export {};
3
3
  /**
4
- * TODO: [🍜] !!!!!! Playground with WebGPT / Promptbook.studio anonymous server
4
+ * TODO: !!! Playground with WebGPT / Promptbook.studio anonymous server
5
5
  * TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
6
6
  */
@@ -50,7 +50,6 @@ export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
50
50
  listModels(): Promise<Array<AvailableModel>>;
51
51
  }
52
52
  /**
53
- * TODO: [🍜] !!!!!! Default remote remoteUrl and path for anonymous server
54
53
  * TODO: [🍓] Allow to list compatible models with each variant
55
54
  * TODO: [ðŸ—Ŋ] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
56
55
  * TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
@@ -51,6 +51,3 @@ export type RemoteLlmExecutionToolsOptions = CommonExecutionToolsOptions & {
51
51
  */
52
52
  readonly clientId: client_id;
53
53
  });
54
- /**
55
- * TODO: [🍜] !!!!!! Default remote remoteUrl and path for anonymous server
56
- */
@@ -11,7 +11,6 @@ import type { RemoteServerOptions } from './interfaces/RemoteServerOptions';
11
11
  */
12
12
  export declare function startRemoteServer(options: RemoteServerOptions): IDestroyable;
13
13
  /**
14
- * TODO: [🍜] !!!!!! Add anonymous option
15
14
  * TODO: [⚖] Expose the collection to be able to connect to same collection via createCollectionFromUrl
16
15
  * TODO: Handle progress - support streaming
17
16
  * TODO: [ðŸ—Ŋ] Do not hang up immediately but wait until client closes OR timeout
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/openai",
3
- "version": "0.65.0-7",
3
+ "version": "0.65.0",
4
4
  "description": "Supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -47,7 +47,7 @@
47
47
  "module": "./esm/index.es.js",
48
48
  "typings": "./esm/typings/src/_packages/openai.index.d.ts",
49
49
  "peerDependencies": {
50
- "@promptbook/core": "0.65.0-7"
50
+ "@promptbook/core": "0.65.0"
51
51
  },
52
52
  "dependencies": {
53
53
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -14,7 +14,7 @@
14
14
  /**
15
15
  * The version of the Promptbook library
16
16
  */
17
- var PROMPTBOOK_VERSION = '0.65.0-6';
17
+ var PROMPTBOOK_VERSION = '0.65.0-7';
18
18
  // TODO: !!!! List here all the versions and annotate + put into script
19
19
 
20
20
  /*! *****************************************************************************