@promptbook/cli 0.76.0 → 0.77.0-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@ import { deserializeError } from '../errors/utils/deserializeError';
8
8
  import { serializeError } from '../errors/utils/serializeError';
9
9
  import { forEachAsync } from '../execution/utils/forEachAsync';
10
10
  import { isValidJsonString } from '../formats/json/utils/isValidJsonString';
11
- import { $currentDate } from '../utils/$currentDate';
11
+ import { $getCurrentDate } from '../utils/$getCurrentDate';
12
12
  import { $isRunningInBrowser } from '../utils/environment/$isRunningInBrowser';
13
13
  import { $isRunningInNode } from '../utils/environment/$isRunningInNode';
14
14
  import { $isRunningInWebWorker } from '../utils/environment/$isRunningInWebWorker';
@@ -82,7 +82,7 @@ export { deserializeError };
82
82
  export { serializeError };
83
83
  export { forEachAsync };
84
84
  export { isValidJsonString };
85
- export { $currentDate };
85
+ export { $getCurrentDate };
86
86
  export { $isRunningInBrowser };
87
87
  export { $isRunningInNode };
88
88
  export { $isRunningInWebWorker };
@@ -1,2 +1,4 @@
1
1
  import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
+ import { createExecutionToolsFromVercelProvider } from '../llm-providers/vercel/createExecutionToolsFromVercelProvider';
2
3
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
4
+ export { createExecutionToolsFromVercelProvider };
@@ -1,15 +1,24 @@
1
+ import type { string_user_id } from '../types/typeAliases';
1
2
  /**
2
3
  * @@@
3
4
  *
4
5
  * Note: Keep it public to allow people to make their own execution tools
5
6
  */
6
7
  export type CommonToolsOptions = {
8
+ /**
9
+ * A unique identifier representing your end-user
10
+ *
11
+ * Note: For example it can help to detect abuse
12
+ * For example for OpenAi @see https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids
13
+ */
14
+ readonly userId?: string_user_id;
7
15
  /**
8
16
  * If true, the internal executions will be logged
9
17
  */
10
18
  readonly isVerbose?: boolean;
11
19
  };
12
20
  /**
21
+ * TODO: [🧠][ðŸĪš] Maybe allow overriding of `userId` for each prompt
13
22
  * TODO: [🈁] Maybe add here `isDeterministic`
14
23
  * TODO: [🧠][💙] Distinct between options passed into ExecutionTools and to ExecutionTools.execute
15
24
  */
@@ -18,7 +18,7 @@ export type CreateLlmToolsFromConfigurationOptions = {
18
18
  *
19
19
  * Note: This is passed to the LLM tools providers to identify misuse
20
20
  */
21
- readonly userId?: string_user_id | null;
21
+ readonly userId?: string_user_id;
22
22
  };
23
23
  /**
24
24
  * @@@
@@ -33,5 +33,5 @@ export type AzureOpenAiExecutionToolsOptions = CommonToolsOptions & {
33
33
  *
34
34
  * @see https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids (document from OpenAI not Azure, but same concept)
35
35
  */
36
- readonly userId: string_user_id | null;
36
+ readonly userId?: string_user_id;
37
37
  };
@@ -1,6 +1,5 @@
1
1
  import type { ClientOptions } from 'openai';
2
2
  import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
3
- import type { string_user_id } from '../../types/typeAliases';
4
3
  /**
5
4
  * Options for `OpenAiExecutionTools`
6
5
  *
@@ -9,12 +8,4 @@ import type { string_user_id } from '../../types/typeAliases';
9
8
  *
10
9
  * @public exported from `@promptbook/openai`
11
10
  */
12
- export type OpenAiExecutionToolsOptions = CommonToolsOptions & ClientOptions & {
13
- /**
14
- * A unique identifier representing your end-user, which can help OpenAI to monitor
15
- * and detect abuse.
16
- *
17
- * @see https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids
18
- */
19
- userId: string_user_id | null;
20
- };
11
+ export type OpenAiExecutionToolsOptions = CommonToolsOptions & ClientOptions;
@@ -24,7 +24,7 @@ export type PromptbookServer_ListModels_AnonymousRequest = {
24
24
  * Note: this is passed to the certain model providers to identify misuse
25
25
  * Note: In anonymous mode, there is no need to identify yourself, nor does it change the actual configuration of LLM Tools (unlike in application mode)
26
26
  */
27
- readonly userId: string_user_id | null;
27
+ readonly userId?: string_user_id;
28
28
  /**
29
29
  * Configuration for the LLM tools
30
30
  */
@@ -29,7 +29,7 @@ export type PromptbookServer_Prompt_AnonymousRequest = {
29
29
  * Note: this is passed to the certain model providers to identify misuse
30
30
  * Note: In anonymous mode, there is no need to identify yourself, nor does it change the actual configuration of LLM Tools (unlike in application mode)
31
31
  */
32
- readonly userId: string_user_id | null;
32
+ readonly userId?: string_user_id;
33
33
  /**
34
34
  * Configuration for the LLM tools
35
35
  */
@@ -41,7 +41,7 @@ export type RemoteLlmExecutionToolsOptions<TCustomOptions> = CommonToolsOptions
41
41
  * Note: This is passed to the certain model providers to identify misuse
42
42
  * Note: In anonymous mode, there is no need to identify yourself, nor does it change the actual configuration of LLM Tools (unlike in application mode).
43
43
  */
44
- readonly userId: string_user_id | null;
44
+ readonly userId?: string_user_id;
45
45
  } | ({
46
46
  /**
47
47
  * Use anonymous server with client identification and fixed collection
@@ -62,7 +62,7 @@ export type CollectionRemoteServerClientOptions<TCustomOptions> = {
62
62
  /**
63
63
  * @@@
64
64
  */
65
- readonly userId: string_user_id | null;
65
+ readonly userId?: string_user_id;
66
66
  /**
67
67
  * @@@
68
68
  */
@@ -0,0 +1,11 @@
1
+ import type { createOpenAI } from '@ai-sdk/openai';
2
+ import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
3
+ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
4
+ type ProviderV1 = ReturnType<typeof createOpenAI>;
5
+ /**
6
+ * !!!!!!
7
+ *
8
+ * @public exported from `@promptbook/vercel`
9
+ */
10
+ export declare function createExecutionToolsFromVercelProvider(vercelProvider: ProviderV1, options?: CommonToolsOptions): LlmExecutionTools;
11
+ export {};
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env ts-node
2
+ export {};
3
+ /**
4
+ * TODO: [main] !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
5
+ * Note: [âšŦ] Code in this file should never be published in any package
6
+ */
@@ -7,4 +7,4 @@ import type { string_date_iso8601 } from '../types/typeAliases';
7
7
  * @returns string_date branded type
8
8
  * @public exported from `@promptbook/utils`
9
9
  */
10
- export declare function $currentDate(): string_date_iso8601;
10
+ export declare function $getCurrentDate(): string_date_iso8601;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/cli",
3
- "version": "0.76.0",
3
+ "version": "0.77.0-0",
4
4
  "description": "It's time for a paradigm shift. The future of software in plain English, French or Latin",
5
5
  "--note-0": " <- [🐊]",
6
6
  "private": false,
package/umd/index.umd.js CHANGED
@@ -49,7 +49,7 @@
49
49
  *
50
50
  * @see https://github.com/webgptorg/promptbook
51
51
  */
52
- var PROMPTBOOK_ENGINE_VERSION = '0.75.10';
52
+ var PROMPTBOOK_ENGINE_VERSION = '0.76.0';
53
53
  /**
54
54
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
55
55
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5657,7 +5657,7 @@
5657
5657
  llmToolsWithUsage = countTotalUsage(llmTools);
5658
5658
  currentPreparation = {
5659
5659
  id: 1,
5660
- // TODO: [ðŸĨ]> date: $currentDate(),
5660
+ // TODO: [ðŸĨ]> date: $getCurrentDate(),
5661
5661
  promptbookVersion: PROMPTBOOK_ENGINE_VERSION,
5662
5662
  usage: ZERO_USAGE,
5663
5663
  };
@@ -10561,7 +10561,7 @@
10561
10561
  * @returns string_date branded type
10562
10562
  * @public exported from `@promptbook/utils`
10563
10563
  */
10564
- function $currentDate() {
10564
+ function $getCurrentDate() {
10565
10565
  return new Date().toISOString();
10566
10566
  }
10567
10567
 
@@ -10636,7 +10636,7 @@
10636
10636
  // TODO: [🧠] !!!!! How to do timing in mixed cache / non-cache situation
10637
10637
  // promptResult.timing: FromtoItems
10638
10638
  return [4 /*yield*/, storage.setItem(key, {
10639
- date: $currentDate(),
10639
+ date: $getCurrentDate(),
10640
10640
  promptbookVersion: PROMPTBOOK_ENGINE_VERSION,
10641
10641
  prompt: prompt,
10642
10642
  promptResult: promptResult,
@@ -12210,15 +12210,6 @@
12210
12210
  * TODO: [🧠] Maybe remove `@promptbook/remote-client` and just use `@promptbook/core`
12211
12211
  */
12212
12212
 
12213
- /**
12214
- * Get current date in ISO 8601 format
12215
- *
12216
- * @private internal utility
12217
- */
12218
- function getCurrentIsoDate() {
12219
- return new Date().toISOString();
12220
- }
12221
-
12222
12213
  /**
12223
12214
  * Function computeUsage will create price per one token based on the string value found on openai page
12224
12215
  *
@@ -12493,7 +12484,7 @@
12493
12484
  ],
12494
12485
  // TODO: Is here some equivalent of user identification?> user: this.options.user,
12495
12486
  };
12496
- start = getCurrentIsoDate();
12487
+ start = $getCurrentDate();
12497
12488
  if (this.options.isVerbose) {
12498
12489
  console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
12499
12490
  }
@@ -12520,7 +12511,7 @@
12520
12511
  }
12521
12512
  resultContent = contentBlock.text;
12522
12513
  // eslint-disable-next-line prefer-const
12523
- complete = getCurrentIsoDate();
12514
+ complete = $getCurrentDate();
12524
12515
  usage = computeAnthropicClaudeUsage(rawPromptContent || '', resultContent || '', rawResponse);
12525
12516
  return [2 /*return*/, $asDeeplyFrozenSerializableJson('AnthropicClaudeExecutionTools ChatPromptResult', {
12526
12517
  content: resultContent,
@@ -12569,7 +12560,7 @@
12569
12560
  prompt: rawPromptContent,
12570
12561
  user: this.options.user,
12571
12562
  };
12572
- const start: string_date_iso8601 = getCurrentIsoDate();
12563
+ const start: string_date_iso8601 = $getCurrentDate();
12573
12564
  let complete: string_date_iso8601;
12574
12565
 
12575
12566
  if (this.options.isVerbose) {
@@ -12598,7 +12589,7 @@
12598
12589
 
12599
12590
  const resultContent = rawResponse.choices[0].text;
12600
12591
  // eslint-disable-next-line prefer-const
12601
- complete = getCurrentIsoDate();
12592
+ complete = $getCurrentDate();
12602
12593
  const usage = { price: 'UNKNOWN', inputTokens: 0, outputTokens: 0 /* <- TODO: [🐞] Compute usage * / } satisfies PromptResultUsage;
12603
12594
 
12604
12595
 
@@ -12660,7 +12651,7 @@
12660
12651
  */
12661
12652
  var createAnthropicClaudeExecutionTools = Object.assign(function (options) {
12662
12653
  if (options.isProxied) {
12663
- return new RemoteLlmExecutionTools(__assign(__assign({}, options), { userId: null, isAnonymous: true, llmToolsConfiguration: [
12654
+ return new RemoteLlmExecutionTools(__assign(__assign({}, options), { isAnonymous: true, llmToolsConfiguration: [
12664
12655
  {
12665
12656
  title: 'Anthropic Claude (proxied)',
12666
12657
  packageName: '@promptbook/anthropic-claude',
@@ -13269,7 +13260,7 @@
13269
13260
  content: rawPromptContent,
13270
13261
  },
13271
13262
  ], false);
13272
- start = getCurrentIsoDate();
13263
+ start = $getCurrentDate();
13273
13264
  complete = void 0;
13274
13265
  if (this.options.isVerbose) {
13275
13266
  console.info(colors__default["default"].bgWhite('messages'), JSON.stringify(messages, null, 4));
@@ -13298,7 +13289,7 @@
13298
13289
  }
13299
13290
  resultContent = rawResponse.choices[0].message.content;
13300
13291
  // eslint-disable-next-line prefer-const
13301
- complete = getCurrentIsoDate();
13292
+ complete = $getCurrentDate();
13302
13293
  usage = {
13303
13294
  price: uncertainNumber() /* <- TODO: [🐞] Compute usage */,
13304
13295
  input: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.promptTokens) }, computeUsageCounts(prompt.content)),
@@ -13359,7 +13350,7 @@
13359
13350
  // <- TODO: [🈁] Use `seed` here AND/OR use is `isDeterministic` for entire execution tools
13360
13351
  // <- Note: [🧆]
13361
13352
  };
13362
- start = getCurrentIsoDate();
13353
+ start = $getCurrentDate();
13363
13354
  complete = void 0;
13364
13355
  if (this.options.isVerbose) {
13365
13356
  console.info(colors__default["default"].bgWhite('content'), JSON.stringify(content, null, 4));
@@ -13391,7 +13382,7 @@
13391
13382
  }
13392
13383
  resultContent = rawResponse.choices[0].text;
13393
13384
  // eslint-disable-next-line prefer-const
13394
- complete = getCurrentIsoDate();
13385
+ complete = $getCurrentDate();
13395
13386
  usage = {
13396
13387
  price: uncertainNumber() /* <- TODO: [🐞] Compute usage */,
13397
13388
  input: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.promptTokens) }, computeUsageCounts(prompt.content)),
@@ -13759,7 +13750,7 @@
13759
13750
  content: rawPromptContent,
13760
13751
  },
13761
13752
  ], false), user: (_a = this.options.userId) === null || _a === void 0 ? void 0 : _a.toString() });
13762
- start = getCurrentIsoDate();
13753
+ start = $getCurrentDate();
13763
13754
  if (this.options.isVerbose) {
13764
13755
  console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
13765
13756
  }
@@ -13783,7 +13774,7 @@
13783
13774
  }
13784
13775
  resultContent = rawResponse.choices[0].message.content;
13785
13776
  // eslint-disable-next-line prefer-const
13786
- complete = getCurrentIsoDate();
13777
+ complete = $getCurrentDate();
13787
13778
  usage = computeOpenAiUsage(content || '', resultContent || '', rawResponse);
13788
13779
  if (resultContent === null) {
13789
13780
  throw new PipelineExecutionError('No response message from OpenAI');
@@ -13838,7 +13829,7 @@
13838
13829
  };
13839
13830
  rawPromptContent = replaceParameters(content, __assign(__assign({}, parameters), { modelName: modelName }));
13840
13831
  rawRequest = __assign(__assign({}, modelSettings), { prompt: rawPromptContent, user: (_a = this.options.userId) === null || _a === void 0 ? void 0 : _a.toString() });
13841
- start = getCurrentIsoDate();
13832
+ start = $getCurrentDate();
13842
13833
  if (this.options.isVerbose) {
13843
13834
  console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
13844
13835
  }
@@ -13862,7 +13853,7 @@
13862
13853
  }
13863
13854
  resultContent = rawResponse.choices[0].text;
13864
13855
  // eslint-disable-next-line prefer-const
13865
- complete = getCurrentIsoDate();
13856
+ complete = $getCurrentDate();
13866
13857
  usage = computeOpenAiUsage(content || '', resultContent || '', rawResponse);
13867
13858
  return [2 /*return*/, $asDeeplyFrozenSerializableJson('OpenAiExecutionTools CompletionPromptResult', {
13868
13859
  content: resultContent,
@@ -13908,7 +13899,7 @@
13908
13899
  input: rawPromptContent,
13909
13900
  model: modelName,
13910
13901
  };
13911
- start = getCurrentIsoDate();
13902
+ start = $getCurrentDate();
13912
13903
  if (this.options.isVerbose) {
13913
13904
  console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
13914
13905
  }
@@ -13928,7 +13919,7 @@
13928
13919
  }
13929
13920
  resultContent = rawResponse.data[0].embedding;
13930
13921
  // eslint-disable-next-line prefer-const
13931
- complete = getCurrentIsoDate();
13922
+ complete = $getCurrentDate();
13932
13923
  usage = computeOpenAiUsage(content || '', '',
13933
13924
  // <- Note: Embedding does not have result content
13934
13925
  rawResponse);
@@ -14081,7 +14072,7 @@
14081
14072
  },
14082
14073
  // <- TODO: Add user identification here> user: this.options.user,
14083
14074
  };
14084
- start = getCurrentIsoDate();
14075
+ start = $getCurrentDate();
14085
14076
  if (this.options.isVerbose) {
14086
14077
  console.info(colors__default["default"].bgWhite('rawRequest'), JSON.stringify(rawRequest, null, 4));
14087
14078
  }
@@ -14132,7 +14123,7 @@
14132
14123
  resultContent = (_c = rawResponse[0].content[0]) === null || _c === void 0 ? void 0 : _c.text.value;
14133
14124
  // <- TODO: [🧠] There are also annotations, maybe use them
14134
14125
  // eslint-disable-next-line prefer-const
14135
- complete = getCurrentIsoDate();
14126
+ complete = $getCurrentDate();
14136
14127
  usage = UNCERTAIN_USAGE;
14137
14128
  // <- TODO: [ðŸĨ˜] Compute real usage for assistant
14138
14129
  // ?> const usage = computeOpenAiUsage(content, resultContent || '', rawResponse);