@promptbook/core 0.75.10 → 0.77.0-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,6 +23,10 @@
23
23
 
24
24
 
25
25
 
26
+ <blockquote style="color: #ff8811">
27
+ <b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
28
+ </blockquote>
29
+
26
30
  ## 📦 Package `@promptbook/core`
27
31
 
28
32
  - Promptbooks are [divided into several](#-packages) packages, all are published from [single monorepo](https://github.com/webgptorg/promptbook).
@@ -240,6 +244,8 @@ Or you can install them separately:
240
244
  - **[@promptbook/execute-javascript](https://www.npmjs.com/package/@promptbook/execute-javascript)** - Execution tools for javascript inside promptbooks
241
245
  - **[@promptbook/openai](https://www.npmjs.com/package/@promptbook/openai)** - Execution tools for OpenAI API, wrapper around OpenAI SDK
242
246
  - **[@promptbook/anthropic-claude](https://www.npmjs.com/package/@promptbook/anthropic-claude)** - Execution tools for Anthropic Claude API, wrapper around Anthropic Claude SDK
247
+ - **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
248
+ - **[@promptbook/gemini](https://www.npmjs.com/package/@promptbook/gemini)** - Integration with Google's Gemini API
243
249
  - **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
244
250
  - **[@promptbook/langtail](https://www.npmjs.com/package/@promptbook/langtail)** - Execution tools for Langtail API, wrapper around Langtail SDK
245
251
  - **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
@@ -259,11 +265,6 @@ Or you can install them separately:
259
265
 
260
266
  ## 📚 Dictionary
261
267
 
262
-
263
-
264
-
265
-
266
-
267
268
  ### 📚 Dictionary
268
269
 
269
270
  The following glossary is used to clarify certain concepts:
@@ -279,8 +280,6 @@ The following glossary is used to clarify certain concepts:
279
280
  - **Retrieval-augmented generation** is a machine learning paradigm where a model generates text by retrieving relevant information from a large database of text. This approach combines the benefits of generative models and retrieval models.
280
281
  - **Longtail** refers to non-common or rare events, items, or entities that are not well-represented in the training data of machine learning models. Longtail items are often challenging for models to predict accurately.
281
282
 
282
-
283
-
284
283
  _Note: Thos section is not complete dictionary, more list of general AI / LLM terms that has connection with Promptbook_
285
284
 
286
285
  #### Promptbook core
@@ -341,8 +340,6 @@ _Note: Thos section is not complete dictionary, more list of general AI / LLM te
341
340
  - [👮 Agent adversary expectations](https://github.com/webgptorg/promptbook/discussions/39)
342
341
  - [view more](https://github.com/webgptorg/promptbook/discussions/categories/concepts)
343
342
 
344
-
345
-
346
343
  ### Terms specific to Promptbook TypeScript implementation
347
344
 
348
345
  - Anonymous mode
package/esm/index.es.js CHANGED
@@ -22,7 +22,7 @@ var BOOK_LANGUAGE_VERSION = '1.0.0';
22
22
  *
23
23
  * @see https://github.com/webgptorg/promptbook
24
24
  */
25
- var PROMPTBOOK_ENGINE_VERSION = '0.75.9';
25
+ var PROMPTBOOK_ENGINE_VERSION = '0.76.0';
26
26
  /**
27
27
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
28
28
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5808,7 +5808,7 @@ function preparePipeline(pipeline, tools, options) {
5808
5808
  llmToolsWithUsage = countTotalUsage(llmTools);
5809
5809
  currentPreparation = {
5810
5810
  id: 1,
5811
- // TODO: [🍥]> date: $currentDate(),
5811
+ // TODO: [🍥]> date: $getCurrentDate(),
5812
5812
  promptbookVersion: PROMPTBOOK_ENGINE_VERSION,
5813
5813
  usage: ZERO_USAGE,
5814
5814
  };
@@ -9802,7 +9802,7 @@ var MemoryStorage = /** @class */ (function () {
9802
9802
  * @returns string_date branded type
9803
9803
  * @public exported from `@promptbook/utils`
9804
9804
  */
9805
- function $currentDate() {
9805
+ function $getCurrentDate() {
9806
9806
  return new Date().toISOString();
9807
9807
  }
9808
9808
 
@@ -9877,7 +9877,7 @@ function cacheLlmTools(llmTools, options) {
9877
9877
  // TODO: [🧠] !!!!! How to do timing in mixed cache / non-cache situation
9878
9878
  // promptResult.timing: FromtoItems
9879
9879
  return [4 /*yield*/, storage.setItem(key, {
9880
- date: $currentDate(),
9880
+ date: $getCurrentDate(),
9881
9881
  promptbookVersion: PROMPTBOOK_ENGINE_VERSION,
9882
9882
  prompt: prompt,
9883
9883
  promptResult: promptResult,