@promptbook/cli 0.98.0-9 → 0.98.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -25,10 +25,6 @@ Write AI applications using plain human language across multiple models and plat
25
25
 
26
26
 
27
27
 
28
- <blockquote style="color: #ff8811">
29
- <b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
30
- </blockquote>
31
-
32
28
  ## 📦 Package `@promptbook/cli`
33
29
 
34
30
  - Promptbooks are [divided into several](#-packages) packages, all are published from [single monorepo](https://github.com/webgptorg/promptbook).
package/esm/index.es.js CHANGED
@@ -47,7 +47,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.98.0-9';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.98.0';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -3729,9 +3729,10 @@ function createLlmToolsFromConfiguration(configuration, options = {}) {
3729
3729
  .list()
3730
3730
  .find(({ packageName, className }) => llmConfiguration.packageName === packageName && llmConfiguration.className === className);
3731
3731
  if (registeredItem === undefined) {
3732
- console.log('!!! $llmToolsRegister.list()', $llmToolsRegister.list());
3732
+ // console.log('$llmToolsRegister.list()', $llmToolsRegister.list());
3733
3733
  throw new Error(spaceTrim((block) => `
3734
3734
  There is no constructor for LLM provider \`${llmConfiguration.className}\` from \`${llmConfiguration.packageName}\`
3735
+ Running in ${!$isRunningInBrowser() ? '' : 'browser environment'}${!$isRunningInNode() ? '' : 'node environment'}${!$isRunningInWebWorker() ? '' : 'worker environment'}
3735
3736
 
3736
3737
  You have probably forgotten install and import the provider package.
3737
3738
  To fix this issue, you can:
@@ -14871,7 +14872,6 @@ function startRemoteServer(options) {
14871
14872
  catch (error) {
14872
14873
  assertsError(error);
14873
14874
  socket.emit('error', serializeError(error));
14874
- // <- TODO: [🚋] There is a problem with the remote server handling errors and sending them back to the client
14875
14875
  }
14876
14876
  finally {
14877
14877
  socket.disconnect();
@@ -18164,6 +18164,7 @@ const _OpenAiCompatibleMetadataRegistration = $llmToolsMetadataRegister.register
18164
18164
  options: {
18165
18165
  apiKey: 'sk-',
18166
18166
  baseURL: 'https://api.openai.com/v1',
18167
+ defaultModelName: 'gpt-4-turbo',
18167
18168
  isProxied: false,
18168
18169
  remoteServerUrl: DEFAULT_REMOTE_SERVER_URL,
18169
18170
  maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,