@promptbook/cli 0.98.0-9 → 0.98.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.98.0-8`).
18
+ * It follows semantic versioning (e.g., `0.98.0-10`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/cli",
3
- "version": "0.98.0-9",
3
+ "version": "0.98.0",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -15,51 +15,73 @@
15
15
  ],
16
16
  "keywords": [
17
17
  "ai",
18
- "llm",
19
- "prompt",
20
- "template",
18
+ "ai-agents",
19
+ "ai-application-framework",
20
+ "ai-assistant",
21
+ "ai-automation",
22
+ "ai-development",
23
+ "ai-framework",
24
+ "ai-ops",
25
+ "ai-orchestration",
26
+ "ai-pipeline",
27
+ "ai-platform",
28
+ "ai-scripting",
29
+ "ai-sdk",
30
+ "ai-workflow",
31
+ "api-integration",
32
+ "automation",
33
+ "automation-framework",
34
+ "book-language",
35
+ "browser",
36
+ "chatbot",
37
+ "cli",
38
+ "cli-tool",
39
+ "command-line",
40
+ "content-generation",
41
+ "conversational-ai",
42
+ "cross-platform",
43
+ "cross-provider",
44
+ "developer-tools",
45
+ "embeddings",
46
+ "function-calling",
47
+ "generative-ai",
48
+ "human-readable",
49
+ "javascript",
50
+ "knowledge-base",
21
51
  "language-model",
22
- "chatgpt",
23
- "autogpt",
52
+ "large-language-models",
53
+ "llm",
54
+ "llmops",
24
55
  "machine-learning",
56
+ "markdown-dsl",
57
+ "mlops",
58
+ "model-agnostic",
59
+ "multi-model",
60
+ "multimodal",
61
+ "natural-language",
25
62
  "natural-language-processing",
26
63
  "nlp",
27
- "openai",
28
- "o3",
29
- "o3-mini",
30
- "deepseek",
31
- "gpt-3",
32
- "gpt-4",
33
- "gpt-4o",
34
- "gpt-4o-mini",
35
- "o1",
36
- "o1-mini",
37
- "o1-preview",
38
- "anthropic",
39
- "claude",
40
- "claude-3",
41
- "claude-3-opus",
42
- "claude-3-sonnet",
43
- "claude-3-haiku",
44
- "gemini",
45
- "gemini-pro",
46
- "gemini-flash",
47
- "mixtral",
48
- "mistral",
49
- "ollama",
50
- "ai-orchestration",
64
+ "nodejs",
65
+ "orchestration",
66
+ "pipeline",
67
+ "plain-english",
68
+ "prompt",
69
+ "prompt-chaining",
51
70
  "prompt-engineering",
52
- "llmops",
53
- "multimodal",
54
- "reasoning",
71
+ "prompt-management",
72
+ "prompt-template",
55
73
  "rag",
56
- "embeddings",
57
- "function-calling",
58
- "large-language-models",
59
- "ai-application-framework",
74
+ "reasoning",
75
+ "task-automation",
76
+ "template",
77
+ "terminal",
60
78
  "text-generation",
61
- "ai-agents",
62
- "LLMOps"
79
+ "text-processing",
80
+ "typescript",
81
+ "unified-interface",
82
+ "vendor-agnostic",
83
+ "workflow",
84
+ "workflow-engine"
63
85
  ],
64
86
  "license": "BUSL-1.1",
65
87
  "bugs": {
package/umd/index.umd.js CHANGED
@@ -57,7 +57,7 @@
57
57
  * @generated
58
58
  * @see https://github.com/webgptorg/promptbook
59
59
  */
60
- const PROMPTBOOK_ENGINE_VERSION = '0.98.0-9';
60
+ const PROMPTBOOK_ENGINE_VERSION = '0.98.0';
61
61
  /**
62
62
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
63
63
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -3739,9 +3739,10 @@
3739
3739
  .list()
3740
3740
  .find(({ packageName, className }) => llmConfiguration.packageName === packageName && llmConfiguration.className === className);
3741
3741
  if (registeredItem === undefined) {
3742
- console.log('!!! $llmToolsRegister.list()', $llmToolsRegister.list());
3742
+ // console.log('$llmToolsRegister.list()', $llmToolsRegister.list());
3743
3743
  throw new Error(spaceTrim__default["default"]((block) => `
3744
3744
  There is no constructor for LLM provider \`${llmConfiguration.className}\` from \`${llmConfiguration.packageName}\`
3745
+ Running in ${!$isRunningInBrowser() ? '' : 'browser environment'}${!$isRunningInNode() ? '' : 'node environment'}${!$isRunningInWebWorker() ? '' : 'worker environment'}
3745
3746
 
3746
3747
  You have probably forgotten install and import the provider package.
3747
3748
  To fix this issue, you can:
@@ -14881,7 +14882,6 @@
14881
14882
  catch (error) {
14882
14883
  assertsError(error);
14883
14884
  socket.emit('error', serializeError(error));
14884
- // <- TODO: [🚋] There is a problem with the remote server handling errors and sending them back to the client
14885
14885
  }
14886
14886
  finally {
14887
14887
  socket.disconnect();
@@ -18174,6 +18174,7 @@
18174
18174
  options: {
18175
18175
  apiKey: 'sk-',
18176
18176
  baseURL: 'https://api.openai.com/v1',
18177
+ defaultModelName: 'gpt-4-turbo',
18177
18178
  isProxied: false,
18178
18179
  remoteServerUrl: DEFAULT_REMOTE_SERVER_URL,
18179
18180
  maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,