@promptbook/wizard 0.98.0-8 → 0.98.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.98.0-7`).
18
+ * It follows semantic versioning (e.g., `0.98.0-10`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/wizard",
3
- "version": "0.98.0-8",
3
+ "version": "0.98.0",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -15,51 +15,72 @@
15
15
  ],
16
16
  "keywords": [
17
17
  "ai",
18
- "llm",
19
- "prompt",
20
- "template",
18
+ "ai-agents",
19
+ "ai-application-framework",
20
+ "ai-assistant",
21
+ "ai-automation",
22
+ "ai-development",
23
+ "ai-framework",
24
+ "ai-ops",
25
+ "ai-orchestration",
26
+ "ai-pipeline",
27
+ "ai-platform",
28
+ "ai-scripting",
29
+ "ai-sdk",
30
+ "ai-workflow",
31
+ "api-integration",
32
+ "automation-framework",
33
+ "book-language",
34
+ "browser",
35
+ "chatbot",
36
+ "configuration",
37
+ "content-generation",
38
+ "conversational-ai",
39
+ "cross-platform",
40
+ "cross-provider",
41
+ "developer-tools",
42
+ "embeddings",
43
+ "function-calling",
44
+ "generative-ai",
45
+ "getting-started",
46
+ "human-readable",
47
+ "javascript",
48
+ "knowledge-base",
21
49
  "language-model",
22
- "chatgpt",
23
- "autogpt",
50
+ "large-language-models",
51
+ "llm",
52
+ "llmops",
24
53
  "machine-learning",
54
+ "markdown-dsl",
55
+ "mlops",
56
+ "model-agnostic",
57
+ "multi-model",
58
+ "multimodal",
59
+ "natural-language",
25
60
  "natural-language-processing",
26
61
  "nlp",
27
- "openai",
28
- "o3",
29
- "o3-mini",
30
- "deepseek",
31
- "gpt-3",
32
- "gpt-4",
33
- "gpt-4o",
34
- "gpt-4o-mini",
35
- "o1",
36
- "o1-mini",
37
- "o1-preview",
38
- "anthropic",
39
- "claude",
40
- "claude-3",
41
- "claude-3-opus",
42
- "claude-3-sonnet",
43
- "claude-3-haiku",
44
- "gemini",
45
- "gemini-pro",
46
- "gemini-flash",
47
- "mixtral",
48
- "mistral",
49
- "ollama",
50
- "ai-orchestration",
62
+ "nodejs",
63
+ "orchestration",
64
+ "pipeline",
65
+ "plain-english",
66
+ "prompt",
67
+ "prompt-chaining",
51
68
  "prompt-engineering",
52
- "llmops",
53
- "multimodal",
54
- "reasoning",
69
+ "prompt-management",
70
+ "prompt-template",
55
71
  "rag",
56
- "embeddings",
57
- "function-calling",
58
- "large-language-models",
59
- "ai-application-framework",
72
+ "reasoning",
73
+ "setup",
74
+ "task-automation",
75
+ "template",
60
76
  "text-generation",
61
- "ai-agents",
62
- "LLMOps"
77
+ "text-processing",
78
+ "typescript",
79
+ "unified-interface",
80
+ "vendor-agnostic",
81
+ "wizard",
82
+ "workflow",
83
+ "workflow-engine"
63
84
  ],
64
85
  "license": "BUSL-1.1",
65
86
  "bugs": {
@@ -74,7 +95,7 @@
74
95
  "module": "./esm/index.es.js",
75
96
  "typings": "./esm/typings/src/_packages/wizard.index.d.ts",
76
97
  "peerDependencies": {
77
- "@promptbook/core": "0.98.0-8"
98
+ "@promptbook/core": "0.98.0"
78
99
  },
79
100
  "dependencies": {
80
101
  "@ai-sdk/deepseek": "0.1.6",
package/umd/index.umd.js CHANGED
@@ -49,7 +49,7 @@
49
49
  * @generated
50
50
  * @see https://github.com/webgptorg/promptbook
51
51
  */
52
- const PROMPTBOOK_ENGINE_VERSION = '0.98.0-8';
52
+ const PROMPTBOOK_ENGINE_VERSION = '0.98.0';
53
53
  /**
54
54
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
55
55
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5124,6 +5124,7 @@
5124
5124
  options: {
5125
5125
  apiKey: 'sk-',
5126
5126
  baseURL: 'https://api.openai.com/v1',
5127
+ defaultModelName: 'gpt-4-turbo',
5127
5128
  isProxied: false,
5128
5129
  remoteServerUrl: DEFAULT_REMOTE_SERVER_URL,
5129
5130
  maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
@@ -5131,27 +5132,6 @@
5131
5132
  };
5132
5133
  },
5133
5134
  createConfigurationFromEnv(env) {
5134
- // Note: OpenAiCompatibleExecutionTools is an abstract class and cannot be instantiated directly
5135
- // However, we can provide configuration for users who want to manually instantiate it
5136
- if (typeof env.OPENAI_API_KEY === 'string') {
5137
- const options = {
5138
- apiKey: env.OPENAI_API_KEY,
5139
- isProxied: false,
5140
- remoteServerUrl: DEFAULT_REMOTE_SERVER_URL,
5141
- maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
5142
- defaultModelName: 'gpt-4-turbo',
5143
- };
5144
- // Add baseURL if provided in environment
5145
- if (typeof env.OPENAI_BASE_URL === 'string') {
5146
- options.baseURL = env.OPENAI_BASE_URL;
5147
- }
5148
- return {
5149
- title: 'Open AI Compatible (from env)',
5150
- packageName: '@promptbook/openai',
5151
- className: 'OpenAiCompatibleExecutionTools',
5152
- options,
5153
- };
5154
- }
5155
5135
  return null;
5156
5136
  },
5157
5137
  });
@@ -12162,8 +12142,10 @@
12162
12142
  .list()
12163
12143
  .find(({ packageName, className }) => llmConfiguration.packageName === packageName && llmConfiguration.className === className);
12164
12144
  if (registeredItem === undefined) {
12145
+ // console.log('$llmToolsRegister.list()', $llmToolsRegister.list());
12165
12146
  throw new Error(spaceTrim__default["default"]((block) => `
12166
12147
  There is no constructor for LLM provider \`${llmConfiguration.className}\` from \`${llmConfiguration.packageName}\`
12148
+ Running in ${!$isRunningInBrowser() ? '' : 'browser environment'}${!$isRunningInNode() ? '' : 'node environment'}${!$isRunningInWebWorker() ? '' : 'worker environment'}
12167
12149
 
12168
12150
  You have probably forgotten install and import the provider package.
12169
12151
  To fix this issue, you can: