@promptbook/wizard 0.101.0-20 → 0.101.0-21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/esm/index.es.js +95 -77
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/src/book-2.0/commitments/ACTION/ACTION.d.ts +0 -12
  4. package/esm/typings/src/book-2.0/commitments/DELETE/DELETE.d.ts +0 -24
  5. package/esm/typings/src/book-2.0/commitments/FORMAT/FORMAT.d.ts +0 -12
  6. package/esm/typings/src/book-2.0/commitments/GOAL/GOAL.d.ts +0 -12
  7. package/esm/typings/src/book-2.0/commitments/KNOWLEDGE/KNOWLEDGE.d.ts +0 -6
  8. package/esm/typings/src/book-2.0/commitments/MEMORY/MEMORY.d.ts +0 -12
  9. package/esm/typings/src/book-2.0/commitments/MESSAGE/MESSAGE.d.ts +0 -12
  10. package/esm/typings/src/book-2.0/commitments/META/META.d.ts +0 -6
  11. package/esm/typings/src/book-2.0/commitments/META_IMAGE/META_IMAGE.d.ts +0 -6
  12. package/esm/typings/src/book-2.0/commitments/META_LINK/META_LINK.d.ts +0 -6
  13. package/esm/typings/src/book-2.0/commitments/MODEL/MODEL.d.ts +0 -12
  14. package/esm/typings/src/book-2.0/commitments/NOTE/NOTE.d.ts +0 -24
  15. package/esm/typings/src/book-2.0/commitments/PERSONA/PERSONA.d.ts +0 -12
  16. package/esm/typings/src/book-2.0/commitments/RULE/RULE.d.ts +0 -12
  17. package/esm/typings/src/book-2.0/commitments/SAMPLE/SAMPLE.d.ts +0 -12
  18. package/esm/typings/src/book-2.0/commitments/SCENARIO/SCENARIO.d.ts +0 -12
  19. package/esm/typings/src/book-2.0/commitments/STYLE/STYLE.d.ts +0 -12
  20. package/esm/typings/src/book-2.0/commitments/_base/createEmptyAgentModelRequirements.d.ts +1 -1
  21. package/esm/typings/src/book-components/AvatarProfile/AvatarChip/AvatarChip.d.ts +3 -0
  22. package/esm/typings/src/book-components/AvatarProfile/AvatarProfile/AvatarProfile.d.ts +3 -0
  23. package/esm/typings/src/book-components/BookEditor/BookEditor.d.ts +3 -0
  24. package/esm/typings/src/book-components/BookEditor/BookEditorInner.d.ts +2 -16
  25. package/esm/typings/src/book-components/Chat/Chat/ChatProps.d.ts +3 -0
  26. package/esm/typings/src/execution/PromptResult.d.ts +2 -4
  27. package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +2 -5
  28. package/esm/typings/src/llm-providers/agent/AgentLlmExecutionTools.d.ts +6 -2
  29. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +3 -8
  30. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +4 -5
  31. package/esm/typings/src/llm-providers/mocked/MockedEchoLlmExecutionTools.d.ts +2 -5
  32. package/esm/typings/src/llm-providers/mocked/MockedFackedLlmExecutionTools.d.ts +2 -0
  33. package/esm/typings/src/llm-providers/mocked/test/joker.test.d.ts +4 -0
  34. package/esm/typings/src/llm-providers/mocked/test/mocked-chat.test.d.ts +5 -0
  35. package/esm/typings/src/llm-providers/mocked/test/mocked-completion.test.d.ts +4 -0
  36. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +3 -3
  37. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +1 -0
  38. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +3 -8
  39. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +5 -14
  40. package/esm/typings/src/scripting/_test/postprocessing.test.d.ts +1 -0
  41. package/esm/typings/src/utils/markdown/humanizeAiText.d.ts +0 -1
  42. package/esm/typings/src/utils/markdown/promptbookifyAiText.d.ts +2 -2
  43. package/esm/typings/src/version.d.ts +1 -1
  44. package/package.json +2 -2
  45. package/umd/index.umd.js +95 -77
  46. package/umd/index.umd.js.map +1 -1
  47. package/esm/typings/src/book-components/Chat/examples/ChatMarkdownDemo.d.ts +0 -16
  48. package/esm/typings/src/expectations/drafts/isDomainNameFree.d.ts +0 -10
  49. package/esm/typings/src/expectations/drafts/isGithubNameFree.d.ts +0 -10
  50. package/esm/typings/src/llm-providers/_common/profiles/llmProviderProfiles.d.ts +0 -81
  51. /package/esm/typings/src/{llm-providers/_common/profiles/test/llmProviderProfiles.test.d.ts → cli/test/ptbk.test.d.ts} +0 -0
package/esm/index.es.js CHANGED
@@ -36,7 +36,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
36
36
  * @generated
37
37
  * @see https://github.com/webgptorg/promptbook
38
38
  */
39
- const PROMPTBOOK_ENGINE_VERSION = '0.101.0-20';
39
+ const PROMPTBOOK_ENGINE_VERSION = '0.101.0-21';
40
40
  /**
41
41
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
42
42
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1337,76 +1337,6 @@ function deserializeError(error) {
1337
1337
  return deserializedError;
1338
1338
  }
1339
1339
 
1340
- /**
1341
- * Predefined profiles for LLM providers to maintain consistency across the application
1342
- * These profiles represent each provider as a virtual persona in chat interfaces
1343
- *
1344
- * @private !!!!
1345
- */
1346
- const LLM_PROVIDER_PROFILES = {
1347
- OPENAI: {
1348
- name: 'OPENAI',
1349
- fullname: 'OpenAI GPT',
1350
- color: '#10a37f', // OpenAI's signature green
1351
- // Note: avatarSrc could be added when we have provider logos available
1352
- },
1353
- ANTHROPIC: {
1354
- name: 'ANTHROPIC',
1355
- fullname: 'Anthropic Claude',
1356
- color: '#d97706', // Anthropic's orange/amber color
1357
- },
1358
- AZURE_OPENAI: {
1359
- name: 'AZURE_OPENAI',
1360
- fullname: 'Azure OpenAI',
1361
- color: '#0078d4', // Microsoft Azure blue
1362
- },
1363
- GOOGLE: {
1364
- name: 'GOOGLE',
1365
- fullname: 'Google Gemini',
1366
- color: '#4285f4', // Google blue
1367
- },
1368
- DEEPSEEK: {
1369
- name: 'DEEPSEEK',
1370
- fullname: 'DeepSeek',
1371
- color: '#7c3aed', // Purple color for DeepSeek
1372
- },
1373
- OLLAMA: {
1374
- name: 'OLLAMA',
1375
- fullname: 'Ollama',
1376
- color: '#059669', // Emerald green for local models
1377
- },
1378
- REMOTE: {
1379
- name: 'REMOTE',
1380
- fullname: 'Remote Server',
1381
- color: '#6b7280', // Gray for remote/proxy connections
1382
- },
1383
- MOCKED_ECHO: {
1384
- name: 'MOCKED_ECHO',
1385
- fullname: 'Echo (Test)',
1386
- color: '#8b5cf6', // Purple for test/mock tools
1387
- },
1388
- MOCKED_FAKE: {
1389
- name: 'MOCKED_FAKE',
1390
- fullname: 'Fake LLM (Test)',
1391
- color: '#ec4899', // Pink for fake/test tools
1392
- },
1393
- VERCEL: {
1394
- name: 'VERCEL',
1395
- fullname: 'Vercel AI',
1396
- color: '#000000', // Vercel's black
1397
- },
1398
- MULTIPLE: {
1399
- name: 'MULTIPLE',
1400
- fullname: 'Multiple Providers',
1401
- color: '#6366f1', // Indigo for combined/multiple providers
1402
- },
1403
- };
1404
- /**
1405
- * TODO: Refactor this - each profile must be alongside the provider definition
1406
- * TODO: [🕛] Unite `AgentBasicInformation`, `ChatParticipant`, `LlmExecutionTools` + `LlmToolsMetadata`
1407
- * Note: [💞] Ignore a discrepancy between file name and entity name
1408
- */
1409
-
1410
1340
  /**
1411
1341
  * Tests if given string is valid URL.
1412
1342
  *
@@ -1500,6 +1430,14 @@ async function createRemoteClient(options) {
1500
1430
  function keepUnused(...valuesToKeep) {
1501
1431
  }
1502
1432
 
1433
+ /**
1434
+ * Profile for Remote provider
1435
+ */
1436
+ const REMOTE_PROVIDER_PROFILE = {
1437
+ name: 'REMOTE',
1438
+ fullname: 'Remote Server',
1439
+ color: '#6b7280',
1440
+ };
1503
1441
  /**
1504
1442
  * Remote server is a proxy server that uses its execution tools internally and exposes the executor interface externally.
1505
1443
  *
@@ -1522,7 +1460,7 @@ class RemoteLlmExecutionTools {
1522
1460
  return `Models from Promptbook remote server ${this.options.remoteServerUrl}`;
1523
1461
  }
1524
1462
  get profile() {
1525
- return LLM_PROVIDER_PROFILES.REMOTE;
1463
+ return REMOTE_PROVIDER_PROFILE;
1526
1464
  }
1527
1465
  /**
1528
1466
  * Check the configuration of all execution tools
@@ -2495,6 +2433,14 @@ resultContent, rawResponse) {
2495
2433
  * TODO: [ðŸĪ] DRY Maybe some common abstraction between `computeOpenAiUsage` and `computeAnthropicClaudeUsage`
2496
2434
  */
2497
2435
 
2436
+ /**
2437
+ * Profile for Anthropic Claude provider
2438
+ */
2439
+ const ANTHROPIC_PROVIDER_PROFILE = {
2440
+ name: 'ANTHROPIC',
2441
+ fullname: 'Anthropic Claude',
2442
+ color: '#d97706',
2443
+ };
2498
2444
  /**
2499
2445
  * Execution Tools for calling Anthropic Claude API.
2500
2446
  *
@@ -2523,7 +2469,7 @@ class AnthropicClaudeExecutionTools {
2523
2469
  return 'Use all models provided by Anthropic Claude';
2524
2470
  }
2525
2471
  get profile() {
2526
- return LLM_PROVIDER_PROFILES.ANTHROPIC;
2472
+ return ANTHROPIC_PROVIDER_PROFILE;
2527
2473
  }
2528
2474
  async getClient() {
2529
2475
  if (this.client === null) {
@@ -3378,6 +3324,14 @@ const OPENAI_MODELS = exportJson({
3378
3324
  * Note: [💞] Ignore a discrepancy between file name and entity name
3379
3325
  */
3380
3326
 
3327
+ /**
3328
+ * Profile for Azure OpenAI provider
3329
+ */
3330
+ const AZURE_OPENAI_PROVIDER_PROFILE = {
3331
+ name: 'AZURE_OPENAI',
3332
+ fullname: 'Azure OpenAI',
3333
+ color: '#0078d4',
3334
+ };
3381
3335
  /**
3382
3336
  * Execution Tools for calling Azure OpenAI API.
3383
3337
  *
@@ -3406,6 +3360,9 @@ class AzureOpenAiExecutionTools {
3406
3360
  get description() {
3407
3361
  return 'Use all models trained by OpenAI provided by Azure';
3408
3362
  }
3363
+ get profile() {
3364
+ return AZURE_OPENAI_PROVIDER_PROFILE;
3365
+ }
3409
3366
  async getClient() {
3410
3367
  if (this.client === null) {
3411
3368
  this.client = new OpenAIClient(`https://${this.options.resourceName}.openai.azure.com/`, new AzureKeyCredential(this.options.apiKey));
@@ -3803,6 +3760,14 @@ function asSerializable(value) {
3803
3760
  }
3804
3761
  }
3805
3762
 
3763
+ /**
3764
+ * Profile for Vercel AI adapter
3765
+ */
3766
+ const VERCEL_PROVIDER_PROFILE = {
3767
+ name: 'VERCEL',
3768
+ fullname: 'Vercel AI',
3769
+ color: '#000000',
3770
+ };
3806
3771
  /**
3807
3772
  * Adapter which creates Promptbook execution tools from Vercel provider
3808
3773
  *
@@ -3825,6 +3790,7 @@ function createExecutionToolsFromVercelProvider(options) {
3825
3790
  return {
3826
3791
  title,
3827
3792
  description,
3793
+ profile: VERCEL_PROVIDER_PROFILE,
3828
3794
  checkConfiguration() {
3829
3795
  // Note: There is no way how to check configuration of Vercel provider
3830
3796
  return Promise.resolve();
@@ -4106,6 +4072,14 @@ const DEEPSEEK_MODELS = exportJson({
4106
4072
  * Note: [💞] Ignore a discrepancy between file name and entity name
4107
4073
  */
4108
4074
 
4075
+ /**
4076
+ * Profile for Deepseek provider
4077
+ */
4078
+ const DEEPSEEK_PROVIDER_PROFILE = {
4079
+ name: 'DEEPSEEK',
4080
+ fullname: 'DeepSeek',
4081
+ color: '#7c3aed',
4082
+ };
4109
4083
  /**
4110
4084
  * Execution Tools for calling Deepseek API.
4111
4085
  *
@@ -4123,13 +4097,17 @@ const createDeepseekExecutionTools = Object.assign((options) => {
4123
4097
  ...options,
4124
4098
  // apiKey: process.env.DEEPSEEK_GENERATIVE_AI_API_KEY,
4125
4099
  });
4126
- return createExecutionToolsFromVercelProvider({
4100
+ const baseTools = createExecutionToolsFromVercelProvider({
4127
4101
  title: 'Deepseek',
4128
4102
  description: 'Implementation of Deepseek models',
4129
4103
  vercelProvider: deepseekVercelProvider,
4130
4104
  availableModels: DEEPSEEK_MODELS,
4131
4105
  ...options,
4132
4106
  });
4107
+ return {
4108
+ ...baseTools,
4109
+ profile: DEEPSEEK_PROVIDER_PROFILE,
4110
+ };
4133
4111
  }, {
4134
4112
  packageName: '@promptbook/deepseek',
4135
4113
  className: 'DeepseekExecutionTools',
@@ -4431,6 +4409,14 @@ const GOOGLE_MODELS = exportJson({
4431
4409
  * Note: [💞] Ignore a discrepancy between file name and entity name
4432
4410
  */
4433
4411
 
4412
+ /**
4413
+ * Profile for Google Gemini provider
4414
+ */
4415
+ const GOOGLE_PROVIDER_PROFILE = {
4416
+ name: 'GOOGLE',
4417
+ fullname: 'Google Gemini',
4418
+ color: '#4285f4',
4419
+ };
4434
4420
  /**
4435
4421
  * Execution Tools for calling Google Gemini API.
4436
4422
  *
@@ -4448,13 +4434,17 @@ const createGoogleExecutionTools = Object.assign((options) => {
4448
4434
  ...options,
4449
4435
  /// apiKey: process.env.GOOGLE_GENERATIVE_AI_API_KEY,
4450
4436
  });
4451
- return createExecutionToolsFromVercelProvider({
4437
+ const baseTools = createExecutionToolsFromVercelProvider({
4452
4438
  title: 'Google',
4453
4439
  description: 'Implementation of Google models',
4454
4440
  vercelProvider: googleGeminiVercelProvider,
4455
4441
  availableModels: GOOGLE_MODELS,
4456
4442
  ...options,
4457
4443
  });
4444
+ return {
4445
+ ...baseTools,
4446
+ profile: GOOGLE_PROVIDER_PROFILE,
4447
+ };
4458
4448
  }, {
4459
4449
  packageName: '@promptbook/google',
4460
4450
  className: 'GoogleExecutionTools',
@@ -5165,6 +5155,7 @@ class OpenAiCompatibleExecutionTools {
5165
5155
  * TODO: [🛄] Maybe make custom `OpenAiCompatibleError`
5166
5156
  * TODO: [🧠][🈁] Maybe use `isDeterministic` from options
5167
5157
  * TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
5158
+ * TODO: [🧠][ðŸĶĒ] Make reverse adapter from LlmExecutionTools to OpenAI-compatible:
5168
5159
  */
5169
5160
 
5170
5161
  /**
@@ -5420,6 +5411,14 @@ const OLLAMA_MODELS = exportJson({
5420
5411
  * Note: [💞] Ignore a discrepancy between file name and entity name
5421
5412
  */
5422
5413
 
5414
+ /**
5415
+ * Profile for Ollama provider
5416
+ */
5417
+ const OLLAMA_PROVIDER_PROFILE = {
5418
+ name: 'OLLAMA',
5419
+ fullname: 'Ollama',
5420
+ color: '#059669',
5421
+ };
5423
5422
  /**
5424
5423
  * Execution Tools for calling Ollama API
5425
5424
  *
@@ -5442,6 +5441,9 @@ class OllamaExecutionTools extends OpenAiCompatibleExecutionTools {
5442
5441
  get description() {
5443
5442
  return 'Use all models provided by Ollama';
5444
5443
  }
5444
+ get profile() {
5445
+ return OLLAMA_PROVIDER_PROFILE;
5446
+ }
5445
5447
  /**
5446
5448
  * List all available models (non dynamically)
5447
5449
  *
@@ -5645,6 +5647,14 @@ const _OpenAiCompatibleMetadataRegistration = $llmToolsMetadataRegister.register
5645
5647
  * Note: [💞] Ignore a discrepancy between file name and entity name
5646
5648
  */
5647
5649
 
5650
+ /**
5651
+ * Profile for OpenAI provider
5652
+ */
5653
+ const OPENAI_PROVIDER_PROFILE = {
5654
+ name: 'OPENAI',
5655
+ fullname: 'OpenAI GPT',
5656
+ color: '#10a37f',
5657
+ };
5648
5658
  /**
5649
5659
  * Execution Tools for calling OpenAI API
5650
5660
  *
@@ -5667,7 +5677,7 @@ class OpenAiExecutionTools extends OpenAiCompatibleExecutionTools {
5667
5677
  return 'Use all models provided by OpenAI';
5668
5678
  }
5669
5679
  get profile() {
5670
- return LLM_PROVIDER_PROFILES.OPENAI;
5680
+ return OPENAI_PROVIDER_PROFILE;
5671
5681
  }
5672
5682
  /*
5673
5683
  Note: Commenting this out to avoid circular dependency
@@ -7863,6 +7873,14 @@ function arrayableToArray(input) {
7863
7873
  return [input];
7864
7874
  }
7865
7875
 
7876
+ /**
7877
+ * Profile for Multiple providers aggregation
7878
+ */
7879
+ const MULTIPLE_PROVIDER_PROFILE = {
7880
+ name: 'MULTIPLE',
7881
+ fullname: 'Multiple Providers',
7882
+ color: '#6366f1',
7883
+ };
7866
7884
  /**
7867
7885
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
7868
7886
  *
@@ -7898,7 +7916,7 @@ class MultipleLlmExecutionTools {
7898
7916
  `);
7899
7917
  }
7900
7918
  get profile() {
7901
- return LLM_PROVIDER_PROFILES.MULTIPLE;
7919
+ return MULTIPLE_PROVIDER_PROFILE;
7902
7920
  }
7903
7921
  /**
7904
7922
  * Check the configuration of all execution tools