@promptbook/editable 0.104.0-6 โ†’ 0.104.0-7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -206,6 +206,7 @@ import type { string_char_emoji } from '../types/typeAliasEmoji';
206
206
  import type { string_business_category_name } from '../types/typeAliases';
207
207
  import type { string_model_name } from '../types/typeAliases';
208
208
  import type { string_prompt } from '../types/typeAliases';
209
+ import type { string_prompt_image } from '../types/typeAliases';
209
210
  import type { string_template } from '../types/typeAliases';
210
211
  import type { string_text_prompt } from '../types/typeAliases';
211
212
  import type { string_chat_prompt } from '../types/typeAliases';
@@ -573,6 +574,7 @@ export type { string_char_emoji };
573
574
  export type { string_business_category_name };
574
575
  export type { string_model_name };
575
576
  export type { string_prompt };
577
+ export type { string_prompt_image };
576
578
  export type { string_template };
577
579
  export type { string_text_prompt };
578
580
  export type { string_chat_prompt };
@@ -7,7 +7,7 @@ import type { string_agent_name, string_agent_permanent_id, string_url_image } f
7
7
  *
8
8
  * @public exported from `@promptbook/core`
9
9
  */
10
- export declare function generatePlaceholderAgentProfileImageUrl(agentIdOrName: string_agent_permanent_id | string_agent_name): string_url_image;
10
+ export declare function generatePlaceholderAgentProfileImageUrl(agentIdOrName: string_agent_permanent_id | string_agent_name, agentsServerUrl?: URL): string_url_image;
11
11
  /**
12
12
  * TODO: [๐Ÿคน] Figure out best placeholder image generator https://i.pravatar.cc/1000?u=568
13
13
  */
@@ -17,7 +17,17 @@ export type CompletionModelRequirements = CommonModelRequirements & {
17
17
  /**
18
18
  * Completion model variant
19
19
  */
20
- modelVariant: 'COMPLETION';
20
+ readonly modelVariant: 'COMPLETION';
21
+ /**
22
+ * The temperature of the model
23
+ *
24
+ * Note: [๐Ÿ’ฑ] Promptbook is using just `temperature` (not `top_k` and `top_p`)
25
+ */
26
+ readonly temperature?: number_model_temperature;
27
+ /**
28
+ * Maximum number of tokens that can be generated by the model
29
+ */
30
+ readonly maxTokens?: number;
21
31
  };
22
32
  /**
23
33
  * Model requirements for the chat variant
@@ -28,11 +38,21 @@ export type ChatModelRequirements = CommonModelRequirements & {
28
38
  /**
29
39
  * Chat model variant
30
40
  */
31
- modelVariant: 'CHAT';
41
+ readonly modelVariant: 'CHAT';
32
42
  /**
33
43
  * System message to be used in the model
34
44
  */
35
45
  readonly systemMessage?: string_system_message;
46
+ /**
47
+ * The temperature of the model
48
+ *
49
+ * Note: [๐Ÿ’ฑ] Promptbook is using just `temperature` (not `top_k` and `top_p`)
50
+ */
51
+ readonly temperature?: number_model_temperature;
52
+ /**
53
+ * Maximum number of tokens that can be generated by the model
54
+ */
55
+ readonly maxTokens?: number;
36
56
  };
37
57
  /**
38
58
  * Model requirements for the image generation variant
@@ -43,7 +63,21 @@ export type ImageGenerationModelRequirements = CommonModelRequirements & {
43
63
  /**
44
64
  * Image generation model variant
45
65
  */
46
- modelVariant: 'IMAGE_GENERATION';
66
+ readonly modelVariant: 'IMAGE_GENERATION';
67
+ /**
68
+ * Size of the generated image
69
+ *
70
+ * e.g. '1536x1536'
71
+ */
72
+ readonly size?: '1024x1024' | '1792x1024' | '1024x1792' | `${number}x${number}`;
73
+ /**
74
+ * Quality of the generated image
75
+ */
76
+ readonly quality?: 'standard' | 'hd';
77
+ /**
78
+ * Style of the generated image
79
+ */
80
+ readonly style?: 'vivid' | 'natural';
47
81
  };
48
82
  /**
49
83
  * Model requirements for the embedding variant
@@ -54,7 +88,7 @@ export type EmbeddingModelRequirements = CommonModelRequirements & {
54
88
  /**
55
89
  * Embedding model variant
56
90
  */
57
- modelVariant: 'EMBEDDING';
91
+ readonly modelVariant: 'EMBEDDING';
58
92
  };
59
93
  /**
60
94
  * Common properties for all model requirements variants
@@ -84,20 +118,10 @@ export type CommonModelRequirements = {
84
118
  * @example 'gpt-4', 'gpt-4-32k-0314', 'gpt-3.5-turbo-instruct',...
85
119
  */
86
120
  readonly modelName?: string_model_name;
87
- /**
88
- * The temperature of the model
89
- *
90
- * Note: [๐Ÿ’ฑ] Promptbook is using just `temperature` (not `top_k` and `top_p`)
91
- */
92
- readonly temperature?: number_model_temperature;
93
121
  /**
94
122
  * Seed for the model
95
123
  */
96
124
  readonly seed?: number_seed;
97
- /**
98
- * Maximum number of tokens that can be generated by the model
99
- */
100
- readonly maxTokens?: number;
101
125
  };
102
126
  /**
103
127
  * TODO: [๐Ÿง ][๐Ÿˆ] `seed` should maybe be somewhere else (not in `ModelRequirements`) (similar that `user` identification is not here)
@@ -14,9 +14,15 @@ export type string_model_name = 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-3
14
14
  /**
15
15
  * Semantic helper
16
16
  *
17
- * For example `"A cat wearing a hat"`
17
+ * For example `"How many eyes does a cat have?"`
18
18
  */
19
19
  export type string_prompt = string;
20
+ /**
21
+ * Semantic helper
22
+ *
23
+ * For example `"A cat wearing a hat"`
24
+ */
25
+ export type string_prompt_image = string;
20
26
  /**
21
27
  * Semantic helper
22
28
  *
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.104.0-5`).
18
+ * It follows semantic versioning (e.g., `0.104.0-6`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/editable",
3
- "version": "0.104.0-6",
3
+ "version": "0.104.0-7",
4
4
  "description": "Promptbook: Turn your company's scattered knowledge into AI ready books",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/editable.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.104.0-6"
98
+ "@promptbook/core": "0.104.0-7"
99
99
  },
100
100
  "dependencies": {
101
101
  "crypto-js": "4.2.0",
package/umd/index.umd.js CHANGED
@@ -23,7 +23,7 @@
23
23
  * @generated
24
24
  * @see https://github.com/webgptorg/promptbook
25
25
  */
26
- const PROMPTBOOK_ENGINE_VERSION = '0.104.0-6';
26
+ const PROMPTBOOK_ENGINE_VERSION = '0.104.0-7';
27
27
  /**
28
28
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
29
29
  * Note: [๐Ÿ’ž] Ignore a discrepancy between file name and entity name
@@ -3796,11 +3796,7 @@
3796
3796
  // TODO: [๐Ÿšœ] DRY
3797
3797
  if ($taskJson.modelRequirements[command.key] !== undefined) {
3798
3798
  if ($taskJson.modelRequirements[command.key] === command.value) {
3799
- console.warn(`Multiple commands \`MODEL ${{
3800
- modelName: 'NAME',
3801
- modelVariant: 'VARIANT',
3802
- maxTokens: '???',
3803
- }[command.key]} ${command.value}\` in the task "${$taskJson.title || $taskJson.name}"`);
3799
+ console.warn(`Multiple commands \`MODEL ${command.key} ${command.value}\` in the task "${$taskJson.title || $taskJson.name}"`);
3804
3800
  // <- TODO: [๐Ÿฎ] Some standard way how to transform errors into warnings and how to handle non-critical fails during the tasks
3805
3801
  }
3806
3802
  else {