@promptbook/remote-server 0.104.0-1 → 0.104.0-10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/esm/index.es.js +42 -37
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/servers.d.ts +8 -0
  4. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  5. package/esm/typings/src/_packages/types.index.d.ts +10 -2
  6. package/esm/typings/src/book-2.0/agent-source/AgentBasicInformation.d.ts +6 -1
  7. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirements.d.ts +6 -6
  8. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirementsWithCommitments.closed.test.d.ts +1 -0
  9. package/esm/typings/src/book-2.0/utils/generatePlaceholderAgentProfileImageUrl.d.ts +3 -3
  10. package/esm/typings/src/book-components/Chat/Chat/ChatMessageItem.d.ts +5 -1
  11. package/esm/typings/src/book-components/Chat/Chat/ChatProps.d.ts +5 -0
  12. package/esm/typings/src/book-components/Chat/CodeBlock/CodeBlock.d.ts +13 -0
  13. package/esm/typings/src/book-components/Chat/MarkdownContent/MarkdownContent.d.ts +1 -0
  14. package/esm/typings/src/book-components/Chat/types/ChatMessage.d.ts +7 -11
  15. package/esm/typings/src/book-components/_common/Dropdown/Dropdown.d.ts +2 -2
  16. package/esm/typings/src/book-components/_common/MenuHoisting/MenuHoistingContext.d.ts +56 -0
  17. package/esm/typings/src/collection/agent-collection/constructors/agent-collection-in-supabase/AgentCollectionInSupabase.d.ts +21 -11
  18. package/esm/typings/src/collection/agent-collection/constructors/agent-collection-in-supabase/AgentsDatabaseSchema.d.ts +80 -14
  19. package/esm/typings/src/commitments/DICTIONARY/DICTIONARY.d.ts +46 -0
  20. package/esm/typings/src/commitments/index.d.ts +2 -1
  21. package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +6 -2
  22. package/esm/typings/src/llm-providers/agent/AgentLlmExecutionTools.d.ts +1 -1
  23. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +1 -1
  24. package/esm/typings/src/llm-providers/openai/createOpenAiCompatibleExecutionTools.d.ts +1 -1
  25. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -0
  26. package/esm/typings/src/types/Message.d.ts +49 -0
  27. package/esm/typings/src/types/ModelRequirements.d.ts +38 -14
  28. package/esm/typings/src/types/typeAliases.d.ts +23 -1
  29. package/esm/typings/src/utils/color/utils/colorToDataUrl.d.ts +2 -1
  30. package/esm/typings/src/utils/environment/$detectRuntimeEnvironment.d.ts +4 -4
  31. package/esm/typings/src/utils/environment/$isRunningInBrowser.d.ts +1 -1
  32. package/esm/typings/src/utils/environment/$isRunningInJest.d.ts +1 -1
  33. package/esm/typings/src/utils/environment/$isRunningInNode.d.ts +1 -1
  34. package/esm/typings/src/utils/environment/$isRunningInWebWorker.d.ts +1 -1
  35. package/esm/typings/src/utils/markdown/extractAllBlocksFromMarkdown.d.ts +2 -2
  36. package/esm/typings/src/utils/markdown/extractOneBlockFromMarkdown.d.ts +2 -2
  37. package/esm/typings/src/utils/random/$randomBase58.d.ts +12 -0
  38. package/esm/typings/src/version.d.ts +1 -1
  39. package/package.json +2 -2
  40. package/umd/index.umd.js +46 -41
  41. package/umd/index.umd.js.map +1 -1
  42. package/esm/typings/src/book-2.0/utils/generateGravatarUrl.d.ts +0 -10
@@ -1,8 +1,8 @@
1
1
  import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
2
2
  import type { AvailableModel } from '../../execution/AvailableModel';
3
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
4
- import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, PromptResult } from '../../execution/PromptResult';
5
- import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, Prompt } from '../../types/Prompt';
4
+ import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, ImagePromptResult, PromptResult } from '../../execution/PromptResult';
5
+ import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, ImagePrompt, Prompt } from '../../types/Prompt';
6
6
  import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
7
7
  /**
8
8
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
@@ -43,6 +43,10 @@ export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
43
43
  * Calls the best available embedding model
44
44
  */
45
45
  callEmbeddingModel(prompt: EmbeddingPrompt): Promise<EmbeddingPromptResult>;
46
+ /**
47
+ * Calls the best available embedding model
48
+ */
49
+ callImageGenerationModel(prompt: ImagePrompt): Promise<ImagePromptResult>;
46
50
  /**
47
51
  * Calls the best available model
48
52
  *
@@ -22,7 +22,7 @@ import type { CreateAgentLlmExecutionToolsOptions } from './CreateAgentLlmExecut
22
22
  * @public exported from `@promptbook/core`
23
23
  */
24
24
  export declare class AgentLlmExecutionTools implements LlmExecutionTools {
25
- private readonly options;
25
+ protected readonly options: CreateAgentLlmExecutionToolsOptions;
26
26
  /**
27
27
  * Cache of OpenAI assistants to avoid creating duplicates
28
28
  */
@@ -39,7 +39,7 @@ export declare class OllamaExecutionTools extends OpenAiCompatibleExecutionTools
39
39
  */
40
40
  protected getDefaultEmbeddingModel(): AvailableModel;
41
41
  /**
42
- * Default model for image generation variant.
42
+ * Default model for completion variant.
43
43
  */
44
44
  protected getDefaultImageGenerationModel(): AvailableModel;
45
45
  }
@@ -64,7 +64,7 @@ export declare class HardcodedOpenAiCompatibleExecutionTools extends OpenAiCompa
64
64
  */
65
65
  protected getDefaultEmbeddingModel(): AvailableModel;
66
66
  /**
67
- * Default model for image generation variant.
67
+ * Default model for completion variant.
68
68
  */
69
69
  protected getDefaultImageGenerationModel(): AvailableModel;
70
70
  }
@@ -46,6 +46,7 @@ export declare class RemoteLlmExecutionTools<TCustomOptions = undefined> impleme
46
46
  private callCommonModel;
47
47
  }
48
48
  /**
49
+ * TODO: !!!! Deprecate pipeline server and all of its components
49
50
  * TODO: Maybe use `$exportJson`
50
51
  * TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`
51
52
  * TODO: [🍓] Allow to list compatible models with each variant
@@ -0,0 +1,49 @@
1
+ import { Arrayable } from 'type-fest';
2
+ import { really_any } from '../_packages/types.index';
3
+ import { id, string_date_iso8601, string_markdown } from './typeAliases';
4
+ /**
5
+ * A generic message structure for various communication channels
6
+ */
7
+ export type Message<TParticipant> = {
8
+ /**
9
+ * Unique identifier of the message
10
+ */
11
+ readonly id?: id;
12
+ /**
13
+ * Date when the message was created
14
+ */
15
+ readonly createdAt?: Date | string_date_iso8601;
16
+ /**
17
+ * The communication channel of the message
18
+ */
19
+ readonly channel?: 'PROMPTBOOK_CHAT' | 'EMAIL' | 'SMS' | 'WHATSAPP' | 'TELEGRAM' | 'SIGNAL' | string | 'UNKNOWN';
20
+ /**
21
+ * Is the message send from the Promptbook or to the Promptbook
22
+ */
23
+ readonly direction?: 'INBOUND' | 'OUTBOUND' | 'INTERNAL' | 'INITIAL';
24
+ /**
25
+ * Who sent the message
26
+ */
27
+ readonly sender: TParticipant;
28
+ /**
29
+ * Who are the recipients of the message
30
+ */
31
+ readonly recipients?: Readonly<Arrayable<TParticipant>>;
32
+ /**
33
+ * The content of the message as markdown
34
+ *
35
+ * Note: We are converting all message content to markdown for consistency
36
+ */
37
+ readonly content: string_markdown;
38
+ /**
39
+ * The thread identifier the message belongs to
40
+ *
41
+ * - `null` means the message is not part of any thread
42
+ * - `undefined` means that we don't know if the message is part of a thread or not
43
+ */
44
+ readonly threadId?: id | null;
45
+ /**
46
+ * Arbitrary metadata associated with the message
47
+ */
48
+ readonly metadata?: Readonly<Record<string, really_any>>;
49
+ };
@@ -17,7 +17,17 @@ export type CompletionModelRequirements = CommonModelRequirements & {
17
17
  /**
18
18
  * Completion model variant
19
19
  */
20
- modelVariant: 'COMPLETION';
20
+ readonly modelVariant: 'COMPLETION';
21
+ /**
22
+ * The temperature of the model
23
+ *
24
+ * Note: [💱] Promptbook is using just `temperature` (not `top_k` and `top_p`)
25
+ */
26
+ readonly temperature?: number_model_temperature;
27
+ /**
28
+ * Maximum number of tokens that can be generated by the model
29
+ */
30
+ readonly maxTokens?: number;
21
31
  };
22
32
  /**
23
33
  * Model requirements for the chat variant
@@ -28,11 +38,21 @@ export type ChatModelRequirements = CommonModelRequirements & {
28
38
  /**
29
39
  * Chat model variant
30
40
  */
31
- modelVariant: 'CHAT';
41
+ readonly modelVariant: 'CHAT';
32
42
  /**
33
43
  * System message to be used in the model
34
44
  */
35
45
  readonly systemMessage?: string_system_message;
46
+ /**
47
+ * The temperature of the model
48
+ *
49
+ * Note: [💱] Promptbook is using just `temperature` (not `top_k` and `top_p`)
50
+ */
51
+ readonly temperature?: number_model_temperature;
52
+ /**
53
+ * Maximum number of tokens that can be generated by the model
54
+ */
55
+ readonly maxTokens?: number;
36
56
  };
37
57
  /**
38
58
  * Model requirements for the image generation variant
@@ -43,7 +63,21 @@ export type ImageGenerationModelRequirements = CommonModelRequirements & {
43
63
  /**
44
64
  * Image generation model variant
45
65
  */
46
- modelVariant: 'IMAGE_GENERATION';
66
+ readonly modelVariant: 'IMAGE_GENERATION';
67
+ /**
68
+ * Size of the generated image
69
+ *
70
+ * e.g. '1536x1536'
71
+ */
72
+ readonly size?: '1024x1024' | '1792x1024' | '1024x1792' | `${number}x${number}`;
73
+ /**
74
+ * Quality of the generated image
75
+ */
76
+ readonly quality?: 'standard' | 'hd';
77
+ /**
78
+ * Style of the generated image
79
+ */
80
+ readonly style?: 'vivid' | 'natural';
47
81
  };
48
82
  /**
49
83
  * Model requirements for the embedding variant
@@ -54,7 +88,7 @@ export type EmbeddingModelRequirements = CommonModelRequirements & {
54
88
  /**
55
89
  * Embedding model variant
56
90
  */
57
- modelVariant: 'EMBEDDING';
91
+ readonly modelVariant: 'EMBEDDING';
58
92
  };
59
93
  /**
60
94
  * Common properties for all model requirements variants
@@ -84,20 +118,10 @@ export type CommonModelRequirements = {
84
118
  * @example 'gpt-4', 'gpt-4-32k-0314', 'gpt-3.5-turbo-instruct',...
85
119
  */
86
120
  readonly modelName?: string_model_name;
87
- /**
88
- * The temperature of the model
89
- *
90
- * Note: [💱] Promptbook is using just `temperature` (not `top_k` and `top_p`)
91
- */
92
- readonly temperature?: number_model_temperature;
93
121
  /**
94
122
  * Seed for the model
95
123
  */
96
124
  readonly seed?: number_seed;
97
- /**
98
- * Maximum number of tokens that can be generated by the model
99
- */
100
- readonly maxTokens?: number;
101
125
  };
102
126
  /**
103
127
  * TODO: [🧠][🈁] `seed` should maybe be somewhere else (not in `ModelRequirements`) (similar that `user` identification is not here)
@@ -14,9 +14,15 @@ export type string_model_name = 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-3
14
14
  /**
15
15
  * Semantic helper
16
16
  *
17
- * For example `"A cat wearing a hat"`
17
+ * For example `"How many eyes does a cat have?"`
18
18
  */
19
19
  export type string_prompt = string;
20
+ /**
21
+ * Semantic helper
22
+ *
23
+ * For example `"A cat wearing a hat"`
24
+ */
25
+ export type string_prompt_image = string;
20
26
  /**
21
27
  * Semantic helper
22
28
  *
@@ -140,6 +146,8 @@ export type string_title = string;
140
146
  * Semantic helper
141
147
  *
142
148
  * For example `"My AI Assistant"`
149
+ *
150
+ * TODO: !!!! Brand the type
143
151
  */
144
152
  export type string_agent_name = string;
145
153
  /**
@@ -154,6 +162,14 @@ export type string_agent_name_in_book = string;
154
162
  * For example `"b126926439c5fcb83609888a11283723c1ef137c0ad599a77a1be81812bd221d"`
155
163
  */
156
164
  export type string_agent_hash = string_sha256;
165
+ /**
166
+ * Semantic helper
167
+ *
168
+ * For example `"3mJr7AoUXx2Wqd"`
169
+ *
170
+ * TODO: !!!! Brand the type
171
+ */
172
+ export type string_agent_permanent_id = string_base_58;
157
173
  /**
158
174
  * Unstructured description of the persona
159
175
  *
@@ -499,6 +515,12 @@ export type string_user_id = id | string_email;
499
515
  * For example `"b126926439c5fcb83609888a11283723c1ef137c0ad599a77a1be81812bd221d"`
500
516
  */
501
517
  export type string_sha256 = string;
518
+ /**
519
+ * Semantic helper
520
+ *
521
+ * For example `"4JmF3b2J5dGVz"`
522
+ */
523
+ export type string_base_58 = string;
502
524
  /**
503
525
  * Semantic helper
504
526
  *
@@ -1,10 +1,11 @@
1
+ import { string_color, string_data_url, string_url_image } from '../../../types/typeAliases';
1
2
  import { Color } from '../Color';
2
3
  /**
3
4
  * Makes data url from color
4
5
  *
5
6
  * @public exported from `@promptbook/color`
6
7
  */
7
- export declare function colorToDataUrl(color: Color): string;
8
+ export declare function colorToDataUrl(color: Color | string_color): string_data_url & string_url_image;
8
9
  /**
9
10
  * TODO: Make as functions NOT const
10
11
  */
@@ -6,10 +6,10 @@
6
6
  * @public exported from `@promptbook/utils`
7
7
  */
8
8
  export declare function $detectRuntimeEnvironment(): {
9
- isRunningInBrowser: any;
10
- isRunningInJest: any;
11
- isRunningInNode: any;
12
- isRunningInWebWorker: any;
9
+ isRunningInBrowser: boolean;
10
+ isRunningInJest: boolean;
11
+ isRunningInNode: boolean;
12
+ isRunningInWebWorker: boolean;
13
13
  };
14
14
  /**
15
15
  * TODO: [🎺] Also detect and report node version here
@@ -5,7 +5,7 @@
5
5
  *
6
6
  * @public exported from `@promptbook/utils`
7
7
  */
8
- export declare const $isRunningInBrowser: Function;
8
+ export declare function $isRunningInBrowser(): boolean;
9
9
  /**
10
10
  * TODO: [🎺]
11
11
  */
@@ -5,7 +5,7 @@
5
5
  *
6
6
  * @public exported from `@promptbook/utils`
7
7
  */
8
- export declare const $isRunningInJest: Function;
8
+ export declare function $isRunningInJest(): boolean;
9
9
  /**
10
10
  * TODO: [🎺]
11
11
  */
@@ -5,7 +5,7 @@
5
5
  *
6
6
  * @public exported from `@promptbook/utils`
7
7
  */
8
- export declare const $isRunningInNode: Function;
8
+ export declare function $isRunningInNode(): boolean;
9
9
  /**
10
10
  * TODO: [🎺]
11
11
  */
@@ -5,7 +5,7 @@
5
5
  *
6
6
  * @public exported from `@promptbook/utils`
7
7
  */
8
- export declare const $isRunningInWebWorker: Function;
8
+ export declare function $isRunningInWebWorker(): boolean;
9
9
  /**
10
10
  * TODO: [🎺]
11
11
  */
@@ -2,7 +2,7 @@ import type { string_markdown } from '../../types/typeAliases';
2
2
  /**
3
3
  * Single code block inside markdown.
4
4
  */
5
- export type CodeBlock = {
5
+ export type MarkdownCodeBlock = {
6
6
  /**
7
7
  * Which notation was used to open the code block
8
8
  */
@@ -30,7 +30,7 @@ export type CodeBlock = {
30
30
  * @throws {ParseError} if block is not closed properly
31
31
  * @public exported from `@promptbook/markdown-utils`
32
32
  */
33
- export declare function extractAllBlocksFromMarkdown(markdown: string_markdown): ReadonlyArray<CodeBlock>;
33
+ export declare function extractAllBlocksFromMarkdown(markdown: string_markdown): ReadonlyArray<MarkdownCodeBlock>;
34
34
  /**
35
35
  * TODO: Maybe name for `blockNotation` instead of '```' and '>'
36
36
  */
@@ -1,5 +1,5 @@
1
1
  import type { string_markdown } from '../../types/typeAliases';
2
- import type { CodeBlock } from './extractAllBlocksFromMarkdown';
2
+ import type { MarkdownCodeBlock } from './extractAllBlocksFromMarkdown';
3
3
  /**
4
4
  * Extracts exactly ONE code block from markdown.
5
5
  *
@@ -16,7 +16,7 @@ import type { CodeBlock } from './extractAllBlocksFromMarkdown';
16
16
  * @public exported from `@promptbook/markdown-utils`
17
17
  * @throws {ParseError} if there is not exactly one code block in the markdown
18
18
  */
19
- export declare function extractOneBlockFromMarkdown(markdown: string_markdown): CodeBlock;
19
+ export declare function extractOneBlockFromMarkdown(markdown: string_markdown): MarkdownCodeBlock;
20
20
  /***
21
21
  * TODO: [🍓][🌻] Decide of this is internal utility, external util OR validator/postprocessor
22
22
  */
@@ -0,0 +1,12 @@
1
+ /**
2
+ * Generates random base58 string
3
+ *
4
+ * Note: `$` is used to indicate that this function is not a pure function - it is not deterministic
5
+ * Note: This function is cryptographically secure (it uses crypto.randomBytes internally)
6
+ *
7
+ * @param length - length of the string
8
+ * @returns secure random base58 string
9
+ *
10
+ * @private internal helper function
11
+ */
12
+ export declare function $randomBase58(length: number): string;
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.104.0-0`).
18
+ * It follows semantic versioning (e.g., `0.104.0-9`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-server",
3
- "version": "0.104.0-1",
3
+ "version": "0.104.0-10",
4
4
  "description": "Promptbook: Turn your company's scattered knowledge into AI ready books",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/remote-server.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.104.0-1"
98
+ "@promptbook/core": "0.104.0-10"
99
99
  },
100
100
  "dependencies": {
101
101
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -47,7 +47,7 @@
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.104.0-1';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.104.0-10';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1195,13 +1195,14 @@
1195
1195
  *
1196
1196
  * @public exported from `@promptbook/utils`
1197
1197
  */
1198
- const $isRunningInNode = new Function(`
1199
- try {
1200
- return this === global;
1201
- } catch (e) {
1202
- return false;
1198
+ function $isRunningInNode() {
1199
+ try {
1200
+ return typeof process !== 'undefined' && process.versions != null && process.versions.node != null;
1201
+ }
1202
+ catch (e) {
1203
+ return false;
1204
+ }
1203
1205
  }
1204
- `);
1205
1206
  /**
1206
1207
  * TODO: [🎺]
1207
1208
  */
@@ -3843,6 +3844,15 @@
3843
3844
  return promptResult;
3844
3845
  };
3845
3846
  }
3847
+ if (llmTools.callImageGenerationModel !== undefined) {
3848
+ proxyTools.callImageGenerationModel = async (prompt) => {
3849
+ // console.info('[🚕] callImageGenerationModel through countTotalUsage');
3850
+ const promptResult = await llmTools.callImageGenerationModel(prompt);
3851
+ totalUsage = addUsage(totalUsage, promptResult.usage);
3852
+ spending.next(promptResult.usage);
3853
+ return promptResult;
3854
+ };
3855
+ }
3846
3856
  // <- Note: [🤖]
3847
3857
  return proxyTools;
3848
3858
  }
@@ -3952,6 +3962,12 @@
3952
3962
  callEmbeddingModel(prompt) {
3953
3963
  return this.callCommonModel(prompt);
3954
3964
  }
3965
+ /**
3966
+ * Calls the best available embedding model
3967
+ */
3968
+ callImageGenerationModel(prompt) {
3969
+ return this.callCommonModel(prompt);
3970
+ }
3955
3971
  // <- Note: [🤖]
3956
3972
  /**
3957
3973
  * Calls the best available model
@@ -3978,6 +3994,11 @@
3978
3994
  continue llm;
3979
3995
  }
3980
3996
  return await llmExecutionTools.callEmbeddingModel(prompt);
3997
+ case 'IMAGE_GENERATION':
3998
+ if (llmExecutionTools.callImageGenerationModel === undefined) {
3999
+ continue llm;
4000
+ }
4001
+ return await llmExecutionTools.callImageGenerationModel(prompt);
3981
4002
  // <- case [🤖]:
3982
4003
  default:
3983
4004
  throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
@@ -6524,8 +6545,9 @@
6524
6545
  $ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
6525
6546
  break variant;
6526
6547
  case 'EMBEDDING':
6548
+ case 'IMAGE_GENERATION':
6527
6549
  throw new PipelineExecutionError(spaceTrim$1.spaceTrim((block) => `
6528
- Embedding model can not be used in pipeline
6550
+ ${modelRequirements.modelVariant} model can not be used in pipeline
6529
6551
 
6530
6552
  This should be catched during parsing
6531
6553
 
@@ -7666,13 +7688,14 @@
7666
7688
  *
7667
7689
  * @public exported from `@promptbook/utils`
7668
7690
  */
7669
- const $isRunningInBrowser = new Function(`
7670
- try {
7671
- return this === window;
7672
- } catch (e) {
7673
- return false;
7691
+ function $isRunningInBrowser() {
7692
+ try {
7693
+ return typeof window !== 'undefined' && typeof window.document !== 'undefined';
7694
+ }
7695
+ catch (e) {
7696
+ return false;
7697
+ }
7674
7698
  }
7675
- `);
7676
7699
  /**
7677
7700
  * TODO: [🎺]
7678
7701
  */
@@ -7684,17 +7707,17 @@
7684
7707
  *
7685
7708
  * @public exported from `@promptbook/utils`
7686
7709
  */
7687
- const $isRunningInWebWorker = new Function(`
7688
- try {
7689
- if (typeof WorkerGlobalScope !== 'undefined' && self instanceof WorkerGlobalScope) {
7690
- return true;
7691
- } else {
7710
+ function $isRunningInWebWorker() {
7711
+ try {
7712
+ // Note: Check for importScripts which is specific to workers
7713
+ // and not available in the main browser thread
7714
+ return (typeof self !== 'undefined' &&
7715
+ typeof self.importScripts === 'function');
7716
+ }
7717
+ catch (e) {
7692
7718
  return false;
7693
7719
  }
7694
- } catch (e) {
7695
- return false;
7696
7720
  }
7697
- `);
7698
7721
  /**
7699
7722
  * TODO: [🎺]
7700
7723
  */
@@ -7826,7 +7849,7 @@
7826
7849
  ${i + 1}) **${title}** \`${className}\` from \`${packageName}\`
7827
7850
  ${morePieces.join('; ')}
7828
7851
  `);
7829
- if ($isRunningInNode) {
7852
+ if ($isRunningInNode()) {
7830
7853
  if (isInstalled && isFullyConfigured) {
7831
7854
  providerMessage = colors__default["default"].green(providerMessage);
7832
7855
  }
@@ -7990,24 +8013,6 @@
7990
8013
  * TODO: [🌺] Use some intermediate util splitWords
7991
8014
  */
7992
8015
 
7993
- /**
7994
- * Detects if the code is running in jest environment
7995
- *
7996
- * Note: `$` is used to indicate that this function is not a pure function - it looks at the global object to determine the environment
7997
- *
7998
- * @public exported from `@promptbook/utils`
7999
- */
8000
- new Function(`
8001
- try {
8002
- return process.env.JEST_WORKER_ID !== undefined;
8003
- } catch (e) {
8004
- return false;
8005
- }
8006
- `);
8007
- /**
8008
- * TODO: [🎺]
8009
- */
8010
-
8011
8016
  /**
8012
8017
  * Makes first letter of a string lowercase
8013
8018
  *