@promptbook/documents 0.104.0-3 → 0.104.0-5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,11 +3,12 @@
3
3
  * Source of truth: `/apps/agents-server/src/database/schema.sql` *(do not edit table structure here manually)*
4
4
  *
5
5
  * [💽] Prompt:
6
- * Re-generate this sub-schema
6
+ * Re-generate this sub-schema from `/apps/agents-server/src/database/schema.ts` *(which was generated from `/apps/agents-server/src/database/migrations/*.sql`)*
7
+ * `AgentsDatabaseSchema` is strict subset of `AgentsServerDatabase`
7
8
  * Generate Supabase TypeScript schema which is a subset of `AgentsServerDatabase`
8
9
  * containing only tables `Agent` and `AgentHistory`
9
10
  *
10
- * NOTE: This file intentionally omits all other tables (EnvironmentVariable, ChatHistory, ChatFeedback)
11
+ * NOTE: This file intentionally omits all other tables (`Metadata`, `ChatHistory`, `ChatFeedback`, `User`, `LlmCache`, etc.)
11
12
  * and any extra schemas (e.g. `graphql_public`) to remain a strict subset.
12
13
  */
13
14
  export type Json = string | number | boolean | null | {
@@ -31,6 +32,7 @@ export type AgentsDatabaseSchema = {
31
32
  preparedModelRequirements: Json | null;
32
33
  preparedExternals: Json | null;
33
34
  deletedAt: string | null;
35
+ visibility: 'PUBLIC' | 'PRIVATE';
34
36
  };
35
37
  Insert: {
36
38
  id?: number;
@@ -46,6 +48,7 @@ export type AgentsDatabaseSchema = {
46
48
  preparedModelRequirements?: Json | null;
47
49
  preparedExternals?: Json | null;
48
50
  deletedAt?: string | null;
51
+ visibility?: 'PUBLIC' | 'PRIVATE';
49
52
  };
50
53
  Update: {
51
54
  id?: number;
@@ -61,6 +64,7 @@ export type AgentsDatabaseSchema = {
61
64
  preparedModelRequirements?: Json | null;
62
65
  preparedExternals?: Json | null;
63
66
  deletedAt?: string | null;
67
+ visibility?: 'PUBLIC' | 'PRIVATE';
64
68
  };
65
69
  Relationships: [];
66
70
  };
@@ -92,21 +96,20 @@ export type AgentsDatabaseSchema = {
92
96
  agentSource?: string;
93
97
  promptbookEngineVersion?: string;
94
98
  };
95
- Relationships: [];
99
+ Relationships: [
100
+ {
101
+ foreignKeyName: 'AgentHistory_agentName_fkey';
102
+ columns: ['agentName'];
103
+ referencedRelation: 'Agent';
104
+ referencedColumns: ['agentName'];
105
+ }
106
+ ];
96
107
  };
97
108
  };
98
- Views: {
99
- [_ in never]: never;
100
- };
101
- Functions: {
102
- [_ in never]: never;
103
- };
104
- Enums: {
105
- [_ in never]: never;
106
- };
107
- CompositeTypes: {
108
- [_ in never]: never;
109
- };
109
+ Views: Record<string, never>;
110
+ Functions: Record<string, never>;
111
+ Enums: Record<string, never>;
112
+ CompositeTypes: Record<string, never>;
110
113
  };
111
114
  };
112
115
  type PublicSchema = AgentsDatabaseSchema[Extract<keyof AgentsDatabaseSchema, 'public'>];
@@ -1,8 +1,8 @@
1
1
  import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
2
2
  import type { AvailableModel } from '../../execution/AvailableModel';
3
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
4
- import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, PromptResult } from '../../execution/PromptResult';
5
- import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, Prompt } from '../../types/Prompt';
4
+ import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, ImagePromptResult, PromptResult } from '../../execution/PromptResult';
5
+ import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, ImagePrompt, Prompt } from '../../types/Prompt';
6
6
  import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
7
7
  /**
8
8
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
@@ -43,6 +43,10 @@ export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
43
43
  * Calls the best available embedding model
44
44
  */
45
45
  callEmbeddingModel(prompt: EmbeddingPrompt): Promise<EmbeddingPromptResult>;
46
+ /**
47
+ * Calls the best available embedding model
48
+ */
49
+ callImageGenerationModel(prompt: ImagePrompt): Promise<ImagePromptResult>;
46
50
  /**
47
51
  * Calls the best available model
48
52
  *
@@ -46,6 +46,7 @@ export declare class RemoteLlmExecutionTools<TCustomOptions = undefined> impleme
46
46
  private callCommonModel;
47
47
  }
48
48
  /**
49
+ * TODO: !!!! Deprecate pipeline server and all of its components
49
50
  * TODO: Maybe use `$exportJson`
50
51
  * TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`
51
52
  * TODO: [🍓] Allow to list compatible models with each variant
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.104.0-2`).
18
+ * It follows semantic versioning (e.g., `0.104.0-4`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/documents",
3
- "version": "0.104.0-3",
3
+ "version": "0.104.0-5",
4
4
  "description": "Promptbook: Turn your company's scattered knowledge into AI ready books",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/documents.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.104.0-3"
98
+ "@promptbook/core": "0.104.0-5"
99
99
  },
100
100
  "dependencies": {
101
101
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -25,7 +25,7 @@
25
25
  * @generated
26
26
  * @see https://github.com/webgptorg/promptbook
27
27
  */
28
- const PROMPTBOOK_ENGINE_VERSION = '0.104.0-3';
28
+ const PROMPTBOOK_ENGINE_VERSION = '0.104.0-5';
29
29
  /**
30
30
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
31
31
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -4045,6 +4045,15 @@
4045
4045
  return promptResult;
4046
4046
  };
4047
4047
  }
4048
+ if (llmTools.callImageGenerationModel !== undefined) {
4049
+ proxyTools.callImageGenerationModel = async (prompt) => {
4050
+ // console.info('[🚕] callImageGenerationModel through countTotalUsage');
4051
+ const promptResult = await llmTools.callImageGenerationModel(prompt);
4052
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4053
+ spending.next(promptResult.usage);
4054
+ return promptResult;
4055
+ };
4056
+ }
4048
4057
  // <- Note: [🤖]
4049
4058
  return proxyTools;
4050
4059
  }
@@ -4154,6 +4163,12 @@
4154
4163
  callEmbeddingModel(prompt) {
4155
4164
  return this.callCommonModel(prompt);
4156
4165
  }
4166
+ /**
4167
+ * Calls the best available embedding model
4168
+ */
4169
+ callImageGenerationModel(prompt) {
4170
+ return this.callCommonModel(prompt);
4171
+ }
4157
4172
  // <- Note: [🤖]
4158
4173
  /**
4159
4174
  * Calls the best available model
@@ -4180,6 +4195,11 @@
4180
4195
  continue llm;
4181
4196
  }
4182
4197
  return await llmExecutionTools.callEmbeddingModel(prompt);
4198
+ case 'IMAGE_GENERATION':
4199
+ if (llmExecutionTools.callImageGenerationModel === undefined) {
4200
+ continue llm;
4201
+ }
4202
+ return await llmExecutionTools.callImageGenerationModel(prompt);
4183
4203
  // <- case [🤖]:
4184
4204
  default:
4185
4205
  throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
@@ -6304,8 +6324,9 @@
6304
6324
  $ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
6305
6325
  break variant;
6306
6326
  case 'EMBEDDING':
6327
+ case 'IMAGE_GENERATION':
6307
6328
  throw new PipelineExecutionError(spaceTrim$1.spaceTrim((block) => `
6308
- Embedding model can not be used in pipeline
6329
+ ${modelRequirements.modelVariant} model can not be used in pipeline
6309
6330
 
6310
6331
  This should be catched during parsing
6311
6332