@promptbook/remote-server 0.104.0-3 → 0.104.0-5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,11 +3,12 @@
3
3
  * Source of truth: `/apps/agents-server/src/database/schema.sql` *(do not edit table structure here manually)*
4
4
  *
5
5
  * [💽] Prompt:
6
- * Re-generate this sub-schema
6
+ * Re-generate this sub-schema from `/apps/agents-server/src/database/schema.ts` *(which was generated from `/apps/agents-server/src/database/migrations/*.sql`)*
7
+ * `AgentsDatabaseSchema` is strict subset of `AgentsServerDatabase`
7
8
  * Generate Supabase TypeScript schema which is a subset of `AgentsServerDatabase`
8
9
  * containing only tables `Agent` and `AgentHistory`
9
10
  *
10
- * NOTE: This file intentionally omits all other tables (EnvironmentVariable, ChatHistory, ChatFeedback)
11
+ * NOTE: This file intentionally omits all other tables (`Metadata`, `ChatHistory`, `ChatFeedback`, `User`, `LlmCache`, etc.)
11
12
  * and any extra schemas (e.g. `graphql_public`) to remain a strict subset.
12
13
  */
13
14
  export type Json = string | number | boolean | null | {
@@ -31,6 +32,7 @@ export type AgentsDatabaseSchema = {
31
32
  preparedModelRequirements: Json | null;
32
33
  preparedExternals: Json | null;
33
34
  deletedAt: string | null;
35
+ visibility: 'PUBLIC' | 'PRIVATE';
34
36
  };
35
37
  Insert: {
36
38
  id?: number;
@@ -46,6 +48,7 @@ export type AgentsDatabaseSchema = {
46
48
  preparedModelRequirements?: Json | null;
47
49
  preparedExternals?: Json | null;
48
50
  deletedAt?: string | null;
51
+ visibility?: 'PUBLIC' | 'PRIVATE';
49
52
  };
50
53
  Update: {
51
54
  id?: number;
@@ -61,6 +64,7 @@ export type AgentsDatabaseSchema = {
61
64
  preparedModelRequirements?: Json | null;
62
65
  preparedExternals?: Json | null;
63
66
  deletedAt?: string | null;
67
+ visibility?: 'PUBLIC' | 'PRIVATE';
64
68
  };
65
69
  Relationships: [];
66
70
  };
@@ -92,21 +96,20 @@ export type AgentsDatabaseSchema = {
92
96
  agentSource?: string;
93
97
  promptbookEngineVersion?: string;
94
98
  };
95
- Relationships: [];
99
+ Relationships: [
100
+ {
101
+ foreignKeyName: 'AgentHistory_agentName_fkey';
102
+ columns: ['agentName'];
103
+ referencedRelation: 'Agent';
104
+ referencedColumns: ['agentName'];
105
+ }
106
+ ];
96
107
  };
97
108
  };
98
- Views: {
99
- [_ in never]: never;
100
- };
101
- Functions: {
102
- [_ in never]: never;
103
- };
104
- Enums: {
105
- [_ in never]: never;
106
- };
107
- CompositeTypes: {
108
- [_ in never]: never;
109
- };
109
+ Views: Record<string, never>;
110
+ Functions: Record<string, never>;
111
+ Enums: Record<string, never>;
112
+ CompositeTypes: Record<string, never>;
110
113
  };
111
114
  };
112
115
  type PublicSchema = AgentsDatabaseSchema[Extract<keyof AgentsDatabaseSchema, 'public'>];
@@ -1,8 +1,8 @@
1
1
  import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
2
2
  import type { AvailableModel } from '../../execution/AvailableModel';
3
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
4
- import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, PromptResult } from '../../execution/PromptResult';
5
- import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, Prompt } from '../../types/Prompt';
4
+ import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, ImagePromptResult, PromptResult } from '../../execution/PromptResult';
5
+ import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, ImagePrompt, Prompt } from '../../types/Prompt';
6
6
  import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
7
7
  /**
8
8
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
@@ -43,6 +43,10 @@ export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
43
43
  * Calls the best available embedding model
44
44
  */
45
45
  callEmbeddingModel(prompt: EmbeddingPrompt): Promise<EmbeddingPromptResult>;
46
+ /**
47
+ * Calls the best available embedding model
48
+ */
49
+ callImageGenerationModel(prompt: ImagePrompt): Promise<ImagePromptResult>;
46
50
  /**
47
51
  * Calls the best available model
48
52
  *
@@ -46,6 +46,7 @@ export declare class RemoteLlmExecutionTools<TCustomOptions = undefined> impleme
46
46
  private callCommonModel;
47
47
  }
48
48
  /**
49
+ * TODO: !!!! Deprecate pipeline server and all of its components
49
50
  * TODO: Maybe use `$exportJson`
50
51
  * TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`
51
52
  * TODO: [🍓] Allow to list compatible models with each variant
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.104.0-2`).
18
+ * It follows semantic versioning (e.g., `0.104.0-4`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-server",
3
- "version": "0.104.0-3",
3
+ "version": "0.104.0-5",
4
4
  "description": "Promptbook: Turn your company's scattered knowledge into AI ready books",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/remote-server.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.104.0-3"
98
+ "@promptbook/core": "0.104.0-5"
99
99
  },
100
100
  "dependencies": {
101
101
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -47,7 +47,7 @@
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.104.0-3';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.104.0-5';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -3844,6 +3844,15 @@
3844
3844
  return promptResult;
3845
3845
  };
3846
3846
  }
3847
+ if (llmTools.callImageGenerationModel !== undefined) {
3848
+ proxyTools.callImageGenerationModel = async (prompt) => {
3849
+ // console.info('[🚕] callImageGenerationModel through countTotalUsage');
3850
+ const promptResult = await llmTools.callImageGenerationModel(prompt);
3851
+ totalUsage = addUsage(totalUsage, promptResult.usage);
3852
+ spending.next(promptResult.usage);
3853
+ return promptResult;
3854
+ };
3855
+ }
3847
3856
  // <- Note: [🤖]
3848
3857
  return proxyTools;
3849
3858
  }
@@ -3953,6 +3962,12 @@
3953
3962
  callEmbeddingModel(prompt) {
3954
3963
  return this.callCommonModel(prompt);
3955
3964
  }
3965
+ /**
3966
+ * Calls the best available embedding model
3967
+ */
3968
+ callImageGenerationModel(prompt) {
3969
+ return this.callCommonModel(prompt);
3970
+ }
3956
3971
  // <- Note: [🤖]
3957
3972
  /**
3958
3973
  * Calls the best available model
@@ -3979,6 +3994,11 @@
3979
3994
  continue llm;
3980
3995
  }
3981
3996
  return await llmExecutionTools.callEmbeddingModel(prompt);
3997
+ case 'IMAGE_GENERATION':
3998
+ if (llmExecutionTools.callImageGenerationModel === undefined) {
3999
+ continue llm;
4000
+ }
4001
+ return await llmExecutionTools.callImageGenerationModel(prompt);
3982
4002
  // <- case [🤖]:
3983
4003
  default:
3984
4004
  throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
@@ -6525,8 +6545,9 @@
6525
6545
  $ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
6526
6546
  break variant;
6527
6547
  case 'EMBEDDING':
6548
+ case 'IMAGE_GENERATION':
6528
6549
  throw new PipelineExecutionError(spaceTrim$1.spaceTrim((block) => `
6529
- Embedding model can not be used in pipeline
6550
+ ${modelRequirements.modelVariant} model can not be used in pipeline
6530
6551
 
6531
6552
  This should be catched during parsing
6532
6553