@peers-app/peers-sdk 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/context/data-context.d.ts +31 -0
- package/dist/context/data-context.js +56 -0
- package/dist/context/index.d.ts +3 -0
- package/dist/context/index.js +19 -0
- package/dist/context/user-context-singleton.d.ts +11 -0
- package/dist/context/user-context-singleton.js +121 -0
- package/dist/context/user-context.d.ts +55 -0
- package/dist/context/user-context.js +205 -0
- package/dist/data/assistants.d.ts +68 -0
- package/dist/data/assistants.js +64 -0
- package/dist/data/change-tracking.d.ts +219 -0
- package/dist/data/change-tracking.js +119 -0
- package/dist/data/channels.d.ts +29 -0
- package/dist/data/channels.js +25 -0
- package/dist/data/data-locks.d.ts +37 -0
- package/dist/data/data-locks.js +180 -0
- package/dist/data/data-locks.test.d.ts +1 -0
- package/dist/data/data-locks.test.js +456 -0
- package/dist/data/device-sync-info.d.ts +19 -0
- package/dist/data/device-sync-info.js +24 -0
- package/dist/data/devices.d.ts +51 -0
- package/dist/data/devices.js +36 -0
- package/dist/data/embeddings.d.ts +47 -0
- package/dist/data/embeddings.js +36 -0
- package/dist/data/files/file-read-stream.d.ts +27 -0
- package/dist/data/files/file-read-stream.js +195 -0
- package/dist/data/files/file-write-stream.d.ts +20 -0
- package/dist/data/files/file-write-stream.js +113 -0
- package/dist/data/files/file.types.d.ts +47 -0
- package/dist/data/files/file.types.js +55 -0
- package/dist/data/files/files.d.ts +28 -0
- package/dist/data/files/files.js +127 -0
- package/dist/data/files/files.test.d.ts +1 -0
- package/dist/data/files/files.test.js +728 -0
- package/dist/data/files/index.d.ts +4 -0
- package/dist/data/files/index.js +23 -0
- package/dist/data/group-member-roles.d.ts +9 -0
- package/dist/data/group-member-roles.js +25 -0
- package/dist/data/group-members.d.ts +39 -0
- package/dist/data/group-members.js +68 -0
- package/dist/data/group-members.test.d.ts +1 -0
- package/dist/data/group-members.test.js +287 -0
- package/dist/data/group-permissions.d.ts +8 -0
- package/dist/data/group-permissions.js +73 -0
- package/dist/data/group-share.d.ts +50 -0
- package/dist/data/group-share.js +196 -0
- package/dist/data/groups.d.ts +50 -0
- package/dist/data/groups.js +73 -0
- package/dist/data/groups.test.d.ts +1 -0
- package/dist/data/groups.test.js +153 -0
- package/dist/data/index.d.ts +31 -0
- package/dist/data/index.js +47 -0
- package/dist/data/knowledge/knowledge-frames.d.ts +34 -0
- package/dist/data/knowledge/knowledge-frames.js +34 -0
- package/dist/data/knowledge/knowledge-links.d.ts +30 -0
- package/dist/data/knowledge/knowledge-links.js +25 -0
- package/dist/data/knowledge/knowledge-values.d.ts +35 -0
- package/dist/data/knowledge/knowledge-values.js +35 -0
- package/dist/data/knowledge/peer-types.d.ts +112 -0
- package/dist/data/knowledge/peer-types.js +27 -0
- package/dist/data/knowledge/predicates.d.ts +34 -0
- package/dist/data/knowledge/predicates.js +27 -0
- package/dist/data/messages.d.ts +57 -0
- package/dist/data/messages.js +97 -0
- package/dist/data/orm/client-proxy.data-source.d.ts +27 -0
- package/dist/data/orm/client-proxy.data-source.js +65 -0
- package/dist/data/orm/cursor.d.ts +25 -0
- package/dist/data/orm/cursor.js +47 -0
- package/dist/data/orm/cursor.test.d.ts +1 -0
- package/dist/data/orm/cursor.test.js +315 -0
- package/dist/data/orm/data-query.d.ts +96 -0
- package/dist/data/orm/data-query.js +208 -0
- package/dist/data/orm/data-query.mongo.d.ts +17 -0
- package/dist/data/orm/data-query.mongo.js +267 -0
- package/dist/data/orm/data-query.mongo.test.d.ts +1 -0
- package/dist/data/orm/data-query.mongo.test.js +398 -0
- package/dist/data/orm/data-query.sqlite.d.ts +14 -0
- package/dist/data/orm/data-query.sqlite.js +297 -0
- package/dist/data/orm/data-query.sqlite.test.d.ts +1 -0
- package/dist/data/orm/data-query.sqlite.test.js +377 -0
- package/dist/data/orm/data-query.test.d.ts +1 -0
- package/dist/data/orm/data-query.test.js +553 -0
- package/dist/data/orm/decorators.d.ts +6 -0
- package/dist/data/orm/decorators.js +21 -0
- package/dist/data/orm/dependency-injection.test.d.ts +1 -0
- package/dist/data/orm/dependency-injection.test.js +171 -0
- package/dist/data/orm/doc.d.ts +26 -0
- package/dist/data/orm/doc.js +124 -0
- package/dist/data/orm/event-registry.d.ts +24 -0
- package/dist/data/orm/event-registry.js +40 -0
- package/dist/data/orm/event-registry.test.d.ts +1 -0
- package/dist/data/orm/event-registry.test.js +44 -0
- package/dist/data/orm/factory.d.ts +8 -0
- package/dist/data/orm/factory.js +147 -0
- package/dist/data/orm/index.d.ts +16 -0
- package/dist/data/orm/index.js +32 -0
- package/dist/data/orm/multi-cursors.d.ts +11 -0
- package/dist/data/orm/multi-cursors.js +146 -0
- package/dist/data/orm/multi-cursors.test.d.ts +1 -0
- package/dist/data/orm/multi-cursors.test.js +455 -0
- package/dist/data/orm/sql-db.d.ts +6 -0
- package/dist/data/orm/sql-db.js +2 -0
- package/dist/data/orm/sql.data-source.d.ts +38 -0
- package/dist/data/orm/sql.data-source.js +379 -0
- package/dist/data/orm/sql.data-source.test.d.ts +1 -0
- package/dist/data/orm/sql.data-source.test.js +406 -0
- package/dist/data/orm/subscribable.data-source.d.ts +25 -0
- package/dist/data/orm/subscribable.data-source.js +72 -0
- package/dist/data/orm/table-container-events.test.d.ts +1 -0
- package/dist/data/orm/table-container-events.test.js +93 -0
- package/dist/data/orm/table-container.d.ts +39 -0
- package/dist/data/orm/table-container.js +96 -0
- package/dist/data/orm/table-definitions.system.d.ts +9 -0
- package/dist/data/orm/table-definitions.system.js +29 -0
- package/dist/data/orm/table-definitions.type.d.ts +19 -0
- package/dist/data/orm/table-definitions.type.js +2 -0
- package/dist/data/orm/table-dependencies.d.ts +32 -0
- package/dist/data/orm/table-dependencies.js +2 -0
- package/dist/data/orm/table.d.ts +42 -0
- package/dist/data/orm/table.event-source.test.d.ts +1 -0
- package/dist/data/orm/table.event-source.test.js +341 -0
- package/dist/data/orm/table.js +244 -0
- package/dist/data/orm/types.d.ts +20 -0
- package/dist/data/orm/types.js +115 -0
- package/dist/data/orm/types.test.d.ts +1 -0
- package/dist/data/orm/types.test.js +71 -0
- package/dist/data/package-permissions.d.ts +7 -0
- package/dist/data/package-permissions.js +18 -0
- package/dist/data/packages.d.ts +92 -0
- package/dist/data/packages.js +90 -0
- package/dist/data/peer-events/peer-event-handlers.d.ts +21 -0
- package/dist/data/peer-events/peer-event-handlers.js +28 -0
- package/dist/data/peer-events/peer-event-types.d.ts +119 -0
- package/dist/data/peer-events/peer-event-types.js +29 -0
- package/dist/data/peer-events/peer-events.d.ts +41 -0
- package/dist/data/peer-events/peer-events.js +102 -0
- package/dist/data/persistent-vars.d.ts +87 -0
- package/dist/data/persistent-vars.js +230 -0
- package/dist/data/tool-tests.d.ts +37 -0
- package/dist/data/tool-tests.js +27 -0
- package/dist/data/tools.d.ts +358 -0
- package/dist/data/tools.js +48 -0
- package/dist/data/user-permissions.d.ts +15 -0
- package/dist/data/user-permissions.js +39 -0
- package/dist/data/user-permissions.test.d.ts +1 -0
- package/dist/data/user-permissions.test.js +252 -0
- package/dist/data/users.d.ts +38 -0
- package/dist/data/users.js +73 -0
- package/dist/data/workflow-logs.d.ts +106 -0
- package/dist/data/workflow-logs.js +67 -0
- package/dist/data/workflow-runs.d.ts +103 -0
- package/dist/data/workflow-runs.js +313 -0
- package/dist/data/workflows.d.ts +16 -0
- package/dist/data/workflows.js +21 -0
- package/dist/device/connection.d.ts +41 -0
- package/dist/device/connection.js +249 -0
- package/dist/device/connection.test.d.ts +1 -0
- package/dist/device/connection.test.js +292 -0
- package/dist/device/device-election.d.ts +36 -0
- package/dist/device/device-election.js +137 -0
- package/dist/device/device.d.ts +22 -0
- package/dist/device/device.js +110 -0
- package/dist/device/device.test.d.ts +1 -0
- package/dist/device/device.test.js +203 -0
- package/dist/device/get-trust-level.d.ts +3 -0
- package/dist/device/get-trust-level.js +87 -0
- package/dist/device/socket.type.d.ts +20 -0
- package/dist/device/socket.type.js +15 -0
- package/dist/device/streamed-socket.d.ts +27 -0
- package/dist/device/streamed-socket.js +154 -0
- package/dist/device/streamed-socket.test.d.ts +1 -0
- package/dist/device/streamed-socket.test.js +44 -0
- package/dist/events.d.ts +35 -0
- package/dist/events.js +128 -0
- package/dist/index.d.ts +33 -0
- package/dist/index.js +50 -0
- package/dist/keys.d.ts +51 -0
- package/dist/keys.js +234 -0
- package/dist/keys.test.d.ts +1 -0
- package/dist/keys.test.js +215 -0
- package/dist/mentions.d.ts +9 -0
- package/dist/mentions.js +46 -0
- package/dist/observable.d.ts +19 -0
- package/dist/observable.js +112 -0
- package/dist/observable.test.d.ts +1 -0
- package/dist/observable.test.js +183 -0
- package/dist/package-loader/get-require.d.ts +10 -0
- package/dist/package-loader/get-require.js +31 -0
- package/dist/package-loader/index.d.ts +1 -0
- package/dist/package-loader/index.js +17 -0
- package/dist/package-loader/package-loader.d.ts +16 -0
- package/dist/package-loader/package-loader.js +102 -0
- package/dist/peers-ui/peers-ui.d.ts +15 -0
- package/dist/peers-ui/peers-ui.js +23 -0
- package/dist/peers-ui/peers-ui.types.d.ts +35 -0
- package/dist/peers-ui/peers-ui.types.js +3 -0
- package/dist/rpc-types.d.ts +45 -0
- package/dist/rpc-types.js +47 -0
- package/dist/serial-json.d.ts +5 -0
- package/dist/serial-json.js +186 -0
- package/dist/serial-json.test.d.ts +1 -0
- package/dist/serial-json.test.js +86 -0
- package/dist/system-ids.d.ts +6 -0
- package/dist/system-ids.js +10 -0
- package/dist/tools/index.d.ts +1 -0
- package/dist/tools/index.js +17 -0
- package/dist/tools/tools-factory.d.ts +5 -0
- package/dist/tools/tools-factory.js +34 -0
- package/dist/types/app-nav.d.ts +18 -0
- package/dist/types/app-nav.js +10 -0
- package/dist/types/assistant-runner-args.d.ts +9 -0
- package/dist/types/assistant-runner-args.js +2 -0
- package/dist/types/field-type.d.ts +37 -0
- package/dist/types/field-type.js +26 -0
- package/dist/types/peer-device.d.ts +40 -0
- package/dist/types/peer-device.js +14 -0
- package/dist/types/peers-package.d.ts +23 -0
- package/dist/types/peers-package.js +2 -0
- package/dist/types/workflow-logger.d.ts +2 -0
- package/dist/types/workflow-logger.js +2 -0
- package/dist/types/workflow-run-context.d.ts +12 -0
- package/dist/types/workflow-run-context.js +2 -0
- package/dist/types/workflow.d.ts +72 -0
- package/dist/types/workflow.js +24 -0
- package/dist/types/zod-types.d.ts +7 -0
- package/dist/types/zod-types.js +12 -0
- package/dist/users.query.d.ts +13 -0
- package/dist/users.query.js +134 -0
- package/dist/utils.d.ts +39 -0
- package/dist/utils.js +240 -0
- package/dist/utils.test.d.ts +1 -0
- package/dist/utils.test.js +140 -0
- package/package.json +50 -0
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.TableContainer = void 0;
|
|
4
|
+
const types_1 = require("./types");
|
|
5
|
+
const table_1 = require("./table");
|
|
6
|
+
const table_definitions_system_1 = require("./table-definitions.system");
|
|
7
|
+
const utils_1 = require("../../utils");
|
|
8
|
+
class TableContainer {
|
|
9
|
+
tableFactory;
|
|
10
|
+
groupId;
|
|
11
|
+
tableDefinitions = {};
|
|
12
|
+
tableInstances = {};
|
|
13
|
+
constructor(tableFactory, groupId) {
|
|
14
|
+
this.tableFactory = tableFactory;
|
|
15
|
+
this.groupId = groupId;
|
|
16
|
+
}
|
|
17
|
+
registerTableDefinition(tableDefinition, opts) {
|
|
18
|
+
const tableId = tableDefinition.metaData.tableId;
|
|
19
|
+
if (!(0, utils_1.isid)(tableId)) {
|
|
20
|
+
throw new Error(`Invalid tableId ${tableId} for table ${tableDefinition.metaData.name}. Table IDs must be valid IDs.`);
|
|
21
|
+
}
|
|
22
|
+
const tableName = (0, utils_1.getFullTableName)(tableDefinition.metaData);
|
|
23
|
+
const existing = this.tableInstances[tableName] || this.tableDefinitions[tableName];
|
|
24
|
+
if (existing && !opts?.overwrite) {
|
|
25
|
+
console.warn(`Table ${tableName} is already registered.`);
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
if (existing && opts?.overwrite) {
|
|
29
|
+
console.warn(`Overwriting table definition for ${tableName}.`);
|
|
30
|
+
delete this.tableInstances[tableName];
|
|
31
|
+
}
|
|
32
|
+
this.tableDefinitions[tableName] = tableDefinition;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* This shouldn't be used unless you have a good reason to bypass the normal table creation process.
|
|
36
|
+
* The normal process is to register the table definition and let the factory create the instance.
|
|
37
|
+
*/
|
|
38
|
+
setTable(table, opts) {
|
|
39
|
+
if (!(0, utils_1.isid)(table.metaData.tableId)) {
|
|
40
|
+
throw new Error('Table must have a valid tableId in its metadata. Only system tables are allowed to have an empty tableId.');
|
|
41
|
+
}
|
|
42
|
+
const tableName = (0, utils_1.getFullTableName)(table.metaData);
|
|
43
|
+
if (this.tableInstances[tableName]) {
|
|
44
|
+
if (opts?.overwrite) {
|
|
45
|
+
console.warn(`Overwriting existing table with ID ${tableName}.`);
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
throw new Error(`Table with ID ${tableName} already exists.`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
this.tableInstances[tableName] = table;
|
|
52
|
+
}
|
|
53
|
+
getTable(metaDataOrTableName, schema, tableConstructor) {
|
|
54
|
+
if (typeof metaDataOrTableName === 'string') {
|
|
55
|
+
return this.getTableByName(metaDataOrTableName);
|
|
56
|
+
}
|
|
57
|
+
const metaData = metaDataOrTableName;
|
|
58
|
+
const tableName = (0, utils_1.getFullTableName)(metaData);
|
|
59
|
+
if (!this.tableInstances[tableName]) {
|
|
60
|
+
schema ??= (0, types_1.fieldsToSchema)(metaData.fields);
|
|
61
|
+
// Check if there's a registered custom constructor
|
|
62
|
+
const registeredDef = this.tableDefinitions[tableName] || table_definitions_system_1.systemTableDefinitions[tableName];
|
|
63
|
+
const TableClass = tableConstructor || registeredDef?.tableConstructor || (table_1.Table);
|
|
64
|
+
const table = this.tableFactory(metaData, schema, TableClass, this.groupId);
|
|
65
|
+
this.tableInstances[tableName] = table;
|
|
66
|
+
}
|
|
67
|
+
return this.tableInstances[tableName];
|
|
68
|
+
}
|
|
69
|
+
getTableByName(tableName) {
|
|
70
|
+
if (this.tableInstances[tableName]) {
|
|
71
|
+
// If the table is already instantiated, return it
|
|
72
|
+
return this.tableInstances[tableName];
|
|
73
|
+
}
|
|
74
|
+
const tableDefinition = this.tableDefinitions[tableName] || table_definitions_system_1.systemTableDefinitions[tableName];
|
|
75
|
+
if (!tableDefinition) {
|
|
76
|
+
throw new Error(`There is no instantiated table or registered definition for ${tableName}`);
|
|
77
|
+
}
|
|
78
|
+
const { metaData, schema, tableConstructor } = tableDefinition;
|
|
79
|
+
return this.getTable(metaData, schema, tableConstructor);
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* This ensures all tables have been new'ed up and then returns the entire list of them.
|
|
83
|
+
*
|
|
84
|
+
* This is primarily intended to be used by the peer-device sync operation which needs all
|
|
85
|
+
* tables to be able to apply changes correctly.
|
|
86
|
+
*/
|
|
87
|
+
getAllTables() {
|
|
88
|
+
const allDefinitions = [...Object.values(table_definitions_system_1.systemTableDefinitions), ...Object.values(this.tableDefinitions)];
|
|
89
|
+
for (const tableDefinition of allDefinitions) {
|
|
90
|
+
const { metaData, schema, tableConstructor } = tableDefinition;
|
|
91
|
+
this.getTable(metaData, schema, tableConstructor);
|
|
92
|
+
}
|
|
93
|
+
return Object.values(this.tableInstances);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
exports.TableContainer = TableContainer;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { ITableDefinition, TableConstructor } from './table-definitions.type';
|
|
3
|
+
import { ITableMetaData } from './types';
|
|
4
|
+
export declare const systemTableDefinitions: {
|
|
5
|
+
[tableName: string]: ITableDefinition<any>;
|
|
6
|
+
};
|
|
7
|
+
export declare function registerSystemTableDefinition<T extends {
|
|
8
|
+
[key: string]: any;
|
|
9
|
+
}>(metaData: ITableMetaData, schema?: z.AnyZodObject, tableConstructor?: TableConstructor<T>): void;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.systemTableDefinitions = void 0;
|
|
4
|
+
exports.registerSystemTableDefinition = registerSystemTableDefinition;
|
|
5
|
+
const keys_1 = require("../../keys");
|
|
6
|
+
const types_1 = require("./types");
|
|
7
|
+
const utils_1 = require("../../utils");
|
|
8
|
+
exports.systemTableDefinitions = {};
|
|
9
|
+
function registerSystemTableDefinition(metaData, schema, tableConstructor) {
|
|
10
|
+
const tableName = (0, utils_1.getFullTableName)(metaData);
|
|
11
|
+
const def = {
|
|
12
|
+
metaData,
|
|
13
|
+
schema,
|
|
14
|
+
tableConstructor
|
|
15
|
+
};
|
|
16
|
+
if (exports.systemTableDefinitions[tableName]) {
|
|
17
|
+
const s1 = (0, keys_1.stableStringify)(exports.systemTableDefinitions[tableName].metaData);
|
|
18
|
+
const s2 = (0, keys_1.stableStringify)(metaData);
|
|
19
|
+
if (s1 !== s2) {
|
|
20
|
+
console.warn(`Attempt to register new metadata for system table ${tableName}, it will be ignored`, {
|
|
21
|
+
registered: exports.systemTableDefinitions[tableName].metaData,
|
|
22
|
+
incoming: metaData
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
schema ??= (0, types_1.fieldsToSchema)(metaData.fields);
|
|
28
|
+
exports.systemTableDefinitions[tableName] = def;
|
|
29
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { IDataSource } from './data-query';
|
|
2
|
+
import type { Table } from './table';
|
|
3
|
+
import type { ITableMetaData } from './types';
|
|
4
|
+
import type { ITableDependencies } from './table-dependencies';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
export type DataSourceFactory = (metaData: ITableMetaData, schema?: z.AnyZodObject, groupId?: string) => IDataSource<any>;
|
|
7
|
+
export type TableConstructor<T extends {
|
|
8
|
+
[key: string]: any;
|
|
9
|
+
}> = new (metaData: ITableMetaData, deps: ITableDependencies) => Table<T>;
|
|
10
|
+
export type TableFactory = (metaData: ITableMetaData, schema?: z.AnyZodObject, TableClass?: TableConstructor<any>, groupId?: string) => Table<any>;
|
|
11
|
+
export interface ITableDefinition<T extends {
|
|
12
|
+
[key: string]: any;
|
|
13
|
+
} = {
|
|
14
|
+
[key: string]: any;
|
|
15
|
+
}> {
|
|
16
|
+
metaData: ITableMetaData;
|
|
17
|
+
schema?: z.AnyZodObject;
|
|
18
|
+
tableConstructor?: TableConstructor<T>;
|
|
19
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import type { IDataSource } from "./data-query";
|
|
3
|
+
import type { EventRegistry } from "./event-registry";
|
|
4
|
+
import type { UserContext } from "../../context/user-context";
|
|
5
|
+
import type { DataSourceFactory } from "./table-definitions.type";
|
|
6
|
+
/**
|
|
7
|
+
* Dependencies required by Table instances.
|
|
8
|
+
* This interface defines the explicit dependencies that every table needs,
|
|
9
|
+
* making testing easier and eliminating circular dependencies.
|
|
10
|
+
*/
|
|
11
|
+
export interface ITableDependencies {
|
|
12
|
+
/** The data source for this table's CRUD operations */
|
|
13
|
+
dataSource: IDataSource<any>;
|
|
14
|
+
/** Registry for managing shared event emitters */
|
|
15
|
+
eventRegistry: EventRegistry;
|
|
16
|
+
/** Zod schema for data validation (optional - will be computed from metaData.fields if not provided) */
|
|
17
|
+
schema?: z.AnyZodObject;
|
|
18
|
+
/** Optional group ID for multi-tenancy */
|
|
19
|
+
groupId?: string;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Dependencies required by DataContext instances.
|
|
23
|
+
* Used to construct DataContext without circular references.
|
|
24
|
+
*/
|
|
25
|
+
export interface IDataContextDependencies {
|
|
26
|
+
/** The user context providing user/device information */
|
|
27
|
+
userContext: UserContext;
|
|
28
|
+
/** Optional group ID for multi-tenancy */
|
|
29
|
+
groupId?: string;
|
|
30
|
+
/** Factory function for creating data sources */
|
|
31
|
+
dataSourceFactory?: DataSourceFactory;
|
|
32
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { Emitter, Event } from "../../events";
|
|
3
|
+
import { DataFilter, ICursorIterable, IDataQueryParams, IDataSource, ISaveOptions } from "./data-query";
|
|
4
|
+
import { IDataChangedEvent } from "./subscribable.data-source";
|
|
5
|
+
import { ITableMetaData } from "./types";
|
|
6
|
+
import type { ITableDependencies } from "./table-dependencies";
|
|
7
|
+
export declare class Table<T extends {
|
|
8
|
+
[key: string]: any;
|
|
9
|
+
}> implements IDataSource<T> {
|
|
10
|
+
readonly tableName: string;
|
|
11
|
+
readonly primaryKeyName: string;
|
|
12
|
+
readonly schema: z.AnyZodObject;
|
|
13
|
+
protected readonly dataChangedEmitter: Emitter<IDataChangedEvent<T>>;
|
|
14
|
+
readonly dataChanged: Event<IDataChangedEvent<T>>;
|
|
15
|
+
private readonly localOperationCache;
|
|
16
|
+
readonly dataSource: IDataSource<T>;
|
|
17
|
+
readonly metaData: ITableMetaData;
|
|
18
|
+
readonly groupId?: string;
|
|
19
|
+
constructor(metaData: ITableMetaData, deps: ITableDependencies);
|
|
20
|
+
private getRecordId;
|
|
21
|
+
private setLocalOperation;
|
|
22
|
+
private clearLocalOperation;
|
|
23
|
+
private cachedRecords;
|
|
24
|
+
get(id: string, opts?: {
|
|
25
|
+
useCache?: boolean;
|
|
26
|
+
cacheTtlMs?: number;
|
|
27
|
+
}): Promise<T | undefined>;
|
|
28
|
+
list(filter?: DataFilter<T>, opts?: IDataQueryParams<T>): Promise<T[]>;
|
|
29
|
+
count(filter?: DataFilter<T>): Promise<number>;
|
|
30
|
+
cursor(filter?: DataFilter<T>, opts?: IDataQueryParams<T>): ICursorIterable<T>;
|
|
31
|
+
findOne(filter?: DataFilter<T>, opts?: {
|
|
32
|
+
enforceUnique?: boolean;
|
|
33
|
+
}): Promise<T | undefined>;
|
|
34
|
+
save(record: T, opts?: ISaveOptions): Promise<T>;
|
|
35
|
+
insert(record: T): Promise<T>;
|
|
36
|
+
update(record: T): Promise<T>;
|
|
37
|
+
delete(idOrRecord: string | T): Promise<void>;
|
|
38
|
+
initRecord(data?: Partial<T>): T;
|
|
39
|
+
initDoc(record: T): import("./doc").IDoc<T>;
|
|
40
|
+
getDoc(id: string): import("./doc").IDoc<T>;
|
|
41
|
+
}
|
|
42
|
+
export declare function getIconClassName(table: Table<any>): string;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const table_1 = require("./table");
|
|
4
|
+
const subscribable_data_source_1 = require("./subscribable.data-source");
|
|
5
|
+
const types_1 = require("./types");
|
|
6
|
+
const field_type_1 = require("../../types/field-type");
|
|
7
|
+
const utils_1 = require("../../utils");
|
|
8
|
+
const event_registry_1 = require("./event-registry");
|
|
9
|
+
// Mock data source for testing
|
|
10
|
+
class MockDataSource {
|
|
11
|
+
tableName;
|
|
12
|
+
primaryKeyName;
|
|
13
|
+
data = new Map();
|
|
14
|
+
constructor(tableName, primaryKeyName = 'id', initialData = []) {
|
|
15
|
+
this.tableName = tableName;
|
|
16
|
+
this.primaryKeyName = primaryKeyName;
|
|
17
|
+
initialData.forEach(item => {
|
|
18
|
+
this.data.set(item[primaryKeyName], item);
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
async get(id) {
|
|
22
|
+
return this.data.get(id);
|
|
23
|
+
}
|
|
24
|
+
async list() {
|
|
25
|
+
return Array.from(this.data.values());
|
|
26
|
+
}
|
|
27
|
+
async save(record) {
|
|
28
|
+
const id = record[this.primaryKeyName];
|
|
29
|
+
if (id && this.data.has(id)) {
|
|
30
|
+
return this.update(record);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
return this.insert(record);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async insert(record) {
|
|
37
|
+
if (!record[this.primaryKeyName]) {
|
|
38
|
+
// Create a new record with the generated ID
|
|
39
|
+
record = { ...record, [this.primaryKeyName]: (0, utils_1.newid)() };
|
|
40
|
+
}
|
|
41
|
+
this.data.set(record[this.primaryKeyName], record);
|
|
42
|
+
return record;
|
|
43
|
+
}
|
|
44
|
+
async update(record) {
|
|
45
|
+
const id = record[this.primaryKeyName];
|
|
46
|
+
if (!this.data.has(id)) {
|
|
47
|
+
throw new Error(`Record with id ${id} not found`);
|
|
48
|
+
}
|
|
49
|
+
this.data.set(id, record);
|
|
50
|
+
return record;
|
|
51
|
+
}
|
|
52
|
+
async delete(idOrRecord) {
|
|
53
|
+
const id = typeof idOrRecord === 'string' ? idOrRecord : idOrRecord[this.primaryKeyName];
|
|
54
|
+
this.data.delete(id);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
describe('Table Event Source Functionality', () => {
|
|
58
|
+
let table;
|
|
59
|
+
let underlyingDataSource;
|
|
60
|
+
let subscribableDataSource;
|
|
61
|
+
let capturedEvents;
|
|
62
|
+
const testMetadata = {
|
|
63
|
+
name: 'TestTable',
|
|
64
|
+
description: 'Test table for event source functionality',
|
|
65
|
+
primaryKeyName: 'id',
|
|
66
|
+
fields: [
|
|
67
|
+
{ name: 'id', type: field_type_1.FieldType.id, description: 'Primary key' },
|
|
68
|
+
{ name: 'name', type: field_type_1.FieldType.string, description: 'Name field' },
|
|
69
|
+
{ name: 'age', type: field_type_1.FieldType.number, description: 'Age field' }
|
|
70
|
+
]
|
|
71
|
+
};
|
|
72
|
+
const testSchema = (0, types_1.fieldsToSchema)(testMetadata.fields);
|
|
73
|
+
const testRecord = {
|
|
74
|
+
id: 'test-1',
|
|
75
|
+
name: 'John Doe',
|
|
76
|
+
age: 30
|
|
77
|
+
};
|
|
78
|
+
beforeEach(() => {
|
|
79
|
+
capturedEvents = [];
|
|
80
|
+
// Create underlying data source with unique name to avoid conflicts
|
|
81
|
+
const uniqueTableName = `TestTable_${Date.now()}_${Math.random()}`;
|
|
82
|
+
underlyingDataSource = new MockDataSource(uniqueTableName, 'id');
|
|
83
|
+
// Wrap it in a SubscribableDataSource
|
|
84
|
+
subscribableDataSource = new subscribable_data_source_1.SubscribableDataSource(underlyingDataSource);
|
|
85
|
+
// Create real event registry for testing
|
|
86
|
+
const mockDataContextForRegistry = {
|
|
87
|
+
dataContextId: 'test-context'
|
|
88
|
+
};
|
|
89
|
+
const eventRegistry = new event_registry_1.EventRegistry(mockDataContextForRegistry);
|
|
90
|
+
const mockDataContext = {
|
|
91
|
+
dataSourceFactory: () => subscribableDataSource,
|
|
92
|
+
eventRegistry: eventRegistry
|
|
93
|
+
};
|
|
94
|
+
// Create table with the subscribable data source
|
|
95
|
+
const metaData = {
|
|
96
|
+
...testMetadata,
|
|
97
|
+
name: uniqueTableName
|
|
98
|
+
};
|
|
99
|
+
const deps = {
|
|
100
|
+
dataSource: subscribableDataSource,
|
|
101
|
+
eventRegistry: eventRegistry,
|
|
102
|
+
schema: testSchema
|
|
103
|
+
};
|
|
104
|
+
table = new table_1.Table(metaData, deps);
|
|
105
|
+
// Subscribe to events to capture them
|
|
106
|
+
table.dataChanged.subscribe((event) => {
|
|
107
|
+
capturedEvents.push(event);
|
|
108
|
+
});
|
|
109
|
+
});
|
|
110
|
+
describe('Local Operations', () => {
|
|
111
|
+
it('should mark insert events as local when called from table', async () => {
|
|
112
|
+
await table.insert(testRecord);
|
|
113
|
+
expect(capturedEvents).toHaveLength(1);
|
|
114
|
+
expect(capturedEvents[0]).toEqual({
|
|
115
|
+
dataObject: testRecord,
|
|
116
|
+
op: 'insert',
|
|
117
|
+
source: 'local'
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
it('should mark update events as local when called from table', async () => {
|
|
121
|
+
// First insert the record
|
|
122
|
+
await table.insert(testRecord);
|
|
123
|
+
capturedEvents.length = 0; // Clear events
|
|
124
|
+
const updatedRecord = { ...testRecord, age: 31 };
|
|
125
|
+
await table.update(updatedRecord);
|
|
126
|
+
expect(capturedEvents).toHaveLength(1);
|
|
127
|
+
expect(capturedEvents[0]).toEqual({
|
|
128
|
+
dataObject: updatedRecord,
|
|
129
|
+
op: 'update',
|
|
130
|
+
source: 'local'
|
|
131
|
+
});
|
|
132
|
+
});
|
|
133
|
+
it('should mark delete events as local when called from table', async () => {
|
|
134
|
+
// First insert the record
|
|
135
|
+
await table.insert(testRecord);
|
|
136
|
+
capturedEvents.length = 0; // Clear events
|
|
137
|
+
await table.delete(testRecord.id);
|
|
138
|
+
expect(capturedEvents).toHaveLength(1);
|
|
139
|
+
expect(capturedEvents[0]).toEqual({
|
|
140
|
+
dataObject: testRecord,
|
|
141
|
+
op: 'delete',
|
|
142
|
+
source: 'local'
|
|
143
|
+
});
|
|
144
|
+
});
|
|
145
|
+
it('should mark save events as local (insert case)', async () => {
|
|
146
|
+
await table.save(testRecord);
|
|
147
|
+
expect(capturedEvents).toHaveLength(1);
|
|
148
|
+
expect(capturedEvents[0]).toEqual({
|
|
149
|
+
dataObject: testRecord,
|
|
150
|
+
op: 'insert',
|
|
151
|
+
source: 'local'
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
it('should mark save events as local (update case)', async () => {
|
|
155
|
+
// First insert the record directly to underlying source
|
|
156
|
+
await underlyingDataSource.insert(testRecord);
|
|
157
|
+
capturedEvents.length = 0; // Clear events
|
|
158
|
+
const updatedRecord = { ...testRecord, age: 31 };
|
|
159
|
+
await table.save(updatedRecord);
|
|
160
|
+
expect(capturedEvents).toHaveLength(1);
|
|
161
|
+
expect(capturedEvents[0]).toEqual({
|
|
162
|
+
dataObject: updatedRecord,
|
|
163
|
+
op: 'update',
|
|
164
|
+
source: 'local'
|
|
165
|
+
});
|
|
166
|
+
});
|
|
167
|
+
});
|
|
168
|
+
describe('Remote Operations', () => {
|
|
169
|
+
it('should mark insert events as remote when called from underlying source', async () => {
|
|
170
|
+
await subscribableDataSource.insert(testRecord);
|
|
171
|
+
expect(capturedEvents).toHaveLength(1);
|
|
172
|
+
expect(capturedEvents[0]).toEqual({
|
|
173
|
+
dataObject: testRecord,
|
|
174
|
+
op: 'insert',
|
|
175
|
+
source: 'remote'
|
|
176
|
+
});
|
|
177
|
+
});
|
|
178
|
+
it('should mark update events as remote when called from underlying source', async () => {
|
|
179
|
+
// First insert the record
|
|
180
|
+
await subscribableDataSource.insert(testRecord);
|
|
181
|
+
capturedEvents.length = 0; // Clear events
|
|
182
|
+
const updatedRecord = { ...testRecord, age: 31 };
|
|
183
|
+
await subscribableDataSource.update(updatedRecord);
|
|
184
|
+
expect(capturedEvents).toHaveLength(1);
|
|
185
|
+
expect(capturedEvents[0]).toEqual({
|
|
186
|
+
dataObject: updatedRecord,
|
|
187
|
+
op: 'update',
|
|
188
|
+
source: 'remote'
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
it('should mark delete events as remote when called from underlying source', async () => {
|
|
192
|
+
// First insert the record
|
|
193
|
+
await subscribableDataSource.insert(testRecord);
|
|
194
|
+
capturedEvents.length = 0; // Clear events
|
|
195
|
+
await subscribableDataSource.delete(testRecord.id);
|
|
196
|
+
expect(capturedEvents).toHaveLength(1);
|
|
197
|
+
expect(capturedEvents[0]).toEqual({
|
|
198
|
+
dataObject: testRecord,
|
|
199
|
+
op: 'delete',
|
|
200
|
+
source: 'remote'
|
|
201
|
+
});
|
|
202
|
+
});
|
|
203
|
+
});
|
|
204
|
+
describe('Concurrent Operations', () => {
|
|
205
|
+
it('should handle multiple concurrent insert operations correctly', async () => {
|
|
206
|
+
const record1 = { id: 'test-1', name: 'John', age: 30 };
|
|
207
|
+
const record2 = { id: 'test-2', name: 'Jane', age: 25 };
|
|
208
|
+
const record3 = { id: 'test-3', name: 'Bob', age: 35 };
|
|
209
|
+
// Execute multiple operations concurrently
|
|
210
|
+
await Promise.all([
|
|
211
|
+
table.insert(record1),
|
|
212
|
+
table.insert(record2),
|
|
213
|
+
table.insert(record3)
|
|
214
|
+
]);
|
|
215
|
+
expect(capturedEvents).toHaveLength(3);
|
|
216
|
+
capturedEvents.forEach(event => {
|
|
217
|
+
expect(event.op).toBe('insert');
|
|
218
|
+
expect(event.source).toBe('local');
|
|
219
|
+
});
|
|
220
|
+
// Verify all records were inserted
|
|
221
|
+
const insertedIds = capturedEvents.map(e => e.dataObject.id).sort();
|
|
222
|
+
expect(insertedIds).toEqual(['test-1', 'test-2', 'test-3']);
|
|
223
|
+
});
|
|
224
|
+
it('should handle concurrent operations with different operations types', async () => {
|
|
225
|
+
const record1 = { id: 'test-1', name: 'John', age: 30 };
|
|
226
|
+
const record2 = { id: 'test-2', name: 'Jane', age: 25 };
|
|
227
|
+
// First insert records to set up for update/delete
|
|
228
|
+
await table.insert(record1);
|
|
229
|
+
await table.insert(record2);
|
|
230
|
+
capturedEvents.length = 0; // Clear events
|
|
231
|
+
const updatedRecord1 = { ...record1, age: 31 };
|
|
232
|
+
const record3 = { id: 'test-3', name: 'Alice', age: 28 };
|
|
233
|
+
// Execute mixed operations concurrently
|
|
234
|
+
await Promise.all([
|
|
235
|
+
table.update(updatedRecord1),
|
|
236
|
+
table.delete(record2.id),
|
|
237
|
+
table.insert(record3)
|
|
238
|
+
]);
|
|
239
|
+
expect(capturedEvents).toHaveLength(3);
|
|
240
|
+
// All should be marked as local
|
|
241
|
+
capturedEvents.forEach(event => {
|
|
242
|
+
expect(event.source).toBe('local');
|
|
243
|
+
});
|
|
244
|
+
// Check specific operations
|
|
245
|
+
const operations = capturedEvents.map(e => e.op).sort();
|
|
246
|
+
expect(operations).toEqual(['delete', 'insert', 'update']);
|
|
247
|
+
});
|
|
248
|
+
it('should handle concurrent local and remote operations correctly', async () => {
|
|
249
|
+
const localRecord = { id: 'local-1', name: 'Local User', age: 30 };
|
|
250
|
+
const remoteRecord = { id: 'remote-1', name: 'Remote User', age: 25 };
|
|
251
|
+
// Execute local and remote operations concurrently
|
|
252
|
+
await Promise.all([
|
|
253
|
+
table.insert(localRecord),
|
|
254
|
+
subscribableDataSource.insert(remoteRecord)
|
|
255
|
+
]);
|
|
256
|
+
expect(capturedEvents).toHaveLength(2);
|
|
257
|
+
// Find events by record ID
|
|
258
|
+
const localEvent = capturedEvents.find(e => e.dataObject.id === 'local-1');
|
|
259
|
+
const remoteEvent = capturedEvents.find(e => e.dataObject.id === 'remote-1');
|
|
260
|
+
expect(localEvent?.source).toBe('local');
|
|
261
|
+
expect(remoteEvent?.source).toBe('remote');
|
|
262
|
+
});
|
|
263
|
+
it('should handle concurrent operations on different records correctly', async () => {
|
|
264
|
+
const record1 = { id: 'test-1', name: 'John', age: 30 };
|
|
265
|
+
const record2 = { id: 'test-2', name: 'Jane', age: 25 };
|
|
266
|
+
// Insert records first
|
|
267
|
+
await table.insert(record1);
|
|
268
|
+
await subscribableDataSource.insert(record2);
|
|
269
|
+
capturedEvents.length = 0;
|
|
270
|
+
const localUpdate = { ...record1, name: 'John Local', age: 31 };
|
|
271
|
+
const remoteUpdate = { ...record2, name: 'Jane Remote', age: 26 };
|
|
272
|
+
// Execute concurrent updates on different records (one local, one remote)
|
|
273
|
+
await Promise.all([
|
|
274
|
+
table.update(localUpdate),
|
|
275
|
+
subscribableDataSource.update(remoteUpdate)
|
|
276
|
+
]);
|
|
277
|
+
expect(capturedEvents).toHaveLength(2);
|
|
278
|
+
// Both operations should complete, but with different sources
|
|
279
|
+
const localEvent = capturedEvents.find(e => e.dataObject.name?.includes('Local'));
|
|
280
|
+
const remoteEvent = capturedEvents.find(e => e.dataObject.name?.includes('Remote'));
|
|
281
|
+
expect(localEvent?.source).toBe('local');
|
|
282
|
+
expect(remoteEvent?.source).toBe('remote');
|
|
283
|
+
});
|
|
284
|
+
});
|
|
285
|
+
describe('Cache Management', () => {
|
|
286
|
+
it('should clean up cache entries after operations complete', async () => {
|
|
287
|
+
const record = { id: 'test-1', name: 'John', age: 30 };
|
|
288
|
+
// Insert record
|
|
289
|
+
await table.insert(record);
|
|
290
|
+
// Verify the cache was cleaned up by checking that subsequent remote operations
|
|
291
|
+
// are marked as remote (not local)
|
|
292
|
+
const updatedRecord = { ...record, age: 31 };
|
|
293
|
+
await subscribableDataSource.update(updatedRecord);
|
|
294
|
+
expect(capturedEvents).toHaveLength(2);
|
|
295
|
+
expect(capturedEvents[0].source).toBe('local'); // Insert
|
|
296
|
+
expect(capturedEvents[1].source).toBe('remote'); // Update from subscribable source
|
|
297
|
+
});
|
|
298
|
+
it('should clean up cache entries even when operations throw errors', async () => {
|
|
299
|
+
const record = { id: 'nonexistent', name: 'John', age: 30 };
|
|
300
|
+
// Try to update a non-existent record (should throw)
|
|
301
|
+
try {
|
|
302
|
+
await table.update(record);
|
|
303
|
+
}
|
|
304
|
+
catch (error) {
|
|
305
|
+
// Expected to throw
|
|
306
|
+
}
|
|
307
|
+
// Now insert the same record through underlying source
|
|
308
|
+
await subscribableDataSource.insert(record);
|
|
309
|
+
// Should be marked as remote since cache was cleaned up
|
|
310
|
+
expect(capturedEvents).toHaveLength(1);
|
|
311
|
+
expect(capturedEvents[0].source).toBe('remote');
|
|
312
|
+
});
|
|
313
|
+
});
|
|
314
|
+
describe('Table Without Underlying SubscribableDataSource', () => {
|
|
315
|
+
it('should work correctly when no underlying SubscribableDataSource exists', async () => {
|
|
316
|
+
// Create table with plain data source (no SubscribableDataSource)
|
|
317
|
+
const plainDataSource = new MockDataSource('PlainTable', 'id');
|
|
318
|
+
const mockDataContextForRegistry2 = {
|
|
319
|
+
dataContextId: 'test-context-2'
|
|
320
|
+
};
|
|
321
|
+
const eventRegistry2 = new event_registry_1.EventRegistry(mockDataContextForRegistry2);
|
|
322
|
+
const mockDataContext = {
|
|
323
|
+
dataSourceFactory: () => plainDataSource,
|
|
324
|
+
eventRegistry: eventRegistry2
|
|
325
|
+
};
|
|
326
|
+
const plainTableDeps = {
|
|
327
|
+
dataSource: plainDataSource,
|
|
328
|
+
eventRegistry: eventRegistry2,
|
|
329
|
+
schema: testSchema
|
|
330
|
+
};
|
|
331
|
+
const plainTable = new table_1.Table(testMetadata, plainTableDeps);
|
|
332
|
+
const events = [];
|
|
333
|
+
plainTable.dataChanged.subscribe(event => {
|
|
334
|
+
events.push(event);
|
|
335
|
+
});
|
|
336
|
+
await plainTable.insert(testRecord);
|
|
337
|
+
expect(events).toHaveLength(1);
|
|
338
|
+
expect(events[0].source).toBe('local');
|
|
339
|
+
});
|
|
340
|
+
});
|
|
341
|
+
});
|