@peers-app/peers-sdk 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/context/data-context.d.ts +31 -0
- package/dist/context/data-context.js +56 -0
- package/dist/context/index.d.ts +3 -0
- package/dist/context/index.js +19 -0
- package/dist/context/user-context-singleton.d.ts +11 -0
- package/dist/context/user-context-singleton.js +121 -0
- package/dist/context/user-context.d.ts +55 -0
- package/dist/context/user-context.js +205 -0
- package/dist/data/assistants.d.ts +68 -0
- package/dist/data/assistants.js +64 -0
- package/dist/data/change-tracking.d.ts +219 -0
- package/dist/data/change-tracking.js +119 -0
- package/dist/data/channels.d.ts +29 -0
- package/dist/data/channels.js +25 -0
- package/dist/data/data-locks.d.ts +37 -0
- package/dist/data/data-locks.js +180 -0
- package/dist/data/data-locks.test.d.ts +1 -0
- package/dist/data/data-locks.test.js +456 -0
- package/dist/data/device-sync-info.d.ts +19 -0
- package/dist/data/device-sync-info.js +24 -0
- package/dist/data/devices.d.ts +51 -0
- package/dist/data/devices.js +36 -0
- package/dist/data/embeddings.d.ts +47 -0
- package/dist/data/embeddings.js +36 -0
- package/dist/data/files/file-read-stream.d.ts +27 -0
- package/dist/data/files/file-read-stream.js +195 -0
- package/dist/data/files/file-write-stream.d.ts +20 -0
- package/dist/data/files/file-write-stream.js +113 -0
- package/dist/data/files/file.types.d.ts +47 -0
- package/dist/data/files/file.types.js +55 -0
- package/dist/data/files/files.d.ts +28 -0
- package/dist/data/files/files.js +127 -0
- package/dist/data/files/files.test.d.ts +1 -0
- package/dist/data/files/files.test.js +728 -0
- package/dist/data/files/index.d.ts +4 -0
- package/dist/data/files/index.js +23 -0
- package/dist/data/group-member-roles.d.ts +9 -0
- package/dist/data/group-member-roles.js +25 -0
- package/dist/data/group-members.d.ts +39 -0
- package/dist/data/group-members.js +68 -0
- package/dist/data/group-members.test.d.ts +1 -0
- package/dist/data/group-members.test.js +287 -0
- package/dist/data/group-permissions.d.ts +8 -0
- package/dist/data/group-permissions.js +73 -0
- package/dist/data/group-share.d.ts +50 -0
- package/dist/data/group-share.js +196 -0
- package/dist/data/groups.d.ts +50 -0
- package/dist/data/groups.js +73 -0
- package/dist/data/groups.test.d.ts +1 -0
- package/dist/data/groups.test.js +153 -0
- package/dist/data/index.d.ts +31 -0
- package/dist/data/index.js +47 -0
- package/dist/data/knowledge/knowledge-frames.d.ts +34 -0
- package/dist/data/knowledge/knowledge-frames.js +34 -0
- package/dist/data/knowledge/knowledge-links.d.ts +30 -0
- package/dist/data/knowledge/knowledge-links.js +25 -0
- package/dist/data/knowledge/knowledge-values.d.ts +35 -0
- package/dist/data/knowledge/knowledge-values.js +35 -0
- package/dist/data/knowledge/peer-types.d.ts +112 -0
- package/dist/data/knowledge/peer-types.js +27 -0
- package/dist/data/knowledge/predicates.d.ts +34 -0
- package/dist/data/knowledge/predicates.js +27 -0
- package/dist/data/messages.d.ts +57 -0
- package/dist/data/messages.js +97 -0
- package/dist/data/orm/client-proxy.data-source.d.ts +27 -0
- package/dist/data/orm/client-proxy.data-source.js +65 -0
- package/dist/data/orm/cursor.d.ts +25 -0
- package/dist/data/orm/cursor.js +47 -0
- package/dist/data/orm/cursor.test.d.ts +1 -0
- package/dist/data/orm/cursor.test.js +315 -0
- package/dist/data/orm/data-query.d.ts +96 -0
- package/dist/data/orm/data-query.js +208 -0
- package/dist/data/orm/data-query.mongo.d.ts +17 -0
- package/dist/data/orm/data-query.mongo.js +267 -0
- package/dist/data/orm/data-query.mongo.test.d.ts +1 -0
- package/dist/data/orm/data-query.mongo.test.js +398 -0
- package/dist/data/orm/data-query.sqlite.d.ts +14 -0
- package/dist/data/orm/data-query.sqlite.js +297 -0
- package/dist/data/orm/data-query.sqlite.test.d.ts +1 -0
- package/dist/data/orm/data-query.sqlite.test.js +377 -0
- package/dist/data/orm/data-query.test.d.ts +1 -0
- package/dist/data/orm/data-query.test.js +553 -0
- package/dist/data/orm/decorators.d.ts +6 -0
- package/dist/data/orm/decorators.js +21 -0
- package/dist/data/orm/dependency-injection.test.d.ts +1 -0
- package/dist/data/orm/dependency-injection.test.js +171 -0
- package/dist/data/orm/doc.d.ts +26 -0
- package/dist/data/orm/doc.js +124 -0
- package/dist/data/orm/event-registry.d.ts +24 -0
- package/dist/data/orm/event-registry.js +40 -0
- package/dist/data/orm/event-registry.test.d.ts +1 -0
- package/dist/data/orm/event-registry.test.js +44 -0
- package/dist/data/orm/factory.d.ts +8 -0
- package/dist/data/orm/factory.js +147 -0
- package/dist/data/orm/index.d.ts +16 -0
- package/dist/data/orm/index.js +32 -0
- package/dist/data/orm/multi-cursors.d.ts +11 -0
- package/dist/data/orm/multi-cursors.js +146 -0
- package/dist/data/orm/multi-cursors.test.d.ts +1 -0
- package/dist/data/orm/multi-cursors.test.js +455 -0
- package/dist/data/orm/sql-db.d.ts +6 -0
- package/dist/data/orm/sql-db.js +2 -0
- package/dist/data/orm/sql.data-source.d.ts +38 -0
- package/dist/data/orm/sql.data-source.js +379 -0
- package/dist/data/orm/sql.data-source.test.d.ts +1 -0
- package/dist/data/orm/sql.data-source.test.js +406 -0
- package/dist/data/orm/subscribable.data-source.d.ts +25 -0
- package/dist/data/orm/subscribable.data-source.js +72 -0
- package/dist/data/orm/table-container-events.test.d.ts +1 -0
- package/dist/data/orm/table-container-events.test.js +93 -0
- package/dist/data/orm/table-container.d.ts +39 -0
- package/dist/data/orm/table-container.js +96 -0
- package/dist/data/orm/table-definitions.system.d.ts +9 -0
- package/dist/data/orm/table-definitions.system.js +29 -0
- package/dist/data/orm/table-definitions.type.d.ts +19 -0
- package/dist/data/orm/table-definitions.type.js +2 -0
- package/dist/data/orm/table-dependencies.d.ts +32 -0
- package/dist/data/orm/table-dependencies.js +2 -0
- package/dist/data/orm/table.d.ts +42 -0
- package/dist/data/orm/table.event-source.test.d.ts +1 -0
- package/dist/data/orm/table.event-source.test.js +341 -0
- package/dist/data/orm/table.js +244 -0
- package/dist/data/orm/types.d.ts +20 -0
- package/dist/data/orm/types.js +115 -0
- package/dist/data/orm/types.test.d.ts +1 -0
- package/dist/data/orm/types.test.js +71 -0
- package/dist/data/package-permissions.d.ts +7 -0
- package/dist/data/package-permissions.js +18 -0
- package/dist/data/packages.d.ts +92 -0
- package/dist/data/packages.js +90 -0
- package/dist/data/peer-events/peer-event-handlers.d.ts +21 -0
- package/dist/data/peer-events/peer-event-handlers.js +28 -0
- package/dist/data/peer-events/peer-event-types.d.ts +119 -0
- package/dist/data/peer-events/peer-event-types.js +29 -0
- package/dist/data/peer-events/peer-events.d.ts +41 -0
- package/dist/data/peer-events/peer-events.js +102 -0
- package/dist/data/persistent-vars.d.ts +87 -0
- package/dist/data/persistent-vars.js +230 -0
- package/dist/data/tool-tests.d.ts +37 -0
- package/dist/data/tool-tests.js +27 -0
- package/dist/data/tools.d.ts +358 -0
- package/dist/data/tools.js +48 -0
- package/dist/data/user-permissions.d.ts +15 -0
- package/dist/data/user-permissions.js +39 -0
- package/dist/data/user-permissions.test.d.ts +1 -0
- package/dist/data/user-permissions.test.js +252 -0
- package/dist/data/users.d.ts +38 -0
- package/dist/data/users.js +73 -0
- package/dist/data/workflow-logs.d.ts +106 -0
- package/dist/data/workflow-logs.js +67 -0
- package/dist/data/workflow-runs.d.ts +103 -0
- package/dist/data/workflow-runs.js +313 -0
- package/dist/data/workflows.d.ts +16 -0
- package/dist/data/workflows.js +21 -0
- package/dist/device/connection.d.ts +41 -0
- package/dist/device/connection.js +249 -0
- package/dist/device/connection.test.d.ts +1 -0
- package/dist/device/connection.test.js +292 -0
- package/dist/device/device-election.d.ts +36 -0
- package/dist/device/device-election.js +137 -0
- package/dist/device/device.d.ts +22 -0
- package/dist/device/device.js +110 -0
- package/dist/device/device.test.d.ts +1 -0
- package/dist/device/device.test.js +203 -0
- package/dist/device/get-trust-level.d.ts +3 -0
- package/dist/device/get-trust-level.js +87 -0
- package/dist/device/socket.type.d.ts +20 -0
- package/dist/device/socket.type.js +15 -0
- package/dist/device/streamed-socket.d.ts +27 -0
- package/dist/device/streamed-socket.js +154 -0
- package/dist/device/streamed-socket.test.d.ts +1 -0
- package/dist/device/streamed-socket.test.js +44 -0
- package/dist/events.d.ts +35 -0
- package/dist/events.js +128 -0
- package/dist/index.d.ts +33 -0
- package/dist/index.js +50 -0
- package/dist/keys.d.ts +51 -0
- package/dist/keys.js +234 -0
- package/dist/keys.test.d.ts +1 -0
- package/dist/keys.test.js +215 -0
- package/dist/mentions.d.ts +9 -0
- package/dist/mentions.js +46 -0
- package/dist/observable.d.ts +19 -0
- package/dist/observable.js +112 -0
- package/dist/observable.test.d.ts +1 -0
- package/dist/observable.test.js +183 -0
- package/dist/package-loader/get-require.d.ts +10 -0
- package/dist/package-loader/get-require.js +31 -0
- package/dist/package-loader/index.d.ts +1 -0
- package/dist/package-loader/index.js +17 -0
- package/dist/package-loader/package-loader.d.ts +16 -0
- package/dist/package-loader/package-loader.js +102 -0
- package/dist/peers-ui/peers-ui.d.ts +15 -0
- package/dist/peers-ui/peers-ui.js +23 -0
- package/dist/peers-ui/peers-ui.types.d.ts +35 -0
- package/dist/peers-ui/peers-ui.types.js +3 -0
- package/dist/rpc-types.d.ts +45 -0
- package/dist/rpc-types.js +47 -0
- package/dist/serial-json.d.ts +5 -0
- package/dist/serial-json.js +186 -0
- package/dist/serial-json.test.d.ts +1 -0
- package/dist/serial-json.test.js +86 -0
- package/dist/system-ids.d.ts +6 -0
- package/dist/system-ids.js +10 -0
- package/dist/tools/index.d.ts +1 -0
- package/dist/tools/index.js +17 -0
- package/dist/tools/tools-factory.d.ts +5 -0
- package/dist/tools/tools-factory.js +34 -0
- package/dist/types/app-nav.d.ts +18 -0
- package/dist/types/app-nav.js +10 -0
- package/dist/types/assistant-runner-args.d.ts +9 -0
- package/dist/types/assistant-runner-args.js +2 -0
- package/dist/types/field-type.d.ts +37 -0
- package/dist/types/field-type.js +26 -0
- package/dist/types/peer-device.d.ts +40 -0
- package/dist/types/peer-device.js +14 -0
- package/dist/types/peers-package.d.ts +23 -0
- package/dist/types/peers-package.js +2 -0
- package/dist/types/workflow-logger.d.ts +2 -0
- package/dist/types/workflow-logger.js +2 -0
- package/dist/types/workflow-run-context.d.ts +12 -0
- package/dist/types/workflow-run-context.js +2 -0
- package/dist/types/workflow.d.ts +72 -0
- package/dist/types/workflow.js +24 -0
- package/dist/types/zod-types.d.ts +7 -0
- package/dist/types/zod-types.js +12 -0
- package/dist/users.query.d.ts +13 -0
- package/dist/users.query.js +134 -0
- package/dist/utils.d.ts +39 -0
- package/dist/utils.js +240 -0
- package/dist/utils.test.d.ts +1 -0
- package/dist/utils.test.js +140 -0
- package/package.json +50 -0
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const SQLiteDB = require("better-sqlite3");
|
|
4
|
+
const lodash_1 = require("lodash");
|
|
5
|
+
const zod_1 = require("zod");
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const field_type_1 = require("../../types/field-type");
|
|
8
|
+
const sql_data_source_1 = require("./sql.data-source");
|
|
9
|
+
const types_1 = require("./types");
|
|
10
|
+
class DBHarness {
|
|
11
|
+
_db = null;
|
|
12
|
+
get db() {
|
|
13
|
+
if (!this._db) {
|
|
14
|
+
this._db = new SQLiteDB(':memory:');
|
|
15
|
+
this._db.pragma('journal_mode = WAL');
|
|
16
|
+
}
|
|
17
|
+
return this._db;
|
|
18
|
+
}
|
|
19
|
+
async get(sql, params = []) {
|
|
20
|
+
return this.db.prepare(sql).get(params);
|
|
21
|
+
}
|
|
22
|
+
async all(sql, params = []) {
|
|
23
|
+
return this.db.prepare(sql).all(params);
|
|
24
|
+
}
|
|
25
|
+
async exec(sql, params = []) {
|
|
26
|
+
await this.db.prepare(sql).run(params);
|
|
27
|
+
}
|
|
28
|
+
async close() {
|
|
29
|
+
this._db?.close();
|
|
30
|
+
this._db = null;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
let db = new DBHarness();
|
|
34
|
+
describe('table', () => {
|
|
35
|
+
const fiveMinutesMs = 5 * 60 * 1000; // 5 minutes in milliseconds
|
|
36
|
+
jest.setTimeout(fiveMinutesMs);
|
|
37
|
+
afterAll(async () => {
|
|
38
|
+
db.db?.close();
|
|
39
|
+
});
|
|
40
|
+
describe('when type is defined in code', () => {
|
|
41
|
+
const taskSchema = zod_1.z.object({
|
|
42
|
+
taskId: zod_1.z.string().default(() => (0, utils_1.newid)()),
|
|
43
|
+
title: zod_1.z.string(),
|
|
44
|
+
body: zod_1.z.string().default(''),
|
|
45
|
+
dueDate: zod_1.z.date().optional(),
|
|
46
|
+
completeDT: zod_1.z.date().optional(),
|
|
47
|
+
parentTaskId: zod_1.z.string().optional(),
|
|
48
|
+
});
|
|
49
|
+
const taskFields = (0, types_1.schemaToFields)(taskSchema);
|
|
50
|
+
const tasksTableType = {
|
|
51
|
+
name: 'test_tasks_code_driven',
|
|
52
|
+
description: 'Task',
|
|
53
|
+
primaryKeyName: 'taskId',
|
|
54
|
+
fields: taskFields,
|
|
55
|
+
indexes: [
|
|
56
|
+
{ fields: ['title'] },
|
|
57
|
+
{ unique: true, fields: ['taskId', { name: 'parentTaskId', order: 'DESC' }] }
|
|
58
|
+
],
|
|
59
|
+
};
|
|
60
|
+
const TasksTable = new sql_data_source_1.SQLDataSource(db, tasksTableType);
|
|
61
|
+
beforeAll(async () => {
|
|
62
|
+
await TasksTable.dropTableIfExists();
|
|
63
|
+
});
|
|
64
|
+
it('should do compile time checks and runtime typechecks', async () => {
|
|
65
|
+
const badSavePromise = TasksTable.save({
|
|
66
|
+
// @ts-expect-error - making sure type checking is working
|
|
67
|
+
id: 'test', title: 1,
|
|
68
|
+
body: 'This is a test task',
|
|
69
|
+
dueDate: new Date(),
|
|
70
|
+
});
|
|
71
|
+
// await expect(badSavePromise).rejects.toThrow(`SQLITE_ERROR: table ${tableName} has no column named id`);
|
|
72
|
+
await expect(badSavePromise).rejects.toThrow(/validation.*failed.*title.*expected string, received number/ims);
|
|
73
|
+
});
|
|
74
|
+
it('should do all CRUD operations', async () => {
|
|
75
|
+
const task1 = {
|
|
76
|
+
taskId: (0, utils_1.newid)(),
|
|
77
|
+
title: 'Test Task',
|
|
78
|
+
body: 'This is a test task',
|
|
79
|
+
dueDate: new Date(),
|
|
80
|
+
};
|
|
81
|
+
// CREATE
|
|
82
|
+
await TasksTable.save(task1);
|
|
83
|
+
// READ
|
|
84
|
+
let task1Db = await TasksTable.get(task1.taskId);
|
|
85
|
+
expect(task1Db).toEqual(task1);
|
|
86
|
+
// UPDATE
|
|
87
|
+
task1.title = 'Updated Task';
|
|
88
|
+
await TasksTable.save(task1);
|
|
89
|
+
task1Db = await TasksTable.get(task1.taskId);
|
|
90
|
+
expect(task1Db).toEqual(task1);
|
|
91
|
+
// DELETE
|
|
92
|
+
await TasksTable.delete(task1.taskId);
|
|
93
|
+
task1Db = await TasksTable.get(task1.taskId);
|
|
94
|
+
expect(task1Db).toBeUndefined();
|
|
95
|
+
});
|
|
96
|
+
it('should enable type-checked queries', async () => {
|
|
97
|
+
const task1 = {
|
|
98
|
+
taskId: (0, utils_1.newid)(),
|
|
99
|
+
title: 'Test Task',
|
|
100
|
+
body: 'This is a test task',
|
|
101
|
+
dueDate: new Date(),
|
|
102
|
+
};
|
|
103
|
+
// CREATE
|
|
104
|
+
await TasksTable.save(task1);
|
|
105
|
+
// query
|
|
106
|
+
const results = await TasksTable.list({ title: 'Test Task' });
|
|
107
|
+
expect(results[0]).toEqual(task1);
|
|
108
|
+
// // @ts-expect-error - making sure type checking is working
|
|
109
|
+
// const badQueryResults = await TasksTable.list({ id: 'test' });
|
|
110
|
+
// // note that sqlite does not check that columns exist, so this query succeeds with no matches
|
|
111
|
+
// expect(badQueryResults).toEqual([]);
|
|
112
|
+
// @ts-expect-error - making sure type checking is working
|
|
113
|
+
const badQueryPromise = TasksTable.list({ id: 'test' });
|
|
114
|
+
// SQLite error messages vary by version - some throw "no such column", others succeed with empty results
|
|
115
|
+
// The important thing is that TypeScript catches this at compile time with @ts-expect-error
|
|
116
|
+
// await expect(badQueryPromise).rejects.toThrow(`SQLITE_ERROR: table ${tableName} has no column named id`);
|
|
117
|
+
// await expect(badQueryPromise).rejects.toThrow(`no such column: "id" - should this be a string literal in single-quotes?`);
|
|
118
|
+
// Instead of checking for a specific error, we check that either:
|
|
119
|
+
// 1. It throws an error (newer SQLite versions), OR
|
|
120
|
+
// 2. It returns empty results (older SQLite versions that don't validate column names)
|
|
121
|
+
try {
|
|
122
|
+
const results = await badQueryPromise;
|
|
123
|
+
// If no error is thrown, expect empty results (SQLite doesn't always validate column existence)
|
|
124
|
+
expect(results).toEqual([]);
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
// If an error is thrown, it should mention the column
|
|
128
|
+
expect(error.message).toMatch(/id|column/i);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
it('should validate the data before saving', async () => {
|
|
132
|
+
// test validates a bad date
|
|
133
|
+
{
|
|
134
|
+
const badTask = {
|
|
135
|
+
taskId: (0, utils_1.newid)(),
|
|
136
|
+
title: 'Test Task',
|
|
137
|
+
body: 'This is a test task',
|
|
138
|
+
dueDate: 'not a date',
|
|
139
|
+
};
|
|
140
|
+
const badSavePromise = TasksTable.save(badTask);
|
|
141
|
+
await expect(badSavePromise).rejects.toThrow('Expected date, received string');
|
|
142
|
+
}
|
|
143
|
+
// test validates a bad id
|
|
144
|
+
{
|
|
145
|
+
const badTask = {
|
|
146
|
+
taskId: 'not an id',
|
|
147
|
+
title: 'Test Task',
|
|
148
|
+
body: 'This is a test task',
|
|
149
|
+
dueDate: new Date(),
|
|
150
|
+
};
|
|
151
|
+
const badSavePromise = TasksTable.save(badTask);
|
|
152
|
+
await expect(badSavePromise).rejects.toThrow('Invalid id');
|
|
153
|
+
}
|
|
154
|
+
// add task to database
|
|
155
|
+
const existingTaskId = (0, utils_1.newid)();
|
|
156
|
+
const task1 = {
|
|
157
|
+
taskId: existingTaskId,
|
|
158
|
+
title: 'Test Task',
|
|
159
|
+
body: 'This is a test task',
|
|
160
|
+
dueDate: new Date(),
|
|
161
|
+
};
|
|
162
|
+
await TasksTable.save(task1);
|
|
163
|
+
// test validates a duplicate id
|
|
164
|
+
{
|
|
165
|
+
// First verify the record actually exists
|
|
166
|
+
const existingRecord = await TasksTable.get(existingTaskId);
|
|
167
|
+
expect(existingRecord).toBeDefined();
|
|
168
|
+
expect(existingRecord?.taskId).toBe(existingTaskId);
|
|
169
|
+
const badTask = {
|
|
170
|
+
taskId: existingTaskId,
|
|
171
|
+
title: 'Test Task',
|
|
172
|
+
body: 'This is a test task',
|
|
173
|
+
dueDate: new Date(),
|
|
174
|
+
};
|
|
175
|
+
// Try-catch approach to handle environments where UNIQUE constraint might not throw
|
|
176
|
+
// or might throw with different error messages
|
|
177
|
+
try {
|
|
178
|
+
await TasksTable.insert(badTask);
|
|
179
|
+
// If we get here without throwing, the test should fail
|
|
180
|
+
// This can happen in some SQLite configurations that don't enforce UNIQUE constraints properly
|
|
181
|
+
fail('Expected insert to throw UNIQUE constraint error, but it succeeded');
|
|
182
|
+
}
|
|
183
|
+
catch (error) {
|
|
184
|
+
// Verify the error is related to UNIQUE constraint or primary key
|
|
185
|
+
expect(error.message).toMatch(/UNIQUE|constraint|PRIMARY KEY|taskId/i);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
// test validates a bad date on update
|
|
189
|
+
{
|
|
190
|
+
const badTask = {
|
|
191
|
+
taskId: existingTaskId,
|
|
192
|
+
title: 'Test Task',
|
|
193
|
+
body: 'This is a test task',
|
|
194
|
+
dueDate: 'not a date',
|
|
195
|
+
};
|
|
196
|
+
const badSavePromise = TasksTable.update(badTask);
|
|
197
|
+
await expect(badSavePromise).rejects.toThrow('Expected date, received string');
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
it('should allow initializing a record with only part of the data', async () => {
|
|
201
|
+
const newTask = TasksTable.initRecord();
|
|
202
|
+
expect(() => TasksTable.schema.parse(newTask)).toThrow();
|
|
203
|
+
newTask.title = 'Test Task';
|
|
204
|
+
expect(() => TasksTable.schema.parse(newTask)).not.toThrow();
|
|
205
|
+
});
|
|
206
|
+
afterAll(async () => {
|
|
207
|
+
await TasksTable.dropTableIfExists();
|
|
208
|
+
await TasksTable.db.close();
|
|
209
|
+
});
|
|
210
|
+
});
|
|
211
|
+
describe('when type is loaded from database', () => {
|
|
212
|
+
const tasksTableType = {
|
|
213
|
+
name: 'test_tasks_data_driven',
|
|
214
|
+
description: 'Task',
|
|
215
|
+
primaryKeyName: 'taskId',
|
|
216
|
+
fields: [
|
|
217
|
+
{ name: 'taskId', description: 'Task ID', type: field_type_1.FieldType.id },
|
|
218
|
+
{ name: 'title', description: 'Title', type: field_type_1.FieldType.string },
|
|
219
|
+
{ name: 'body', description: 'Body', type: field_type_1.FieldType.string },
|
|
220
|
+
{ name: 'dueDate', description: 'Due Date', type: field_type_1.FieldType.Date, optional: true },
|
|
221
|
+
{ name: 'completeDT', description: 'Complete Date', type: field_type_1.FieldType.Date, optional: true },
|
|
222
|
+
{ name: 'parentTaskId', description: 'Parent Task ID', type: field_type_1.FieldType.id, optional: true },
|
|
223
|
+
],
|
|
224
|
+
};
|
|
225
|
+
const TasksTable = new sql_data_source_1.SQLDataSource(db, tasksTableType);
|
|
226
|
+
beforeAll(async () => {
|
|
227
|
+
await TasksTable.dropTableIfExists();
|
|
228
|
+
});
|
|
229
|
+
it('should do runtime typechecks', async () => {
|
|
230
|
+
const badSavePromise = TasksTable.save({
|
|
231
|
+
id: 'test',
|
|
232
|
+
title: 1,
|
|
233
|
+
body: 'This is a test task',
|
|
234
|
+
dueDate: new Date(),
|
|
235
|
+
});
|
|
236
|
+
// await expect(badSavePromise).rejects.toThrow(`SQLITE_ERROR: table ${tableName} has no column named id`);
|
|
237
|
+
await expect(badSavePromise).rejects.toThrow(/validation.*failed.*title.*expected string, received number/ims);
|
|
238
|
+
});
|
|
239
|
+
it('should do all CRUD operations', async () => {
|
|
240
|
+
const task1 = {
|
|
241
|
+
taskId: (0, utils_1.newid)(),
|
|
242
|
+
title: 'Test Task',
|
|
243
|
+
body: 'This is a test task',
|
|
244
|
+
dueDate: new Date(),
|
|
245
|
+
};
|
|
246
|
+
// CREATE
|
|
247
|
+
await TasksTable.save(task1);
|
|
248
|
+
// READ
|
|
249
|
+
let task1Db = await TasksTable.get(task1.taskId);
|
|
250
|
+
expect(task1Db).toEqual(task1);
|
|
251
|
+
// UPDATE
|
|
252
|
+
task1.title = 'Updated Task';
|
|
253
|
+
await TasksTable.save(task1);
|
|
254
|
+
task1Db = await TasksTable.get(task1.taskId);
|
|
255
|
+
expect(task1Db).toEqual(task1);
|
|
256
|
+
// DELETE
|
|
257
|
+
await TasksTable.delete(task1.taskId);
|
|
258
|
+
task1Db = await TasksTable.get(task1.taskId);
|
|
259
|
+
expect(task1Db).toBeUndefined();
|
|
260
|
+
});
|
|
261
|
+
afterAll(async () => {
|
|
262
|
+
await TasksTable.dropTableIfExists();
|
|
263
|
+
await TasksTable.db.close();
|
|
264
|
+
});
|
|
265
|
+
});
|
|
266
|
+
describe('when the schema has changed', () => {
|
|
267
|
+
const notesTableTypeV1 = {
|
|
268
|
+
name: 'test_table_notes',
|
|
269
|
+
description: 'Notes',
|
|
270
|
+
primaryKeyName: 'entryId',
|
|
271
|
+
fields: [
|
|
272
|
+
{ name: 'entryId', description: 'Entry ID', type: field_type_1.FieldType.id },
|
|
273
|
+
{ name: 'title', description: 'Title', type: field_type_1.FieldType.string },
|
|
274
|
+
{ name: 'body', description: 'Body', type: field_type_1.FieldType.string, defaultValue: '' },
|
|
275
|
+
],
|
|
276
|
+
indexes: [
|
|
277
|
+
{ fields: ['title'] },
|
|
278
|
+
],
|
|
279
|
+
};
|
|
280
|
+
const NotesTableV1 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV1);
|
|
281
|
+
beforeAll(async () => {
|
|
282
|
+
await NotesTableV1.dropTableIfExists();
|
|
283
|
+
await NotesTableV1.list();
|
|
284
|
+
});
|
|
285
|
+
it('should not update table if already up to date', async () => {
|
|
286
|
+
const updated = await NotesTableV1.initTable(true);
|
|
287
|
+
expect(updated).toBe(false);
|
|
288
|
+
});
|
|
289
|
+
it('should detect column changes and modify tables to match', async () => {
|
|
290
|
+
const v1Note = await NotesTableV1.insert({ title: 'Test Note', body: 'This is a test note' });
|
|
291
|
+
// add optional number column
|
|
292
|
+
const notesTableTypeV2 = (0, lodash_1.cloneDeep)(notesTableTypeV1);
|
|
293
|
+
notesTableTypeV2.fields.push({ name: 'priority', description: 'Priority of note', type: field_type_1.FieldType.number, optional: true });
|
|
294
|
+
const NotesTableV2 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV2);
|
|
295
|
+
const updatedToV2 = await NotesTableV2.initTable(true);
|
|
296
|
+
expect(updatedToV2).toBe(true);
|
|
297
|
+
const v2Note = await NotesTableV2.get(v1Note.entryId);
|
|
298
|
+
expect(v2Note).toEqual({ ...v1Note });
|
|
299
|
+
// add required date column
|
|
300
|
+
const notesTableTypeV3 = (0, lodash_1.cloneDeep)(notesTableTypeV2);
|
|
301
|
+
notesTableTypeV3.fields.push({ name: 'modified', description: 'Last time this was modified', type: field_type_1.FieldType.Date, defaultValue: () => new Date() });
|
|
302
|
+
const NotesTableV3 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV3);
|
|
303
|
+
const updatedToV3 = await NotesTableV3.initTable(true);
|
|
304
|
+
expect(updatedToV3).toBe(true);
|
|
305
|
+
const v3Note = await NotesTableV3.get(v1Note.entryId);
|
|
306
|
+
expect(v3Note).toEqual({ ...v1Note, modified: v3Note.modified });
|
|
307
|
+
// remove column
|
|
308
|
+
const notesTableTypeV4 = (0, lodash_1.cloneDeep)(notesTableTypeV3);
|
|
309
|
+
notesTableTypeV4.fields.pop();
|
|
310
|
+
const NotesTableV4 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV4);
|
|
311
|
+
const updatedToV4 = await NotesTableV4.initTable(true);
|
|
312
|
+
expect(updatedToV4).toBe(true);
|
|
313
|
+
const v4Note = await NotesTableV4.get(v1Note.entryId);
|
|
314
|
+
expect(v4Note).toEqual({ ...v1Note });
|
|
315
|
+
// change column type and make required
|
|
316
|
+
const notesTableTypeV5 = (0, lodash_1.cloneDeep)(notesTableTypeV4);
|
|
317
|
+
notesTableTypeV5.fields[3].type = field_type_1.FieldType.boolean;
|
|
318
|
+
notesTableTypeV5.fields[3].optional = false;
|
|
319
|
+
const NotesTableV5 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV5);
|
|
320
|
+
const updatedToV5 = await NotesTableV5.initTable(true);
|
|
321
|
+
expect(updatedToV5).toBe(true);
|
|
322
|
+
const v5Note = await NotesTableV5.get(v1Note.entryId);
|
|
323
|
+
expect(v5Note).toEqual({ ...v1Note, priority: false });
|
|
324
|
+
const updatedToV5Again = await NotesTableV5.initTable(true);
|
|
325
|
+
expect(updatedToV5Again).toBe(false);
|
|
326
|
+
});
|
|
327
|
+
it('should detect and change column indexes', async () => {
|
|
328
|
+
const notesTableTypeV1Indexes = (0, lodash_1.cloneDeep)(notesTableTypeV1);
|
|
329
|
+
notesTableTypeV1Indexes.name += '_indexes';
|
|
330
|
+
const NotesTableV1 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV1Indexes);
|
|
331
|
+
await NotesTableV1.dropTableIfExists();
|
|
332
|
+
await NotesTableV1.initTable(true);
|
|
333
|
+
const indexesV1 = await NotesTableV1.db.all(`PRAGMA index_list("${NotesTableV1.tableName}")`, []);
|
|
334
|
+
expect(indexesV1).toHaveLength(2);
|
|
335
|
+
// remove indexes
|
|
336
|
+
const notesTableTypeV2Indexes = (0, lodash_1.cloneDeep)(notesTableTypeV1Indexes);
|
|
337
|
+
notesTableTypeV2Indexes.indexes = [];
|
|
338
|
+
const NotesTableV2 = new sql_data_source_1.SQLDataSource(db, notesTableTypeV2Indexes);
|
|
339
|
+
const updatedToV2 = await NotesTableV2.initTable(true);
|
|
340
|
+
// indexes are updated separately from tables
|
|
341
|
+
expect(updatedToV2).toBe(false);
|
|
342
|
+
const indexesV2 = await NotesTableV1.db.all(`PRAGMA index_list("${NotesTableV1.tableName}")`, []);
|
|
343
|
+
expect(indexesV2).toHaveLength(1);
|
|
344
|
+
});
|
|
345
|
+
});
|
|
346
|
+
describe('when the field type is `object`', () => {
|
|
347
|
+
const tableType = {
|
|
348
|
+
name: 'test_table_objects',
|
|
349
|
+
description: 'Objects',
|
|
350
|
+
primaryKeyName: 'objectId',
|
|
351
|
+
fields: [
|
|
352
|
+
{ name: 'objectId', description: 'Object ID', type: field_type_1.FieldType.id },
|
|
353
|
+
{ name: 'name', description: 'Name', type: field_type_1.FieldType.string },
|
|
354
|
+
{ name: 'data', description: 'Data', type: field_type_1.FieldType.object },
|
|
355
|
+
],
|
|
356
|
+
};
|
|
357
|
+
const ObjectsTable = new sql_data_source_1.SQLDataSource(db, tableType);
|
|
358
|
+
beforeAll(async () => {
|
|
359
|
+
await ObjectsTable.dropTableIfExists();
|
|
360
|
+
});
|
|
361
|
+
it('should store objects as JSON strings', async () => {
|
|
362
|
+
const obj = {
|
|
363
|
+
objectId: (0, utils_1.newid)(),
|
|
364
|
+
name: 'Test Object',
|
|
365
|
+
data: { test: 'data' },
|
|
366
|
+
};
|
|
367
|
+
await ObjectsTable.save(obj);
|
|
368
|
+
const objDb = await ObjectsTable.get(obj.objectId);
|
|
369
|
+
expect(objDb).toEqual(obj);
|
|
370
|
+
});
|
|
371
|
+
afterAll(async () => {
|
|
372
|
+
await ObjectsTable.dropTableIfExists();
|
|
373
|
+
await ObjectsTable.db.close();
|
|
374
|
+
});
|
|
375
|
+
});
|
|
376
|
+
describe('when the field type is an array of primitives', () => {
|
|
377
|
+
const tableType = {
|
|
378
|
+
name: 'test_table_arrays',
|
|
379
|
+
description: 'Arrays',
|
|
380
|
+
primaryKeyName: 'arrayId',
|
|
381
|
+
fields: [
|
|
382
|
+
{ name: 'arrayId', description: 'Array ID', type: field_type_1.FieldType.id },
|
|
383
|
+
{ name: 'name', description: 'Name', type: field_type_1.FieldType.string },
|
|
384
|
+
{ name: 'data', description: 'Data', type: field_type_1.FieldType.string, isArray: true },
|
|
385
|
+
],
|
|
386
|
+
};
|
|
387
|
+
const ArraysTable = new sql_data_source_1.SQLDataSource(db, tableType);
|
|
388
|
+
beforeAll(async () => {
|
|
389
|
+
await ArraysTable.dropTableIfExists();
|
|
390
|
+
});
|
|
391
|
+
it('should store arrays as JSON strings and automatically rehydrate them', async () => {
|
|
392
|
+
const obj = {
|
|
393
|
+
arrayId: (0, utils_1.newid)(),
|
|
394
|
+
name: 'Test Array',
|
|
395
|
+
data: ['test', 'data'],
|
|
396
|
+
};
|
|
397
|
+
await ArraysTable.save(obj);
|
|
398
|
+
const objDb = await ArraysTable.get(obj.arrayId);
|
|
399
|
+
expect(objDb).toEqual(obj);
|
|
400
|
+
});
|
|
401
|
+
afterAll(async () => {
|
|
402
|
+
await ArraysTable.dropTableIfExists();
|
|
403
|
+
await ArraysTable.db.close();
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
});
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { Emitter, Event } from "../../events";
|
|
2
|
+
import { DataFilter, ICursorIterable, IDataQueryParams, IDataSource } from "./data-query";
|
|
3
|
+
export interface IDataChangedEvent<T> {
|
|
4
|
+
dataObject: T;
|
|
5
|
+
op: 'insert' | 'update' | 'delete';
|
|
6
|
+
source: 'local' | 'remote';
|
|
7
|
+
}
|
|
8
|
+
export declare class SubscribableDataSource<T extends {
|
|
9
|
+
[key: string]: any;
|
|
10
|
+
}> implements IDataSource<T> {
|
|
11
|
+
protected readonly dataSource: IDataSource<T>;
|
|
12
|
+
readonly tableName: string;
|
|
13
|
+
readonly primaryKeyName: string;
|
|
14
|
+
protected readonly dataChangedEmitter: Emitter<IDataChangedEvent<T>>;
|
|
15
|
+
readonly dataChanged: Event<IDataChangedEvent<T>>;
|
|
16
|
+
constructor(dataSource: IDataSource<T>);
|
|
17
|
+
get(id: string): Promise<T | undefined>;
|
|
18
|
+
list(filter?: DataFilter<T>, opts?: IDataQueryParams<T>): Promise<T[]>;
|
|
19
|
+
count(filter?: DataFilter<T>): Promise<number>;
|
|
20
|
+
cursor(filter?: DataFilter<T>, opts?: IDataQueryParams<T>): ICursorIterable<T>;
|
|
21
|
+
save(record: T): Promise<T>;
|
|
22
|
+
insert(record: T): Promise<T>;
|
|
23
|
+
update(record: T): Promise<T>;
|
|
24
|
+
delete(idOrRecord: string | T): Promise<void>;
|
|
25
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SubscribableDataSource = void 0;
|
|
4
|
+
const events_1 = require("../../events");
|
|
5
|
+
const data_query_1 = require("./data-query");
|
|
6
|
+
const eventNameCounts = new Map();
|
|
7
|
+
class SubscribableDataSource {
|
|
8
|
+
dataSource;
|
|
9
|
+
tableName;
|
|
10
|
+
primaryKeyName;
|
|
11
|
+
dataChangedEmitter;
|
|
12
|
+
dataChanged;
|
|
13
|
+
constructor(dataSource) {
|
|
14
|
+
this.dataSource = dataSource;
|
|
15
|
+
this.tableName = dataSource.tableName;
|
|
16
|
+
this.primaryKeyName = dataSource.primaryKeyName;
|
|
17
|
+
// ensure different data source emitters for the same table have different names
|
|
18
|
+
let eventNameCount = eventNameCounts.get(this.tableName) || 0;
|
|
19
|
+
eventNameCounts.set(this.tableName, eventNameCount + 1);
|
|
20
|
+
const eventName = `SubscribableDataSource_${this.tableName}_DataChanged_${eventNameCount}`;
|
|
21
|
+
this.dataChangedEmitter = new events_1.Emitter(eventName);
|
|
22
|
+
this.dataChanged = this.dataChangedEmitter.event;
|
|
23
|
+
}
|
|
24
|
+
async get(id) {
|
|
25
|
+
return this.dataSource.get(id);
|
|
26
|
+
}
|
|
27
|
+
async list(filter = {}, opts = {}) {
|
|
28
|
+
return this.dataSource.list(filter, opts);
|
|
29
|
+
}
|
|
30
|
+
async count(filter = {}) {
|
|
31
|
+
return (0, data_query_1.dataSourceCount)(this.dataSource, filter);
|
|
32
|
+
}
|
|
33
|
+
cursor(filter = {}, opts = {}) {
|
|
34
|
+
return (0, data_query_1.dataSourceCursor)(this.dataSource || this, filter, opts);
|
|
35
|
+
}
|
|
36
|
+
async save(record) {
|
|
37
|
+
let newRecord = true;
|
|
38
|
+
const primaryKey = record[this.primaryKeyName];
|
|
39
|
+
if (primaryKey) {
|
|
40
|
+
const existingRecord = await this.dataSource.get(primaryKey);
|
|
41
|
+
if (existingRecord) {
|
|
42
|
+
newRecord = false;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
if (newRecord) {
|
|
46
|
+
return await this.insert(record);
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
return await this.update(record);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
async insert(record) {
|
|
53
|
+
record = await this.dataSource.insert(record);
|
|
54
|
+
this.dataChangedEmitter.emit({ dataObject: record, op: 'insert', source: 'local' });
|
|
55
|
+
return record;
|
|
56
|
+
}
|
|
57
|
+
async update(record) {
|
|
58
|
+
record = await this.dataSource.update(record);
|
|
59
|
+
this.dataChangedEmitter.emit({ dataObject: record, op: 'update', source: 'local' });
|
|
60
|
+
return record;
|
|
61
|
+
}
|
|
62
|
+
async delete(idOrRecord) {
|
|
63
|
+
const id = typeof idOrRecord === 'string' ? idOrRecord : idOrRecord[this.primaryKeyName];
|
|
64
|
+
const record = await this.get(id);
|
|
65
|
+
if (!record) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
await this.dataSource.delete(idOrRecord);
|
|
69
|
+
this.dataChangedEmitter.emit({ dataObject: record, op: 'delete', source: 'local' });
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
exports.SubscribableDataSource = SubscribableDataSource;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const zod_1 = require("zod");
|
|
4
|
+
const table_container_1 = require("./table-container");
|
|
5
|
+
const event_registry_1 = require("./event-registry");
|
|
6
|
+
const client_proxy_data_source_1 = require("./client-proxy.data-source");
|
|
7
|
+
const field_type_1 = require("../../types/field-type");
|
|
8
|
+
describe('TableFactory Event Sharing', () => {
|
|
9
|
+
const testSchema = zod_1.z.object({
|
|
10
|
+
id: zod_1.z.string(),
|
|
11
|
+
name: zod_1.z.string(),
|
|
12
|
+
});
|
|
13
|
+
const testMetaData = {
|
|
14
|
+
name: 'TestTable',
|
|
15
|
+
description: 'Test table for event sharing',
|
|
16
|
+
primaryKeyName: 'id',
|
|
17
|
+
fields: [
|
|
18
|
+
{ name: 'id', type: field_type_1.FieldType.string, optional: false },
|
|
19
|
+
{ name: 'name', type: field_type_1.FieldType.string, optional: false },
|
|
20
|
+
],
|
|
21
|
+
};
|
|
22
|
+
it('should share event emitters when using the same EventRegistry', () => {
|
|
23
|
+
// Create mock data context for event registry
|
|
24
|
+
const mockDataContext = {
|
|
25
|
+
dataContextId: 'test-context'
|
|
26
|
+
};
|
|
27
|
+
// Create shared event registry
|
|
28
|
+
const sharedRegistry = new event_registry_1.EventRegistry(mockDataContext);
|
|
29
|
+
// Create table factory function that uses shared registry
|
|
30
|
+
const tableFactory1 = jest.fn().mockImplementation((metaData, schema, TableClass) => {
|
|
31
|
+
const deps = {
|
|
32
|
+
dataSource: new client_proxy_data_source_1.ClientProxyDataSource(metaData, schema || testSchema, 'test-context'),
|
|
33
|
+
eventRegistry: sharedRegistry,
|
|
34
|
+
};
|
|
35
|
+
return new (TableClass || require('./table').Table)(metaData, deps);
|
|
36
|
+
});
|
|
37
|
+
const tableFactory2 = jest.fn().mockImplementation((metaData, schema, TableClass) => {
|
|
38
|
+
const deps = {
|
|
39
|
+
dataSource: new client_proxy_data_source_1.ClientProxyDataSource(metaData, schema || testSchema, 'test-context'),
|
|
40
|
+
eventRegistry: sharedRegistry,
|
|
41
|
+
};
|
|
42
|
+
return new (TableClass || require('./table').Table)(metaData, deps);
|
|
43
|
+
});
|
|
44
|
+
// Create two table containers with the same shared registry
|
|
45
|
+
const container1 = new table_container_1.TableContainer(tableFactory1);
|
|
46
|
+
const container2 = new table_container_1.TableContainer(tableFactory2);
|
|
47
|
+
// Create tables from both factories
|
|
48
|
+
const table1 = container1.getTable(testMetaData, testSchema);
|
|
49
|
+
const table2 = container2.getTable(testMetaData, testSchema);
|
|
50
|
+
// Verify they use the same event emitter
|
|
51
|
+
expect(table1.dataChangedEmitter).toBe(table2.dataChangedEmitter);
|
|
52
|
+
// Verify they have the same dataChanged event
|
|
53
|
+
expect(table1.dataChanged).toBe(table2.dataChanged);
|
|
54
|
+
// Verify the shared registry only has one emitter for this table
|
|
55
|
+
expect(sharedRegistry.getEmitterCount()).toBe(1);
|
|
56
|
+
});
|
|
57
|
+
it('should have separate event emitters when using different EventRegistry instances', () => {
|
|
58
|
+
// Create separate mock data contexts for event registries
|
|
59
|
+
const mockContext1 = { dataContextId: 'test-context-1' };
|
|
60
|
+
const mockContext2 = { dataContextId: 'test-context-2' };
|
|
61
|
+
// Create separate event registries
|
|
62
|
+
const registry1 = new event_registry_1.EventRegistry(mockContext1);
|
|
63
|
+
const registry2 = new event_registry_1.EventRegistry(mockContext2);
|
|
64
|
+
// Create table factory functions with different registries
|
|
65
|
+
const tableFactory1 = jest.fn().mockImplementation((metaData, schema, TableClass) => {
|
|
66
|
+
const deps = {
|
|
67
|
+
dataSource: new client_proxy_data_source_1.ClientProxyDataSource(metaData, schema || testSchema, 'test-context'),
|
|
68
|
+
eventRegistry: registry1,
|
|
69
|
+
};
|
|
70
|
+
return new (TableClass || require('./table').Table)(metaData, deps);
|
|
71
|
+
});
|
|
72
|
+
const tableFactory2 = jest.fn().mockImplementation((metaData, schema, TableClass) => {
|
|
73
|
+
const deps = {
|
|
74
|
+
dataSource: new client_proxy_data_source_1.ClientProxyDataSource(metaData, schema || testSchema, 'test-context'),
|
|
75
|
+
eventRegistry: registry2,
|
|
76
|
+
};
|
|
77
|
+
return new (TableClass || require('./table').Table)(metaData, deps);
|
|
78
|
+
});
|
|
79
|
+
// Create two table containers with different registries
|
|
80
|
+
const container1 = new table_container_1.TableContainer(tableFactory1);
|
|
81
|
+
const container2 = new table_container_1.TableContainer(tableFactory2);
|
|
82
|
+
// Create tables from both factories
|
|
83
|
+
const table1 = container1.getTable(testMetaData, testSchema);
|
|
84
|
+
const table2 = container2.getTable(testMetaData, testSchema);
|
|
85
|
+
// Verify they use different event emitters
|
|
86
|
+
expect(table1.dataChangedEmitter).not.toBe(table2.dataChangedEmitter);
|
|
87
|
+
// Verify they have different dataChanged events
|
|
88
|
+
expect(table1.dataChanged).not.toBe(table2.dataChanged);
|
|
89
|
+
// Verify each registry has one emitter
|
|
90
|
+
expect(registry1.getEmitterCount()).toBe(1);
|
|
91
|
+
expect(registry2.getEmitterCount()).toBe(1);
|
|
92
|
+
});
|
|
93
|
+
});
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { ITableMetaData } from './types';
|
|
3
|
+
import { Table } from './table';
|
|
4
|
+
import { ITableDefinition, TableFactory, TableConstructor } from './table-definitions.type';
|
|
5
|
+
export declare class TableContainer {
|
|
6
|
+
readonly tableFactory: TableFactory;
|
|
7
|
+
readonly groupId?: string | undefined;
|
|
8
|
+
private readonly tableDefinitions;
|
|
9
|
+
private tableInstances;
|
|
10
|
+
constructor(tableFactory: TableFactory, groupId?: string | undefined);
|
|
11
|
+
registerTableDefinition<T extends {
|
|
12
|
+
[key: string]: any;
|
|
13
|
+
}>(tableDefinition: ITableDefinition<T>, opts?: {
|
|
14
|
+
overwrite?: boolean;
|
|
15
|
+
}): void;
|
|
16
|
+
/**
|
|
17
|
+
* This shouldn't be used unless you have a good reason to bypass the normal table creation process.
|
|
18
|
+
* The normal process is to register the table definition and let the factory create the instance.
|
|
19
|
+
*/
|
|
20
|
+
setTable(table: Table<any>, opts?: {
|
|
21
|
+
overwrite?: boolean;
|
|
22
|
+
}): void;
|
|
23
|
+
getTable<T extends {
|
|
24
|
+
[key: string]: any;
|
|
25
|
+
}>(tableName: string): Table<T>;
|
|
26
|
+
getTable<T extends {
|
|
27
|
+
[key: string]: any;
|
|
28
|
+
}>(metaData: ITableMetaData, schema?: z.AnyZodObject, tableConstructor?: TableConstructor<T>): Table<T>;
|
|
29
|
+
getTableByName<T extends {
|
|
30
|
+
[key: string]: any;
|
|
31
|
+
}>(tableName: string): Table<T>;
|
|
32
|
+
/**
|
|
33
|
+
* This ensures all tables have been new'ed up and then returns the entire list of them.
|
|
34
|
+
*
|
|
35
|
+
* This is primarily intended to be used by the peer-device sync operation which needs all
|
|
36
|
+
* tables to be able to apply changes correctly.
|
|
37
|
+
*/
|
|
38
|
+
getAllTables(): Table<any>[];
|
|
39
|
+
}
|