@powerhousedao/reactor 4.1.0-dev.73 → 4.1.0-dev.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/core/reactor.d.ts +6 -12
- package/dist/src/core/reactor.d.ts.map +1 -1
- package/dist/src/core/reactor.js +127 -79
- package/dist/src/core/reactor.js.map +1 -1
- package/dist/src/events/types.d.ts +14 -0
- package/dist/src/events/types.d.ts.map +1 -1
- package/dist/src/events/types.js +6 -0
- package/dist/src/events/types.js.map +1 -1
- package/dist/src/executor/simple-job-executor-manager.d.ts +4 -1
- package/dist/src/executor/simple-job-executor-manager.d.ts.map +1 -1
- package/dist/src/executor/simple-job-executor-manager.js +86 -36
- package/dist/src/executor/simple-job-executor-manager.js.map +1 -1
- package/dist/src/executor/simple-job-executor.d.ts +12 -7
- package/dist/src/executor/simple-job-executor.d.ts.map +1 -1
- package/dist/src/executor/simple-job-executor.js +281 -62
- package/dist/src/executor/simple-job-executor.js.map +1 -1
- package/dist/src/executor/types.d.ts +3 -2
- package/dist/src/executor/types.d.ts.map +1 -1
- package/dist/src/executor/types.js.map +1 -1
- package/dist/src/index.d.ts +8 -2
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +8 -1
- package/dist/src/index.js.map +1 -1
- package/dist/src/job-tracker/in-memory-job-tracker.d.ts +15 -0
- package/dist/src/job-tracker/in-memory-job-tracker.d.ts.map +1 -0
- package/dist/src/job-tracker/in-memory-job-tracker.js +78 -0
- package/dist/src/job-tracker/in-memory-job-tracker.js.map +1 -0
- package/dist/src/job-tracker/index.d.ts +3 -0
- package/dist/src/job-tracker/index.d.ts.map +1 -0
- package/dist/src/job-tracker/index.js +2 -0
- package/dist/src/job-tracker/index.js.map +1 -0
- package/dist/src/job-tracker/interfaces.d.ts +41 -0
- package/dist/src/job-tracker/interfaces.d.ts.map +1 -0
- package/dist/src/job-tracker/interfaces.js +2 -0
- package/dist/src/job-tracker/interfaces.js.map +1 -0
- package/dist/src/queue/types.d.ts +4 -4
- package/dist/src/queue/types.d.ts.map +1 -1
- package/dist/src/read-models/coordinator.d.ts +38 -0
- package/dist/src/read-models/coordinator.d.ts.map +1 -0
- package/dist/src/read-models/coordinator.js +62 -0
- package/dist/src/read-models/coordinator.js.map +1 -0
- package/dist/src/read-models/document-view.d.ts +4 -5
- package/dist/src/read-models/document-view.d.ts.map +1 -1
- package/dist/src/read-models/document-view.js +170 -119
- package/dist/src/read-models/document-view.js.map +1 -1
- package/dist/src/read-models/interfaces.d.ts +29 -0
- package/dist/src/read-models/interfaces.d.ts.map +1 -0
- package/dist/src/read-models/interfaces.js +2 -0
- package/dist/src/read-models/interfaces.js.map +1 -0
- package/dist/src/storage/interfaces.d.ts +5 -20
- package/dist/src/storage/interfaces.d.ts.map +1 -1
- package/package.json +3 -3
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { JobStatus } from "../shared/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* In-memory implementation of IJobTracker.
|
|
4
|
+
* Maintains job status in a Map for synchronous access.
|
|
5
|
+
*/
|
|
6
|
+
export class InMemoryJobTracker {
|
|
7
|
+
jobs = new Map();
|
|
8
|
+
registerJob(jobInfo) {
|
|
9
|
+
this.jobs.set(jobInfo.id, { ...jobInfo });
|
|
10
|
+
}
|
|
11
|
+
markRunning(jobId) {
|
|
12
|
+
const job = this.jobs.get(jobId);
|
|
13
|
+
if (!job) {
|
|
14
|
+
// Job not found - might have been registered elsewhere
|
|
15
|
+
// Create minimal job entry
|
|
16
|
+
this.jobs.set(jobId, {
|
|
17
|
+
id: jobId,
|
|
18
|
+
status: JobStatus.RUNNING,
|
|
19
|
+
createdAtUtcIso: new Date().toISOString(),
|
|
20
|
+
});
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
// Update existing job
|
|
24
|
+
this.jobs.set(jobId, {
|
|
25
|
+
...job,
|
|
26
|
+
status: JobStatus.RUNNING,
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
markCompleted(jobId, result) {
|
|
30
|
+
const job = this.jobs.get(jobId);
|
|
31
|
+
if (!job) {
|
|
32
|
+
// Job not found - create minimal completed entry
|
|
33
|
+
this.jobs.set(jobId, {
|
|
34
|
+
id: jobId,
|
|
35
|
+
status: JobStatus.COMPLETED,
|
|
36
|
+
createdAtUtcIso: new Date().toISOString(),
|
|
37
|
+
completedAtUtcIso: new Date().toISOString(),
|
|
38
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
39
|
+
result,
|
|
40
|
+
});
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
// Update existing job
|
|
44
|
+
this.jobs.set(jobId, {
|
|
45
|
+
...job,
|
|
46
|
+
status: JobStatus.COMPLETED,
|
|
47
|
+
completedAtUtcIso: new Date().toISOString(),
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
49
|
+
result,
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
markFailed(jobId, error) {
|
|
53
|
+
const job = this.jobs.get(jobId);
|
|
54
|
+
if (!job) {
|
|
55
|
+
// Job not found - create minimal failed entry
|
|
56
|
+
this.jobs.set(jobId, {
|
|
57
|
+
id: jobId,
|
|
58
|
+
status: JobStatus.FAILED,
|
|
59
|
+
createdAtUtcIso: new Date().toISOString(),
|
|
60
|
+
completedAtUtcIso: new Date().toISOString(),
|
|
61
|
+
error,
|
|
62
|
+
});
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
// Update existing job
|
|
66
|
+
this.jobs.set(jobId, {
|
|
67
|
+
...job,
|
|
68
|
+
status: JobStatus.FAILED,
|
|
69
|
+
completedAtUtcIso: new Date().toISOString(),
|
|
70
|
+
error,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
getJobStatus(jobId) {
|
|
74
|
+
const job = this.jobs.get(jobId);
|
|
75
|
+
return job ? { ...job } : null;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
//# sourceMappingURL=in-memory-job-tracker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"in-memory-job-tracker.js","sourceRoot":"","sources":["../../../src/job-tracker/in-memory-job-tracker.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAgB,MAAM,oBAAoB,CAAC;AAG7D;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IACrB,IAAI,GAAG,IAAI,GAAG,EAAmB,CAAC;IAE1C,WAAW,CAAC,OAAgB;QAC1B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,EAAE,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC;IAC5C,CAAC;IAED,WAAW,CAAC,KAAa;QACvB,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACjC,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,uDAAuD;YACvD,2BAA2B;YAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;gBACnB,EAAE,EAAE,KAAK;gBACT,MAAM,EAAE,SAAS,CAAC,OAAO;gBACzB,eAAe,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;aAC1C,CAAC,CAAC;YACH,OAAO;QACT,CAAC;QAED,sBAAsB;QACtB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;YACnB,GAAG,GAAG;YACN,MAAM,EAAE,SAAS,CAAC,OAAO;SAC1B,CAAC,CAAC;IACL,CAAC;IAED,aAAa,CAAC,KAAa,EAAE,MAAY;QACvC,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACjC,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,iDAAiD;YACjD,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;gBACnB,EAAE,EAAE,KAAK;gBACT,MAAM,EAAE,SAAS,CAAC,SAAS;gBAC3B,eAAe,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBACzC,iBAAiB,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBAC3C,mEAAmE;gBACnE,MAAM;aACP,CAAC,CAAC;YACH,OAAO;QACT,CAAC;QAED,sBAAsB;QACtB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;YACnB,GAAG,GAAG;YACN,MAAM,EAAE,SAAS,CAAC,SAAS;YAC3B,iBAAiB,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YAC3C,mEAAmE;YACnE,MAAM;SACP,CAAC,CAAC;IACL,CAAC;IAED,UAAU,CAAC,KAAa,EAAE,KAAa;QACrC,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACjC,IAAI,CAAC,GAAG,EAAE,CAAC;YACT,8CAA8C;YAC9C,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;gBACnB,EAAE,EAAE,KAAK;gBACT,MAAM,EAAE,SAAS,CAAC,MAAM;gBACxB,eAAe,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBACzC,iBAAiB,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;gBAC3C,KAAK;aACN,CAAC,CAAC;YACH,OAAO;QACT,CAAC;QAED,sBAAsB;QACtB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE;YACnB,GAAG,GAAG;YACN,MAAM,EAAE,SAAS,CAAC,MAAM;YACxB,iBAAiB,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;YAC3C,KAAK;SACN,CAAC,CAAC;IACL,CAAC;IAED,YAAY,CAAC,KAAa;QACxB,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACjC,OAAO,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;IACjC,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/job-tracker/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAChE,YAAY,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/job-tracker/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import type { JobInfo } from "../shared/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Interface for tracking job lifecycle status.
|
|
4
|
+
* Maintains job state throughout execution: PENDING → RUNNING → COMPLETED/FAILED.
|
|
5
|
+
*/
|
|
6
|
+
export interface IJobTracker {
|
|
7
|
+
/**
|
|
8
|
+
* Register a new job with PENDING status.
|
|
9
|
+
*
|
|
10
|
+
* @param jobInfo - The job information to register
|
|
11
|
+
*/
|
|
12
|
+
registerJob(jobInfo: JobInfo): void;
|
|
13
|
+
/**
|
|
14
|
+
* Update a job's status to RUNNING.
|
|
15
|
+
*
|
|
16
|
+
* @param jobId - The job ID to mark as running
|
|
17
|
+
*/
|
|
18
|
+
markRunning(jobId: string): void;
|
|
19
|
+
/**
|
|
20
|
+
* Mark a job as completed successfully.
|
|
21
|
+
*
|
|
22
|
+
* @param jobId - The job ID to mark as completed
|
|
23
|
+
* @param result - Optional result data from the job execution
|
|
24
|
+
*/
|
|
25
|
+
markCompleted(jobId: string, result?: any): void;
|
|
26
|
+
/**
|
|
27
|
+
* Mark a job as failed.
|
|
28
|
+
*
|
|
29
|
+
* @param jobId - The job ID to mark as failed
|
|
30
|
+
* @param error - Error message describing the failure
|
|
31
|
+
*/
|
|
32
|
+
markFailed(jobId: string, error: string): void;
|
|
33
|
+
/**
|
|
34
|
+
* Retrieve the current status of a job.
|
|
35
|
+
*
|
|
36
|
+
* @param jobId - The job ID to query
|
|
37
|
+
* @returns The job information, or null if the job is not found
|
|
38
|
+
*/
|
|
39
|
+
getJobStatus(jobId: string): JobInfo | null;
|
|
40
|
+
}
|
|
41
|
+
//# sourceMappingURL=interfaces.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"interfaces.d.ts","sourceRoot":"","sources":["../../../src/job-tracker/interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAElD;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;OAIG;IACH,WAAW,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI,CAAC;IAEpC;;;;OAIG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAEjC;;;;;OAKG;IACH,aAAa,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC;IAEjD;;;;;OAKG;IACH,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IAE/C;;;;;OAKG;IACH,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,IAAI,CAAC;CAC7C"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../../src/job-tracker/interfaces.ts"],"names":[],"mappings":""}
|
|
@@ -28,12 +28,12 @@ export type Job = {
|
|
|
28
28
|
id: string;
|
|
29
29
|
/** The document ID this job operates on */
|
|
30
30
|
documentId: string;
|
|
31
|
-
/** The scope of the
|
|
31
|
+
/** The scope of the operations */
|
|
32
32
|
scope: string;
|
|
33
|
-
/** The branch of the
|
|
33
|
+
/** The branch of the operations */
|
|
34
34
|
branch: string;
|
|
35
|
-
/** The
|
|
36
|
-
|
|
35
|
+
/** The operations to be executed (processed sequentially) */
|
|
36
|
+
operations: Operation[];
|
|
37
37
|
/** Timestamp when the job was created */
|
|
38
38
|
createdAt: string;
|
|
39
39
|
/** The hint for the queue to use for ordering the job */
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/queue/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAEhD;;GAEG;AACH,oBAAY,aAAa;IACvB,OAAO,KAAK;IACZ,aAAa,IAAI;IACjB,OAAO,IAAI;IACX,KAAK,IAAI;IACT,OAAO,IAAI;IACX,QAAQ,IAAI;CACb;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,KAAK,EAAE,aAAa,CAAC;IAE9B,KAAK,IAAI,IAAI,CAAC;IACd,QAAQ,IAAI,IAAI,CAAC;IACjB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,GAAG,GAAG;IAChB,oCAAoC;IACpC,EAAE,EAAE,MAAM,CAAC;IAEX,2CAA2C;IAC3C,UAAU,EAAE,MAAM,CAAC;IAEnB,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/queue/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAEhD;;GAEG;AACH,oBAAY,aAAa;IACvB,OAAO,KAAK;IACZ,aAAa,IAAI;IACjB,OAAO,IAAI;IACX,KAAK,IAAI;IACT,OAAO,IAAI;IACX,QAAQ,IAAI;CACb;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,KAAK,EAAE,aAAa,CAAC;IAE9B,KAAK,IAAI,IAAI,CAAC;IACd,QAAQ,IAAI,IAAI,CAAC;IACjB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,GAAG,GAAG;IAChB,oCAAoC;IACpC,EAAE,EAAE,MAAM,CAAC;IAEX,2CAA2C;IAC3C,UAAU,EAAE,MAAM,CAAC;IAEnB,kCAAkC;IAClC,KAAK,EAAE,MAAM,CAAC;IAEd,mCAAmC;IACnC,MAAM,EAAE,MAAM,CAAC;IAEf,6DAA6D;IAC7D,UAAU,EAAE,SAAS,EAAE,CAAC;IAExB,yCAAyC;IACzC,SAAS,EAAE,MAAM,CAAC;IAElB,yDAAyD;IACzD,SAAS,EAAE,MAAM,EAAE,CAAC;IAEpB,+BAA+B;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB,wCAAwC;IACxC,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB,uCAAuC;IACvC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,eAAe;;CAElB,CAAC;AAEX;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;CACf,CAAC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { IEventBus } from "../events/interfaces.js";
|
|
2
|
+
import type { IReadModel, IReadModelCoordinator } from "./interfaces.js";
|
|
3
|
+
/**
|
|
4
|
+
* Coordinates read model synchronization by listening to operation write events
|
|
5
|
+
* and updating all registered read models in parallel.
|
|
6
|
+
*
|
|
7
|
+
* This coordinator is responsible for:
|
|
8
|
+
* - Subscribing to OPERATION_WRITTEN events from the event bus
|
|
9
|
+
* - Distributing operation updates to all registered read models
|
|
10
|
+
* - Managing the lifecycle of read model subscriptions
|
|
11
|
+
*
|
|
12
|
+
* Read models are updated asynchronously and in parallel to avoid blocking
|
|
13
|
+
* the write path. Errors in read model updates are propagated through the
|
|
14
|
+
* event bus but do not affect the write operation success.
|
|
15
|
+
*/
|
|
16
|
+
export declare class ReadModelCoordinator implements IReadModelCoordinator {
|
|
17
|
+
private eventBus;
|
|
18
|
+
private readModels;
|
|
19
|
+
private unsubscribe?;
|
|
20
|
+
private isRunning;
|
|
21
|
+
constructor(eventBus: IEventBus, readModels: IReadModel[]);
|
|
22
|
+
/**
|
|
23
|
+
* Start listening for operation events and updating read models.
|
|
24
|
+
* Can be called multiple times safely (subsequent calls are no-ops).
|
|
25
|
+
*/
|
|
26
|
+
start(): void;
|
|
27
|
+
/**
|
|
28
|
+
* Stop listening and clean up subscriptions.
|
|
29
|
+
* Can be called multiple times safely (subsequent calls are no-ops).
|
|
30
|
+
*/
|
|
31
|
+
stop(): void;
|
|
32
|
+
/**
|
|
33
|
+
* Handle operation written events by updating all read models in parallel.
|
|
34
|
+
* Errors from individual read models are collected and re-thrown as an aggregate.
|
|
35
|
+
*/
|
|
36
|
+
private handleOperationWritten;
|
|
37
|
+
}
|
|
38
|
+
//# sourceMappingURL=coordinator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"coordinator.d.ts","sourceRoot":"","sources":["../../../src/read-models/coordinator.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AAMzD,OAAO,KAAK,EAAE,UAAU,EAAE,qBAAqB,EAAE,MAAM,iBAAiB,CAAC;AAEzE;;;;;;;;;;;;GAYG;AACH,qBAAa,oBAAqB,YAAW,qBAAqB;IAK9D,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,UAAU;IALpB,OAAO,CAAC,WAAW,CAAC,CAAc;IAClC,OAAO,CAAC,SAAS,CAAS;gBAGhB,QAAQ,EAAE,SAAS,EACnB,UAAU,EAAE,UAAU,EAAE;IAGlC;;;OAGG;IACH,KAAK,IAAI,IAAI;IAgBb;;;OAGG;IACH,IAAI,IAAI,IAAI;IAaZ;;;OAGG;YACW,sBAAsB;CAWrC"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { OperationEventTypes, } from "../events/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Coordinates read model synchronization by listening to operation write events
|
|
4
|
+
* and updating all registered read models in parallel.
|
|
5
|
+
*
|
|
6
|
+
* This coordinator is responsible for:
|
|
7
|
+
* - Subscribing to OPERATION_WRITTEN events from the event bus
|
|
8
|
+
* - Distributing operation updates to all registered read models
|
|
9
|
+
* - Managing the lifecycle of read model subscriptions
|
|
10
|
+
*
|
|
11
|
+
* Read models are updated asynchronously and in parallel to avoid blocking
|
|
12
|
+
* the write path. Errors in read model updates are propagated through the
|
|
13
|
+
* event bus but do not affect the write operation success.
|
|
14
|
+
*/
|
|
15
|
+
export class ReadModelCoordinator {
|
|
16
|
+
eventBus;
|
|
17
|
+
readModels;
|
|
18
|
+
unsubscribe;
|
|
19
|
+
isRunning = false;
|
|
20
|
+
constructor(eventBus, readModels) {
|
|
21
|
+
this.eventBus = eventBus;
|
|
22
|
+
this.readModels = readModels;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Start listening for operation events and updating read models.
|
|
26
|
+
* Can be called multiple times safely (subsequent calls are no-ops).
|
|
27
|
+
*/
|
|
28
|
+
start() {
|
|
29
|
+
if (this.isRunning) {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
// Subscribe to OPERATION_WRITTEN events
|
|
33
|
+
this.unsubscribe = this.eventBus.subscribe(OperationEventTypes.OPERATION_WRITTEN, async (type, event) => {
|
|
34
|
+
await this.handleOperationWritten(event);
|
|
35
|
+
});
|
|
36
|
+
this.isRunning = true;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Stop listening and clean up subscriptions.
|
|
40
|
+
* Can be called multiple times safely (subsequent calls are no-ops).
|
|
41
|
+
*/
|
|
42
|
+
stop() {
|
|
43
|
+
if (!this.isRunning) {
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
if (this.unsubscribe) {
|
|
47
|
+
this.unsubscribe();
|
|
48
|
+
this.unsubscribe = undefined;
|
|
49
|
+
}
|
|
50
|
+
this.isRunning = false;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Handle operation written events by updating all read models in parallel.
|
|
54
|
+
* Errors from individual read models are collected and re-thrown as an aggregate.
|
|
55
|
+
*/
|
|
56
|
+
async handleOperationWritten(event) {
|
|
57
|
+
// Index into all read models in parallel
|
|
58
|
+
// If any read model fails, the error will be collected by the event bus
|
|
59
|
+
await Promise.all(this.readModels.map((readModel) => readModel.indexOperations(event.operations)));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
//# sourceMappingURL=coordinator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"coordinator.js","sourceRoot":"","sources":["../../../src/read-models/coordinator.ts"],"names":[],"mappings":"AACA,OAAO,EACL,mBAAmB,GAGpB,MAAM,oBAAoB,CAAC;AAG5B;;;;;;;;;;;;GAYG;AACH,MAAM,OAAO,oBAAoB;IAKrB;IACA;IALF,WAAW,CAAe;IAC1B,SAAS,GAAG,KAAK,CAAC;IAE1B,YACU,QAAmB,EACnB,UAAwB;QADxB,aAAQ,GAAR,QAAQ,CAAW;QACnB,eAAU,GAAV,UAAU,CAAc;IAC/B,CAAC;IAEJ;;;OAGG;IACH,KAAK;QACH,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,OAAO;QACT,CAAC;QAED,wCAAwC;QACxC,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CACxC,mBAAmB,CAAC,iBAAiB,EACrC,KAAK,EAAE,IAAI,EAAE,KAA4B,EAAE,EAAE;YAC3C,MAAM,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,CAAC;QAC3C,CAAC,CACF,CAAC;QAEF,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC;IACxB,CAAC;IAED;;;OAGG;IACH,IAAI;QACF,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;YACpB,OAAO;QACT,CAAC;QAED,IAAI,IAAI,CAAC,WAAW,EAAE,CAAC;YACrB,IAAI,CAAC,WAAW,EAAE,CAAC;YACnB,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;QAC/B,CAAC;QAED,IAAI,CAAC,SAAS,GAAG,KAAK,CAAC;IACzB,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,sBAAsB,CAClC,KAA4B;QAE5B,yCAAyC;QACzC,wEAAwE;QACxE,MAAM,OAAO,CAAC,GAAG,CACf,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,EAAE,CAChC,SAAS,CAAC,eAAe,CAAC,KAAK,CAAC,UAAU,CAAC,CAC5C,CACF,CAAC;IACJ,CAAC;CACF"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { PHDocument } from "document-model";
|
|
2
2
|
import type { Kysely } from "kysely";
|
|
3
|
-
import type {
|
|
3
|
+
import type { IDocumentView, IOperationStore, OperationWithContext, ViewFilter } from "../storage/interfaces.js";
|
|
4
4
|
import type { Database as StorageDatabase } from "../storage/kysely/types.js";
|
|
5
5
|
import type { DocumentViewDatabase } from "./types.js";
|
|
6
6
|
type Database = StorageDatabase & DocumentViewDatabase;
|
|
@@ -11,11 +11,10 @@ export declare class KyselyDocumentView implements IDocumentView {
|
|
|
11
11
|
constructor(db: Kysely<Database>, operationStore: IOperationStore);
|
|
12
12
|
init(): Promise<void>;
|
|
13
13
|
indexOperations(items: OperationWithContext[]): Promise<void>;
|
|
14
|
-
getHeader(documentId: string, branch: string, signal?: AbortSignal): Promise<PHDocumentHeader>;
|
|
15
14
|
exists(documentIds: string[], signal?: AbortSignal): Promise<boolean[]>;
|
|
16
|
-
|
|
17
|
-
private createTablesIfNotExist;
|
|
15
|
+
get<TDocument extends PHDocument>(documentId: string, view?: ViewFilter, signal?: AbortSignal): Promise<TDocument>;
|
|
18
16
|
private checkTablesExist;
|
|
17
|
+
private createTablesIfNotExist;
|
|
19
18
|
}
|
|
20
19
|
export {};
|
|
21
20
|
//# sourceMappingURL=document-view.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"document-view.d.ts","sourceRoot":"","sources":["../../../src/read-models/document-view.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"document-view.d.ts","sourceRoot":"","sources":["../../../src/read-models/document-view.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAa,UAAU,EAAoB,MAAM,gBAAgB,CAAC;AAE9E,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAErC,OAAO,KAAK,EACV,aAAa,EACb,eAAe,EACf,oBAAoB,EACpB,UAAU,EACX,MAAM,0BAA0B,CAAC;AAClC,OAAO,KAAK,EAAE,QAAQ,IAAI,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC9E,OAAO,KAAK,EACV,oBAAoB,EAErB,MAAM,YAAY,CAAC;AAGpB,KAAK,QAAQ,GAAG,eAAe,GAAG,oBAAoB,CAAC;AAEvD,qBAAa,kBAAmB,YAAW,aAAa;IAIpD,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,cAAc;IAJxB,OAAO,CAAC,eAAe,CAAa;gBAG1B,EAAE,EAAE,MAAM,CAAC,QAAQ,CAAC,EACpB,cAAc,EAAE,eAAe;IAGnC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAsCrB,eAAe,CAAC,KAAK,EAAE,oBAAoB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAoF7D,MAAM,CACV,WAAW,EAAE,MAAM,EAAE,EACrB,MAAM,CAAC,EAAE,WAAW,GACnB,OAAO,CAAC,OAAO,EAAE,CAAC;IAyBf,GAAG,CAAC,SAAS,SAAS,UAAU,EACpC,UAAU,EAAE,MAAM,EAClB,IAAI,CAAC,EAAE,UAAU,EACjB,MAAM,CAAC,EAAE,WAAW,GACnB,OAAO,CAAC,SAAS,CAAC;YA4HP,gBAAgB;YAchB,sBAAsB;CAsGrC"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { defaultBaseState } from "document-model/core";
|
|
2
2
|
import { v4 as uuidv4 } from "uuid";
|
|
3
3
|
export class KyselyDocumentView {
|
|
4
4
|
db;
|
|
@@ -47,103 +47,72 @@ export class KyselyDocumentView {
|
|
|
47
47
|
const { operation, context } = item;
|
|
48
48
|
const { documentId, scope, branch, documentType } = context;
|
|
49
49
|
const { index, hash } = operation;
|
|
50
|
-
//
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
50
|
+
// Parse the full resulting state if present
|
|
51
|
+
let fullState = {};
|
|
52
|
+
if (operation.resultingState) {
|
|
53
|
+
try {
|
|
54
|
+
fullState = JSON.parse(operation.resultingState);
|
|
55
|
+
}
|
|
56
|
+
catch {
|
|
57
|
+
// Failed to parse resultingState, use empty state
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// If resultingState is present, create/update snapshots for ALL scopes in the state
|
|
61
|
+
// Otherwise, fall back to creating/updating a snapshot for just the operation's scope
|
|
62
|
+
const scopesToIndex = Object.keys(fullState).length > 0
|
|
63
|
+
? Object.entries(fullState)
|
|
64
|
+
: [[scope, {}]];
|
|
65
|
+
for (const [scopeName, scopeState] of scopesToIndex) {
|
|
66
|
+
// Check if we need to create or update a snapshot for this scope
|
|
67
|
+
const existingSnapshot = await trx
|
|
68
|
+
.selectFrom("DocumentSnapshot")
|
|
69
|
+
.selectAll()
|
|
68
70
|
.where("documentId", "=", documentId)
|
|
69
|
-
.where("scope", "=",
|
|
71
|
+
.where("scope", "=", scopeName)
|
|
70
72
|
.where("branch", "=", branch)
|
|
71
|
-
.
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
73
|
+
.executeTakeFirst();
|
|
74
|
+
const newState = typeof scopeState === "object" && scopeState !== null
|
|
75
|
+
? scopeState
|
|
76
|
+
: {};
|
|
77
|
+
if (existingSnapshot) {
|
|
78
|
+
// Update existing snapshot with new state
|
|
79
|
+
await trx
|
|
80
|
+
.updateTable("DocumentSnapshot")
|
|
81
|
+
.set({
|
|
82
|
+
lastOperationIndex: index,
|
|
83
|
+
lastOperationHash: hash,
|
|
84
|
+
lastUpdatedAt: new Date(),
|
|
85
|
+
snapshotVersion: existingSnapshot.snapshotVersion + 1,
|
|
86
|
+
content: JSON.stringify(newState),
|
|
87
|
+
})
|
|
88
|
+
.where("documentId", "=", documentId)
|
|
89
|
+
.where("scope", "=", scopeName)
|
|
90
|
+
.where("branch", "=", branch)
|
|
91
|
+
.execute();
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
// Create new snapshot with computed state
|
|
95
|
+
const snapshot = {
|
|
96
|
+
id: uuidv4(),
|
|
97
|
+
documentId,
|
|
98
|
+
slug: null,
|
|
99
|
+
name: null,
|
|
100
|
+
scope: scopeName,
|
|
101
|
+
branch,
|
|
102
|
+
content: JSON.stringify(newState),
|
|
103
|
+
documentType,
|
|
104
|
+
lastOperationIndex: index,
|
|
105
|
+
lastOperationHash: hash,
|
|
106
|
+
identifiers: null,
|
|
107
|
+
metadata: null,
|
|
108
|
+
deletedAt: null,
|
|
109
|
+
};
|
|
110
|
+
await trx.insertInto("DocumentSnapshot").values(snapshot).execute();
|
|
111
|
+
}
|
|
91
112
|
}
|
|
92
113
|
}
|
|
93
114
|
});
|
|
94
115
|
}
|
|
95
|
-
async getHeader(documentId, branch, signal) {
|
|
96
|
-
if (signal?.aborted) {
|
|
97
|
-
throw new Error("Operation aborted");
|
|
98
|
-
}
|
|
99
|
-
// Query operations from header and document scopes only
|
|
100
|
-
// - "header" scope: CREATE_DOCUMENT actions contain initial header metadata
|
|
101
|
-
// - "document" scope: UPGRADE_DOCUMENT actions contain version transitions
|
|
102
|
-
const headerAndDocOps = await this.db
|
|
103
|
-
.selectFrom("Operation")
|
|
104
|
-
.selectAll()
|
|
105
|
-
.where("documentId", "=", documentId)
|
|
106
|
-
.where("branch", "=", branch)
|
|
107
|
-
.where("scope", "in", ["header", "document"])
|
|
108
|
-
.orderBy("timestampUtcMs", "asc") // Process in chronological order
|
|
109
|
-
.execute();
|
|
110
|
-
if (headerAndDocOps.length === 0) {
|
|
111
|
-
throw new Error(`Document header not found: ${documentId}`);
|
|
112
|
-
}
|
|
113
|
-
// Reconstruct header from header and document scope operations
|
|
114
|
-
let header = createPresignedHeader();
|
|
115
|
-
for (const op of headerAndDocOps) {
|
|
116
|
-
const action = JSON.parse(op.action);
|
|
117
|
-
if (action.type === "CREATE_DOCUMENT") {
|
|
118
|
-
const input = action.input;
|
|
119
|
-
// Extract header from CREATE_DOCUMENT action's signing parameters
|
|
120
|
-
if (input.signing) {
|
|
121
|
-
header = {
|
|
122
|
-
...header,
|
|
123
|
-
id: input.signing.signature, // documentId === signing.signature
|
|
124
|
-
documentType: input.signing.documentType,
|
|
125
|
-
createdAtUtcIso: input.signing.createdAtUtcIso,
|
|
126
|
-
lastModifiedAtUtcIso: input.signing.createdAtUtcIso,
|
|
127
|
-
sig: {
|
|
128
|
-
nonce: input.signing.nonce,
|
|
129
|
-
publicKey: input.signing.publicKey,
|
|
130
|
-
},
|
|
131
|
-
};
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
else if (action.type === "UPGRADE_DOCUMENT") {
|
|
135
|
-
// UPGRADE_DOCUMENT tracks version changes in the document scope
|
|
136
|
-
// Version information would be in the operation's resulting state
|
|
137
|
-
// For now, this is handled elsewhere in the document state
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
// Get revision map and latest timestamp from all scopes efficiently
|
|
141
|
-
const { revision, latestTimestamp } = await this.operationStore.getRevisions(documentId, branch, signal);
|
|
142
|
-
// Update header with cross-scope revision and timestamp information
|
|
143
|
-
header.revision = revision;
|
|
144
|
-
header.lastModifiedAtUtcIso = latestTimestamp;
|
|
145
|
-
return header;
|
|
146
|
-
}
|
|
147
116
|
async exists(documentIds, signal) {
|
|
148
117
|
if (signal?.aborted) {
|
|
149
118
|
throw new Error("Operation aborted");
|
|
@@ -164,26 +133,122 @@ export class KyselyDocumentView {
|
|
|
164
133
|
// Return a boolean array in the same order as the input
|
|
165
134
|
return documentIds.map((id) => existingIds.has(id));
|
|
166
135
|
}
|
|
167
|
-
async
|
|
136
|
+
async get(documentId, view, signal) {
|
|
168
137
|
if (signal?.aborted) {
|
|
169
138
|
throw new Error("Operation aborted");
|
|
170
139
|
}
|
|
171
|
-
|
|
172
|
-
|
|
140
|
+
const branch = view?.branch || "main";
|
|
141
|
+
// Determine which scopes to retrieve
|
|
142
|
+
let scopesToQuery;
|
|
143
|
+
if (view?.scopes && view.scopes.length > 0) {
|
|
144
|
+
// If scopes has values, always include header + document + specified scopes
|
|
145
|
+
// (header and document are the minimum scopes that must be returned)
|
|
146
|
+
scopesToQuery = [...new Set(["header", "document", ...view.scopes])];
|
|
173
147
|
}
|
|
174
|
-
|
|
175
|
-
|
|
148
|
+
else {
|
|
149
|
+
// If scopes is undefined, null, or empty array [], get all scopes (no filter)
|
|
150
|
+
scopesToQuery = [];
|
|
151
|
+
}
|
|
152
|
+
// Build query to get snapshots
|
|
153
|
+
let query = this.db
|
|
176
154
|
.selectFrom("DocumentSnapshot")
|
|
177
155
|
.selectAll()
|
|
178
|
-
.where("documentId", "
|
|
179
|
-
.where("scope", "=", scope)
|
|
156
|
+
.where("documentId", "=", documentId)
|
|
180
157
|
.where("branch", "=", branch)
|
|
181
|
-
.where("isDeleted", "=", false)
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
158
|
+
.where("isDeleted", "=", false);
|
|
159
|
+
// Apply scope filter if we have specific scopes to query
|
|
160
|
+
if (scopesToQuery.length > 0) {
|
|
161
|
+
query = query.where("scope", "in", scopesToQuery);
|
|
162
|
+
}
|
|
163
|
+
// Execute the query
|
|
164
|
+
const snapshots = await query.execute();
|
|
165
|
+
if (snapshots.length === 0) {
|
|
166
|
+
throw new Error(`Document not found: ${documentId}`);
|
|
167
|
+
}
|
|
168
|
+
if (signal?.aborted) {
|
|
169
|
+
throw new Error("Operation aborted");
|
|
170
|
+
}
|
|
171
|
+
// Find the header snapshot
|
|
172
|
+
const headerSnapshot = snapshots.find((s) => s.scope === "header");
|
|
173
|
+
if (!headerSnapshot) {
|
|
174
|
+
throw new Error(`Document header not found: ${documentId}`);
|
|
175
|
+
}
|
|
176
|
+
// Parse the header
|
|
177
|
+
let header;
|
|
178
|
+
try {
|
|
179
|
+
header = JSON.parse(headerSnapshot.content);
|
|
180
|
+
}
|
|
181
|
+
catch (error) {
|
|
182
|
+
throw new Error(`Failed to parse header for document ${documentId}: ${error instanceof Error ? error.message : String(error)}`);
|
|
183
|
+
}
|
|
184
|
+
// Reconstruct the document state from all snapshots
|
|
185
|
+
// Note: exclude "header" scope from state since it's already in the header field
|
|
186
|
+
const state = {};
|
|
187
|
+
for (const snapshot of snapshots) {
|
|
188
|
+
// Skip header scope - it's stored separately in the header field
|
|
189
|
+
if (snapshot.scope === "header") {
|
|
190
|
+
continue;
|
|
191
|
+
}
|
|
192
|
+
try {
|
|
193
|
+
const scopeState = JSON.parse(snapshot.content);
|
|
194
|
+
state[snapshot.scope] = scopeState;
|
|
195
|
+
}
|
|
196
|
+
catch {
|
|
197
|
+
// Failed to parse snapshot content, use empty state
|
|
198
|
+
state[snapshot.scope] = {};
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
// Retrieve operations from the operation store to match legacy storage format
|
|
202
|
+
const operations = {};
|
|
203
|
+
// Get all operations for this document across all scopes
|
|
204
|
+
const allOps = await this.operationStore.getSinceId(0, signal);
|
|
205
|
+
const docOps = allOps.filter((op) => op.context.documentId === documentId && op.context.branch === branch);
|
|
206
|
+
// Group operations by scope and normalize to match legacy storage structure
|
|
207
|
+
for (const { operation, context } of docOps) {
|
|
208
|
+
if (!operations[context.scope]) {
|
|
209
|
+
operations[context.scope] = [];
|
|
210
|
+
}
|
|
211
|
+
// Normalize operation to match legacy storage format
|
|
212
|
+
// Legacy storage includes redundant top-level fields that duplicate action fields
|
|
213
|
+
const normalizedOp = {
|
|
214
|
+
action: operation.action,
|
|
215
|
+
index: operation.index,
|
|
216
|
+
timestampUtcMs: operation.timestampUtcMs,
|
|
217
|
+
hash: operation.hash,
|
|
218
|
+
skip: operation.skip,
|
|
219
|
+
// Add top-level fields that mirror action fields (legacy format)
|
|
220
|
+
...operation.action,
|
|
221
|
+
// Legacy storage includes these optional fields
|
|
222
|
+
error: operation.error,
|
|
223
|
+
resultingState: operation.resultingState,
|
|
224
|
+
};
|
|
225
|
+
operations[context.scope].push(normalizedOp);
|
|
226
|
+
}
|
|
227
|
+
// Construct the PHDocument
|
|
228
|
+
const document = {
|
|
229
|
+
header,
|
|
230
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
|
231
|
+
state: state,
|
|
232
|
+
operations,
|
|
233
|
+
// to be removed...
|
|
234
|
+
initialState: defaultBaseState(),
|
|
235
|
+
clipboard: [],
|
|
236
|
+
};
|
|
237
|
+
return document;
|
|
238
|
+
}
|
|
239
|
+
async checkTablesExist() {
|
|
240
|
+
try {
|
|
241
|
+
// Try to query ViewState table
|
|
242
|
+
await this.db
|
|
243
|
+
.selectFrom("ViewState")
|
|
244
|
+
.select("lastOperationId")
|
|
245
|
+
.limit(1)
|
|
246
|
+
.execute();
|
|
247
|
+
return true;
|
|
248
|
+
}
|
|
249
|
+
catch {
|
|
250
|
+
return false;
|
|
251
|
+
}
|
|
187
252
|
}
|
|
188
253
|
async createTablesIfNotExist() {
|
|
189
254
|
// Check if tables exist by trying to query them
|
|
@@ -267,19 +332,5 @@ export class KyselyDocumentView {
|
|
|
267
332
|
.execute();
|
|
268
333
|
}
|
|
269
334
|
}
|
|
270
|
-
async checkTablesExist() {
|
|
271
|
-
try {
|
|
272
|
-
// Try to query ViewState table
|
|
273
|
-
await this.db
|
|
274
|
-
.selectFrom("ViewState")
|
|
275
|
-
.select("lastOperationId")
|
|
276
|
-
.limit(1)
|
|
277
|
-
.execute();
|
|
278
|
-
return true;
|
|
279
|
-
}
|
|
280
|
-
catch {
|
|
281
|
-
return false;
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
335
|
}
|
|
285
336
|
//# sourceMappingURL=document-view.js.map
|