@interocitor/core 0.0.0-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +706 -0
- package/dist/adapters/cloudflare.d.ts +78 -0
- package/dist/adapters/cloudflare.d.ts.map +1 -0
- package/dist/adapters/cloudflare.js +325 -0
- package/dist/adapters/google-drive.d.ts +64 -0
- package/dist/adapters/google-drive.d.ts.map +1 -0
- package/dist/adapters/google-drive.js +339 -0
- package/dist/adapters/memory.d.ts +53 -0
- package/dist/adapters/memory.d.ts.map +1 -0
- package/dist/adapters/memory.js +182 -0
- package/dist/adapters/webdav.d.ts +70 -0
- package/dist/adapters/webdav.d.ts.map +1 -0
- package/dist/adapters/webdav.js +323 -0
- package/dist/core/codec.d.ts +20 -0
- package/dist/core/codec.d.ts.map +1 -0
- package/dist/core/codec.js +102 -0
- package/dist/core/compaction.d.ts +45 -0
- package/dist/core/compaction.d.ts.map +1 -0
- package/dist/core/compaction.js +190 -0
- package/dist/core/connected-stores.d.ts +77 -0
- package/dist/core/connected-stores.d.ts.map +1 -0
- package/dist/core/connected-stores.js +76 -0
- package/dist/core/crdt.d.ts +36 -0
- package/dist/core/crdt.d.ts.map +1 -0
- package/dist/core/crdt.js +174 -0
- package/dist/core/errors.d.ts +47 -0
- package/dist/core/errors.d.ts.map +1 -0
- package/dist/core/errors.js +61 -0
- package/dist/core/flush.d.ts +9 -0
- package/dist/core/flush.d.ts.map +1 -0
- package/dist/core/flush.js +98 -0
- package/dist/core/hlc.d.ts +25 -0
- package/dist/core/hlc.d.ts.map +1 -0
- package/dist/core/hlc.js +75 -0
- package/dist/core/ids.d.ts +49 -0
- package/dist/core/ids.d.ts.map +1 -0
- package/dist/core/ids.js +132 -0
- package/dist/core/internals.d.ts +33 -0
- package/dist/core/internals.d.ts.map +1 -0
- package/dist/core/internals.js +72 -0
- package/dist/core/manifest.d.ts +56 -0
- package/dist/core/manifest.d.ts.map +1 -0
- package/dist/core/manifest.js +203 -0
- package/dist/core/pull.d.ts +26 -0
- package/dist/core/pull.d.ts.map +1 -0
- package/dist/core/pull.js +113 -0
- package/dist/core/row-id.d.ts +12 -0
- package/dist/core/row-id.d.ts.map +1 -0
- package/dist/core/row-id.js +11 -0
- package/dist/core/schema-types.d.ts +26 -0
- package/dist/core/schema-types.d.ts.map +1 -0
- package/dist/core/schema-types.js +31 -0
- package/dist/core/schema-types.type-test.d.ts +2 -0
- package/dist/core/schema-types.type-test.d.ts.map +1 -0
- package/dist/core/schema-types.type-test.js +224 -0
- package/dist/core/sync-engine.d.ts +364 -0
- package/dist/core/sync-engine.d.ts.map +1 -0
- package/dist/core/sync-engine.js +2475 -0
- package/dist/core/table.d.ts +260 -0
- package/dist/core/table.d.ts.map +1 -0
- package/dist/core/table.js +461 -0
- package/dist/core/types.d.ts +952 -0
- package/dist/core/types.d.ts.map +1 -0
- package/dist/core/types.js +6 -0
- package/dist/crypto/encryption.d.ts +61 -0
- package/dist/crypto/encryption.d.ts.map +1 -0
- package/dist/crypto/encryption.js +216 -0
- package/dist/crypto/keys.d.ts +48 -0
- package/dist/crypto/keys.d.ts.map +1 -0
- package/dist/crypto/keys.js +54 -0
- package/dist/handshake/channel.d.ts +117 -0
- package/dist/handshake/channel.d.ts.map +1 -0
- package/dist/handshake/channel.js +245 -0
- package/dist/handshake/index.d.ts +216 -0
- package/dist/handshake/index.d.ts.map +1 -0
- package/dist/handshake/index.js +199 -0
- package/dist/handshake/qr-public.d.ts +3 -0
- package/dist/handshake/qr-public.d.ts.map +1 -0
- package/dist/handshake/qr-public.js +1 -0
- package/dist/handshake/qr.d.ts +100 -0
- package/dist/handshake/qr.d.ts.map +1 -0
- package/dist/handshake/qr.js +102 -0
- package/dist/index.d.ts +50 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +50 -0
- package/dist/storage/credential-store.d.ts +122 -0
- package/dist/storage/credential-store.d.ts.map +1 -0
- package/dist/storage/credential-store.js +356 -0
- package/dist/storage/local-store.d.ts +64 -0
- package/dist/storage/local-store.d.ts.map +1 -0
- package/dist/storage/local-store.js +490 -0
- package/dist/storage/reset.d.ts +10 -0
- package/dist/storage/reset.d.ts.map +1 -0
- package/dist/storage/reset.js +18 -0
- package/package.json +76 -0
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Manifest — reading, writing, creating, and validating cloud manifests.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from Interocitor. Not part of the public API.
|
|
5
|
+
*/
|
|
6
|
+
import { paths, textEncoder, textDecoder, generateId, computeContentHash } from "./internals.js";
|
|
7
|
+
import { assertExpectedMeshId } from "./codec.js";
|
|
8
|
+
import { MeshEncryptionMismatchError } from "./errors.js";
|
|
9
|
+
export async function readJson(adapter, path) {
|
|
10
|
+
const data = await adapter.readFile(path);
|
|
11
|
+
return JSON.parse(textDecoder.decode(data));
|
|
12
|
+
}
|
|
13
|
+
export async function readJsonIfExists(adapter, path) {
|
|
14
|
+
try {
|
|
15
|
+
return await readJson(adapter, path);
|
|
16
|
+
}
|
|
17
|
+
catch {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
export function assertServerAuth(manifest, serverId) {
|
|
22
|
+
if (manifest.writtenBy !== serverId) {
|
|
23
|
+
throw new Error(`Unauthorized manifest writer: ${manifest.writtenBy}`);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
export async function validateManifestHash(manifest) {
|
|
27
|
+
const { contentHash, ...payload } = manifest;
|
|
28
|
+
const expected = await computeContentHash(payload);
|
|
29
|
+
if (contentHash !== expected) {
|
|
30
|
+
throw new Error('Manifest content hash mismatch');
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
export async function writeJson(adapter, path, value) {
|
|
34
|
+
console.log('[interocitor:write] manifest.writeJson', { path, kind: path.endsWith('/manifest.json') ? 'pointer' : path.includes('/manifest-') ? 'manifest' : path.includes('/devices/') ? 'device' : path.includes('/changes/') ? 'changes' : 'other' });
|
|
35
|
+
await adapter.writeFile(path, textEncoder.encode(JSON.stringify(value, null, 2)));
|
|
36
|
+
}
|
|
37
|
+
export async function createBootstrapManifest(ctx, meshId) {
|
|
38
|
+
const p = paths(ctx.remotePath);
|
|
39
|
+
const now = new Date().toISOString();
|
|
40
|
+
const payload = {
|
|
41
|
+
generation: 1,
|
|
42
|
+
parentGeneration: 0,
|
|
43
|
+
writtenBy: ctx.serverId,
|
|
44
|
+
writtenAt: now,
|
|
45
|
+
version: 3,
|
|
46
|
+
meshId: meshId || generateId('mesh'),
|
|
47
|
+
schema: ctx.schema?.version ?? 1,
|
|
48
|
+
encrypted: ctx.encrypted,
|
|
49
|
+
server: {
|
|
50
|
+
managed: ctx.serverManaged,
|
|
51
|
+
relayUrl: null,
|
|
52
|
+
serverId: ctx.serverId,
|
|
53
|
+
},
|
|
54
|
+
createdAt: now,
|
|
55
|
+
epoch: 0,
|
|
56
|
+
watermarkHlc: '',
|
|
57
|
+
snapshotPath: null,
|
|
58
|
+
deltaPath: null,
|
|
59
|
+
};
|
|
60
|
+
const manifest = {
|
|
61
|
+
...payload,
|
|
62
|
+
contentHash: await computeContentHash(payload),
|
|
63
|
+
};
|
|
64
|
+
const manifestFile = `manifest-${manifest.generation}.json`;
|
|
65
|
+
const pointer = {
|
|
66
|
+
currentGeneration: manifest.generation,
|
|
67
|
+
file: manifestFile,
|
|
68
|
+
};
|
|
69
|
+
await writeJson(ctx.adapter, p.manifestFile(manifest.generation), manifest);
|
|
70
|
+
ctx.emit({
|
|
71
|
+
type: 'trace:manifest',
|
|
72
|
+
op: 'write',
|
|
73
|
+
reason: 'bootstrap',
|
|
74
|
+
generation: manifest.generation,
|
|
75
|
+
path: p.manifestFile(manifest.generation),
|
|
76
|
+
});
|
|
77
|
+
await writeJson(ctx.adapter, p.manifestPointer, pointer);
|
|
78
|
+
ctx.emit({
|
|
79
|
+
type: 'trace:manifest',
|
|
80
|
+
op: 'write',
|
|
81
|
+
reason: 'bootstrap-pointer',
|
|
82
|
+
generation: manifest.generation,
|
|
83
|
+
path: p.manifestPointer,
|
|
84
|
+
});
|
|
85
|
+
return { pointer, manifest };
|
|
86
|
+
}
|
|
87
|
+
export async function loadOrCreateManifest(ctx, codecState, local, poisonRemote, reason = 'unknown') {
|
|
88
|
+
const p = paths(ctx.remotePath);
|
|
89
|
+
ctx.emit({ type: 'trace:manifest', op: 'read', reason, path: p.manifestPointer });
|
|
90
|
+
const globalPointer = await readJsonIfExists(ctx.adapter, p.manifestPointer);
|
|
91
|
+
let pointer;
|
|
92
|
+
let manifest;
|
|
93
|
+
let bootstrapped = false;
|
|
94
|
+
if (globalPointer) {
|
|
95
|
+
pointer = globalPointer;
|
|
96
|
+
const manifestPath = `${ctx.remotePath}/${pointer.file}`;
|
|
97
|
+
ctx.emit({ type: 'trace:manifest', op: 'read', reason, path: manifestPath, generation: pointer.currentGeneration });
|
|
98
|
+
manifest = await readJson(ctx.adapter, manifestPath);
|
|
99
|
+
}
|
|
100
|
+
else {
|
|
101
|
+
bootstrapped = true;
|
|
102
|
+
ctx.emit({ type: 'trace:manifest', op: 'bootstrap-create', reason, path: p.manifestPointer });
|
|
103
|
+
const existingMeshId = await local.getMeta('meshId');
|
|
104
|
+
const bootstrap = await createBootstrapManifest(ctx, typeof existingMeshId === 'string' ? existingMeshId : undefined);
|
|
105
|
+
// Skip the read-after-write — we just minted both files in this process,
|
|
106
|
+
// they are exactly what's on disk. No GETs needed.
|
|
107
|
+
pointer = bootstrap.pointer;
|
|
108
|
+
manifest = bootstrap.manifest;
|
|
109
|
+
}
|
|
110
|
+
const manifestPath = `${ctx.remotePath}/${pointer.file}`;
|
|
111
|
+
await validateManifestHash(manifest);
|
|
112
|
+
try {
|
|
113
|
+
await assertExpectedMeshId(local, codecState.manifest, manifest.meshId);
|
|
114
|
+
}
|
|
115
|
+
catch (err) {
|
|
116
|
+
throw await poisonRemote(err, manifestPath);
|
|
117
|
+
}
|
|
118
|
+
if (manifest.version !== 3) {
|
|
119
|
+
throw new Error(`Unsupported manifest version ${manifest.version} (expected 3).`);
|
|
120
|
+
}
|
|
121
|
+
if (ctx.schema?.version !== undefined && manifest.schema !== ctx.schema.version) {
|
|
122
|
+
ctx.emit({ type: 'schema:mismatch', local: ctx.schema.version, remote: manifest.schema });
|
|
123
|
+
throw new Error(`Schema version mismatch: local=${ctx.schema.version}, remote=${manifest.schema}`);
|
|
124
|
+
}
|
|
125
|
+
if (manifest.server.managed) {
|
|
126
|
+
assertServerAuth(manifest, ctx.serverId);
|
|
127
|
+
}
|
|
128
|
+
// Encryption-mode parity check.
|
|
129
|
+
//
|
|
130
|
+
// The remote manifest pins the mesh's encryption mode at bootstrap.
|
|
131
|
+
// If the engine reconnects with a different `encrypted` flag (typical
|
|
132
|
+
// app bug: passphrase loaded asynchronously, so the first session
|
|
133
|
+
// wrote plaintext and the next session derives a key and tries to
|
|
134
|
+
// decrypt), every change file would fail decode and poison the remote.
|
|
135
|
+
//
|
|
136
|
+
// Surface this as an actionable error *before* any decode runs and
|
|
137
|
+
// *without* poisoning. The remote is not corrupt — the local config
|
|
138
|
+
// is wrong.
|
|
139
|
+
if (typeof manifest.encrypted === 'boolean' && manifest.encrypted !== ctx.encrypted) {
|
|
140
|
+
throw new MeshEncryptionMismatchError(manifest.encrypted, ctx.encrypted);
|
|
141
|
+
}
|
|
142
|
+
return { manifest, bootstrapped };
|
|
143
|
+
}
|
|
144
|
+
export async function upsertDeviceMetadata(adapter, remotePath, deviceId, opts) {
|
|
145
|
+
const p = paths(remotePath);
|
|
146
|
+
const now = new Date().toISOString();
|
|
147
|
+
// Bootstrap fast-path: caller asserts no prior record. Skip the GET.
|
|
148
|
+
// Worst case if caller is wrong: we clobber displayName/deviceType the
|
|
149
|
+
// user set on a different device — which would itself indicate the
|
|
150
|
+
// bootstrap flag was misused. Sync engine only sets bootstrap=true
|
|
151
|
+
// when it just minted the manifest in this same connect cycle.
|
|
152
|
+
const existing = opts?.bootstrap
|
|
153
|
+
? null
|
|
154
|
+
: await readJsonIfExists(adapter, p.deviceFile(deviceId));
|
|
155
|
+
const touchedObserved = opts?.observedManifestGeneration !== undefined
|
|
156
|
+
|| opts?.observedEpoch !== undefined
|
|
157
|
+
|| opts?.observedWatermarkHlc !== undefined
|
|
158
|
+
|| opts?.observedGcFloorHlc !== undefined;
|
|
159
|
+
const next = {
|
|
160
|
+
deviceId,
|
|
161
|
+
registeredAt: existing?.registeredAt ?? now,
|
|
162
|
+
lastSeenAt: opts?.skipTouchIfUnchanged ? (existing?.lastSeenAt ?? now) : now,
|
|
163
|
+
userId: existing?.userId,
|
|
164
|
+
name: existing?.name,
|
|
165
|
+
displayName: opts?.displayName ?? existing?.displayName,
|
|
166
|
+
deviceType: opts?.deviceType ?? existing?.deviceType,
|
|
167
|
+
retired: existing?.retired,
|
|
168
|
+
observedManifestGeneration: opts?.observedManifestGeneration ?? existing?.observedManifestGeneration,
|
|
169
|
+
observedEpoch: opts?.observedEpoch ?? existing?.observedEpoch,
|
|
170
|
+
observedWatermarkHlc: opts?.observedWatermarkHlc ?? existing?.observedWatermarkHlc,
|
|
171
|
+
observedGcFloorHlc: opts?.observedGcFloorHlc ?? existing?.observedGcFloorHlc,
|
|
172
|
+
observedAt: touchedObserved
|
|
173
|
+
? (opts?.skipTouchIfUnchanged ? (existing?.observedAt ?? now) : now)
|
|
174
|
+
: existing?.observedAt,
|
|
175
|
+
cutOffAt: existing?.cutOffAt,
|
|
176
|
+
cutOffReason: existing?.cutOffReason,
|
|
177
|
+
};
|
|
178
|
+
if (opts?.skipTouchIfUnchanged && existing && JSON.stringify(existing) === JSON.stringify(next))
|
|
179
|
+
return;
|
|
180
|
+
if (opts?.skipTouchIfUnchanged && touchedObserved && existing) {
|
|
181
|
+
const observedChanged = existing.observedManifestGeneration !== next.observedManifestGeneration
|
|
182
|
+
|| existing.observedEpoch !== next.observedEpoch
|
|
183
|
+
|| existing.observedWatermarkHlc !== next.observedWatermarkHlc
|
|
184
|
+
|| existing.observedGcFloorHlc !== next.observedGcFloorHlc
|
|
185
|
+
|| existing.displayName !== next.displayName
|
|
186
|
+
|| existing.deviceType !== next.deviceType
|
|
187
|
+
|| existing.retired !== next.retired
|
|
188
|
+
|| existing.cutOffAt !== next.cutOffAt
|
|
189
|
+
|| existing.cutOffReason !== next.cutOffReason
|
|
190
|
+
|| existing.userId !== next.userId
|
|
191
|
+
|| existing.name !== next.name;
|
|
192
|
+
if (!observedChanged)
|
|
193
|
+
return;
|
|
194
|
+
next.lastSeenAt = existing.lastSeenAt;
|
|
195
|
+
next.observedAt = existing.observedAt;
|
|
196
|
+
}
|
|
197
|
+
if (opts?.skipTouchIfUnchanged && existing) {
|
|
198
|
+
next.lastSeenAt = existing.lastSeenAt;
|
|
199
|
+
if (!touchedObserved)
|
|
200
|
+
next.observedAt = existing.observedAt;
|
|
201
|
+
}
|
|
202
|
+
await writeJson(adapter, p.deviceFile(deviceId), next);
|
|
203
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pull — download remote changes and merge into local state.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from Interocitor. Not part of the public API.
|
|
5
|
+
*/
|
|
6
|
+
import type { StorageAdapter, LocalStoreAdapter, Row, Op, SyncEvent, DatabaseSchemaDefinition } from './types.ts';
|
|
7
|
+
import type { HLC } from './types.ts';
|
|
8
|
+
import type { CodecState } from './codec.ts';
|
|
9
|
+
export interface PullContext {
|
|
10
|
+
adapter: StorageAdapter;
|
|
11
|
+
local: LocalStoreAdapter;
|
|
12
|
+
remotePath: string;
|
|
13
|
+
codecState: CodecState;
|
|
14
|
+
hlc: HLC;
|
|
15
|
+
deviceId: string;
|
|
16
|
+
tables: Record<string, Record<string, Row>>;
|
|
17
|
+
knownTables: Set<string>;
|
|
18
|
+
schema?: DatabaseSchemaDefinition;
|
|
19
|
+
emit: (event: SyncEvent) => void;
|
|
20
|
+
ensureRowsCached: (ops: Op[]) => Promise<void>;
|
|
21
|
+
poisonRemote: (error: unknown, path?: string) => Promise<Error>;
|
|
22
|
+
loadOrCreateManifest: () => Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
/** Returns the updated HLC after pull. */
|
|
25
|
+
export declare function pull(ctx: PullContext): Promise<HLC>;
|
|
26
|
+
//# sourceMappingURL=pull.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pull.d.ts","sourceRoot":"","sources":["../../src/core/pull.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,cAAc,EACd,iBAAiB,EACjB,GAAG,EACH,EAAE,EAEF,SAAS,EACT,wBAAwB,EACzB,MAAM,YAAY,CAAC;AACpB,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAKtC,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AAG7C,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,cAAc,CAAC;IACxB,KAAK,EAAE,iBAAiB,CAAC;IACzB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,UAAU,CAAC;IACvB,GAAG,EAAE,GAAG,CAAC;IACT,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;IAC5C,WAAW,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACzB,MAAM,CAAC,EAAE,wBAAwB,CAAC;IAClC,IAAI,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,IAAI,CAAC;IACjC,gBAAgB,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC/C,YAAY,EAAE,CAAC,KAAK,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC;IAChE,oBAAoB,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;CAC3C;AAiBD,0CAA0C;AAC1C,wBAAsB,IAAI,CAAC,GAAG,EAAE,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,CAiGzD"}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pull — download remote changes and merge into local state.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from Interocitor. Not part of the public API.
|
|
5
|
+
*/
|
|
6
|
+
import { hlcParse, hlcReceive, hlcCompareStr, hlcSerialize } from "./hlc.js";
|
|
7
|
+
import { applyChangeEntry } from "./crdt.js";
|
|
8
|
+
import { paths, textDecoder, log } from "./internals.js";
|
|
9
|
+
import { decodeChangePayload } from "./codec.js";
|
|
10
|
+
import { readJsonIfExists } from "./manifest.js";
|
|
11
|
+
function emitAffectedRows(affected, knownTables, emit) {
|
|
12
|
+
for (const row of affected) {
|
|
13
|
+
knownTables.add(row._meta.table);
|
|
14
|
+
if (row._meta.deleted) {
|
|
15
|
+
emit({ type: 'delete', table: row._meta.table, rowId: row._meta.rowId });
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
emit({ type: 'change', table: row._meta.table, rowId: row._meta.rowId, row });
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
/** Returns the updated HLC after pull. */
|
|
23
|
+
export async function pull(ctx) {
|
|
24
|
+
const { adapter, local, remotePath, codecState, tables, knownTables, emit } = ctx;
|
|
25
|
+
let hlc = ctx.hlc;
|
|
26
|
+
log('debug', 'pull() — start');
|
|
27
|
+
emit({ type: 'sync:start' });
|
|
28
|
+
try {
|
|
29
|
+
await ctx.loadOrCreateManifest();
|
|
30
|
+
const p = paths(remotePath);
|
|
31
|
+
const cursorRaw = await local.getMeta('cursor');
|
|
32
|
+
const cursor = typeof cursorRaw === 'string' ? cursorRaw : '';
|
|
33
|
+
// Fast path: if global head hasn't advanced past cursor, skip listing.
|
|
34
|
+
const head = await readJsonIfExists(adapter, p.changesHead);
|
|
35
|
+
emit({
|
|
36
|
+
type: 'trace:head',
|
|
37
|
+
op: 'read',
|
|
38
|
+
reason: 'pull-fast-path',
|
|
39
|
+
path: p.changesHead,
|
|
40
|
+
priorHlc: head?.latestHlc ?? null,
|
|
41
|
+
});
|
|
42
|
+
if (head?.latestHlc && cursor && hlcCompareStr(head.latestHlc, cursor) <= 0) {
|
|
43
|
+
log('debug', 'pull() — head unchanged, skipping');
|
|
44
|
+
emit({
|
|
45
|
+
type: 'trace:head',
|
|
46
|
+
op: 'skip-no-change',
|
|
47
|
+
reason: 'pull-fast-path',
|
|
48
|
+
path: p.changesHead,
|
|
49
|
+
priorHlc: head.latestHlc,
|
|
50
|
+
nextHlc: cursor,
|
|
51
|
+
});
|
|
52
|
+
emit({ type: 'sync:complete', entriesMerged: 0 });
|
|
53
|
+
return hlc;
|
|
54
|
+
}
|
|
55
|
+
// List the flat changes folder once.
|
|
56
|
+
let files;
|
|
57
|
+
try {
|
|
58
|
+
files = await adapter.listFiles(p.changesFolder);
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
log('debug', 'pull() — changes folder not found, nothing to merge');
|
|
62
|
+
emit({ type: 'sync:complete', entriesMerged: 0 });
|
|
63
|
+
return hlc;
|
|
64
|
+
}
|
|
65
|
+
files.sort((a, b) => a.name.localeCompare(b.name));
|
|
66
|
+
let totalMerged = 0;
|
|
67
|
+
let latestMergedHlc = cursor;
|
|
68
|
+
for (const file of files) {
|
|
69
|
+
if (file.name === 'head.json')
|
|
70
|
+
continue;
|
|
71
|
+
try {
|
|
72
|
+
const chgIdx = file.name.lastIndexOf('-chg_');
|
|
73
|
+
if (chgIdx === -1)
|
|
74
|
+
continue;
|
|
75
|
+
const fileHlc = file.name.slice(0, chgIdx);
|
|
76
|
+
if (cursor && hlcCompareStr(fileHlc, cursor) <= 0)
|
|
77
|
+
continue;
|
|
78
|
+
const raw = textDecoder.decode(await adapter.readFile(file.path));
|
|
79
|
+
const entry = await decodeChangePayload(codecState, local, raw, file.path);
|
|
80
|
+
if (cursor && hlcCompareStr(entry.hlc, cursor) <= 0)
|
|
81
|
+
continue;
|
|
82
|
+
const remoteHlc = hlcParse(entry.hlc);
|
|
83
|
+
hlc = hlcReceive(hlc, remoteHlc);
|
|
84
|
+
await ctx.ensureRowsCached(entry.ops);
|
|
85
|
+
const affected = applyChangeEntry(tables, entry, codecState.manifest?.schema ?? 1, ctx.schema);
|
|
86
|
+
if (affected.length > 0) {
|
|
87
|
+
await local.putRows(affected);
|
|
88
|
+
totalMerged += affected.length;
|
|
89
|
+
emitAffectedRows(affected, knownTables, emit);
|
|
90
|
+
}
|
|
91
|
+
if (!latestMergedHlc || hlcCompareStr(entry.hlc, latestMergedHlc) > 0) {
|
|
92
|
+
latestMergedHlc = entry.hlc;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
emit({ type: 'decode:error', error: err instanceof Error ? err : new Error(String(err)), path: file.path, context: { stage: 'pull', name: file.name } });
|
|
97
|
+
throw await ctx.poisonRemote(err, file.path);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
if (latestMergedHlc && latestMergedHlc !== cursor) {
|
|
101
|
+
await local.setMeta('cursor', latestMergedHlc);
|
|
102
|
+
}
|
|
103
|
+
await local.setMeta('hlc', hlcSerialize(hlc));
|
|
104
|
+
log('debug', 'pull() — complete', { totalMerged });
|
|
105
|
+
emit({ type: 'sync:complete', entriesMerged: totalMerged });
|
|
106
|
+
return hlc;
|
|
107
|
+
}
|
|
108
|
+
catch (err) {
|
|
109
|
+
log('error', 'pull() — failed', err);
|
|
110
|
+
emit({ type: 'sync:error', error: err });
|
|
111
|
+
throw err;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Stable client-side row ID generator.
|
|
3
|
+
*
|
|
4
|
+
* Use for synced rows. Do not use auto-increment IDs across devices.
|
|
5
|
+
* Default output uses `crypto.randomUUID()` when available.
|
|
6
|
+
*/
|
|
7
|
+
export interface CreateRowIdOptions {
|
|
8
|
+
/** Optional prefix like `task`, `meal`, `note`. */
|
|
9
|
+
prefix?: string;
|
|
10
|
+
}
|
|
11
|
+
export declare function createRowId(options?: CreateRowIdOptions): string;
|
|
12
|
+
//# sourceMappingURL=row-id.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"row-id.d.ts","sourceRoot":"","sources":["../../src/core/row-id.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,MAAM,WAAW,kBAAkB;IACjC,mDAAmD;IACnD,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAQD,wBAAgB,WAAW,CAAC,OAAO,GAAE,kBAAuB,GAAG,MAAM,CAKpE"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
function randomHex(bytes) {
|
|
2
|
+
const buf = new Uint8Array(bytes);
|
|
3
|
+
crypto.getRandomValues(buf);
|
|
4
|
+
return Array.from(buf, b => b.toString(16).padStart(2, '0')).join('');
|
|
5
|
+
}
|
|
6
|
+
export function createRowId(options = {}) {
|
|
7
|
+
const base = typeof crypto.randomUUID === 'function'
|
|
8
|
+
? crypto.randomUUID()
|
|
9
|
+
: `${randomHex(8)}-${randomHex(4)}-${randomHex(4)}-${randomHex(4)}-${randomHex(12)}`;
|
|
10
|
+
return options.prefix ? `${options.prefix}_${base}` : base;
|
|
11
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { IndexableSchemaField, OptionalSchemaField, SchemaField } from './types.ts';
|
|
2
|
+
type BaseField<T, K extends import('./types.ts').SchemaFieldKind> = SchemaField<T, K> & {
|
|
3
|
+
readonly optional: OptionalSchemaField<T, K>;
|
|
4
|
+
};
|
|
5
|
+
type BaseIndexableField<T> = IndexableSchemaField<T> & {
|
|
6
|
+
readonly optional: OptionalSchemaField<T, import('./types.ts').IndexableSchemaFieldKind>;
|
|
7
|
+
};
|
|
8
|
+
export declare const types: {
|
|
9
|
+
string: BaseIndexableField<string>;
|
|
10
|
+
number: BaseIndexableField<number>;
|
|
11
|
+
boolean: BaseIndexableField<boolean>;
|
|
12
|
+
date: BaseIndexableField<Date>;
|
|
13
|
+
Date: BaseIndexableField<Date>;
|
|
14
|
+
json: BaseField<unknown, "json">;
|
|
15
|
+
/** Type a JSON field explicitly: `types.typed<MyType[]>('json')` */
|
|
16
|
+
typed<T>(kind: "json"): BaseField<T, "json">;
|
|
17
|
+
enum<const T extends string>(..._values: T[]): BaseIndexableField<T>;
|
|
18
|
+
index<T>(field: IndexableSchemaField<T> & {
|
|
19
|
+
__optional?: never;
|
|
20
|
+
}): IndexableSchemaField<T>;
|
|
21
|
+
unique<T>(field: IndexableSchemaField<T> & {
|
|
22
|
+
__optional?: never;
|
|
23
|
+
}): IndexableSchemaField<T>;
|
|
24
|
+
};
|
|
25
|
+
export {};
|
|
26
|
+
//# sourceMappingURL=schema-types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema-types.d.ts","sourceRoot":"","sources":["../../src/core/schema-types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,oBAAoB,EAAE,mBAAmB,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAEzF,KAAK,SAAS,CAAC,CAAC,EAAE,CAAC,SAAS,OAAO,YAAY,EAAE,eAAe,IAAI,WAAW,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG;IACtF,QAAQ,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;CAC9C,CAAC;AAEF,KAAK,kBAAkB,CAAC,CAAC,IAAI,oBAAoB,CAAC,CAAC,CAAC,GAAG;IACrD,QAAQ,CAAC,QAAQ,EAAE,mBAAmB,CAAC,CAAC,EAAE,OAAO,YAAY,EAAE,wBAAwB,CAAC,CAAC;CAC1F,CAAC;AAaF,eAAO,MAAM,KAAK;YAC8D,kBAAkB,CAAC,MAAM,CAAC;YAC1B,kBAAkB,CAAC,MAAM,CAAC;aACzB,kBAAkB,CAAC,OAAO,CAAC;UAC9B,kBAAkB,CAAC,IAAI,CAAC;UACxB,kBAAkB,CAAC,IAAI,CAAC;UAC9B,SAAS,CAAC,OAAO,EAAE,MAAM,CAAC;IAEhG,oEAAoE;UAC9D,CAAC,QAAQ,MAAM,GAAG,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC;eAIjC,CAAC,SAAS,MAAM,cAAc,CAAC,EAAE,GAAG,kBAAkB,CAAC,CAAC,CAAC;UAI9D,CAAC,SAAS,oBAAoB,CAAC,CAAC,CAAC,GAAG;QAAE,UAAU,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,oBAAoB,CAAC,CAAC,CAAC;WAInF,CAAC,SAAS,oBAAoB,CAAC,CAAC,CAAC,GAAG;QAAE,UAAU,CAAC,EAAE,KAAK,CAAA;KAAE,GAAG,oBAAoB,CAAC,CAAC,CAAC;CAG5F,CAAC"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
function withOptional(field) {
|
|
2
|
+
const base = field;
|
|
3
|
+
Object.defineProperty(base, 'optional', {
|
|
4
|
+
value: { ...field, optional: true, __optional: true },
|
|
5
|
+
enumerable: true,
|
|
6
|
+
configurable: false,
|
|
7
|
+
writable: false,
|
|
8
|
+
});
|
|
9
|
+
return base;
|
|
10
|
+
}
|
|
11
|
+
export const types = {
|
|
12
|
+
string: withOptional({ kind: 'string' }),
|
|
13
|
+
number: withOptional({ kind: 'number' }),
|
|
14
|
+
boolean: withOptional({ kind: 'boolean' }),
|
|
15
|
+
date: withOptional({ kind: 'date' }),
|
|
16
|
+
Date: withOptional({ kind: 'date' }),
|
|
17
|
+
json: withOptional({ kind: 'json' }),
|
|
18
|
+
/** Type a JSON field explicitly: `types.typed<MyType[]>('json')` */
|
|
19
|
+
typed(kind) {
|
|
20
|
+
return withOptional({ kind });
|
|
21
|
+
},
|
|
22
|
+
enum(..._values) {
|
|
23
|
+
return withOptional({ kind: 'enum' });
|
|
24
|
+
},
|
|
25
|
+
index(field) {
|
|
26
|
+
return { ...field, index: true };
|
|
27
|
+
},
|
|
28
|
+
unique(field) {
|
|
29
|
+
return { ...field, index: true, unique: true };
|
|
30
|
+
},
|
|
31
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema-types.type-test.d.ts","sourceRoot":"","sources":["../../src/core/schema-types.type-test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import { types } from "./schema-types.js";
|
|
2
|
+
// ─── Scalar types carry their generic ────────────────────────────────
|
|
3
|
+
const _s = types.string;
|
|
4
|
+
const _n = types.number;
|
|
5
|
+
const _b = types.boolean;
|
|
6
|
+
const _d = types.date;
|
|
7
|
+
const _D = types.Date;
|
|
8
|
+
const _j = types.json;
|
|
9
|
+
void _s;
|
|
10
|
+
void _n;
|
|
11
|
+
void _b;
|
|
12
|
+
void _d;
|
|
13
|
+
void _D;
|
|
14
|
+
void _j;
|
|
15
|
+
// ─── enum() infers literal union from const generic ─────────────────
|
|
16
|
+
const statusType = types.enum('open', 'done', 'archived');
|
|
17
|
+
void statusType;
|
|
18
|
+
const indexedStatus = types.index(types.enum('open', 'done', 'archived'));
|
|
19
|
+
const uniqueRole = types.unique(types.enum('admin', 'viewer'));
|
|
20
|
+
void indexedStatus;
|
|
21
|
+
void uniqueRole;
|
|
22
|
+
// @ts-expect-error — 'open'|'done'|'other' is wider than the target 'open'|'done'
|
|
23
|
+
const _wrongEnum = types.enum('open', 'done', 'other');
|
|
24
|
+
void _wrongEnum;
|
|
25
|
+
// @ts-expect-error — string is wider than 'open'|'done'
|
|
26
|
+
const _enumNotString = types.index(types.string);
|
|
27
|
+
void _enumNotString;
|
|
28
|
+
// ─── index() and unique() return the same flat shape with flags set ──
|
|
29
|
+
const indexedString = types.index(types.string);
|
|
30
|
+
const uniqueNumber = types.unique(types.number);
|
|
31
|
+
const indexedDate = types.index(types.Date);
|
|
32
|
+
void indexedString;
|
|
33
|
+
void uniqueNumber;
|
|
34
|
+
void indexedDate;
|
|
35
|
+
// @ts-expect-error — IndexableSchemaField<string> is not IndexableSchemaField<number>
|
|
36
|
+
const _wrongGeneric = types.index(types.string);
|
|
37
|
+
void _wrongGeneric;
|
|
38
|
+
// ─── json is not indexable ──────────────────────────────────────────
|
|
39
|
+
// @ts-expect-error — SchemaField<unknown> (json) is not IndexableSchemaField
|
|
40
|
+
types.index(types.json);
|
|
41
|
+
// @ts-expect-error — SchemaField<unknown> (json) is not IndexableSchemaField
|
|
42
|
+
types.unique(types.json);
|
|
43
|
+
// ─── Can't pass an already-indexed field back into index/unique ──────
|
|
44
|
+
// (IndexableSchemaField is still IndexableSchemaField — nesting compiles,
|
|
45
|
+
// but index/unique are idempotent by design; the flag just stays true)
|
|
46
|
+
// ─── Bare string literal is not a valid field descriptor ─────────────
|
|
47
|
+
// @ts-expect-error — 'string' is not IndexableSchemaField
|
|
48
|
+
types.index('string');
|
|
49
|
+
// ─── Full schema definition compiles ────────────────────────────────
|
|
50
|
+
const schema = {
|
|
51
|
+
tables: {
|
|
52
|
+
tasks: {
|
|
53
|
+
fields: {
|
|
54
|
+
status: types.index(types.enum('open', 'done', 'archived')),
|
|
55
|
+
assignee: types.unique(types.string),
|
|
56
|
+
priority: types.index(types.number),
|
|
57
|
+
dueDate: types.Date,
|
|
58
|
+
payload: types.json,
|
|
59
|
+
},
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
void schema;
|
|
64
|
+
// ─── Generic DatabaseSchemaDefinition + InferSchemaType ─────────────
|
|
65
|
+
// Build a typed schema via `satisfies` — TS widens to the generic but keeps
|
|
66
|
+
// the phantom types intact for inference.
|
|
67
|
+
const typedSchema = {
|
|
68
|
+
tables: {
|
|
69
|
+
tasks: {
|
|
70
|
+
fields: {
|
|
71
|
+
title: types.string,
|
|
72
|
+
status: types.enum('open', 'done'),
|
|
73
|
+
priority: types.number,
|
|
74
|
+
},
|
|
75
|
+
},
|
|
76
|
+
notes: {
|
|
77
|
+
fields: {
|
|
78
|
+
content: types.string,
|
|
79
|
+
pinned: types.boolean,
|
|
80
|
+
},
|
|
81
|
+
},
|
|
82
|
+
},
|
|
83
|
+
};
|
|
84
|
+
// Field types are correct
|
|
85
|
+
const _taskTitle = 'hello';
|
|
86
|
+
const _taskStatus = 'open';
|
|
87
|
+
const _taskPrio = 42;
|
|
88
|
+
const _noteContent = 'text';
|
|
89
|
+
const _notePinned = true;
|
|
90
|
+
void _taskTitle;
|
|
91
|
+
void _taskStatus;
|
|
92
|
+
void _taskPrio;
|
|
93
|
+
void _noteContent;
|
|
94
|
+
void _notePinned;
|
|
95
|
+
// InferTableType matches direct extraction
|
|
96
|
+
const _same = {};
|
|
97
|
+
void _same;
|
|
98
|
+
// @ts-expect-error — 'other' is not in the 'open'|'done' union
|
|
99
|
+
const _badStatus = 'other';
|
|
100
|
+
void _badStatus;
|
|
101
|
+
// @ts-expect-error — number is not string
|
|
102
|
+
const _badTitle = 42;
|
|
103
|
+
void _badTitle;
|
|
104
|
+
// ─── TableSchemaDefinition<T> typed fields ───────────────────────────
|
|
105
|
+
// Valid typed table schema compiles
|
|
106
|
+
const _validTableSchema = {
|
|
107
|
+
fields: { title: types.string, priority: types.number },
|
|
108
|
+
};
|
|
109
|
+
void _validTableSchema;
|
|
110
|
+
// Field type mismatch — SchemaField<number> is not SchemaField<string>
|
|
111
|
+
const _mismatchedField = {
|
|
112
|
+
// @ts-expect-error — IndexableSchemaField<number> is not SchemaField<string>
|
|
113
|
+
fields: { title: types.number },
|
|
114
|
+
};
|
|
115
|
+
void _mismatchedField;
|
|
116
|
+
// ─── SyncConfig<S> carries schema type ──────────────────────────────
|
|
117
|
+
// Config typed explicitly — schema must match S
|
|
118
|
+
const _config = {
|
|
119
|
+
remotePath: '/App',
|
|
120
|
+
appName: 'App',
|
|
121
|
+
schema: {
|
|
122
|
+
tables: { tasks: { fields: { title: types.string } } },
|
|
123
|
+
},
|
|
124
|
+
};
|
|
125
|
+
void _config;
|
|
126
|
+
// Schema field type mismatch caught inline
|
|
127
|
+
const _badConfig = {
|
|
128
|
+
remotePath: '/App',
|
|
129
|
+
appName: 'App',
|
|
130
|
+
schema: {
|
|
131
|
+
tables: { tasks: { fields: {
|
|
132
|
+
// @ts-expect-error — IndexableSchemaField<number> is not SchemaField<string>
|
|
133
|
+
title: types.number,
|
|
134
|
+
} } },
|
|
135
|
+
},
|
|
136
|
+
};
|
|
137
|
+
void _badConfig;
|
|
138
|
+
const tasksTable = typedEngine.table('tasks');
|
|
139
|
+
const _addOk = { title: 'x', status: 'open', priority: 1 };
|
|
140
|
+
// @ts-expect-error — missing required field status
|
|
141
|
+
const _addBad = { title: 'x', priority: 1 };
|
|
142
|
+
const _replaceOk = { title: 'x', status: 'open', priority: 1 };
|
|
143
|
+
// @ts-expect-error — missing required field priority
|
|
144
|
+
const _replaceBad = { title: 'x', status: 'open' };
|
|
145
|
+
void _addOk;
|
|
146
|
+
void _replaceOk;
|
|
147
|
+
// _TasksRow should be { title: string; ... } | undefined — not Record<string,unknown>
|
|
148
|
+
const _checkRow = { title: 'x', status: 'open', priority: 1 };
|
|
149
|
+
void _checkRow;
|
|
150
|
+
// @ts-expect-error — 'nonexistent' is not keyof DB (no fallback overload)
|
|
151
|
+
typedEngine.table('nonexistent');
|
|
152
|
+
// ─── Regression: satisfies DatabaseSchemaDefinition infers correctly ─
|
|
153
|
+
const userSchema = {
|
|
154
|
+
tables: {
|
|
155
|
+
weekPlans: {
|
|
156
|
+
fields: {
|
|
157
|
+
weekId: types.string,
|
|
158
|
+
plan: types.json,
|
|
159
|
+
createdAt: types.date,
|
|
160
|
+
},
|
|
161
|
+
},
|
|
162
|
+
receipts: {
|
|
163
|
+
fields: {
|
|
164
|
+
id: types.string,
|
|
165
|
+
weekId: types.index(types.string),
|
|
166
|
+
totalActual: types.number,
|
|
167
|
+
},
|
|
168
|
+
},
|
|
169
|
+
},
|
|
170
|
+
};
|
|
171
|
+
const _wp = { weekId: 'w1', plan: {}, createdAt: new Date() };
|
|
172
|
+
const _r = { id: 'r1', weekId: 'w1', totalActual: 42 };
|
|
173
|
+
void _wp;
|
|
174
|
+
void _r;
|
|
175
|
+
// @ts-expect-error — number not assignable to string
|
|
176
|
+
const _badWp = { weekId: 42, plan: {}, createdAt: new Date() };
|
|
177
|
+
void _badWp;
|
|
178
|
+
// ─── _type phantom: no undefined bleeding into field types ───────────
|
|
179
|
+
const _weekId = 'hello'; // string not string|undefined
|
|
180
|
+
const _date = new Date(); // Date not Date|undefined
|
|
181
|
+
const _num = 42; // number
|
|
182
|
+
// @ts-expect-error — string is not number
|
|
183
|
+
const _badNum = 'x';
|
|
184
|
+
// ─── Optional fields ─────────────────────────────────────────────────
|
|
185
|
+
const _optionalStringField = types.string.optional;
|
|
186
|
+
const _optionalJsonField = types.typed('json').optional;
|
|
187
|
+
// @ts-expect-error — optional fields cannot be indexed
|
|
188
|
+
const _optionalIndexedString = types.index(types.string.optional);
|
|
189
|
+
// @ts-expect-error — optional fields cannot be unique
|
|
190
|
+
const _optionalUniqueString = types.unique(types.string.optional);
|
|
191
|
+
const optionalSchema = {
|
|
192
|
+
tables: {
|
|
193
|
+
tasks: {
|
|
194
|
+
fields: {
|
|
195
|
+
title: types.string,
|
|
196
|
+
note: types.string.optional,
|
|
197
|
+
payload: types.typed('json').optional,
|
|
198
|
+
},
|
|
199
|
+
},
|
|
200
|
+
},
|
|
201
|
+
};
|
|
202
|
+
const _optionalOk1 = { title: 'x' };
|
|
203
|
+
const _noteRead = _optRow.note;
|
|
204
|
+
// @ts-expect-error — optional field is string | undefined, not string
|
|
205
|
+
const _noteStrict = _optRow.note;
|
|
206
|
+
void _noteRead;
|
|
207
|
+
void _noteStrict;
|
|
208
|
+
const _optionalOk2 = { title: 'x', note: 'hello', payload: { foo: 'bar' } };
|
|
209
|
+
// @ts-expect-error — title required
|
|
210
|
+
const _optionalBad1 = { note: 'hello' };
|
|
211
|
+
// @ts-expect-error — note must be string when present
|
|
212
|
+
const _optionalBad2 = { title: 'x', note: 42 };
|
|
213
|
+
// @ts-expect-error — payload must match typed JSON payload
|
|
214
|
+
const _optionalBad3 = { title: 'x', payload: { foo: 42 } };
|
|
215
|
+
void _optionalStringField;
|
|
216
|
+
void _optionalJsonField;
|
|
217
|
+
void _optionalIndexedString;
|
|
218
|
+
void _optionalUniqueString;
|
|
219
|
+
void _optionalOk1;
|
|
220
|
+
void _optionalOk2;
|
|
221
|
+
void _weekId;
|
|
222
|
+
void _date;
|
|
223
|
+
void _num;
|
|
224
|
+
void _badNum;
|