sh3-core 0.8.2 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.d.ts +4 -7
- package/dist/api.js +2 -4
- package/dist/app/store/InstalledView.svelte +55 -1
- package/dist/app/store/PermissionConfirmModal.svelte +232 -0
- package/dist/app/store/PermissionConfirmModal.svelte.d.ts +17 -0
- package/dist/app/store/StoreView.svelte +119 -5
- package/dist/app/store/storeShard.svelte.d.ts +10 -1
- package/dist/app/store/storeShard.svelte.js +51 -7
- package/dist/app/store/storeShard.svelte.test.js +34 -0
- package/dist/apps/types.d.ts +3 -5
- package/dist/documents/backends.d.ts +2 -0
- package/dist/documents/backends.js +6 -0
- package/dist/documents/browse.d.ts +31 -1
- package/dist/documents/browse.js +18 -2
- package/dist/documents/browse.test.js +81 -0
- package/dist/documents/handle.js +13 -5
- package/dist/documents/handle.test.js +55 -0
- package/dist/documents/http-backend.d.ts +11 -4
- package/dist/documents/http-backend.js +37 -11
- package/dist/documents/index.d.ts +2 -1
- package/dist/documents/index.js +1 -1
- package/dist/documents/sync-types.d.ts +45 -0
- package/dist/documents/sync-types.js +11 -0
- package/dist/documents/types.d.ts +69 -2
- package/dist/documents/types.js +32 -2
- package/dist/keys/ConsentDialog.svelte +4 -4
- package/dist/keys/consent.test.js +4 -3
- package/dist/keys/types.d.ts +4 -2
- package/dist/registry/client.js +3 -0
- package/dist/registry/installer.d.ts +4 -1
- package/dist/registry/installer.js +25 -11
- package/dist/registry/permission-descriptions.d.ts +21 -0
- package/dist/registry/permission-descriptions.js +67 -0
- package/dist/registry/permission-descriptions.test.js +86 -0
- package/dist/registry/schema.js +19 -6
- package/dist/registry/types.d.ts +17 -5
- package/dist/server-shard/types.d.ts +55 -8
- package/dist/shards/activate-browse.test.js +87 -3
- package/dist/shards/activate.svelte.js +9 -31
- package/dist/shards/types.d.ts +0 -15
- package/dist/shell/views/KeysAndPeers.svelte +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +2 -10
- package/dist/documents/journal-hook.d.ts +0 -6
- package/dist/documents/journal-hook.js +0 -16
- package/dist/documents/sync/activate-integration.test.js +0 -37
- package/dist/documents/sync/components/DocumentSyncExplorer.svelte +0 -99
- package/dist/documents/sync/components/DocumentSyncExplorer.svelte.d.ts +0 -15
- package/dist/documents/sync/components/SyncGrantPicker.svelte +0 -70
- package/dist/documents/sync/components/SyncGrantPicker.svelte.d.ts +0 -12
- package/dist/documents/sync/conflicts.d.ts +0 -30
- package/dist/documents/sync/conflicts.js +0 -77
- package/dist/documents/sync/conflicts.test.js +0 -71
- package/dist/documents/sync/engine.d.ts +0 -19
- package/dist/documents/sync/engine.js +0 -188
- package/dist/documents/sync/engine.test.d.ts +0 -1
- package/dist/documents/sync/engine.test.js +0 -169
- package/dist/documents/sync/handle.d.ts +0 -11
- package/dist/documents/sync/handle.js +0 -79
- package/dist/documents/sync/handle.test.js +0 -56
- package/dist/documents/sync/hash.d.ts +0 -1
- package/dist/documents/sync/hash.js +0 -13
- package/dist/documents/sync/hash.test.d.ts +0 -1
- package/dist/documents/sync/hash.test.js +0 -20
- package/dist/documents/sync/index.d.ts +0 -5
- package/dist/documents/sync/index.js +0 -10
- package/dist/documents/sync/journal.d.ts +0 -30
- package/dist/documents/sync/journal.js +0 -179
- package/dist/documents/sync/journal.test.d.ts +0 -1
- package/dist/documents/sync/journal.test.js +0 -87
- package/dist/documents/sync/observer.d.ts +0 -3
- package/dist/documents/sync/observer.js +0 -45
- package/dist/documents/sync/registry.d.ts +0 -13
- package/dist/documents/sync/registry.js +0 -73
- package/dist/documents/sync/registry.test.d.ts +0 -1
- package/dist/documents/sync/registry.test.js +0 -53
- package/dist/documents/sync/serialization.d.ts +0 -5
- package/dist/documents/sync/serialization.js +0 -24
- package/dist/documents/sync/serialization.test.d.ts +0 -1
- package/dist/documents/sync/serialization.test.js +0 -26
- package/dist/documents/sync/singleton.d.ts +0 -11
- package/dist/documents/sync/singleton.js +0 -26
- package/dist/documents/sync/tombstones.d.ts +0 -19
- package/dist/documents/sync/tombstones.js +0 -58
- package/dist/documents/sync/tombstones.test.d.ts +0 -1
- package/dist/documents/sync/tombstones.test.js +0 -37
- package/dist/documents/sync/types.d.ts +0 -116
- package/dist/documents/sync/types.js +0 -27
- package/dist/documents/sync/write-hook.test.d.ts +0 -1
- package/dist/documents/sync/write-hook.test.js +0 -36
- package/dist/server-sync.d.ts +0 -6
- package/dist/server-sync.js +0 -634
- package/dist/server-sync.js.map +0 -7
- package/dist/shards/activate-sync-registry.test.d.ts +0 -1
- package/dist/shards/activate-sync-registry.test.js +0 -42
- package/dist/testing.d.ts +0 -3
- package/dist/testing.js +0 -77
- package/dist/testing.js.map +0 -7
- /package/dist/{documents/sync/activate-integration.test.d.ts → app/store/storeShard.svelte.test.d.ts} +0 -0
- /package/dist/documents/{sync/handle.test.d.ts → handle.test.d.ts} +0 -0
- /package/dist/{documents/sync/conflicts.test.d.ts → registry/permission-descriptions.test.d.ts} +0 -0
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect } from 'vitest';
|
|
2
|
-
import { MemoryDocumentBackend } from '../backends';
|
|
3
|
-
import { SyncEngine } from './engine';
|
|
4
|
-
import { createSyncHandle } from './handle';
|
|
5
|
-
import { createSyncRegistry, __grantInternal } from './registry';
|
|
6
|
-
import { hashContent } from './hash';
|
|
7
|
-
import { ScopeNotGrantedError } from './types';
|
|
8
|
-
async function setup() {
|
|
9
|
-
const backend = new MemoryDocumentBackend();
|
|
10
|
-
const engine = new SyncEngine(backend, 'tenant-a');
|
|
11
|
-
await engine.init();
|
|
12
|
-
const registry = createSyncRegistry(backend, 'tenant-a');
|
|
13
|
-
const handle = createSyncHandle({ tenantId: 'tenant-a', connectorId: 'conn-A', engine, registry });
|
|
14
|
-
return { backend, engine, registry, handle };
|
|
15
|
-
}
|
|
16
|
-
describe('createSyncHandle', () => {
|
|
17
|
-
it('grantedScopes returns [] when no grants exist', async () => {
|
|
18
|
-
const { handle } = await setup();
|
|
19
|
-
expect(await handle.grantedScopes()).toEqual([]);
|
|
20
|
-
});
|
|
21
|
-
it('allows apply within a granted shard scope', async () => {
|
|
22
|
-
const { backend, handle } = await setup();
|
|
23
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's1' });
|
|
24
|
-
const h = await hashContent('x');
|
|
25
|
-
const out = await handle.apply({ kind: 'shard', shardId: 's1' }, {
|
|
26
|
-
path: 'a.md', shardId: 's1', op: 'upsert',
|
|
27
|
-
content: 'x', remoteHash: h, remoteMtime: 1,
|
|
28
|
-
});
|
|
29
|
-
expect(out.status).toBe('applied');
|
|
30
|
-
});
|
|
31
|
-
it('throws ScopeNotGrantedError when scope is not granted', async () => {
|
|
32
|
-
const { handle } = await setup();
|
|
33
|
-
await expect(handle.apply({ kind: 'shard', shardId: 's2' }, {
|
|
34
|
-
path: 'a.md', shardId: 's2', op: 'upsert',
|
|
35
|
-
content: 'x', remoteHash: 'h', remoteMtime: 1,
|
|
36
|
-
})).rejects.toBeInstanceOf(ScopeNotGrantedError);
|
|
37
|
-
});
|
|
38
|
-
it('revocation between calls takes effect immediately', async () => {
|
|
39
|
-
const { backend, registry, handle } = await setup();
|
|
40
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's1' });
|
|
41
|
-
await handle.getManifest({ kind: 'shard', shardId: 's1' });
|
|
42
|
-
await registry.revoke('conn-A', { kind: 'shard', shardId: 's1' });
|
|
43
|
-
await expect(handle.getManifest({ kind: 'shard', shardId: 's1' })).rejects.toBeInstanceOf(ScopeNotGrantedError);
|
|
44
|
-
});
|
|
45
|
-
it('tenant scope expands to granted shard scopes for getManifest', async () => {
|
|
46
|
-
const { backend, handle } = await setup();
|
|
47
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's1' });
|
|
48
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's2' });
|
|
49
|
-
await backend.write('tenant-a', 's1', 'a.md', 'A');
|
|
50
|
-
await backend.write('tenant-a', 's2', 'b.md', 'B');
|
|
51
|
-
// Grant also tenant scope itself:
|
|
52
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'tenant' });
|
|
53
|
-
const m = await handle.getManifest({ kind: 'tenant' });
|
|
54
|
-
expect(m.map((e) => `${e.shardId}:${e.path}`).sort()).toEqual(['s1:a.md', 's2:b.md']);
|
|
55
|
-
});
|
|
56
|
-
});
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare function hashContent(content: string | ArrayBuffer): Promise<string>;
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Content hashing for the sync subsystem. sha-256 via SubtleCrypto,
|
|
3
|
-
* truncated to 16 hex chars — enough for collision resistance at
|
|
4
|
-
* per-user document scale while keeping manifest entries small.
|
|
5
|
-
*/
|
|
6
|
-
export async function hashContent(content) {
|
|
7
|
-
const buf = typeof content === 'string' ? new TextEncoder().encode(content) : new Uint8Array(content);
|
|
8
|
-
const digest = await crypto.subtle.digest('SHA-256', buf);
|
|
9
|
-
const hex = Array.from(new Uint8Array(digest))
|
|
10
|
-
.map((b) => b.toString(16).padStart(2, '0'))
|
|
11
|
-
.join('');
|
|
12
|
-
return hex.slice(0, 16);
|
|
13
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect } from 'vitest';
|
|
2
|
-
import { hashContent } from './hash';
|
|
3
|
-
describe('hashContent', () => {
|
|
4
|
-
it('produces a 16-char lowercase hex string', async () => {
|
|
5
|
-
const h = await hashContent('hello');
|
|
6
|
-
expect(h).toMatch(/^[0-9a-f]{16}$/);
|
|
7
|
-
});
|
|
8
|
-
it('is deterministic for the same input', async () => {
|
|
9
|
-
expect(await hashContent('abc')).toBe(await hashContent('abc'));
|
|
10
|
-
});
|
|
11
|
-
it('differs for different inputs', async () => {
|
|
12
|
-
expect(await hashContent('abc')).not.toBe(await hashContent('abcd'));
|
|
13
|
-
});
|
|
14
|
-
it('handles ArrayBuffer input', async () => {
|
|
15
|
-
const buf = new TextEncoder().encode('hello').buffer;
|
|
16
|
-
const hStr = await hashContent('hello');
|
|
17
|
-
const hBuf = await hashContent(buf);
|
|
18
|
-
expect(hStr).toBe(hBuf);
|
|
19
|
-
});
|
|
20
|
-
});
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
export type { SyncScope, SyncHandle, ManifestEntry, ApplyEntry, ApplyOpts, ApplyOutcome, ApplyBatchResult, ConflictPolicy, ConflictResolution, ConflictContext, JournalEntry, ChangePage, GrantRecord, } from './types';
|
|
2
|
-
export { PERMISSION_DOCUMENTS_SYNC, SYNC_RESERVED_SHARD_ID, ScopeNotGrantedError, ScopeRevokedError, TenantMismatchError, } from './types';
|
|
3
|
-
export type { SyncRegistry } from './registry';
|
|
4
|
-
export { default as SyncGrantPicker } from './components/SyncGrantPicker.svelte';
|
|
5
|
-
export { default as DocumentSyncExplorer } from './components/DocumentSyncExplorer.svelte';
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Document Sync API — public surface.
|
|
3
|
-
*
|
|
4
|
-
* Connector shards consume this module via the main sh3-core barrel.
|
|
5
|
-
* The __grantInternal helper is intentionally NOT re-exported here;
|
|
6
|
-
* it is imported directly by SyncGrantPicker.svelte only.
|
|
7
|
-
*/
|
|
8
|
-
export { PERMISSION_DOCUMENTS_SYNC, SYNC_RESERVED_SHARD_ID, ScopeNotGrantedError, ScopeRevokedError, TenantMismatchError, } from './types';
|
|
9
|
-
export { default as SyncGrantPicker } from './components/SyncGrantPicker.svelte';
|
|
10
|
-
export { default as DocumentSyncExplorer } from './components/DocumentSyncExplorer.svelte';
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import type { DocumentBackend } from '../types';
|
|
2
|
-
import type { ChangePage, JournalEntry, SyncScope } from './types';
|
|
3
|
-
export declare class Journal {
|
|
4
|
-
#private;
|
|
5
|
-
private backend;
|
|
6
|
-
private tenantId;
|
|
7
|
-
constructor(backend: DocumentBackend, tenantId: string, opts?: {
|
|
8
|
-
segmentSize?: number;
|
|
9
|
-
pageSize?: number;
|
|
10
|
-
});
|
|
11
|
-
init(): Promise<void>;
|
|
12
|
-
static encodeCursor(seq: number, version: number): string;
|
|
13
|
-
static decodeCursor(cursor: string): {
|
|
14
|
-
seq: number;
|
|
15
|
-
version: number;
|
|
16
|
-
} | null;
|
|
17
|
-
append(entry: Omit<JournalEntry, 'seq' | 'ts'>): Promise<JournalEntry>;
|
|
18
|
-
oldestRetainedSeq(): Promise<number>;
|
|
19
|
-
changesSince(scope: SyncScope, cursor?: string): Promise<ChangePage>;
|
|
20
|
-
getCursor(connectorId: string): Promise<string | null>;
|
|
21
|
-
ackCursor(connectorId: string, cursor: string): Promise<void>;
|
|
22
|
-
dropCursor(connectorId: string): Promise<void>;
|
|
23
|
-
listCursors(): Promise<Array<{
|
|
24
|
-
connectorId: string;
|
|
25
|
-
cursor: string;
|
|
26
|
-
}>>;
|
|
27
|
-
minSeqAckedByAll(connectorIds: string[]): Promise<number>;
|
|
28
|
-
/** Test-only: simulate truncating all segments whose entries are <= uptoSeq. */
|
|
29
|
-
__truncateForTest(uptoSeq: number): Promise<void>;
|
|
30
|
-
}
|
|
@@ -1,179 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Change journal for the sync subsystem.
|
|
3
|
-
*
|
|
4
|
-
* Append-only per-tenant sequence of JournalEntry, stored as JSON
|
|
5
|
-
* segments under journal/<n>.json. Cursors are opaque "<seq>:<version>"
|
|
6
|
-
* strings; version matches the journal's truncation epoch. A cursor with
|
|
7
|
-
* an older version whose seq < oldestSeq means the connector has fallen
|
|
8
|
-
* behind retained history.
|
|
9
|
-
*/
|
|
10
|
-
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
11
|
-
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
12
|
-
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
13
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
14
|
-
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
15
|
-
};
|
|
16
|
-
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
17
|
-
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
18
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
19
|
-
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
20
|
-
};
|
|
21
|
-
var _Journal_meta, _Journal_segmentSize, _Journal_pageSize;
|
|
22
|
-
import { readJson, writeJson, deletePath, listJsonPaths } from './serialization';
|
|
23
|
-
const META_PATH = 'journal/meta.json';
|
|
24
|
-
const SEGMENT_PREFIX = 'journal/seg-';
|
|
25
|
-
const CURSOR_PREFIX = 'cursors/';
|
|
26
|
-
const DEFAULT_SEGMENT_SIZE = 500;
|
|
27
|
-
const DEFAULT_PAGE_SIZE = 100;
|
|
28
|
-
function segmentPath(i) {
|
|
29
|
-
return `${SEGMENT_PREFIX}${i}.json`;
|
|
30
|
-
}
|
|
31
|
-
function cursorPath(connectorId) {
|
|
32
|
-
return `${CURSOR_PREFIX}${encodeURIComponent(connectorId)}.json`;
|
|
33
|
-
}
|
|
34
|
-
function matchesScope(entry, scope) {
|
|
35
|
-
switch (scope.kind) {
|
|
36
|
-
case 'tenant': return true;
|
|
37
|
-
case 'shard': return entry.shardId === scope.shardId;
|
|
38
|
-
case 'path': return entry.shardId === scope.shardId && entry.path.startsWith(scope.prefix);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
export class Journal {
|
|
42
|
-
constructor(backend, tenantId, opts = {}) {
|
|
43
|
-
var _a, _b;
|
|
44
|
-
this.backend = backend;
|
|
45
|
-
this.tenantId = tenantId;
|
|
46
|
-
_Journal_meta.set(this, { currentSeq: 0, currentSegment: 0, oldestSegment: 0, version: 0 });
|
|
47
|
-
_Journal_segmentSize.set(this, void 0);
|
|
48
|
-
_Journal_pageSize.set(this, void 0);
|
|
49
|
-
__classPrivateFieldSet(this, _Journal_segmentSize, (_a = opts.segmentSize) !== null && _a !== void 0 ? _a : DEFAULT_SEGMENT_SIZE, "f");
|
|
50
|
-
__classPrivateFieldSet(this, _Journal_pageSize, (_b = opts.pageSize) !== null && _b !== void 0 ? _b : DEFAULT_PAGE_SIZE, "f");
|
|
51
|
-
}
|
|
52
|
-
async init() {
|
|
53
|
-
const meta = await readJson(this.backend, this.tenantId, META_PATH);
|
|
54
|
-
if (meta)
|
|
55
|
-
__classPrivateFieldSet(this, _Journal_meta, meta, "f");
|
|
56
|
-
}
|
|
57
|
-
static encodeCursor(seq, version) {
|
|
58
|
-
return `${seq}:${version}`;
|
|
59
|
-
}
|
|
60
|
-
static decodeCursor(cursor) {
|
|
61
|
-
const m = /^(\d+):(\d+)$/.exec(cursor);
|
|
62
|
-
if (!m)
|
|
63
|
-
return null;
|
|
64
|
-
return { seq: Number(m[1]), version: Number(m[2]) };
|
|
65
|
-
}
|
|
66
|
-
async append(entry) {
|
|
67
|
-
var _a;
|
|
68
|
-
const seq = __classPrivateFieldGet(this, _Journal_meta, "f").currentSeq + 1;
|
|
69
|
-
const full = Object.assign(Object.assign({}, entry), { seq, ts: Date.now() });
|
|
70
|
-
const segIdx = __classPrivateFieldGet(this, _Journal_meta, "f").currentSegment;
|
|
71
|
-
const current = (_a = (await readJson(this.backend, this.tenantId, segmentPath(segIdx)))) !== null && _a !== void 0 ? _a : [];
|
|
72
|
-
current.push(full);
|
|
73
|
-
await writeJson(this.backend, this.tenantId, segmentPath(segIdx), current);
|
|
74
|
-
__classPrivateFieldGet(this, _Journal_meta, "f").currentSeq = seq;
|
|
75
|
-
if (current.length >= __classPrivateFieldGet(this, _Journal_segmentSize, "f")) {
|
|
76
|
-
__classPrivateFieldGet(this, _Journal_meta, "f").currentSegment = segIdx + 1;
|
|
77
|
-
}
|
|
78
|
-
await writeJson(this.backend, this.tenantId, META_PATH, __classPrivateFieldGet(this, _Journal_meta, "f"));
|
|
79
|
-
return full;
|
|
80
|
-
}
|
|
81
|
-
async oldestRetainedSeq() {
|
|
82
|
-
const first = await readJson(this.backend, this.tenantId, segmentPath(__classPrivateFieldGet(this, _Journal_meta, "f").oldestSegment));
|
|
83
|
-
if (!first || first.length === 0)
|
|
84
|
-
return 0;
|
|
85
|
-
return first[0].seq;
|
|
86
|
-
}
|
|
87
|
-
async changesSince(scope, cursor) {
|
|
88
|
-
var _a;
|
|
89
|
-
let startSeq = 0;
|
|
90
|
-
if (cursor) {
|
|
91
|
-
const decoded = Journal.decodeCursor(cursor);
|
|
92
|
-
if (!decoded)
|
|
93
|
-
return { entries: [], nextCursor: cursor, hasMore: false };
|
|
94
|
-
const oldest = await this.oldestRetainedSeq();
|
|
95
|
-
if (decoded.version < __classPrivateFieldGet(this, _Journal_meta, "f").version && decoded.seq < oldest) {
|
|
96
|
-
return { entries: [], nextCursor: cursor, hasMore: false, truncated: true };
|
|
97
|
-
}
|
|
98
|
-
startSeq = decoded.seq;
|
|
99
|
-
}
|
|
100
|
-
const out = [];
|
|
101
|
-
let lastSeq = startSeq;
|
|
102
|
-
let hasMore = false;
|
|
103
|
-
for (let seg = __classPrivateFieldGet(this, _Journal_meta, "f").oldestSegment; seg <= __classPrivateFieldGet(this, _Journal_meta, "f").currentSegment; seg++) {
|
|
104
|
-
const entries = (_a = (await readJson(this.backend, this.tenantId, segmentPath(seg)))) !== null && _a !== void 0 ? _a : [];
|
|
105
|
-
for (const e of entries) {
|
|
106
|
-
if (e.seq <= startSeq)
|
|
107
|
-
continue;
|
|
108
|
-
if (!matchesScope(e, scope)) {
|
|
109
|
-
lastSeq = e.seq;
|
|
110
|
-
continue;
|
|
111
|
-
}
|
|
112
|
-
if (out.length >= __classPrivateFieldGet(this, _Journal_pageSize, "f")) {
|
|
113
|
-
hasMore = true;
|
|
114
|
-
break;
|
|
115
|
-
}
|
|
116
|
-
out.push(e);
|
|
117
|
-
lastSeq = e.seq;
|
|
118
|
-
}
|
|
119
|
-
if (hasMore)
|
|
120
|
-
break;
|
|
121
|
-
}
|
|
122
|
-
return {
|
|
123
|
-
entries: out,
|
|
124
|
-
nextCursor: Journal.encodeCursor(lastSeq, __classPrivateFieldGet(this, _Journal_meta, "f").version),
|
|
125
|
-
hasMore,
|
|
126
|
-
};
|
|
127
|
-
}
|
|
128
|
-
async getCursor(connectorId) {
|
|
129
|
-
return readJson(this.backend, this.tenantId, cursorPath(connectorId));
|
|
130
|
-
}
|
|
131
|
-
async ackCursor(connectorId, cursor) {
|
|
132
|
-
await writeJson(this.backend, this.tenantId, cursorPath(connectorId), cursor);
|
|
133
|
-
}
|
|
134
|
-
async dropCursor(connectorId) {
|
|
135
|
-
await deletePath(this.backend, this.tenantId, cursorPath(connectorId));
|
|
136
|
-
}
|
|
137
|
-
async listCursors() {
|
|
138
|
-
const paths = await listJsonPaths(this.backend, this.tenantId, CURSOR_PREFIX);
|
|
139
|
-
const out = [];
|
|
140
|
-
for (const p of paths) {
|
|
141
|
-
const cursor = await readJson(this.backend, this.tenantId, p);
|
|
142
|
-
if (cursor === null)
|
|
143
|
-
continue;
|
|
144
|
-
const id = decodeURIComponent(p.slice(CURSOR_PREFIX.length, -'.json'.length));
|
|
145
|
-
out.push({ connectorId: id, cursor });
|
|
146
|
-
}
|
|
147
|
-
return out;
|
|
148
|
-
}
|
|
149
|
-
async minSeqAckedByAll(connectorIds) {
|
|
150
|
-
if (connectorIds.length === 0)
|
|
151
|
-
return 0;
|
|
152
|
-
let min = Infinity;
|
|
153
|
-
for (const id of connectorIds) {
|
|
154
|
-
const c = await this.getCursor(id);
|
|
155
|
-
const decoded = c ? Journal.decodeCursor(c) : null;
|
|
156
|
-
const seq = decoded ? decoded.seq : 0;
|
|
157
|
-
if (seq < min)
|
|
158
|
-
min = seq;
|
|
159
|
-
}
|
|
160
|
-
return min === Infinity ? 0 : min;
|
|
161
|
-
}
|
|
162
|
-
/** Test-only: simulate truncating all segments whose entries are <= uptoSeq. */
|
|
163
|
-
async __truncateForTest(uptoSeq) {
|
|
164
|
-
var _a;
|
|
165
|
-
for (let seg = __classPrivateFieldGet(this, _Journal_meta, "f").oldestSegment; seg <= __classPrivateFieldGet(this, _Journal_meta, "f").currentSegment; seg++) {
|
|
166
|
-
const entries = (_a = (await readJson(this.backend, this.tenantId, segmentPath(seg)))) !== null && _a !== void 0 ? _a : [];
|
|
167
|
-
const last = entries[entries.length - 1];
|
|
168
|
-
if (last && last.seq <= uptoSeq) {
|
|
169
|
-
await deletePath(this.backend, this.tenantId, segmentPath(seg));
|
|
170
|
-
__classPrivateFieldGet(this, _Journal_meta, "f").oldestSegment = seg + 1;
|
|
171
|
-
}
|
|
172
|
-
else
|
|
173
|
-
break;
|
|
174
|
-
}
|
|
175
|
-
__classPrivateFieldGet(this, _Journal_meta, "f").version += 1;
|
|
176
|
-
await writeJson(this.backend, this.tenantId, META_PATH, __classPrivateFieldGet(this, _Journal_meta, "f"));
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
_Journal_meta = new WeakMap(), _Journal_segmentSize = new WeakMap(), _Journal_pageSize = new WeakMap();
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, beforeEach } from 'vitest';
|
|
2
|
-
import { MemoryDocumentBackend } from '../backends';
|
|
3
|
-
import { Journal } from './journal';
|
|
4
|
-
describe('Journal', () => {
|
|
5
|
-
let backend;
|
|
6
|
-
let journal;
|
|
7
|
-
beforeEach(async () => {
|
|
8
|
-
backend = new MemoryDocumentBackend();
|
|
9
|
-
journal = new Journal(backend, 'tenant-a', { segmentSize: 3 });
|
|
10
|
-
await journal.init();
|
|
11
|
-
});
|
|
12
|
-
it('starts with no entries', async () => {
|
|
13
|
-
const page = await journal.changesSince({ kind: 'tenant' });
|
|
14
|
-
expect(page.entries).toEqual([]);
|
|
15
|
-
expect(page.hasMore).toBe(false);
|
|
16
|
-
});
|
|
17
|
-
it('appends and reads entries', async () => {
|
|
18
|
-
await journal.append({ shardId: 's1', path: 'a.md', op: 'upsert', hash: 'h1' });
|
|
19
|
-
await journal.append({ shardId: 's1', path: 'b.md', op: 'upsert', hash: 'h2' });
|
|
20
|
-
const page = await journal.changesSince({ kind: 'tenant' });
|
|
21
|
-
expect(page.entries.map((e) => e.path)).toEqual(['a.md', 'b.md']);
|
|
22
|
-
expect(page.entries[0].seq).toBe(1);
|
|
23
|
-
expect(page.entries[1].seq).toBe(2);
|
|
24
|
-
});
|
|
25
|
-
it('filters by shard scope', async () => {
|
|
26
|
-
await journal.append({ shardId: 's1', path: 'a.md', op: 'upsert', hash: 'h1' });
|
|
27
|
-
await journal.append({ shardId: 's2', path: 'b.md', op: 'upsert', hash: 'h2' });
|
|
28
|
-
const scoped = { kind: 'shard', shardId: 's1' };
|
|
29
|
-
const page = await journal.changesSince(scoped);
|
|
30
|
-
expect(page.entries.map((e) => e.path)).toEqual(['a.md']);
|
|
31
|
-
});
|
|
32
|
-
it('filters by path prefix', async () => {
|
|
33
|
-
await journal.append({ shardId: 's1', path: 'docs/x.md', op: 'upsert', hash: 'h1' });
|
|
34
|
-
await journal.append({ shardId: 's1', path: 'notes/y.md', op: 'upsert', hash: 'h2' });
|
|
35
|
-
const scoped = { kind: 'path', shardId: 's1', prefix: 'docs/' };
|
|
36
|
-
const page = await journal.changesSince(scoped);
|
|
37
|
-
expect(page.entries.map((e) => e.path)).toEqual(['docs/x.md']);
|
|
38
|
-
});
|
|
39
|
-
it('resumes from a cursor', async () => {
|
|
40
|
-
for (let i = 0; i < 5; i++) {
|
|
41
|
-
await journal.append({ shardId: 's1', path: `f${i}.md`, op: 'upsert', hash: `h${i}` });
|
|
42
|
-
}
|
|
43
|
-
const first = await journal.changesSince({ kind: 'tenant' });
|
|
44
|
-
// Walk through pages until end.
|
|
45
|
-
let cursor = first.nextCursor;
|
|
46
|
-
const seen = [...first.entries];
|
|
47
|
-
let hasMore = first.hasMore;
|
|
48
|
-
while (hasMore) {
|
|
49
|
-
const next = await journal.changesSince({ kind: 'tenant' }, cursor);
|
|
50
|
-
seen.push(...next.entries);
|
|
51
|
-
cursor = next.nextCursor;
|
|
52
|
-
hasMore = next.hasMore;
|
|
53
|
-
}
|
|
54
|
-
expect(seen.map((e) => e.seq)).toEqual([1, 2, 3, 4, 5]);
|
|
55
|
-
});
|
|
56
|
-
it('persists cursors per connector and returns only new entries after ack', async () => {
|
|
57
|
-
await journal.append({ shardId: 's1', path: 'a.md', op: 'upsert', hash: 'h1' });
|
|
58
|
-
const p1 = await journal.changesSince({ kind: 'tenant' });
|
|
59
|
-
await journal.ackCursor('conn-A', p1.nextCursor);
|
|
60
|
-
await journal.append({ shardId: 's1', path: 'b.md', op: 'upsert', hash: 'h2' });
|
|
61
|
-
const saved = await journal.getCursor('conn-A');
|
|
62
|
-
const p2 = await journal.changesSince({ kind: 'tenant' }, saved !== null && saved !== void 0 ? saved : undefined);
|
|
63
|
-
expect(p2.entries.map((e) => e.path)).toEqual(['b.md']);
|
|
64
|
-
});
|
|
65
|
-
it('reports truncated=true when a cursor predates oldest retained seq', async () => {
|
|
66
|
-
// Force many segments then truncate
|
|
67
|
-
for (let i = 0; i < 10; i++) {
|
|
68
|
-
await journal.append({ shardId: 's1', path: `f${i}.md`, op: 'upsert', hash: `h${i}` });
|
|
69
|
-
}
|
|
70
|
-
// Simulate truncation up to seq 6.
|
|
71
|
-
await journal.__truncateForTest(6);
|
|
72
|
-
const stale = Journal.encodeCursor(2, 0); // old version
|
|
73
|
-
const page = await journal.changesSince({ kind: 'tenant' }, stale);
|
|
74
|
-
expect(page.truncated).toBe(true);
|
|
75
|
-
expect(page.entries).toEqual([]);
|
|
76
|
-
});
|
|
77
|
-
it('minSeqAckedByAll returns the lowest acked seq across registered cursors', async () => {
|
|
78
|
-
await journal.append({ shardId: 's1', path: 'a.md', op: 'upsert', hash: 'h1' });
|
|
79
|
-
await journal.append({ shardId: 's1', path: 'b.md', op: 'upsert', hash: 'h2' });
|
|
80
|
-
await journal.append({ shardId: 's1', path: 'c.md', op: 'upsert', hash: 'h3' });
|
|
81
|
-
await journal.ackCursor('conn-A', Journal.encodeCursor(3, 0));
|
|
82
|
-
await journal.ackCursor('conn-B', Journal.encodeCursor(1, 0));
|
|
83
|
-
expect(await journal.minSeqAckedByAll(['conn-A', 'conn-B'])).toBe(1);
|
|
84
|
-
expect(await journal.minSeqAckedByAll(['conn-A'])).toBe(3);
|
|
85
|
-
expect(await journal.minSeqAckedByAll([])).toBe(0);
|
|
86
|
-
});
|
|
87
|
-
});
|
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Observer factory for ctx.syncRegistry.
|
|
3
|
-
*
|
|
4
|
-
* Returns a lazy accessor that resolves to the same SyncRegistry instance
|
|
5
|
-
* backed by the per-tenant sync bundle. Observer-class shards (e.g. the
|
|
6
|
-
* file-explorer) and connector shards share one view of grants and
|
|
7
|
-
* conflicts.
|
|
8
|
-
*
|
|
9
|
-
* Gated upstream on the 'documents:browse' permission — granting remains
|
|
10
|
-
* exclusive to <SyncGrantPicker />.
|
|
11
|
-
*/
|
|
12
|
-
import { getSyncBundle } from './singleton';
|
|
13
|
-
export function createSyncRegistryAccessor(backend, tenantId) {
|
|
14
|
-
let cached = null;
|
|
15
|
-
let initPromise = null;
|
|
16
|
-
function resolve() {
|
|
17
|
-
if (cached)
|
|
18
|
-
return Promise.resolve(cached);
|
|
19
|
-
if (!initPromise) {
|
|
20
|
-
initPromise = getSyncBundle(backend, tenantId).then(({ registry }) => {
|
|
21
|
-
cached = registry;
|
|
22
|
-
return registry;
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
return initPromise;
|
|
26
|
-
}
|
|
27
|
-
return () => {
|
|
28
|
-
const proxy = {
|
|
29
|
-
async list(connectorId) {
|
|
30
|
-
return (await resolve()).list(connectorId);
|
|
31
|
-
},
|
|
32
|
-
async revoke(connectorId, scope) {
|
|
33
|
-
return (await resolve()).revoke(connectorId, scope);
|
|
34
|
-
},
|
|
35
|
-
listConflicts: (async (shardId) => {
|
|
36
|
-
const r = await resolve();
|
|
37
|
-
return shardId === undefined ? r.listConflicts() : r.listConflicts(shardId);
|
|
38
|
-
}),
|
|
39
|
-
async listAllConnectorIds() {
|
|
40
|
-
return (await resolve()).listAllConnectorIds();
|
|
41
|
-
},
|
|
42
|
-
};
|
|
43
|
-
return proxy;
|
|
44
|
-
};
|
|
45
|
-
}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import type { DocumentBackend } from '../types';
|
|
2
|
-
import type { ConflictResolution, GrantRecord, SyncScope } from './types';
|
|
3
|
-
export declare function __grantInternal(backend: DocumentBackend, tenantId: string, connectorId: string, scope: SyncScope): Promise<void>;
|
|
4
|
-
export interface SyncRegistry {
|
|
5
|
-
list(connectorId?: string): Promise<GrantRecord[]>;
|
|
6
|
-
revoke(connectorId: string, scope: SyncScope): Promise<void>;
|
|
7
|
-
/** Per-shard conflict enumeration. */
|
|
8
|
-
listConflicts(shardId: string): Promise<ConflictResolution[]>;
|
|
9
|
-
/** Tenant-wide conflict enumeration (fans out over every shard). */
|
|
10
|
-
listConflicts(): Promise<ConflictResolution[]>;
|
|
11
|
-
listAllConnectorIds(): Promise<string[]>;
|
|
12
|
-
}
|
|
13
|
-
export declare function createSyncRegistry(backend: DocumentBackend, tenantId: string): SyncRegistry;
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Grant registry — public surface (revoke/list/listConflicts) for
|
|
3
|
-
* connectors, plus __grantInternal used only by the core-owned
|
|
4
|
-
* SyncGrantPicker component. Connectors do not import __grantInternal.
|
|
5
|
-
*/
|
|
6
|
-
import { readJson, writeJson, deletePath, listJsonPaths } from './serialization';
|
|
7
|
-
import { ConflictManager } from './conflicts';
|
|
8
|
-
const GRANTS_PREFIX = 'grants/';
|
|
9
|
-
function grantPath(connectorId) {
|
|
10
|
-
return `${GRANTS_PREFIX}${encodeURIComponent(connectorId)}.json`;
|
|
11
|
-
}
|
|
12
|
-
function scopesEqual(a, b) {
|
|
13
|
-
if (a.kind !== b.kind)
|
|
14
|
-
return false;
|
|
15
|
-
if (a.kind === 'tenant')
|
|
16
|
-
return true;
|
|
17
|
-
if (a.kind === 'shard' && b.kind === 'shard')
|
|
18
|
-
return a.shardId === b.shardId;
|
|
19
|
-
if (a.kind === 'path' && b.kind === 'path')
|
|
20
|
-
return a.shardId === b.shardId && a.prefix === b.prefix;
|
|
21
|
-
return false;
|
|
22
|
-
}
|
|
23
|
-
export async function __grantInternal(backend, tenantId, connectorId, scope) {
|
|
24
|
-
var _a;
|
|
25
|
-
const existing = (_a = (await readJson(backend, tenantId, grantPath(connectorId)))) !== null && _a !== void 0 ? _a : [];
|
|
26
|
-
if (existing.some((g) => scopesEqual(g.scope, scope)))
|
|
27
|
-
return;
|
|
28
|
-
existing.push({ connectorId, scope, grantedAt: Date.now() });
|
|
29
|
-
await writeJson(backend, tenantId, grantPath(connectorId), existing);
|
|
30
|
-
}
|
|
31
|
-
export function createSyncRegistry(backend, tenantId) {
|
|
32
|
-
const conflicts = new ConflictManager(backend, tenantId);
|
|
33
|
-
async function readGrants(connectorId) {
|
|
34
|
-
var _a;
|
|
35
|
-
return (_a = (await readJson(backend, tenantId, grantPath(connectorId)))) !== null && _a !== void 0 ? _a : [];
|
|
36
|
-
}
|
|
37
|
-
return {
|
|
38
|
-
async list(connectorId) {
|
|
39
|
-
if (connectorId)
|
|
40
|
-
return readGrants(connectorId);
|
|
41
|
-
const paths = await listJsonPaths(backend, tenantId, GRANTS_PREFIX);
|
|
42
|
-
const out = [];
|
|
43
|
-
for (const p of paths) {
|
|
44
|
-
const arr = await readJson(backend, tenantId, p);
|
|
45
|
-
if (arr)
|
|
46
|
-
out.push(...arr);
|
|
47
|
-
}
|
|
48
|
-
return out;
|
|
49
|
-
},
|
|
50
|
-
async revoke(connectorId, scope) {
|
|
51
|
-
const grants = await readGrants(connectorId);
|
|
52
|
-
const next = grants.filter((g) => !scopesEqual(g.scope, scope));
|
|
53
|
-
if (next.length === 0)
|
|
54
|
-
await deletePath(backend, tenantId, grantPath(connectorId));
|
|
55
|
-
else
|
|
56
|
-
await writeJson(backend, tenantId, grantPath(connectorId), next);
|
|
57
|
-
},
|
|
58
|
-
async listConflicts(shardId) {
|
|
59
|
-
if (shardId !== undefined)
|
|
60
|
-
return conflicts.listConflicts(shardId);
|
|
61
|
-
const shards = await backend.listAllShards(tenantId);
|
|
62
|
-
const out = [];
|
|
63
|
-
for (const s of shards) {
|
|
64
|
-
out.push(...(await conflicts.listConflicts(s)));
|
|
65
|
-
}
|
|
66
|
-
return out;
|
|
67
|
-
},
|
|
68
|
-
async listAllConnectorIds() {
|
|
69
|
-
const paths = await listJsonPaths(backend, tenantId, GRANTS_PREFIX);
|
|
70
|
-
return paths.map((p) => decodeURIComponent(p.slice(GRANTS_PREFIX.length, -'.json'.length)));
|
|
71
|
-
},
|
|
72
|
-
};
|
|
73
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
|
2
|
-
import { MemoryDocumentBackend } from '../backends';
|
|
3
|
-
import { createSyncRegistry, __grantInternal } from './registry';
|
|
4
|
-
describe('syncRegistry', () => {
|
|
5
|
-
let backend;
|
|
6
|
-
let reg;
|
|
7
|
-
beforeEach(() => {
|
|
8
|
-
backend = new MemoryDocumentBackend();
|
|
9
|
-
reg = createSyncRegistry(backend, 'tenant-a');
|
|
10
|
-
});
|
|
11
|
-
it('list returns [] when nothing granted', async () => {
|
|
12
|
-
expect(await reg.list()).toEqual([]);
|
|
13
|
-
});
|
|
14
|
-
it('__grantInternal records and list returns it', async () => {
|
|
15
|
-
vi.useFakeTimers().setSystemTime(new Date(1000));
|
|
16
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'tenant' });
|
|
17
|
-
const all = await reg.list();
|
|
18
|
-
expect(all).toEqual([
|
|
19
|
-
{ connectorId: 'conn-A', scope: { kind: 'tenant' }, grantedAt: 1000 },
|
|
20
|
-
]);
|
|
21
|
-
vi.useRealTimers();
|
|
22
|
-
});
|
|
23
|
-
it('list filters by connectorId', async () => {
|
|
24
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's1' });
|
|
25
|
-
await __grantInternal(backend, 'tenant-a', 'conn-B', { kind: 'tenant' });
|
|
26
|
-
const a = await reg.list('conn-A');
|
|
27
|
-
expect(a).toHaveLength(1);
|
|
28
|
-
expect(a[0].connectorId).toBe('conn-A');
|
|
29
|
-
});
|
|
30
|
-
it('revoke removes one scope, leaving other grants intact', async () => {
|
|
31
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's1' });
|
|
32
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'shard', shardId: 's2' });
|
|
33
|
-
await reg.revoke('conn-A', { kind: 'shard', shardId: 's1' });
|
|
34
|
-
const all = await reg.list('conn-A');
|
|
35
|
-
expect(all.map((g) => g.scope.shardId)).toEqual(['s2']);
|
|
36
|
-
});
|
|
37
|
-
it('__grantInternal is idempotent for same scope', async () => {
|
|
38
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'tenant' });
|
|
39
|
-
await __grantInternal(backend, 'tenant-a', 'conn-A', { kind: 'tenant' });
|
|
40
|
-
expect((await reg.list('conn-A')).length).toBe(1);
|
|
41
|
-
});
|
|
42
|
-
it('listConflicts() with no args returns conflicts across all shards', async () => {
|
|
43
|
-
var _a, _b;
|
|
44
|
-
// Seed conflict artifacts directly via the backend (matches
|
|
45
|
-
// ConflictManager's artifact naming).
|
|
46
|
-
await backend.write('tenant-a', 'shard-a', 'doc.md.sync-conflict-connA-111', 'remote-a');
|
|
47
|
-
await backend.write('tenant-a', 'shard-b', 'other.md.sync-conflict-connB-222', 'remote-b');
|
|
48
|
-
const all = await reg.listConflicts();
|
|
49
|
-
expect(all.map((c) => c.shardId).sort()).toEqual(['shard-a', 'shard-b']);
|
|
50
|
-
expect((_a = all.find((c) => c.shardId === 'shard-a')) === null || _a === void 0 ? void 0 : _a.path).toBe('doc.md');
|
|
51
|
-
expect((_b = all.find((c) => c.shardId === 'shard-b')) === null || _b === void 0 ? void 0 : _b.path).toBe('other.md');
|
|
52
|
-
});
|
|
53
|
-
});
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
import type { DocumentBackend } from '../types';
|
|
2
|
-
export declare function readJson<T>(backend: DocumentBackend, tenantId: string, path: string): Promise<T | null>;
|
|
3
|
-
export declare function writeJson(backend: DocumentBackend, tenantId: string, path: string, value: unknown): Promise<void>;
|
|
4
|
-
export declare function deletePath(backend: DocumentBackend, tenantId: string, path: string): Promise<void>;
|
|
5
|
-
export declare function listJsonPaths(backend: DocumentBackend, tenantId: string, prefix: string): Promise<string[]>;
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* JSON storage helpers on top of DocumentBackend. All sync metadata
|
|
3
|
-
* lives under the reserved shardId SYNC_RESERVED_SHARD_ID, scoped per
|
|
4
|
-
* tenant by the backend. Callers pass tenantId explicitly; tenant
|
|
5
|
-
* scoping is enforced at the SyncHandle/engine layer.
|
|
6
|
-
*/
|
|
7
|
-
import { SYNC_RESERVED_SHARD_ID } from './types';
|
|
8
|
-
export async function readJson(backend, tenantId, path) {
|
|
9
|
-
const raw = await backend.read(tenantId, SYNC_RESERVED_SHARD_ID, path);
|
|
10
|
-
if (raw === null)
|
|
11
|
-
return null;
|
|
12
|
-
const str = typeof raw === 'string' ? raw : new TextDecoder().decode(raw);
|
|
13
|
-
return JSON.parse(str);
|
|
14
|
-
}
|
|
15
|
-
export async function writeJson(backend, tenantId, path, value) {
|
|
16
|
-
await backend.write(tenantId, SYNC_RESERVED_SHARD_ID, path, JSON.stringify(value));
|
|
17
|
-
}
|
|
18
|
-
export async function deletePath(backend, tenantId, path) {
|
|
19
|
-
await backend.delete(tenantId, SYNC_RESERVED_SHARD_ID, path);
|
|
20
|
-
}
|
|
21
|
-
export async function listJsonPaths(backend, tenantId, prefix) {
|
|
22
|
-
const all = await backend.list(tenantId, SYNC_RESERVED_SHARD_ID);
|
|
23
|
-
return all.map((m) => m.path).filter((p) => p.startsWith(prefix));
|
|
24
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|