@noy-db/to-probe 0.1.0-pre.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 vLannaAi
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,33 @@
1
+ # @noy-db/to-probe
2
+
3
+ [![npm](https://img.shields.io/npm/v/%40noy-db/to-probe.svg)](https://www.npmjs.com/package/@noy-db/to-probe)
4
+
5
+ > Diagnostic companion for the @noy-db/to-* store family
6
+
7
+ Part of [**`@noy-db/hub`**](https://www.npmjs.com/package/@noy-db/hub) — the zero-knowledge, offline-first, encrypted document store.
8
+
9
+ ## Install
10
+
11
+ ```bash
12
+ pnpm add @noy-db/hub @noy-db/to-probe
13
+ ```
14
+
15
+ ## What it is
16
+
17
+ Diagnostic companion for the @noy-db/to-* store family — not itself a storage backend. Setup-time suitability test + topology check + runtime reliability monitor. Exercises the 6-method NoydbStore contract across five axes (write latency, CAS integrity, hydration cost, sync economics, network resilience) and produces a structured risk-scored report.
18
+
19
+ ## Status
20
+
21
+ **Pre-release** (`0.1.0-pre.1`). API may change before `1.0`.
22
+
23
+ ## Documentation
24
+
25
+ See the [main repository](https://github.com/vLannaAi/noy-db#readme) for setup, examples, and the full subsystem catalog.
26
+
27
+ - Source — [`packages/to-probe`](https://github.com/vLannaAi/noy-db/tree/main/packages/to-probe)
28
+ - Issues — [github.com/vLannaAi/noy-db/issues](https://github.com/vLannaAi/noy-db/issues)
29
+ - Spec — [`SPEC.md`](https://github.com/vLannaAi/noy-db/blob/main/SPEC.md)
30
+
31
+ ## License
32
+
33
+ [MIT](./LICENSE) © vLannaAi
package/dist/index.cjs ADDED
@@ -0,0 +1,331 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ probeTopology: () => probeTopology,
24
+ runStoreProbe: () => runStoreProbe
25
+ });
26
+ module.exports = __toCommonJS(index_exports);
27
+
28
+ // src/probe.ts
29
+ var PROBE_VAULT = "probe-vault";
30
+ var PROBE_COLLECTION = "probe-benchmark";
31
+ async function runStoreProbe(store, options = {}) {
32
+ const started = Date.now();
33
+ const vault = options.vault ?? PROBE_VAULT;
34
+ const collection = options.collection ?? PROBE_COLLECTION;
35
+ const runId = Date.now().toString(36);
36
+ const write = await probeWrite(store, vault, collection, runId, options);
37
+ const cas = await probeCas(store, vault, collection, runId, options);
38
+ const hydration = await probeHydration(store, vault, collection, runId, options);
39
+ const sync = await probeSync(store, vault, collection, runId, options);
40
+ const network = await probeNetwork(store);
41
+ const capabilities = options.capabilities ?? null;
42
+ const risks = collectRisks(options, write, cas, hydration, sync, network, capabilities);
43
+ const suitability = score(risks);
44
+ await bestEffortCleanup(store, vault, collection);
45
+ return {
46
+ store: store.name ?? "unnamed",
47
+ capabilities,
48
+ write,
49
+ cas,
50
+ hydration,
51
+ sync,
52
+ network,
53
+ suitability,
54
+ durationMs: Date.now() - started,
55
+ probedAt: (/* @__PURE__ */ new Date()).toISOString()
56
+ };
57
+ }
58
+ async function probeWrite(store, vault, collection, runId, options) {
59
+ const n = options.writeSampleSize ?? 20;
60
+ const coldId = `w-${runId}-cold`;
61
+ const coldStart = Date.now();
62
+ await store.put(vault, collection, coldId, envelope(1));
63
+ const coldMs = Date.now() - coldStart;
64
+ const serialSamples = [];
65
+ for (let i = 0; i < n; i++) {
66
+ const t0 = Date.now();
67
+ await store.put(vault, collection, `w-${runId}-s-${i}`, envelope(1));
68
+ serialSamples.push(Date.now() - t0);
69
+ }
70
+ const concurrentSamples = [];
71
+ for (let batch = 0; batch < 5; batch++) {
72
+ const t0 = Date.now();
73
+ await Promise.all(
74
+ Array.from(
75
+ { length: 10 },
76
+ (_, j) => store.put(vault, collection, `w-${runId}-c-${batch}-${j}`, envelope(1))
77
+ )
78
+ );
79
+ concurrentSamples.push(Date.now() - t0);
80
+ }
81
+ return {
82
+ coldStart: coldMs,
83
+ serial: stats(serialSamples),
84
+ concurrent: stats(concurrentSamples)
85
+ };
86
+ }
87
+ async function probeCas(store, vault, collection, runId, options) {
88
+ const concurrency = options.casConcurrency ?? 10;
89
+ const id = `cas-${runId}`;
90
+ await store.put(vault, collection, id, envelope(1));
91
+ const settled = await Promise.allSettled(
92
+ Array.from(
93
+ { length: concurrency },
94
+ (_, i) => store.put(vault, collection, id, envelope(2, i), 1)
95
+ )
96
+ );
97
+ const successes = settled.filter((r) => r.status === "fulfilled").length;
98
+ const rejections = settled.length - successes;
99
+ const declaredAtomic = options.capabilities?.casAtomic ?? null;
100
+ const expected = declaredAtomic === false ? "multiple-ok" : "exactly-one";
101
+ return { concurrent: concurrency, successes, rejections, expected };
102
+ }
103
+ async function probeHydration(store, vault, collection, runId, options) {
104
+ const records = options.hydrationRecords ?? 100;
105
+ const existing = await store.list(vault, collection);
106
+ for (let i = existing.length; i < records; i++) {
107
+ await store.put(vault, collection, `h-${runId}-${i}`, envelope(1));
108
+ }
109
+ const t0 = Date.now();
110
+ const snapshot = await store.loadAll(vault);
111
+ const loadAllMs = Date.now() - t0;
112
+ const totalBytes = estimateBytes(snapshot);
113
+ const loaded = Object.values(snapshot).reduce(
114
+ (sum, coll) => sum + Object.keys(coll).length,
115
+ 0
116
+ );
117
+ const perRecordBytes = loaded > 0 ? Math.round(totalBytes / loaded) : 0;
118
+ return { records: loaded, loadAllMs, totalBytes, perRecordBytes };
119
+ }
120
+ async function probeSync(store, vault, collection, runId, options) {
121
+ const batchSize = options.syncBatchSize ?? 50;
122
+ const singleStart = Date.now();
123
+ await store.put(vault, collection, `sync-${runId}-single`, envelope(1));
124
+ const singlePushMs = Date.now() - singleStart;
125
+ const t0 = Date.now();
126
+ for (let i = 0; i < batchSize; i++) {
127
+ await store.put(vault, collection, `sync-${runId}-b-${i}`, envelope(1));
128
+ }
129
+ const batchPushMs = Date.now() - t0;
130
+ const bytesPerPush = approxEnvelopeBytes();
131
+ return { singlePushMs, batchPushMs, batchSize, bytesPerPush };
132
+ }
133
+ async function probeNetwork(store) {
134
+ if (typeof store.ping !== "function") {
135
+ return { pingSupported: false, pingMs: null };
136
+ }
137
+ const t0 = Date.now();
138
+ try {
139
+ await store.ping();
140
+ return { pingSupported: true, pingMs: Date.now() - t0 };
141
+ } catch {
142
+ return { pingSupported: true, pingMs: null };
143
+ }
144
+ }
145
+ function collectRisks(options, write, cas, hydration, sync, network, capabilities) {
146
+ const risks = [];
147
+ const slowWriteMs = options.slowWriteMs ?? 100;
148
+ const slowHydrationMs = options.slowHydrationMs ?? 500;
149
+ const slowSyncMs = options.slowSyncMs ?? 250;
150
+ if (write.serial.p99 > slowWriteMs) {
151
+ risks.push({
152
+ code: "slow-write-p99",
153
+ severity: "warn",
154
+ message: `Serial write p99 ${write.serial.p99}ms exceeds threshold ${slowWriteMs}ms`
155
+ });
156
+ }
157
+ if (hydration.loadAllMs > slowHydrationMs) {
158
+ risks.push({
159
+ code: "slow-hydration",
160
+ severity: "warn",
161
+ message: `loadAll(${hydration.records}) took ${hydration.loadAllMs}ms (threshold ${slowHydrationMs}ms)`
162
+ });
163
+ }
164
+ if (sync.singlePushMs > slowSyncMs) {
165
+ risks.push({
166
+ code: "slow-sync",
167
+ severity: "warn",
168
+ message: `Single-record push ${sync.singlePushMs}ms exceeds ${slowSyncMs}ms`
169
+ });
170
+ }
171
+ if (capabilities?.casAtomic === true && cas.successes > 1) {
172
+ risks.push({
173
+ code: "cas-mismatch",
174
+ severity: "error",
175
+ message: `Store declared casAtomic:true but ${cas.successes}/${cas.concurrent} concurrent puts succeeded (expected exactly 1)`
176
+ });
177
+ }
178
+ if (capabilities?.casAtomic === false) {
179
+ risks.push({
180
+ code: "cas-unsupported",
181
+ severity: "warn",
182
+ message: "Store lacks atomic CAS \u2014 unsafe for multi-writer sync-peer role"
183
+ });
184
+ }
185
+ if (!network.pingSupported) {
186
+ risks.push({
187
+ code: "no-ping",
188
+ severity: "warn",
189
+ message: "Store has no ping() \u2014 runtime monitor will rely on list() as liveness check"
190
+ });
191
+ }
192
+ return risks;
193
+ }
194
+ function score(risks) {
195
+ const hasError = risks.some((r) => r.severity === "error");
196
+ const casUnsupported = risks.some((r) => r.code === "cas-unsupported");
197
+ const slowWrite = risks.some((r) => r.code === "slow-write-p99");
198
+ const recommended = [];
199
+ if (!hasError) {
200
+ if (!slowWrite) recommended.push("primary");
201
+ if (!casUnsupported) recommended.push("sync-peer");
202
+ recommended.push("backup", "archive");
203
+ }
204
+ return { recommended, risks };
205
+ }
206
+ function envelope(version, seed = 0) {
207
+ const data = `probe-${version}-${seed}`.padEnd(64, "x");
208
+ const b64 = base64Encode(data);
209
+ return {
210
+ _noydb: 1,
211
+ _v: version,
212
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
213
+ _iv: base64Encode("0".repeat(12)),
214
+ _data: b64
215
+ };
216
+ }
217
+ function base64Encode(s) {
218
+ if (typeof Buffer !== "undefined") return Buffer.from(s, "utf-8").toString("base64");
219
+ return btoa(unescape(encodeURIComponent(s)));
220
+ }
221
+ function approxEnvelopeBytes() {
222
+ return JSON.stringify(envelope(1)).length;
223
+ }
224
+ function estimateBytes(snapshot) {
225
+ let total = 0;
226
+ for (const coll of Object.values(snapshot)) {
227
+ for (const rec of Object.values(coll)) {
228
+ total += JSON.stringify(rec).length;
229
+ }
230
+ }
231
+ return total;
232
+ }
233
+ function stats(samples) {
234
+ if (samples.length === 0) return { count: 0, p50: 0, p99: 0, max: 0 };
235
+ const sorted = [...samples].sort((a, b) => a - b);
236
+ return {
237
+ count: sorted.length,
238
+ p50: percentile(sorted, 0.5),
239
+ p99: percentile(sorted, 0.99),
240
+ max: sorted[sorted.length - 1]
241
+ };
242
+ }
243
+ function percentile(sorted, q) {
244
+ const idx = Math.min(sorted.length - 1, Math.floor(q * sorted.length));
245
+ return sorted[idx];
246
+ }
247
+ async function bestEffortCleanup(store, vault, collection) {
248
+ try {
249
+ const ids = await store.list(vault, collection);
250
+ await Promise.all(ids.map((id) => store.delete(vault, collection, id).catch(() => {
251
+ })));
252
+ } catch {
253
+ }
254
+ }
255
+
256
+ // src/topology.ts
257
+ async function probeTopology(options) {
258
+ const started = Date.now();
259
+ const expectedUsers = options.expectedUsers ?? 1;
260
+ const primary = await runStoreProbe(options.store, options);
261
+ const targets = [];
262
+ for (const t of options.sync ?? []) {
263
+ const label = t.label ?? t.store.name ?? t.role;
264
+ const report = await runStoreProbe(t.store, { ...options, vault: `_probe-${label}` });
265
+ targets.push({ ...report, role: t.role, label });
266
+ }
267
+ const topology = evaluateTopology(options.store, primary, targets, options.sync, expectedUsers);
268
+ const allErrors = [
269
+ ...primary.suitability.risks,
270
+ ...targets.flatMap((t) => t.suitability.risks),
271
+ ...topology
272
+ ].filter((r) => r.severity === "error");
273
+ return {
274
+ primary,
275
+ targets,
276
+ topology,
277
+ recommended: allErrors.length === 0,
278
+ durationMs: Date.now() - started,
279
+ probedAt: (/* @__PURE__ */ new Date()).toISOString()
280
+ };
281
+ }
282
+ function evaluateTopology(_primaryStore, primary, targets, syncTargets = [], expectedUsers) {
283
+ const risks = [];
284
+ targets.forEach((target, i) => {
285
+ const input = syncTargets[i];
286
+ const label = target.label;
287
+ if (target.role === "sync-peer" && looksLikeBundleStore(target.store)) {
288
+ risks.push({
289
+ target: label,
290
+ code: "bundle-as-sync-peer",
291
+ severity: "warn",
292
+ message: `"${label}" looks bundle-shaped \u2014 use role 'backup' or 'archive' for push-only semantics`
293
+ });
294
+ }
295
+ if (target.role === "sync-peer" && expectedUsers > 1 && target.capabilities?.casAtomic === false) {
296
+ risks.push({
297
+ target: label,
298
+ code: "no-atomic-cas-sync-peer",
299
+ severity: "error",
300
+ message: `"${label}" has casAtomic:false \u2014 unsafe as sync-peer for ${expectedUsers} concurrent users`
301
+ });
302
+ }
303
+ if (target.role === "sync-peer" && primary.write.serial.p99 > target.write.serial.p99 * 2) {
304
+ risks.push({
305
+ target: label,
306
+ code: "primary-slower-than-peer",
307
+ severity: "warn",
308
+ message: `Primary p99 ${primary.write.serial.p99}ms is >2\xD7 peer "${label}" p99 ${target.write.serial.p99}ms \u2014 unusual topology`
309
+ });
310
+ }
311
+ if (target.role === "archive" && input?.hasPullPolicy === true) {
312
+ risks.push({
313
+ target: label,
314
+ code: "archive-pull-configured",
315
+ severity: "error",
316
+ message: `"${label}" is an archive target but has a pull policy \u2014 archives are push-only`
317
+ });
318
+ }
319
+ });
320
+ return risks;
321
+ }
322
+ function looksLikeBundleStore(name) {
323
+ const n = name.toLowerCase();
324
+ return /drive|webdav|git|bundle/.test(n);
325
+ }
326
+ // Annotate the CommonJS export names for ESM import in node:
327
+ 0 && (module.exports = {
328
+ probeTopology,
329
+ runStoreProbe
330
+ });
331
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/probe.ts","../src/topology.ts"],"sourcesContent":["/**\n * **@noy-db/to-probe** — diagnostic companion for the `@noy-db/to-*`\n * store family. **Not itself a storage backend** — it exercises other\n * stores and reports on their suitability.\n *\n * Two surfaces:\n *\n * ```ts\n * import { runStoreProbe, probeTopology } from '@noy-db/to-probe'\n *\n * // Single-store pre-flight check\n * const report = await runStoreProbe(myStore, { capabilities: { casAtomic: true, ... } })\n * if (!report.suitability.recommended.includes('primary')) {\n * // Surface risks to user, or acknowledge and continue\n * }\n *\n * // Multi-store topology check — primary + sync targets as one report\n * const topology = await probeTopology({\n * store: browserIdb({ prefix: 'app' }),\n * sync: [\n * { store: dynamo({ table: 'live' }), role: 'sync-peer', label: 'live' },\n * { store: s3({ bucket: 'archive' }), role: 'backup', label: 'backup' },\n * ],\n * expectedUsers: 3,\n * })\n * ```\n *\n * For runtime metrics on real traffic (not synthetic benchmarks),\n * compose with `@noy-db/to-meter` — the meter wraps a store as a\n * pass-through adapter and tracks live op latency/errors.\n *\n * @packageDocumentation\n */\n\nexport { runStoreProbe } from './probe.js'\nexport { probeTopology } from './topology.js'\n\nexport type {\n ProbeOptions,\n ProbeRisk,\n ProbeRiskCode,\n ProbeRole,\n StoreProbeReport,\n SuitabilityScore,\n LatencyStats,\n WriteAxis,\n CasAxis,\n HydrationAxis,\n SyncAxis,\n NetworkAxis,\n TopologyProbeOptions,\n TopologyProbeReport,\n TopologyRisk,\n TopologyTargetReport,\n} from './types.js'\n","/**\n * `runStoreProbe()` — setup-time suitability test for a `NoydbStore`.\n *\n * Five measurement axes (D1-D5 per spec in issue ):\n *\n * | Axis | Measures |\n * |------|----------|\n * | D1 — Write responsiveness | serial + concurrent put p50/p99, cold-start |\n * | D2 — Conflict integrity | N parallel puts with same `expectedVersion` |\n * | D3 — Hydration cost | `loadAll()` time and record-size footprint |\n * | D4 — Sync economics | single + batch `put` cost, bytes/push |\n * | D5 — Network resilience | `ping()` support + latency |\n *\n * Writes happen to an isolated `_probe / _probe` collection that the\n * probe cleans up on completion. The probe does not mutate real\n * application data — but if a probe is interrupted, stray envelopes\n * may remain under that collection. Adopters can safely delete\n * anything under the `_probe` vault.\n *\n * The probe never decrypts anything. It operates at the `NoydbStore`\n * layer with handcrafted {@link EncryptedEnvelope}-shaped payloads — a\n * probe run produces no keyring, no DEK, and no plaintext the store\n * can see.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore, StoreCapabilities, VaultSnapshot } from '@noy-db/hub'\nimport type {\n CasAxis,\n HydrationAxis,\n LatencyStats,\n NetworkAxis,\n ProbeOptions,\n ProbeRisk,\n ProbeRole,\n StoreProbeReport,\n SuitabilityScore,\n SyncAxis,\n WriteAxis,\n} from './types.js'\n\nconst PROBE_VAULT = 'probe-vault'\nconst PROBE_COLLECTION = 'probe-benchmark'\n\n/**\n * Run the full 5-axis probe against `store`. Returns a structured\n * report with per-axis measurements and a {@link SuitabilityScore}.\n *\n * The probe is **idempotent-per-run**: it picks unique record IDs per\n * invocation using a monotonically increasing counter seeded by\n * `Date.now()`, so concurrent probe runs against the same store do\n * not collide.\n */\nexport async function runStoreProbe(\n store: NoydbStore,\n options: ProbeOptions = {},\n): Promise<StoreProbeReport> {\n const started = Date.now()\n const vault = options.vault ?? PROBE_VAULT\n const collection = options.collection ?? PROBE_COLLECTION\n const runId = Date.now().toString(36)\n\n const write = await probeWrite(store, vault, collection, runId, options)\n const cas = await probeCas(store, vault, collection, runId, options)\n const hydration = await probeHydration(store, vault, collection, runId, options)\n const sync = await probeSync(store, vault, collection, runId, options)\n const network = await probeNetwork(store)\n\n const capabilities = options.capabilities ?? null\n const risks = collectRisks(options, write, cas, hydration, sync, network, capabilities)\n const suitability = score(risks)\n\n await bestEffortCleanup(store, vault, collection)\n\n return {\n store: store.name ?? 'unnamed',\n capabilities,\n write, cas, hydration, sync, network,\n suitability,\n durationMs: Date.now() - started,\n probedAt: new Date().toISOString(),\n }\n}\n\n// ── D1 · write latency ────────────────────────────────────────────────────\n\nasync function probeWrite(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<WriteAxis> {\n const n = options.writeSampleSize ?? 20\n\n // Cold start — single isolated write\n const coldId = `w-${runId}-cold`\n const coldStart = Date.now()\n await store.put(vault, collection, coldId, envelope(1))\n const coldMs = Date.now() - coldStart\n\n // Serial sample\n const serialSamples: number[] = []\n for (let i = 0; i < n; i++) {\n const t0 = Date.now()\n await store.put(vault, collection, `w-${runId}-s-${i}`, envelope(1))\n serialSamples.push(Date.now() - t0)\n }\n\n // Concurrent sample: 5 batches of 10, measured per-batch\n const concurrentSamples: number[] = []\n for (let batch = 0; batch < 5; batch++) {\n const t0 = Date.now()\n await Promise.all(\n Array.from({ length: 10 }, (_, j) =>\n store.put(vault, collection, `w-${runId}-c-${batch}-${j}`, envelope(1)),\n ),\n )\n concurrentSamples.push(Date.now() - t0)\n }\n\n return {\n coldStart: coldMs,\n serial: stats(serialSamples),\n concurrent: stats(concurrentSamples),\n }\n}\n\n// ── D2 · CAS integrity ────────────────────────────────────────────────────\n\nasync function probeCas(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<CasAxis> {\n const concurrency = options.casConcurrency ?? 10\n const id = `cas-${runId}`\n\n // Seed with version 1\n await store.put(vault, collection, id, envelope(1))\n\n // Fire N concurrent puts all with expectedVersion=1. For a casAtomic\n // store: exactly one should succeed; the rest should reject with\n // ConflictError.\n const settled = await Promise.allSettled(\n Array.from({ length: concurrency }, (_, i) =>\n store.put(vault, collection, id, envelope(2, i), 1),\n ),\n )\n const successes = settled.filter((r) => r.status === 'fulfilled').length\n const rejections = settled.length - successes\n\n // What the store promised\n const declaredAtomic = options.capabilities?.casAtomic ?? null\n const expected = declaredAtomic === false ? 'multiple-ok' : 'exactly-one'\n\n return { concurrent: concurrency, successes, rejections, expected }\n}\n\n// ── D3 · hydration ────────────────────────────────────────────────────────\n\nasync function probeHydration(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<HydrationAxis> {\n const records = options.hydrationRecords ?? 100\n\n // Fill the probe collection to the target record count. Writes from\n // D1/D2 already contributed some envelopes; we top up the rest.\n const existing = await store.list(vault, collection)\n for (let i = existing.length; i < records; i++) {\n await store.put(vault, collection, `h-${runId}-${i}`, envelope(1))\n }\n\n const t0 = Date.now()\n const snapshot = await store.loadAll(vault)\n const loadAllMs = Date.now() - t0\n\n const totalBytes = estimateBytes(snapshot)\n const loaded = Object.values(snapshot).reduce(\n (sum, coll) => sum + Object.keys(coll).length,\n 0,\n )\n const perRecordBytes = loaded > 0 ? Math.round(totalBytes / loaded) : 0\n\n return { records: loaded, loadAllMs, totalBytes, perRecordBytes }\n}\n\n// ── D4 · sync economics ───────────────────────────────────────────────────\n\nasync function probeSync(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<SyncAxis> {\n const batchSize = options.syncBatchSize ?? 50\n\n // Single-record push\n const singleStart = Date.now()\n await store.put(vault, collection, `sync-${runId}-single`, envelope(1))\n const singlePushMs = Date.now() - singleStart\n\n // Batch push (simulated — sequential writes since the contract has no\n // bulk put; saveAll would also rewrite existing data)\n const t0 = Date.now()\n for (let i = 0; i < batchSize; i++) {\n await store.put(vault, collection, `sync-${runId}-b-${i}`, envelope(1))\n }\n const batchPushMs = Date.now() - t0\n\n // Rough bytes-per-push — envelope size plus keys\n const bytesPerPush = approxEnvelopeBytes()\n\n return { singlePushMs, batchPushMs, batchSize, bytesPerPush }\n}\n\n// ── D5 · network resilience ───────────────────────────────────────────────\n\nasync function probeNetwork(store: NoydbStore): Promise<NetworkAxis> {\n if (typeof store.ping !== 'function') {\n return { pingSupported: false, pingMs: null }\n }\n const t0 = Date.now()\n try {\n await store.ping()\n return { pingSupported: true, pingMs: Date.now() - t0 }\n } catch {\n return { pingSupported: true, pingMs: null }\n }\n}\n\n// ── Risk aggregation + scoring ────────────────────────────────────────────\n\nfunction collectRisks(\n options: ProbeOptions,\n write: WriteAxis,\n cas: CasAxis,\n hydration: HydrationAxis,\n sync: SyncAxis,\n network: NetworkAxis,\n capabilities: StoreCapabilities | null,\n): ProbeRisk[] {\n const risks: ProbeRisk[] = []\n const slowWriteMs = options.slowWriteMs ?? 100\n const slowHydrationMs = options.slowHydrationMs ?? 500\n const slowSyncMs = options.slowSyncMs ?? 250\n\n if (write.serial.p99 > slowWriteMs) {\n risks.push({\n code: 'slow-write-p99',\n severity: 'warn',\n message: `Serial write p99 ${write.serial.p99}ms exceeds threshold ${slowWriteMs}ms`,\n })\n }\n if (hydration.loadAllMs > slowHydrationMs) {\n risks.push({\n code: 'slow-hydration',\n severity: 'warn',\n message: `loadAll(${hydration.records}) took ${hydration.loadAllMs}ms (threshold ${slowHydrationMs}ms)`,\n })\n }\n if (sync.singlePushMs > slowSyncMs) {\n risks.push({\n code: 'slow-sync',\n severity: 'warn',\n message: `Single-record push ${sync.singlePushMs}ms exceeds ${slowSyncMs}ms`,\n })\n }\n if (capabilities?.casAtomic === true && cas.successes > 1) {\n risks.push({\n code: 'cas-mismatch',\n severity: 'error',\n message: `Store declared casAtomic:true but ${cas.successes}/${cas.concurrent} concurrent puts succeeded (expected exactly 1)`,\n })\n }\n if (capabilities?.casAtomic === false) {\n risks.push({\n code: 'cas-unsupported',\n severity: 'warn',\n message: 'Store lacks atomic CAS — unsafe for multi-writer sync-peer role',\n })\n }\n if (!network.pingSupported) {\n risks.push({\n code: 'no-ping',\n severity: 'warn',\n message: 'Store has no ping() — runtime monitor will rely on list() as liveness check',\n })\n }\n\n return risks\n}\n\nfunction score(risks: readonly ProbeRisk[]): SuitabilityScore {\n const hasError = risks.some((r) => r.severity === 'error')\n const casUnsupported = risks.some((r) => r.code === 'cas-unsupported')\n const slowWrite = risks.some((r) => r.code === 'slow-write-p99')\n\n const recommended: ProbeRole[] = []\n if (!hasError) {\n if (!slowWrite) recommended.push('primary')\n if (!casUnsupported) recommended.push('sync-peer')\n recommended.push('backup', 'archive')\n }\n return { recommended, risks }\n}\n\n// ── helpers ───────────────────────────────────────────────────────────────\n\n/** Build a synthetic envelope with a tiny ciphertext payload. Safe —\n * the store never decrypts, so the `_data` just needs to parse through\n * whatever JSON round-tripping the store does. */\nfunction envelope(version: number, seed = 0): EncryptedEnvelope {\n const data = `probe-${version}-${seed}`.padEnd(64, 'x')\n // base64-encode a deterministic marker so stores that assert\n // base64-shape on persist don't explode\n const b64 = base64Encode(data)\n return {\n _noydb: 1,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: base64Encode('0'.repeat(12)),\n _data: b64,\n }\n}\n\nfunction base64Encode(s: string): string {\n if (typeof Buffer !== 'undefined') return Buffer.from(s, 'utf-8').toString('base64')\n return btoa(unescape(encodeURIComponent(s)))\n}\n\nfunction approxEnvelopeBytes(): number {\n return JSON.stringify(envelope(1)).length\n}\n\nfunction estimateBytes(snapshot: VaultSnapshot): number {\n let total = 0\n for (const coll of Object.values(snapshot)) {\n for (const rec of Object.values(coll)) {\n total += JSON.stringify(rec).length\n }\n }\n return total\n}\n\nfunction stats(samples: number[]): LatencyStats {\n if (samples.length === 0) return { count: 0, p50: 0, p99: 0, max: 0 }\n const sorted = [...samples].sort((a, b) => a - b)\n return {\n count: sorted.length,\n p50: percentile(sorted, 0.5),\n p99: percentile(sorted, 0.99),\n max: sorted[sorted.length - 1]!,\n }\n}\n\nfunction percentile(sorted: number[], q: number): number {\n const idx = Math.min(sorted.length - 1, Math.floor(q * sorted.length))\n return sorted[idx]!\n}\n\nasync function bestEffortCleanup(\n store: NoydbStore,\n vault: string,\n collection: string,\n): Promise<void> {\n try {\n const ids = await store.list(vault, collection)\n await Promise.all(ids.map((id) => store.delete(vault, collection, id).catch(() => {})))\n } catch {\n // Silent — cleanup failure is not a probe failure\n }\n}\n","/**\n * `probeTopology()` — multi-backend health + suitability check.\n *\n * Runs {@link runStoreProbe} independently on the primary store and\n * every sync target, then layers topology-level rules that only make\n * sense across the whole graph:\n *\n * | Rule | Condition | Severity |\n * |------|-----------|----------|\n * | `bundle-as-sync-peer` | Bundle-shaped store used as `sync-peer` | warn |\n * | `no-atomic-cas-sync-peer` | Non-atomic-CAS store used as `sync-peer` with >1 user | error |\n * | `primary-slower-than-peer` | Primary p99 > sync-peer p99 × 2 | warn |\n * | `archive-pull-configured` | `archive` target declared with a pull policy | error |\n *\n * Only one probe pass per store — if two targets happen to point at\n * the same backend, both get probed (the target identifies the\n * configuration, not the backend instance).\n *\n * @module\n */\nimport type { NoydbStore } from '@noy-db/hub'\nimport { runStoreProbe } from './probe.js'\nimport type {\n StoreProbeReport,\n TopologyProbeOptions,\n TopologyProbeReport,\n TopologyRisk,\n TopologyTargetReport,\n} from './types.js'\n\nexport async function probeTopology(\n options: TopologyProbeOptions,\n): Promise<TopologyProbeReport> {\n const started = Date.now()\n const expectedUsers = options.expectedUsers ?? 1\n\n const primary = await runStoreProbe(options.store, options)\n const targets: TopologyTargetReport[] = []\n\n for (const t of options.sync ?? []) {\n const label = t.label ?? t.store.name ?? t.role\n const report = await runStoreProbe(t.store, { ...options, vault: `_probe-${label}` })\n targets.push({ ...report, role: t.role, label })\n }\n\n const topology = evaluateTopology(options.store, primary, targets, options.sync, expectedUsers)\n const allErrors = [\n ...primary.suitability.risks,\n ...targets.flatMap((t) => t.suitability.risks),\n ...topology,\n ].filter((r) => r.severity === 'error')\n\n return {\n primary, targets, topology,\n recommended: allErrors.length === 0,\n durationMs: Date.now() - started,\n probedAt: new Date().toISOString(),\n }\n}\n\nfunction evaluateTopology(\n _primaryStore: NoydbStore,\n primary: StoreProbeReport,\n targets: readonly TopologyTargetReport[],\n syncTargets: TopologyProbeOptions['sync'] = [],\n expectedUsers: number,\n): TopologyRisk[] {\n const risks: TopologyRisk[] = []\n\n targets.forEach((target, i) => {\n const input = syncTargets[i]\n const label = target.label\n\n // Bundle-shaped stores (drive/webdav/git) don't have atomic CAS\n // and surface as sync-peer-unsuitable. For we detect by\n // name heuristics; future hub work can annotate StoreCapabilities\n // with a `shape: 'kv' | 'bundle'` field.\n if (target.role === 'sync-peer' && looksLikeBundleStore(target.store)) {\n risks.push({\n target: label,\n code: 'bundle-as-sync-peer',\n severity: 'warn',\n message: `\"${label}\" looks bundle-shaped — use role 'backup' or 'archive' for push-only semantics`,\n })\n }\n\n if (\n target.role === 'sync-peer' &&\n expectedUsers > 1 &&\n target.capabilities?.casAtomic === false\n ) {\n risks.push({\n target: label,\n code: 'no-atomic-cas-sync-peer',\n severity: 'error',\n message: `\"${label}\" has casAtomic:false — unsafe as sync-peer for ${expectedUsers} concurrent users`,\n })\n }\n\n if (target.role === 'sync-peer' && primary.write.serial.p99 > target.write.serial.p99 * 2) {\n risks.push({\n target: label,\n code: 'primary-slower-than-peer',\n severity: 'warn',\n message: `Primary p99 ${primary.write.serial.p99}ms is >2× peer \"${label}\" p99 ${target.write.serial.p99}ms — unusual topology`,\n })\n }\n\n if (target.role === 'archive' && input?.hasPullPolicy === true) {\n risks.push({\n target: label,\n code: 'archive-pull-configured',\n severity: 'error',\n message: `\"${label}\" is an archive target but has a pull policy — archives are push-only`,\n })\n }\n })\n\n return risks\n}\n\n/** Heuristic bundle detection: name includes 'drive' / 'webdav' / 'git'\n * / 'bundle'. Adopters who wrap a bundle store under a custom name\n * can silence this via `acknowledgeRisks: ['bundle-as-sync-peer']`. */\nfunction looksLikeBundleStore(name: string): boolean {\n const n = name.toLowerCase()\n return /drive|webdav|git|bundle/.test(n)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACyCA,IAAM,cAAc;AACpB,IAAM,mBAAmB;AAWzB,eAAsB,cACpB,OACA,UAAwB,CAAC,GACE;AAC3B,QAAM,UAAU,KAAK,IAAI;AACzB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,aAAa,QAAQ,cAAc;AACzC,QAAM,QAAQ,KAAK,IAAI,EAAE,SAAS,EAAE;AAEpC,QAAM,QAAQ,MAAM,WAAW,OAAO,OAAO,YAAY,OAAO,OAAO;AACvE,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO,YAAY,OAAO,OAAO;AACnE,QAAM,YAAY,MAAM,eAAe,OAAO,OAAO,YAAY,OAAO,OAAO;AAC/E,QAAM,OAAO,MAAM,UAAU,OAAO,OAAO,YAAY,OAAO,OAAO;AACrE,QAAM,UAAU,MAAM,aAAa,KAAK;AAExC,QAAM,eAAe,QAAQ,gBAAgB;AAC7C,QAAM,QAAQ,aAAa,SAAS,OAAO,KAAK,WAAW,MAAM,SAAS,YAAY;AACtF,QAAM,cAAc,MAAM,KAAK;AAE/B,QAAM,kBAAkB,OAAO,OAAO,UAAU;AAEhD,SAAO;AAAA,IACL,OAAO,MAAM,QAAQ;AAAA,IACrB;AAAA,IACA;AAAA,IAAO;AAAA,IAAK;AAAA,IAAW;AAAA,IAAM;AAAA,IAC7B;AAAA,IACA,YAAY,KAAK,IAAI,IAAI;AAAA,IACzB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,EACnC;AACF;AAIA,eAAe,WACb,OACA,OACA,YACA,OACA,SACoB;AACpB,QAAM,IAAI,QAAQ,mBAAmB;AAGrC,QAAM,SAAS,KAAK,KAAK;AACzB,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,SAAS,CAAC,CAAC;AACtD,QAAM,SAAS,KAAK,IAAI,IAAI;AAG5B,QAAM,gBAA0B,CAAC;AACjC,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAM,KAAK,KAAK,IAAI;AACpB,UAAM,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,CAAC;AACnE,kBAAc,KAAK,KAAK,IAAI,IAAI,EAAE;AAAA,EACpC;AAGA,QAAM,oBAA8B,CAAC;AACrC,WAAS,QAAQ,GAAG,QAAQ,GAAG,SAAS;AACtC,UAAM,KAAK,KAAK,IAAI;AACpB,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAK,EAAE,QAAQ,GAAG;AAAA,QAAG,CAAC,GAAG,MAC7B,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,MACxE;AAAA,IACF;AACA,sBAAkB,KAAK,KAAK,IAAI,IAAI,EAAE;AAAA,EACxC;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,QAAQ,MAAM,aAAa;AAAA,IAC3B,YAAY,MAAM,iBAAiB;AAAA,EACrC;AACF;AAIA,eAAe,SACb,OACA,OACA,YACA,OACA,SACkB;AAClB,QAAM,cAAc,QAAQ,kBAAkB;AAC9C,QAAM,KAAK,OAAO,KAAK;AAGvB,QAAM,MAAM,IAAI,OAAO,YAAY,IAAI,SAAS,CAAC,CAAC;AAKlD,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,MAAM;AAAA,MAAK,EAAE,QAAQ,YAAY;AAAA,MAAG,CAAC,GAAG,MACtC,MAAM,IAAI,OAAO,YAAY,IAAI,SAAS,GAAG,CAAC,GAAG,CAAC;AAAA,IACpD;AAAA,EACF;AACA,QAAM,YAAY,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,WAAW,EAAE;AAClE,QAAM,aAAa,QAAQ,SAAS;AAGpC,QAAM,iBAAiB,QAAQ,cAAc,aAAa;AAC1D,QAAM,WAAW,mBAAmB,QAAQ,gBAAgB;AAE5D,SAAO,EAAE,YAAY,aAAa,WAAW,YAAY,SAAS;AACpE;AAIA,eAAe,eACb,OACA,OACA,YACA,OACA,SACwB;AACxB,QAAM,UAAU,QAAQ,oBAAoB;AAI5C,QAAM,WAAW,MAAM,MAAM,KAAK,OAAO,UAAU;AACnD,WAAS,IAAI,SAAS,QAAQ,IAAI,SAAS,KAAK;AAC9C,UAAM,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,IAAI,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,QAAM,KAAK,KAAK,IAAI;AACpB,QAAM,WAAW,MAAM,MAAM,QAAQ,KAAK;AAC1C,QAAM,YAAY,KAAK,IAAI,IAAI;AAE/B,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,OAAO,OAAO,QAAQ,EAAE;AAAA,IACrC,CAAC,KAAK,SAAS,MAAM,OAAO,KAAK,IAAI,EAAE;AAAA,IACvC;AAAA,EACF;AACA,QAAM,iBAAiB,SAAS,IAAI,KAAK,MAAM,aAAa,MAAM,IAAI;AAEtE,SAAO,EAAE,SAAS,QAAQ,WAAW,YAAY,eAAe;AAClE;AAIA,eAAe,UACb,OACA,OACA,YACA,OACA,SACmB;AACnB,QAAM,YAAY,QAAQ,iBAAiB;AAG3C,QAAM,cAAc,KAAK,IAAI;AAC7B,QAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,KAAK,WAAW,SAAS,CAAC,CAAC;AACtE,QAAM,eAAe,KAAK,IAAI,IAAI;AAIlC,QAAM,KAAK,KAAK,IAAI;AACpB,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,UAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,EACxE;AACA,QAAM,cAAc,KAAK,IAAI,IAAI;AAGjC,QAAM,eAAe,oBAAoB;AAEzC,SAAO,EAAE,cAAc,aAAa,WAAW,aAAa;AAC9D;AAIA,eAAe,aAAa,OAAyC;AACnE,MAAI,OAAO,MAAM,SAAS,YAAY;AACpC,WAAO,EAAE,eAAe,OAAO,QAAQ,KAAK;AAAA,EAC9C;AACA,QAAM,KAAK,KAAK,IAAI;AACpB,MAAI;AACF,UAAM,MAAM,KAAK;AACjB,WAAO,EAAE,eAAe,MAAM,QAAQ,KAAK,IAAI,IAAI,GAAG;AAAA,EACxD,QAAQ;AACN,WAAO,EAAE,eAAe,MAAM,QAAQ,KAAK;AAAA,EAC7C;AACF;AAIA,SAAS,aACP,SACA,OACA,KACA,WACA,MACA,SACA,cACa;AACb,QAAM,QAAqB,CAAC;AAC5B,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,aAAa,QAAQ,cAAc;AAEzC,MAAI,MAAM,OAAO,MAAM,aAAa;AAClC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,oBAAoB,MAAM,OAAO,GAAG,wBAAwB,WAAW;AAAA,IAClF,CAAC;AAAA,EACH;AACA,MAAI,UAAU,YAAY,iBAAiB;AACzC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,WAAW,UAAU,OAAO,UAAU,UAAU,SAAS,iBAAiB,eAAe;AAAA,IACpG,CAAC;AAAA,EACH;AACA,MAAI,KAAK,eAAe,YAAY;AAClC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,sBAAsB,KAAK,YAAY,cAAc,UAAU;AAAA,IAC1E,CAAC;AAAA,EACH;AACA,MAAI,cAAc,cAAc,QAAQ,IAAI,YAAY,GAAG;AACzD,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,qCAAqC,IAAI,SAAS,IAAI,IAAI,UAAU;AAAA,IAC/E,CAAC;AAAA,EACH;AACA,MAAI,cAAc,cAAc,OAAO;AACrC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,MAAI,CAAC,QAAQ,eAAe;AAC1B,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,MAAM,OAA+C;AAC5D,QAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,aAAa,OAAO;AACzD,QAAM,iBAAiB,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,iBAAiB;AACrE,QAAM,YAAY,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,gBAAgB;AAE/D,QAAM,cAA2B,CAAC;AAClC,MAAI,CAAC,UAAU;AACb,QAAI,CAAC,UAAW,aAAY,KAAK,SAAS;AAC1C,QAAI,CAAC,eAAgB,aAAY,KAAK,WAAW;AACjD,gBAAY,KAAK,UAAU,SAAS;AAAA,EACtC;AACA,SAAO,EAAE,aAAa,MAAM;AAC9B;AAOA,SAAS,SAAS,SAAiB,OAAO,GAAsB;AAC9D,QAAM,OAAO,SAAS,OAAO,IAAI,IAAI,GAAG,OAAO,IAAI,GAAG;AAGtD,QAAM,MAAM,aAAa,IAAI;AAC7B,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC5B,KAAK,aAAa,IAAI,OAAO,EAAE,CAAC;AAAA,IAChC,OAAO;AAAA,EACT;AACF;AAEA,SAAS,aAAa,GAAmB;AACvC,MAAI,OAAO,WAAW,YAAa,QAAO,OAAO,KAAK,GAAG,OAAO,EAAE,SAAS,QAAQ;AACnF,SAAO,KAAK,SAAS,mBAAmB,CAAC,CAAC,CAAC;AAC7C;AAEA,SAAS,sBAA8B;AACrC,SAAO,KAAK,UAAU,SAAS,CAAC,CAAC,EAAE;AACrC;AAEA,SAAS,cAAc,UAAiC;AACtD,MAAI,QAAQ;AACZ,aAAW,QAAQ,OAAO,OAAO,QAAQ,GAAG;AAC1C,eAAW,OAAO,OAAO,OAAO,IAAI,GAAG;AACrC,eAAS,KAAK,UAAU,GAAG,EAAE;AAAA,IAC/B;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,MAAM,SAAiC;AAC9C,MAAI,QAAQ,WAAW,EAAG,QAAO,EAAE,OAAO,GAAG,KAAK,GAAG,KAAK,GAAG,KAAK,EAAE;AACpE,QAAM,SAAS,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC;AAChD,SAAO;AAAA,IACL,OAAO,OAAO;AAAA,IACd,KAAK,WAAW,QAAQ,GAAG;AAAA,IAC3B,KAAK,WAAW,QAAQ,IAAI;AAAA,IAC5B,KAAK,OAAO,OAAO,SAAS,CAAC;AAAA,EAC/B;AACF;AAEA,SAAS,WAAW,QAAkB,GAAmB;AACvD,QAAM,MAAM,KAAK,IAAI,OAAO,SAAS,GAAG,KAAK,MAAM,IAAI,OAAO,MAAM,CAAC;AACrE,SAAO,OAAO,GAAG;AACnB;AAEA,eAAe,kBACb,OACA,OACA,YACe;AACf,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,KAAK,OAAO,UAAU;AAC9C,UAAM,QAAQ,IAAI,IAAI,IAAI,CAAC,OAAO,MAAM,OAAO,OAAO,YAAY,EAAE,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC,CAAC,CAAC;AAAA,EACxF,QAAQ;AAAA,EAER;AACF;;;AC7VA,eAAsB,cACpB,SAC8B;AAC9B,QAAM,UAAU,KAAK,IAAI;AACzB,QAAM,gBAAgB,QAAQ,iBAAiB;AAE/C,QAAM,UAAU,MAAM,cAAc,QAAQ,OAAO,OAAO;AAC1D,QAAM,UAAkC,CAAC;AAEzC,aAAW,KAAK,QAAQ,QAAQ,CAAC,GAAG;AAClC,UAAM,QAAQ,EAAE,SAAS,EAAE,MAAM,QAAQ,EAAE;AAC3C,UAAM,SAAS,MAAM,cAAc,EAAE,OAAO,EAAE,GAAG,SAAS,OAAO,UAAU,KAAK,GAAG,CAAC;AACpF,YAAQ,KAAK,EAAE,GAAG,QAAQ,MAAM,EAAE,MAAM,MAAM,CAAC;AAAA,EACjD;AAEA,QAAM,WAAW,iBAAiB,QAAQ,OAAO,SAAS,SAAS,QAAQ,MAAM,aAAa;AAC9F,QAAM,YAAY;AAAA,IAChB,GAAG,QAAQ,YAAY;AAAA,IACvB,GAAG,QAAQ,QAAQ,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,IAC7C,GAAG;AAAA,EACL,EAAE,OAAO,CAAC,MAAM,EAAE,aAAa,OAAO;AAEtC,SAAO;AAAA,IACL;AAAA,IAAS;AAAA,IAAS;AAAA,IAClB,aAAa,UAAU,WAAW;AAAA,IAClC,YAAY,KAAK,IAAI,IAAI;AAAA,IACzB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,EACnC;AACF;AAEA,SAAS,iBACP,eACA,SACA,SACA,cAA4C,CAAC,GAC7C,eACgB;AAChB,QAAM,QAAwB,CAAC;AAE/B,UAAQ,QAAQ,CAAC,QAAQ,MAAM;AAC7B,UAAM,QAAQ,YAAY,CAAC;AAC3B,UAAM,QAAQ,OAAO;AAMrB,QAAI,OAAO,SAAS,eAAe,qBAAqB,OAAO,KAAK,GAAG;AACrE,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK;AAAA,MACpB,CAAC;AAAA,IACH;AAEA,QACE,OAAO,SAAS,eAChB,gBAAgB,KAChB,OAAO,cAAc,cAAc,OACnC;AACA,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK,wDAAmD,aAAa;AAAA,MACpF,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,SAAS,eAAe,QAAQ,MAAM,OAAO,MAAM,OAAO,MAAM,OAAO,MAAM,GAAG;AACzF,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,eAAe,QAAQ,MAAM,OAAO,GAAG,sBAAmB,KAAK,SAAS,OAAO,MAAM,OAAO,GAAG;AAAA,MAC1G,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,SAAS,aAAa,OAAO,kBAAkB,MAAM;AAC9D,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK;AAAA,MACpB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAKA,SAAS,qBAAqB,MAAuB;AACnD,QAAM,IAAI,KAAK,YAAY;AAC3B,SAAO,0BAA0B,KAAK,CAAC;AACzC;","names":[]}
@@ -0,0 +1,189 @@
1
+ import { StoreCapabilities, NoydbStore, SyncTargetRole } from '@noy-db/hub';
2
+
3
+ /**
4
+ * Shared types for `@noy-db/to-probe`.
5
+ *
6
+ * Both `runStoreProbe()` and `probeTopology()` produce structured
7
+ * reports with the same vocabulary: a fixed set of per-axis measurement
8
+ * blocks, a `ProbeRisk[]` list with severity and a machine-readable
9
+ * `code`, and a `SuitabilityScore` triple (primary / sync-peer / backup)
10
+ * summarising whether the store is safe to use in that role.
11
+ *
12
+ * The `code` strings are the identifiers adopters pass to
13
+ * `createNoydb({ acknowledgeRisks: [...] })` to silence a known risk.
14
+ *
15
+ * @module
16
+ */
17
+
18
+ /** Role a store is being considered for. */
19
+ type ProbeRole = 'primary' | 'sync-peer' | 'backup' | 'archive';
20
+ /** Machine-readable risk identifiers. Keep this list closed — adopters
21
+ * pass these exact strings to `acknowledgeRisks`. */
22
+ type ProbeRiskCode = 'slow-write-p99' | 'slow-hydration' | 'slow-sync' | 'cas-mismatch' | 'cas-unsupported' | 'no-ping' | 'hydration-blocked' | 'bundle-as-sync-peer' | 'no-atomic-cas-sync-peer' | 'primary-slower-than-peer' | 'archive-pull-configured';
23
+ interface ProbeRisk {
24
+ readonly code: ProbeRiskCode;
25
+ readonly severity: 'warn' | 'error';
26
+ readonly message: string;
27
+ }
28
+ /** Per-axis latency measurement — all numbers in milliseconds. */
29
+ interface LatencyStats {
30
+ readonly count: number;
31
+ readonly p50: number;
32
+ readonly p99: number;
33
+ readonly max: number;
34
+ }
35
+ interface WriteAxis {
36
+ readonly serial: LatencyStats;
37
+ readonly concurrent: LatencyStats;
38
+ readonly coldStart: number;
39
+ }
40
+ interface CasAxis {
41
+ readonly concurrent: number;
42
+ readonly successes: number;
43
+ readonly rejections: number;
44
+ readonly expected: 'exactly-one' | 'multiple-ok';
45
+ }
46
+ interface HydrationAxis {
47
+ readonly records: number;
48
+ readonly loadAllMs: number;
49
+ readonly perRecordBytes: number;
50
+ readonly totalBytes: number;
51
+ }
52
+ interface SyncAxis {
53
+ readonly singlePushMs: number;
54
+ readonly batchPushMs: number;
55
+ readonly batchSize: number;
56
+ readonly bytesPerPush: number;
57
+ }
58
+ interface NetworkAxis {
59
+ readonly pingSupported: boolean;
60
+ readonly pingMs: number | null;
61
+ }
62
+ /** Suitability decision per role. */
63
+ interface SuitabilityScore {
64
+ /** Roles the store passes (no error-severity risks apply). */
65
+ readonly recommended: readonly ProbeRole[];
66
+ /** Risks that caller may choose to acknowledge. */
67
+ readonly risks: readonly ProbeRisk[];
68
+ }
69
+ /** Full report produced by `runStoreProbe()`. */
70
+ interface StoreProbeReport {
71
+ readonly store: string;
72
+ readonly capabilities: StoreCapabilities | null;
73
+ readonly write: WriteAxis;
74
+ readonly cas: CasAxis;
75
+ readonly hydration: HydrationAxis;
76
+ readonly sync: SyncAxis;
77
+ readonly network: NetworkAxis;
78
+ readonly suitability: SuitabilityScore;
79
+ readonly durationMs: number;
80
+ readonly probedAt: string;
81
+ }
82
+ /** Options for `runStoreProbe()`. */
83
+ interface ProbeOptions {
84
+ /**
85
+ * Probe vault name. Isolated from real data — cleaned up at the
86
+ * end of the probe. Default `'probe-vault'`. Avoid `_`-prefixed
87
+ * values: several stores hide `_`-collections from `loadAll`,
88
+ * which would make D3 (hydration) measure zero records.
89
+ */
90
+ readonly vault?: string;
91
+ /**
92
+ * Collection used for probe writes. Default `'probe-benchmark'`.
93
+ * Leftover envelopes may persist if the probe is interrupted —
94
+ * adopters can safely delete anything under this name.
95
+ */
96
+ readonly collection?: string;
97
+ /**
98
+ * Declared capabilities of the store (for `casAtomic` verification).
99
+ * Stores in this codebase don't attach capabilities to the `NoydbStore`
100
+ * object itself — pass them explicitly so the probe can compare
101
+ * declared vs. measured behaviour.
102
+ */
103
+ readonly capabilities?: StoreCapabilities;
104
+ /** Number of serial writes in the D1 latency sample. Default 20. */
105
+ readonly writeSampleSize?: number;
106
+ /** Number of parallel writers in the D2 CAS test. Default 10. */
107
+ readonly casConcurrency?: number;
108
+ /** Records to populate before measuring loadAll. Default 100. */
109
+ readonly hydrationRecords?: number;
110
+ /** Batch size for D4 sync economics. Default 50. */
111
+ readonly syncBatchSize?: number;
112
+ /** p99 write-latency threshold (ms). Above this → `slow-write-p99`. Default 100. */
113
+ readonly slowWriteMs?: number;
114
+ /** loadAll threshold (ms). Above this → `slow-hydration`. Default 500. */
115
+ readonly slowHydrationMs?: number;
116
+ /** Single-record push threshold (ms). Above this → `slow-sync`. Default 250. */
117
+ readonly slowSyncMs?: number;
118
+ }
119
+ /** Input for `probeTopology()`. */
120
+ interface TopologyProbeOptions extends ProbeOptions {
121
+ readonly store: NoydbStore;
122
+ readonly sync?: ReadonlyArray<{
123
+ readonly store: NoydbStore;
124
+ readonly role: SyncTargetRole;
125
+ readonly label?: string;
126
+ readonly hasPullPolicy?: boolean;
127
+ }>;
128
+ /** Expected number of concurrent human users. Default 1. */
129
+ readonly expectedUsers?: number;
130
+ }
131
+ interface TopologyRisk extends ProbeRisk {
132
+ /** Target label (or 'primary'). */
133
+ readonly target: string;
134
+ }
135
+ interface TopologyTargetReport extends StoreProbeReport {
136
+ readonly role: SyncTargetRole;
137
+ readonly label: string;
138
+ }
139
+ interface TopologyProbeReport {
140
+ readonly primary: StoreProbeReport;
141
+ readonly targets: readonly TopologyTargetReport[];
142
+ readonly topology: readonly TopologyRisk[];
143
+ /** `true` iff there are no error-severity risks across primary + targets + topology. */
144
+ readonly recommended: boolean;
145
+ readonly durationMs: number;
146
+ readonly probedAt: string;
147
+ }
148
+
149
+ /**
150
+ * `runStoreProbe()` — setup-time suitability test for a `NoydbStore`.
151
+ *
152
+ * Five measurement axes (D1-D5 per spec in issue ):
153
+ *
154
+ * | Axis | Measures |
155
+ * |------|----------|
156
+ * | D1 — Write responsiveness | serial + concurrent put p50/p99, cold-start |
157
+ * | D2 — Conflict integrity | N parallel puts with same `expectedVersion` |
158
+ * | D3 — Hydration cost | `loadAll()` time and record-size footprint |
159
+ * | D4 — Sync economics | single + batch `put` cost, bytes/push |
160
+ * | D5 — Network resilience | `ping()` support + latency |
161
+ *
162
+ * Writes happen to an isolated `_probe / _probe` collection that the
163
+ * probe cleans up on completion. The probe does not mutate real
164
+ * application data — but if a probe is interrupted, stray envelopes
165
+ * may remain under that collection. Adopters can safely delete
166
+ * anything under the `_probe` vault.
167
+ *
168
+ * The probe never decrypts anything. It operates at the `NoydbStore`
169
+ * layer with handcrafted {@link EncryptedEnvelope}-shaped payloads — a
170
+ * probe run produces no keyring, no DEK, and no plaintext the store
171
+ * can see.
172
+ *
173
+ * @module
174
+ */
175
+
176
+ /**
177
+ * Run the full 5-axis probe against `store`. Returns a structured
178
+ * report with per-axis measurements and a {@link SuitabilityScore}.
179
+ *
180
+ * The probe is **idempotent-per-run**: it picks unique record IDs per
181
+ * invocation using a monotonically increasing counter seeded by
182
+ * `Date.now()`, so concurrent probe runs against the same store do
183
+ * not collide.
184
+ */
185
+ declare function runStoreProbe(store: NoydbStore, options?: ProbeOptions): Promise<StoreProbeReport>;
186
+
187
+ declare function probeTopology(options: TopologyProbeOptions): Promise<TopologyProbeReport>;
188
+
189
+ export { type CasAxis, type HydrationAxis, type LatencyStats, type NetworkAxis, type ProbeOptions, type ProbeRisk, type ProbeRiskCode, type ProbeRole, type StoreProbeReport, type SuitabilityScore, type SyncAxis, type TopologyProbeOptions, type TopologyProbeReport, type TopologyRisk, type TopologyTargetReport, type WriteAxis, probeTopology, runStoreProbe };
@@ -0,0 +1,189 @@
1
+ import { StoreCapabilities, NoydbStore, SyncTargetRole } from '@noy-db/hub';
2
+
3
+ /**
4
+ * Shared types for `@noy-db/to-probe`.
5
+ *
6
+ * Both `runStoreProbe()` and `probeTopology()` produce structured
7
+ * reports with the same vocabulary: a fixed set of per-axis measurement
8
+ * blocks, a `ProbeRisk[]` list with severity and a machine-readable
9
+ * `code`, and a `SuitabilityScore` triple (primary / sync-peer / backup)
10
+ * summarising whether the store is safe to use in that role.
11
+ *
12
+ * The `code` strings are the identifiers adopters pass to
13
+ * `createNoydb({ acknowledgeRisks: [...] })` to silence a known risk.
14
+ *
15
+ * @module
16
+ */
17
+
18
+ /** Role a store is being considered for. */
19
+ type ProbeRole = 'primary' | 'sync-peer' | 'backup' | 'archive';
20
+ /** Machine-readable risk identifiers. Keep this list closed — adopters
21
+ * pass these exact strings to `acknowledgeRisks`. */
22
+ type ProbeRiskCode = 'slow-write-p99' | 'slow-hydration' | 'slow-sync' | 'cas-mismatch' | 'cas-unsupported' | 'no-ping' | 'hydration-blocked' | 'bundle-as-sync-peer' | 'no-atomic-cas-sync-peer' | 'primary-slower-than-peer' | 'archive-pull-configured';
23
+ interface ProbeRisk {
24
+ readonly code: ProbeRiskCode;
25
+ readonly severity: 'warn' | 'error';
26
+ readonly message: string;
27
+ }
28
+ /** Per-axis latency measurement — all numbers in milliseconds. */
29
+ interface LatencyStats {
30
+ readonly count: number;
31
+ readonly p50: number;
32
+ readonly p99: number;
33
+ readonly max: number;
34
+ }
35
+ interface WriteAxis {
36
+ readonly serial: LatencyStats;
37
+ readonly concurrent: LatencyStats;
38
+ readonly coldStart: number;
39
+ }
40
+ interface CasAxis {
41
+ readonly concurrent: number;
42
+ readonly successes: number;
43
+ readonly rejections: number;
44
+ readonly expected: 'exactly-one' | 'multiple-ok';
45
+ }
46
+ interface HydrationAxis {
47
+ readonly records: number;
48
+ readonly loadAllMs: number;
49
+ readonly perRecordBytes: number;
50
+ readonly totalBytes: number;
51
+ }
52
+ interface SyncAxis {
53
+ readonly singlePushMs: number;
54
+ readonly batchPushMs: number;
55
+ readonly batchSize: number;
56
+ readonly bytesPerPush: number;
57
+ }
58
+ interface NetworkAxis {
59
+ readonly pingSupported: boolean;
60
+ readonly pingMs: number | null;
61
+ }
62
+ /** Suitability decision per role. */
63
+ interface SuitabilityScore {
64
+ /** Roles the store passes (no error-severity risks apply). */
65
+ readonly recommended: readonly ProbeRole[];
66
+ /** Risks that caller may choose to acknowledge. */
67
+ readonly risks: readonly ProbeRisk[];
68
+ }
69
+ /** Full report produced by `runStoreProbe()`. */
70
+ interface StoreProbeReport {
71
+ readonly store: string;
72
+ readonly capabilities: StoreCapabilities | null;
73
+ readonly write: WriteAxis;
74
+ readonly cas: CasAxis;
75
+ readonly hydration: HydrationAxis;
76
+ readonly sync: SyncAxis;
77
+ readonly network: NetworkAxis;
78
+ readonly suitability: SuitabilityScore;
79
+ readonly durationMs: number;
80
+ readonly probedAt: string;
81
+ }
82
+ /** Options for `runStoreProbe()`. */
83
+ interface ProbeOptions {
84
+ /**
85
+ * Probe vault name. Isolated from real data — cleaned up at the
86
+ * end of the probe. Default `'probe-vault'`. Avoid `_`-prefixed
87
+ * values: several stores hide `_`-collections from `loadAll`,
88
+ * which would make D3 (hydration) measure zero records.
89
+ */
90
+ readonly vault?: string;
91
+ /**
92
+ * Collection used for probe writes. Default `'probe-benchmark'`.
93
+ * Leftover envelopes may persist if the probe is interrupted —
94
+ * adopters can safely delete anything under this name.
95
+ */
96
+ readonly collection?: string;
97
+ /**
98
+ * Declared capabilities of the store (for `casAtomic` verification).
99
+ * Stores in this codebase don't attach capabilities to the `NoydbStore`
100
+ * object itself — pass them explicitly so the probe can compare
101
+ * declared vs. measured behaviour.
102
+ */
103
+ readonly capabilities?: StoreCapabilities;
104
+ /** Number of serial writes in the D1 latency sample. Default 20. */
105
+ readonly writeSampleSize?: number;
106
+ /** Number of parallel writers in the D2 CAS test. Default 10. */
107
+ readonly casConcurrency?: number;
108
+ /** Records to populate before measuring loadAll. Default 100. */
109
+ readonly hydrationRecords?: number;
110
+ /** Batch size for D4 sync economics. Default 50. */
111
+ readonly syncBatchSize?: number;
112
+ /** p99 write-latency threshold (ms). Above this → `slow-write-p99`. Default 100. */
113
+ readonly slowWriteMs?: number;
114
+ /** loadAll threshold (ms). Above this → `slow-hydration`. Default 500. */
115
+ readonly slowHydrationMs?: number;
116
+ /** Single-record push threshold (ms). Above this → `slow-sync`. Default 250. */
117
+ readonly slowSyncMs?: number;
118
+ }
119
+ /** Input for `probeTopology()`. */
120
+ interface TopologyProbeOptions extends ProbeOptions {
121
+ readonly store: NoydbStore;
122
+ readonly sync?: ReadonlyArray<{
123
+ readonly store: NoydbStore;
124
+ readonly role: SyncTargetRole;
125
+ readonly label?: string;
126
+ readonly hasPullPolicy?: boolean;
127
+ }>;
128
+ /** Expected number of concurrent human users. Default 1. */
129
+ readonly expectedUsers?: number;
130
+ }
131
+ interface TopologyRisk extends ProbeRisk {
132
+ /** Target label (or 'primary'). */
133
+ readonly target: string;
134
+ }
135
+ interface TopologyTargetReport extends StoreProbeReport {
136
+ readonly role: SyncTargetRole;
137
+ readonly label: string;
138
+ }
139
+ interface TopologyProbeReport {
140
+ readonly primary: StoreProbeReport;
141
+ readonly targets: readonly TopologyTargetReport[];
142
+ readonly topology: readonly TopologyRisk[];
143
+ /** `true` iff there are no error-severity risks across primary + targets + topology. */
144
+ readonly recommended: boolean;
145
+ readonly durationMs: number;
146
+ readonly probedAt: string;
147
+ }
148
+
149
+ /**
150
+ * `runStoreProbe()` — setup-time suitability test for a `NoydbStore`.
151
+ *
152
+ * Five measurement axes (D1-D5 per spec in issue ):
153
+ *
154
+ * | Axis | Measures |
155
+ * |------|----------|
156
+ * | D1 — Write responsiveness | serial + concurrent put p50/p99, cold-start |
157
+ * | D2 — Conflict integrity | N parallel puts with same `expectedVersion` |
158
+ * | D3 — Hydration cost | `loadAll()` time and record-size footprint |
159
+ * | D4 — Sync economics | single + batch `put` cost, bytes/push |
160
+ * | D5 — Network resilience | `ping()` support + latency |
161
+ *
162
+ * Writes happen to an isolated `_probe / _probe` collection that the
163
+ * probe cleans up on completion. The probe does not mutate real
164
+ * application data — but if a probe is interrupted, stray envelopes
165
+ * may remain under that collection. Adopters can safely delete
166
+ * anything under the `_probe` vault.
167
+ *
168
+ * The probe never decrypts anything. It operates at the `NoydbStore`
169
+ * layer with handcrafted {@link EncryptedEnvelope}-shaped payloads — a
170
+ * probe run produces no keyring, no DEK, and no plaintext the store
171
+ * can see.
172
+ *
173
+ * @module
174
+ */
175
+
176
+ /**
177
+ * Run the full 5-axis probe against `store`. Returns a structured
178
+ * report with per-axis measurements and a {@link SuitabilityScore}.
179
+ *
180
+ * The probe is **idempotent-per-run**: it picks unique record IDs per
181
+ * invocation using a monotonically increasing counter seeded by
182
+ * `Date.now()`, so concurrent probe runs against the same store do
183
+ * not collide.
184
+ */
185
+ declare function runStoreProbe(store: NoydbStore, options?: ProbeOptions): Promise<StoreProbeReport>;
186
+
187
+ declare function probeTopology(options: TopologyProbeOptions): Promise<TopologyProbeReport>;
188
+
189
+ export { type CasAxis, type HydrationAxis, type LatencyStats, type NetworkAxis, type ProbeOptions, type ProbeRisk, type ProbeRiskCode, type ProbeRole, type StoreProbeReport, type SuitabilityScore, type SyncAxis, type TopologyProbeOptions, type TopologyProbeReport, type TopologyRisk, type TopologyTargetReport, type WriteAxis, probeTopology, runStoreProbe };
package/dist/index.js ADDED
@@ -0,0 +1,303 @@
1
+ // src/probe.ts
2
+ var PROBE_VAULT = "probe-vault";
3
+ var PROBE_COLLECTION = "probe-benchmark";
4
+ async function runStoreProbe(store, options = {}) {
5
+ const started = Date.now();
6
+ const vault = options.vault ?? PROBE_VAULT;
7
+ const collection = options.collection ?? PROBE_COLLECTION;
8
+ const runId = Date.now().toString(36);
9
+ const write = await probeWrite(store, vault, collection, runId, options);
10
+ const cas = await probeCas(store, vault, collection, runId, options);
11
+ const hydration = await probeHydration(store, vault, collection, runId, options);
12
+ const sync = await probeSync(store, vault, collection, runId, options);
13
+ const network = await probeNetwork(store);
14
+ const capabilities = options.capabilities ?? null;
15
+ const risks = collectRisks(options, write, cas, hydration, sync, network, capabilities);
16
+ const suitability = score(risks);
17
+ await bestEffortCleanup(store, vault, collection);
18
+ return {
19
+ store: store.name ?? "unnamed",
20
+ capabilities,
21
+ write,
22
+ cas,
23
+ hydration,
24
+ sync,
25
+ network,
26
+ suitability,
27
+ durationMs: Date.now() - started,
28
+ probedAt: (/* @__PURE__ */ new Date()).toISOString()
29
+ };
30
+ }
31
+ async function probeWrite(store, vault, collection, runId, options) {
32
+ const n = options.writeSampleSize ?? 20;
33
+ const coldId = `w-${runId}-cold`;
34
+ const coldStart = Date.now();
35
+ await store.put(vault, collection, coldId, envelope(1));
36
+ const coldMs = Date.now() - coldStart;
37
+ const serialSamples = [];
38
+ for (let i = 0; i < n; i++) {
39
+ const t0 = Date.now();
40
+ await store.put(vault, collection, `w-${runId}-s-${i}`, envelope(1));
41
+ serialSamples.push(Date.now() - t0);
42
+ }
43
+ const concurrentSamples = [];
44
+ for (let batch = 0; batch < 5; batch++) {
45
+ const t0 = Date.now();
46
+ await Promise.all(
47
+ Array.from(
48
+ { length: 10 },
49
+ (_, j) => store.put(vault, collection, `w-${runId}-c-${batch}-${j}`, envelope(1))
50
+ )
51
+ );
52
+ concurrentSamples.push(Date.now() - t0);
53
+ }
54
+ return {
55
+ coldStart: coldMs,
56
+ serial: stats(serialSamples),
57
+ concurrent: stats(concurrentSamples)
58
+ };
59
+ }
60
+ async function probeCas(store, vault, collection, runId, options) {
61
+ const concurrency = options.casConcurrency ?? 10;
62
+ const id = `cas-${runId}`;
63
+ await store.put(vault, collection, id, envelope(1));
64
+ const settled = await Promise.allSettled(
65
+ Array.from(
66
+ { length: concurrency },
67
+ (_, i) => store.put(vault, collection, id, envelope(2, i), 1)
68
+ )
69
+ );
70
+ const successes = settled.filter((r) => r.status === "fulfilled").length;
71
+ const rejections = settled.length - successes;
72
+ const declaredAtomic = options.capabilities?.casAtomic ?? null;
73
+ const expected = declaredAtomic === false ? "multiple-ok" : "exactly-one";
74
+ return { concurrent: concurrency, successes, rejections, expected };
75
+ }
76
+ async function probeHydration(store, vault, collection, runId, options) {
77
+ const records = options.hydrationRecords ?? 100;
78
+ const existing = await store.list(vault, collection);
79
+ for (let i = existing.length; i < records; i++) {
80
+ await store.put(vault, collection, `h-${runId}-${i}`, envelope(1));
81
+ }
82
+ const t0 = Date.now();
83
+ const snapshot = await store.loadAll(vault);
84
+ const loadAllMs = Date.now() - t0;
85
+ const totalBytes = estimateBytes(snapshot);
86
+ const loaded = Object.values(snapshot).reduce(
87
+ (sum, coll) => sum + Object.keys(coll).length,
88
+ 0
89
+ );
90
+ const perRecordBytes = loaded > 0 ? Math.round(totalBytes / loaded) : 0;
91
+ return { records: loaded, loadAllMs, totalBytes, perRecordBytes };
92
+ }
93
+ async function probeSync(store, vault, collection, runId, options) {
94
+ const batchSize = options.syncBatchSize ?? 50;
95
+ const singleStart = Date.now();
96
+ await store.put(vault, collection, `sync-${runId}-single`, envelope(1));
97
+ const singlePushMs = Date.now() - singleStart;
98
+ const t0 = Date.now();
99
+ for (let i = 0; i < batchSize; i++) {
100
+ await store.put(vault, collection, `sync-${runId}-b-${i}`, envelope(1));
101
+ }
102
+ const batchPushMs = Date.now() - t0;
103
+ const bytesPerPush = approxEnvelopeBytes();
104
+ return { singlePushMs, batchPushMs, batchSize, bytesPerPush };
105
+ }
106
+ async function probeNetwork(store) {
107
+ if (typeof store.ping !== "function") {
108
+ return { pingSupported: false, pingMs: null };
109
+ }
110
+ const t0 = Date.now();
111
+ try {
112
+ await store.ping();
113
+ return { pingSupported: true, pingMs: Date.now() - t0 };
114
+ } catch {
115
+ return { pingSupported: true, pingMs: null };
116
+ }
117
+ }
118
+ function collectRisks(options, write, cas, hydration, sync, network, capabilities) {
119
+ const risks = [];
120
+ const slowWriteMs = options.slowWriteMs ?? 100;
121
+ const slowHydrationMs = options.slowHydrationMs ?? 500;
122
+ const slowSyncMs = options.slowSyncMs ?? 250;
123
+ if (write.serial.p99 > slowWriteMs) {
124
+ risks.push({
125
+ code: "slow-write-p99",
126
+ severity: "warn",
127
+ message: `Serial write p99 ${write.serial.p99}ms exceeds threshold ${slowWriteMs}ms`
128
+ });
129
+ }
130
+ if (hydration.loadAllMs > slowHydrationMs) {
131
+ risks.push({
132
+ code: "slow-hydration",
133
+ severity: "warn",
134
+ message: `loadAll(${hydration.records}) took ${hydration.loadAllMs}ms (threshold ${slowHydrationMs}ms)`
135
+ });
136
+ }
137
+ if (sync.singlePushMs > slowSyncMs) {
138
+ risks.push({
139
+ code: "slow-sync",
140
+ severity: "warn",
141
+ message: `Single-record push ${sync.singlePushMs}ms exceeds ${slowSyncMs}ms`
142
+ });
143
+ }
144
+ if (capabilities?.casAtomic === true && cas.successes > 1) {
145
+ risks.push({
146
+ code: "cas-mismatch",
147
+ severity: "error",
148
+ message: `Store declared casAtomic:true but ${cas.successes}/${cas.concurrent} concurrent puts succeeded (expected exactly 1)`
149
+ });
150
+ }
151
+ if (capabilities?.casAtomic === false) {
152
+ risks.push({
153
+ code: "cas-unsupported",
154
+ severity: "warn",
155
+ message: "Store lacks atomic CAS \u2014 unsafe for multi-writer sync-peer role"
156
+ });
157
+ }
158
+ if (!network.pingSupported) {
159
+ risks.push({
160
+ code: "no-ping",
161
+ severity: "warn",
162
+ message: "Store has no ping() \u2014 runtime monitor will rely on list() as liveness check"
163
+ });
164
+ }
165
+ return risks;
166
+ }
167
+ function score(risks) {
168
+ const hasError = risks.some((r) => r.severity === "error");
169
+ const casUnsupported = risks.some((r) => r.code === "cas-unsupported");
170
+ const slowWrite = risks.some((r) => r.code === "slow-write-p99");
171
+ const recommended = [];
172
+ if (!hasError) {
173
+ if (!slowWrite) recommended.push("primary");
174
+ if (!casUnsupported) recommended.push("sync-peer");
175
+ recommended.push("backup", "archive");
176
+ }
177
+ return { recommended, risks };
178
+ }
179
+ function envelope(version, seed = 0) {
180
+ const data = `probe-${version}-${seed}`.padEnd(64, "x");
181
+ const b64 = base64Encode(data);
182
+ return {
183
+ _noydb: 1,
184
+ _v: version,
185
+ _ts: (/* @__PURE__ */ new Date()).toISOString(),
186
+ _iv: base64Encode("0".repeat(12)),
187
+ _data: b64
188
+ };
189
+ }
190
+ function base64Encode(s) {
191
+ if (typeof Buffer !== "undefined") return Buffer.from(s, "utf-8").toString("base64");
192
+ return btoa(unescape(encodeURIComponent(s)));
193
+ }
194
+ function approxEnvelopeBytes() {
195
+ return JSON.stringify(envelope(1)).length;
196
+ }
197
+ function estimateBytes(snapshot) {
198
+ let total = 0;
199
+ for (const coll of Object.values(snapshot)) {
200
+ for (const rec of Object.values(coll)) {
201
+ total += JSON.stringify(rec).length;
202
+ }
203
+ }
204
+ return total;
205
+ }
206
+ function stats(samples) {
207
+ if (samples.length === 0) return { count: 0, p50: 0, p99: 0, max: 0 };
208
+ const sorted = [...samples].sort((a, b) => a - b);
209
+ return {
210
+ count: sorted.length,
211
+ p50: percentile(sorted, 0.5),
212
+ p99: percentile(sorted, 0.99),
213
+ max: sorted[sorted.length - 1]
214
+ };
215
+ }
216
+ function percentile(sorted, q) {
217
+ const idx = Math.min(sorted.length - 1, Math.floor(q * sorted.length));
218
+ return sorted[idx];
219
+ }
220
+ async function bestEffortCleanup(store, vault, collection) {
221
+ try {
222
+ const ids = await store.list(vault, collection);
223
+ await Promise.all(ids.map((id) => store.delete(vault, collection, id).catch(() => {
224
+ })));
225
+ } catch {
226
+ }
227
+ }
228
+
229
+ // src/topology.ts
230
+ async function probeTopology(options) {
231
+ const started = Date.now();
232
+ const expectedUsers = options.expectedUsers ?? 1;
233
+ const primary = await runStoreProbe(options.store, options);
234
+ const targets = [];
235
+ for (const t of options.sync ?? []) {
236
+ const label = t.label ?? t.store.name ?? t.role;
237
+ const report = await runStoreProbe(t.store, { ...options, vault: `_probe-${label}` });
238
+ targets.push({ ...report, role: t.role, label });
239
+ }
240
+ const topology = evaluateTopology(options.store, primary, targets, options.sync, expectedUsers);
241
+ const allErrors = [
242
+ ...primary.suitability.risks,
243
+ ...targets.flatMap((t) => t.suitability.risks),
244
+ ...topology
245
+ ].filter((r) => r.severity === "error");
246
+ return {
247
+ primary,
248
+ targets,
249
+ topology,
250
+ recommended: allErrors.length === 0,
251
+ durationMs: Date.now() - started,
252
+ probedAt: (/* @__PURE__ */ new Date()).toISOString()
253
+ };
254
+ }
255
+ function evaluateTopology(_primaryStore, primary, targets, syncTargets = [], expectedUsers) {
256
+ const risks = [];
257
+ targets.forEach((target, i) => {
258
+ const input = syncTargets[i];
259
+ const label = target.label;
260
+ if (target.role === "sync-peer" && looksLikeBundleStore(target.store)) {
261
+ risks.push({
262
+ target: label,
263
+ code: "bundle-as-sync-peer",
264
+ severity: "warn",
265
+ message: `"${label}" looks bundle-shaped \u2014 use role 'backup' or 'archive' for push-only semantics`
266
+ });
267
+ }
268
+ if (target.role === "sync-peer" && expectedUsers > 1 && target.capabilities?.casAtomic === false) {
269
+ risks.push({
270
+ target: label,
271
+ code: "no-atomic-cas-sync-peer",
272
+ severity: "error",
273
+ message: `"${label}" has casAtomic:false \u2014 unsafe as sync-peer for ${expectedUsers} concurrent users`
274
+ });
275
+ }
276
+ if (target.role === "sync-peer" && primary.write.serial.p99 > target.write.serial.p99 * 2) {
277
+ risks.push({
278
+ target: label,
279
+ code: "primary-slower-than-peer",
280
+ severity: "warn",
281
+ message: `Primary p99 ${primary.write.serial.p99}ms is >2\xD7 peer "${label}" p99 ${target.write.serial.p99}ms \u2014 unusual topology`
282
+ });
283
+ }
284
+ if (target.role === "archive" && input?.hasPullPolicy === true) {
285
+ risks.push({
286
+ target: label,
287
+ code: "archive-pull-configured",
288
+ severity: "error",
289
+ message: `"${label}" is an archive target but has a pull policy \u2014 archives are push-only`
290
+ });
291
+ }
292
+ });
293
+ return risks;
294
+ }
295
+ function looksLikeBundleStore(name) {
296
+ const n = name.toLowerCase();
297
+ return /drive|webdav|git|bundle/.test(n);
298
+ }
299
+ export {
300
+ probeTopology,
301
+ runStoreProbe
302
+ };
303
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/probe.ts","../src/topology.ts"],"sourcesContent":["/**\n * `runStoreProbe()` — setup-time suitability test for a `NoydbStore`.\n *\n * Five measurement axes (D1-D5 per spec in issue ):\n *\n * | Axis | Measures |\n * |------|----------|\n * | D1 — Write responsiveness | serial + concurrent put p50/p99, cold-start |\n * | D2 — Conflict integrity | N parallel puts with same `expectedVersion` |\n * | D3 — Hydration cost | `loadAll()` time and record-size footprint |\n * | D4 — Sync economics | single + batch `put` cost, bytes/push |\n * | D5 — Network resilience | `ping()` support + latency |\n *\n * Writes happen to an isolated `_probe / _probe` collection that the\n * probe cleans up on completion. The probe does not mutate real\n * application data — but if a probe is interrupted, stray envelopes\n * may remain under that collection. Adopters can safely delete\n * anything under the `_probe` vault.\n *\n * The probe never decrypts anything. It operates at the `NoydbStore`\n * layer with handcrafted {@link EncryptedEnvelope}-shaped payloads — a\n * probe run produces no keyring, no DEK, and no plaintext the store\n * can see.\n *\n * @module\n */\nimport type { EncryptedEnvelope, NoydbStore, StoreCapabilities, VaultSnapshot } from '@noy-db/hub'\nimport type {\n CasAxis,\n HydrationAxis,\n LatencyStats,\n NetworkAxis,\n ProbeOptions,\n ProbeRisk,\n ProbeRole,\n StoreProbeReport,\n SuitabilityScore,\n SyncAxis,\n WriteAxis,\n} from './types.js'\n\nconst PROBE_VAULT = 'probe-vault'\nconst PROBE_COLLECTION = 'probe-benchmark'\n\n/**\n * Run the full 5-axis probe against `store`. Returns a structured\n * report with per-axis measurements and a {@link SuitabilityScore}.\n *\n * The probe is **idempotent-per-run**: it picks unique record IDs per\n * invocation using a monotonically increasing counter seeded by\n * `Date.now()`, so concurrent probe runs against the same store do\n * not collide.\n */\nexport async function runStoreProbe(\n store: NoydbStore,\n options: ProbeOptions = {},\n): Promise<StoreProbeReport> {\n const started = Date.now()\n const vault = options.vault ?? PROBE_VAULT\n const collection = options.collection ?? PROBE_COLLECTION\n const runId = Date.now().toString(36)\n\n const write = await probeWrite(store, vault, collection, runId, options)\n const cas = await probeCas(store, vault, collection, runId, options)\n const hydration = await probeHydration(store, vault, collection, runId, options)\n const sync = await probeSync(store, vault, collection, runId, options)\n const network = await probeNetwork(store)\n\n const capabilities = options.capabilities ?? null\n const risks = collectRisks(options, write, cas, hydration, sync, network, capabilities)\n const suitability = score(risks)\n\n await bestEffortCleanup(store, vault, collection)\n\n return {\n store: store.name ?? 'unnamed',\n capabilities,\n write, cas, hydration, sync, network,\n suitability,\n durationMs: Date.now() - started,\n probedAt: new Date().toISOString(),\n }\n}\n\n// ── D1 · write latency ────────────────────────────────────────────────────\n\nasync function probeWrite(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<WriteAxis> {\n const n = options.writeSampleSize ?? 20\n\n // Cold start — single isolated write\n const coldId = `w-${runId}-cold`\n const coldStart = Date.now()\n await store.put(vault, collection, coldId, envelope(1))\n const coldMs = Date.now() - coldStart\n\n // Serial sample\n const serialSamples: number[] = []\n for (let i = 0; i < n; i++) {\n const t0 = Date.now()\n await store.put(vault, collection, `w-${runId}-s-${i}`, envelope(1))\n serialSamples.push(Date.now() - t0)\n }\n\n // Concurrent sample: 5 batches of 10, measured per-batch\n const concurrentSamples: number[] = []\n for (let batch = 0; batch < 5; batch++) {\n const t0 = Date.now()\n await Promise.all(\n Array.from({ length: 10 }, (_, j) =>\n store.put(vault, collection, `w-${runId}-c-${batch}-${j}`, envelope(1)),\n ),\n )\n concurrentSamples.push(Date.now() - t0)\n }\n\n return {\n coldStart: coldMs,\n serial: stats(serialSamples),\n concurrent: stats(concurrentSamples),\n }\n}\n\n// ── D2 · CAS integrity ────────────────────────────────────────────────────\n\nasync function probeCas(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<CasAxis> {\n const concurrency = options.casConcurrency ?? 10\n const id = `cas-${runId}`\n\n // Seed with version 1\n await store.put(vault, collection, id, envelope(1))\n\n // Fire N concurrent puts all with expectedVersion=1. For a casAtomic\n // store: exactly one should succeed; the rest should reject with\n // ConflictError.\n const settled = await Promise.allSettled(\n Array.from({ length: concurrency }, (_, i) =>\n store.put(vault, collection, id, envelope(2, i), 1),\n ),\n )\n const successes = settled.filter((r) => r.status === 'fulfilled').length\n const rejections = settled.length - successes\n\n // What the store promised\n const declaredAtomic = options.capabilities?.casAtomic ?? null\n const expected = declaredAtomic === false ? 'multiple-ok' : 'exactly-one'\n\n return { concurrent: concurrency, successes, rejections, expected }\n}\n\n// ── D3 · hydration ────────────────────────────────────────────────────────\n\nasync function probeHydration(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<HydrationAxis> {\n const records = options.hydrationRecords ?? 100\n\n // Fill the probe collection to the target record count. Writes from\n // D1/D2 already contributed some envelopes; we top up the rest.\n const existing = await store.list(vault, collection)\n for (let i = existing.length; i < records; i++) {\n await store.put(vault, collection, `h-${runId}-${i}`, envelope(1))\n }\n\n const t0 = Date.now()\n const snapshot = await store.loadAll(vault)\n const loadAllMs = Date.now() - t0\n\n const totalBytes = estimateBytes(snapshot)\n const loaded = Object.values(snapshot).reduce(\n (sum, coll) => sum + Object.keys(coll).length,\n 0,\n )\n const perRecordBytes = loaded > 0 ? Math.round(totalBytes / loaded) : 0\n\n return { records: loaded, loadAllMs, totalBytes, perRecordBytes }\n}\n\n// ── D4 · sync economics ───────────────────────────────────────────────────\n\nasync function probeSync(\n store: NoydbStore,\n vault: string,\n collection: string,\n runId: string,\n options: ProbeOptions,\n): Promise<SyncAxis> {\n const batchSize = options.syncBatchSize ?? 50\n\n // Single-record push\n const singleStart = Date.now()\n await store.put(vault, collection, `sync-${runId}-single`, envelope(1))\n const singlePushMs = Date.now() - singleStart\n\n // Batch push (simulated — sequential writes since the contract has no\n // bulk put; saveAll would also rewrite existing data)\n const t0 = Date.now()\n for (let i = 0; i < batchSize; i++) {\n await store.put(vault, collection, `sync-${runId}-b-${i}`, envelope(1))\n }\n const batchPushMs = Date.now() - t0\n\n // Rough bytes-per-push — envelope size plus keys\n const bytesPerPush = approxEnvelopeBytes()\n\n return { singlePushMs, batchPushMs, batchSize, bytesPerPush }\n}\n\n// ── D5 · network resilience ───────────────────────────────────────────────\n\nasync function probeNetwork(store: NoydbStore): Promise<NetworkAxis> {\n if (typeof store.ping !== 'function') {\n return { pingSupported: false, pingMs: null }\n }\n const t0 = Date.now()\n try {\n await store.ping()\n return { pingSupported: true, pingMs: Date.now() - t0 }\n } catch {\n return { pingSupported: true, pingMs: null }\n }\n}\n\n// ── Risk aggregation + scoring ────────────────────────────────────────────\n\nfunction collectRisks(\n options: ProbeOptions,\n write: WriteAxis,\n cas: CasAxis,\n hydration: HydrationAxis,\n sync: SyncAxis,\n network: NetworkAxis,\n capabilities: StoreCapabilities | null,\n): ProbeRisk[] {\n const risks: ProbeRisk[] = []\n const slowWriteMs = options.slowWriteMs ?? 100\n const slowHydrationMs = options.slowHydrationMs ?? 500\n const slowSyncMs = options.slowSyncMs ?? 250\n\n if (write.serial.p99 > slowWriteMs) {\n risks.push({\n code: 'slow-write-p99',\n severity: 'warn',\n message: `Serial write p99 ${write.serial.p99}ms exceeds threshold ${slowWriteMs}ms`,\n })\n }\n if (hydration.loadAllMs > slowHydrationMs) {\n risks.push({\n code: 'slow-hydration',\n severity: 'warn',\n message: `loadAll(${hydration.records}) took ${hydration.loadAllMs}ms (threshold ${slowHydrationMs}ms)`,\n })\n }\n if (sync.singlePushMs > slowSyncMs) {\n risks.push({\n code: 'slow-sync',\n severity: 'warn',\n message: `Single-record push ${sync.singlePushMs}ms exceeds ${slowSyncMs}ms`,\n })\n }\n if (capabilities?.casAtomic === true && cas.successes > 1) {\n risks.push({\n code: 'cas-mismatch',\n severity: 'error',\n message: `Store declared casAtomic:true but ${cas.successes}/${cas.concurrent} concurrent puts succeeded (expected exactly 1)`,\n })\n }\n if (capabilities?.casAtomic === false) {\n risks.push({\n code: 'cas-unsupported',\n severity: 'warn',\n message: 'Store lacks atomic CAS — unsafe for multi-writer sync-peer role',\n })\n }\n if (!network.pingSupported) {\n risks.push({\n code: 'no-ping',\n severity: 'warn',\n message: 'Store has no ping() — runtime monitor will rely on list() as liveness check',\n })\n }\n\n return risks\n}\n\nfunction score(risks: readonly ProbeRisk[]): SuitabilityScore {\n const hasError = risks.some((r) => r.severity === 'error')\n const casUnsupported = risks.some((r) => r.code === 'cas-unsupported')\n const slowWrite = risks.some((r) => r.code === 'slow-write-p99')\n\n const recommended: ProbeRole[] = []\n if (!hasError) {\n if (!slowWrite) recommended.push('primary')\n if (!casUnsupported) recommended.push('sync-peer')\n recommended.push('backup', 'archive')\n }\n return { recommended, risks }\n}\n\n// ── helpers ───────────────────────────────────────────────────────────────\n\n/** Build a synthetic envelope with a tiny ciphertext payload. Safe —\n * the store never decrypts, so the `_data` just needs to parse through\n * whatever JSON round-tripping the store does. */\nfunction envelope(version: number, seed = 0): EncryptedEnvelope {\n const data = `probe-${version}-${seed}`.padEnd(64, 'x')\n // base64-encode a deterministic marker so stores that assert\n // base64-shape on persist don't explode\n const b64 = base64Encode(data)\n return {\n _noydb: 1,\n _v: version,\n _ts: new Date().toISOString(),\n _iv: base64Encode('0'.repeat(12)),\n _data: b64,\n }\n}\n\nfunction base64Encode(s: string): string {\n if (typeof Buffer !== 'undefined') return Buffer.from(s, 'utf-8').toString('base64')\n return btoa(unescape(encodeURIComponent(s)))\n}\n\nfunction approxEnvelopeBytes(): number {\n return JSON.stringify(envelope(1)).length\n}\n\nfunction estimateBytes(snapshot: VaultSnapshot): number {\n let total = 0\n for (const coll of Object.values(snapshot)) {\n for (const rec of Object.values(coll)) {\n total += JSON.stringify(rec).length\n }\n }\n return total\n}\n\nfunction stats(samples: number[]): LatencyStats {\n if (samples.length === 0) return { count: 0, p50: 0, p99: 0, max: 0 }\n const sorted = [...samples].sort((a, b) => a - b)\n return {\n count: sorted.length,\n p50: percentile(sorted, 0.5),\n p99: percentile(sorted, 0.99),\n max: sorted[sorted.length - 1]!,\n }\n}\n\nfunction percentile(sorted: number[], q: number): number {\n const idx = Math.min(sorted.length - 1, Math.floor(q * sorted.length))\n return sorted[idx]!\n}\n\nasync function bestEffortCleanup(\n store: NoydbStore,\n vault: string,\n collection: string,\n): Promise<void> {\n try {\n const ids = await store.list(vault, collection)\n await Promise.all(ids.map((id) => store.delete(vault, collection, id).catch(() => {})))\n } catch {\n // Silent — cleanup failure is not a probe failure\n }\n}\n","/**\n * `probeTopology()` — multi-backend health + suitability check.\n *\n * Runs {@link runStoreProbe} independently on the primary store and\n * every sync target, then layers topology-level rules that only make\n * sense across the whole graph:\n *\n * | Rule | Condition | Severity |\n * |------|-----------|----------|\n * | `bundle-as-sync-peer` | Bundle-shaped store used as `sync-peer` | warn |\n * | `no-atomic-cas-sync-peer` | Non-atomic-CAS store used as `sync-peer` with >1 user | error |\n * | `primary-slower-than-peer` | Primary p99 > sync-peer p99 × 2 | warn |\n * | `archive-pull-configured` | `archive` target declared with a pull policy | error |\n *\n * Only one probe pass per store — if two targets happen to point at\n * the same backend, both get probed (the target identifies the\n * configuration, not the backend instance).\n *\n * @module\n */\nimport type { NoydbStore } from '@noy-db/hub'\nimport { runStoreProbe } from './probe.js'\nimport type {\n StoreProbeReport,\n TopologyProbeOptions,\n TopologyProbeReport,\n TopologyRisk,\n TopologyTargetReport,\n} from './types.js'\n\nexport async function probeTopology(\n options: TopologyProbeOptions,\n): Promise<TopologyProbeReport> {\n const started = Date.now()\n const expectedUsers = options.expectedUsers ?? 1\n\n const primary = await runStoreProbe(options.store, options)\n const targets: TopologyTargetReport[] = []\n\n for (const t of options.sync ?? []) {\n const label = t.label ?? t.store.name ?? t.role\n const report = await runStoreProbe(t.store, { ...options, vault: `_probe-${label}` })\n targets.push({ ...report, role: t.role, label })\n }\n\n const topology = evaluateTopology(options.store, primary, targets, options.sync, expectedUsers)\n const allErrors = [\n ...primary.suitability.risks,\n ...targets.flatMap((t) => t.suitability.risks),\n ...topology,\n ].filter((r) => r.severity === 'error')\n\n return {\n primary, targets, topology,\n recommended: allErrors.length === 0,\n durationMs: Date.now() - started,\n probedAt: new Date().toISOString(),\n }\n}\n\nfunction evaluateTopology(\n _primaryStore: NoydbStore,\n primary: StoreProbeReport,\n targets: readonly TopologyTargetReport[],\n syncTargets: TopologyProbeOptions['sync'] = [],\n expectedUsers: number,\n): TopologyRisk[] {\n const risks: TopologyRisk[] = []\n\n targets.forEach((target, i) => {\n const input = syncTargets[i]\n const label = target.label\n\n // Bundle-shaped stores (drive/webdav/git) don't have atomic CAS\n // and surface as sync-peer-unsuitable. For we detect by\n // name heuristics; future hub work can annotate StoreCapabilities\n // with a `shape: 'kv' | 'bundle'` field.\n if (target.role === 'sync-peer' && looksLikeBundleStore(target.store)) {\n risks.push({\n target: label,\n code: 'bundle-as-sync-peer',\n severity: 'warn',\n message: `\"${label}\" looks bundle-shaped — use role 'backup' or 'archive' for push-only semantics`,\n })\n }\n\n if (\n target.role === 'sync-peer' &&\n expectedUsers > 1 &&\n target.capabilities?.casAtomic === false\n ) {\n risks.push({\n target: label,\n code: 'no-atomic-cas-sync-peer',\n severity: 'error',\n message: `\"${label}\" has casAtomic:false — unsafe as sync-peer for ${expectedUsers} concurrent users`,\n })\n }\n\n if (target.role === 'sync-peer' && primary.write.serial.p99 > target.write.serial.p99 * 2) {\n risks.push({\n target: label,\n code: 'primary-slower-than-peer',\n severity: 'warn',\n message: `Primary p99 ${primary.write.serial.p99}ms is >2× peer \"${label}\" p99 ${target.write.serial.p99}ms — unusual topology`,\n })\n }\n\n if (target.role === 'archive' && input?.hasPullPolicy === true) {\n risks.push({\n target: label,\n code: 'archive-pull-configured',\n severity: 'error',\n message: `\"${label}\" is an archive target but has a pull policy — archives are push-only`,\n })\n }\n })\n\n return risks\n}\n\n/** Heuristic bundle detection: name includes 'drive' / 'webdav' / 'git'\n * / 'bundle'. Adopters who wrap a bundle store under a custom name\n * can silence this via `acknowledgeRisks: ['bundle-as-sync-peer']`. */\nfunction looksLikeBundleStore(name: string): boolean {\n const n = name.toLowerCase()\n return /drive|webdav|git|bundle/.test(n)\n}\n"],"mappings":";AAyCA,IAAM,cAAc;AACpB,IAAM,mBAAmB;AAWzB,eAAsB,cACpB,OACA,UAAwB,CAAC,GACE;AAC3B,QAAM,UAAU,KAAK,IAAI;AACzB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,aAAa,QAAQ,cAAc;AACzC,QAAM,QAAQ,KAAK,IAAI,EAAE,SAAS,EAAE;AAEpC,QAAM,QAAQ,MAAM,WAAW,OAAO,OAAO,YAAY,OAAO,OAAO;AACvE,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO,YAAY,OAAO,OAAO;AACnE,QAAM,YAAY,MAAM,eAAe,OAAO,OAAO,YAAY,OAAO,OAAO;AAC/E,QAAM,OAAO,MAAM,UAAU,OAAO,OAAO,YAAY,OAAO,OAAO;AACrE,QAAM,UAAU,MAAM,aAAa,KAAK;AAExC,QAAM,eAAe,QAAQ,gBAAgB;AAC7C,QAAM,QAAQ,aAAa,SAAS,OAAO,KAAK,WAAW,MAAM,SAAS,YAAY;AACtF,QAAM,cAAc,MAAM,KAAK;AAE/B,QAAM,kBAAkB,OAAO,OAAO,UAAU;AAEhD,SAAO;AAAA,IACL,OAAO,MAAM,QAAQ;AAAA,IACrB;AAAA,IACA;AAAA,IAAO;AAAA,IAAK;AAAA,IAAW;AAAA,IAAM;AAAA,IAC7B;AAAA,IACA,YAAY,KAAK,IAAI,IAAI;AAAA,IACzB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,EACnC;AACF;AAIA,eAAe,WACb,OACA,OACA,YACA,OACA,SACoB;AACpB,QAAM,IAAI,QAAQ,mBAAmB;AAGrC,QAAM,SAAS,KAAK,KAAK;AACzB,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,SAAS,CAAC,CAAC;AACtD,QAAM,SAAS,KAAK,IAAI,IAAI;AAG5B,QAAM,gBAA0B,CAAC;AACjC,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,UAAM,KAAK,KAAK,IAAI;AACpB,UAAM,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,CAAC;AACnE,kBAAc,KAAK,KAAK,IAAI,IAAI,EAAE;AAAA,EACpC;AAGA,QAAM,oBAA8B,CAAC;AACrC,WAAS,QAAQ,GAAG,QAAQ,GAAG,SAAS;AACtC,UAAM,KAAK,KAAK,IAAI;AACpB,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAK,EAAE,QAAQ,GAAG;AAAA,QAAG,CAAC,GAAG,MAC7B,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,MAAM,KAAK,IAAI,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,MACxE;AAAA,IACF;AACA,sBAAkB,KAAK,KAAK,IAAI,IAAI,EAAE;AAAA,EACxC;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,QAAQ,MAAM,aAAa;AAAA,IAC3B,YAAY,MAAM,iBAAiB;AAAA,EACrC;AACF;AAIA,eAAe,SACb,OACA,OACA,YACA,OACA,SACkB;AAClB,QAAM,cAAc,QAAQ,kBAAkB;AAC9C,QAAM,KAAK,OAAO,KAAK;AAGvB,QAAM,MAAM,IAAI,OAAO,YAAY,IAAI,SAAS,CAAC,CAAC;AAKlD,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,MAAM;AAAA,MAAK,EAAE,QAAQ,YAAY;AAAA,MAAG,CAAC,GAAG,MACtC,MAAM,IAAI,OAAO,YAAY,IAAI,SAAS,GAAG,CAAC,GAAG,CAAC;AAAA,IACpD;AAAA,EACF;AACA,QAAM,YAAY,QAAQ,OAAO,CAAC,MAAM,EAAE,WAAW,WAAW,EAAE;AAClE,QAAM,aAAa,QAAQ,SAAS;AAGpC,QAAM,iBAAiB,QAAQ,cAAc,aAAa;AAC1D,QAAM,WAAW,mBAAmB,QAAQ,gBAAgB;AAE5D,SAAO,EAAE,YAAY,aAAa,WAAW,YAAY,SAAS;AACpE;AAIA,eAAe,eACb,OACA,OACA,YACA,OACA,SACwB;AACxB,QAAM,UAAU,QAAQ,oBAAoB;AAI5C,QAAM,WAAW,MAAM,MAAM,KAAK,OAAO,UAAU;AACnD,WAAS,IAAI,SAAS,QAAQ,IAAI,SAAS,KAAK;AAC9C,UAAM,MAAM,IAAI,OAAO,YAAY,KAAK,KAAK,IAAI,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,EACnE;AAEA,QAAM,KAAK,KAAK,IAAI;AACpB,QAAM,WAAW,MAAM,MAAM,QAAQ,KAAK;AAC1C,QAAM,YAAY,KAAK,IAAI,IAAI;AAE/B,QAAM,aAAa,cAAc,QAAQ;AACzC,QAAM,SAAS,OAAO,OAAO,QAAQ,EAAE;AAAA,IACrC,CAAC,KAAK,SAAS,MAAM,OAAO,KAAK,IAAI,EAAE;AAAA,IACvC;AAAA,EACF;AACA,QAAM,iBAAiB,SAAS,IAAI,KAAK,MAAM,aAAa,MAAM,IAAI;AAEtE,SAAO,EAAE,SAAS,QAAQ,WAAW,YAAY,eAAe;AAClE;AAIA,eAAe,UACb,OACA,OACA,YACA,OACA,SACmB;AACnB,QAAM,YAAY,QAAQ,iBAAiB;AAG3C,QAAM,cAAc,KAAK,IAAI;AAC7B,QAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,KAAK,WAAW,SAAS,CAAC,CAAC;AACtE,QAAM,eAAe,KAAK,IAAI,IAAI;AAIlC,QAAM,KAAK,KAAK,IAAI;AACpB,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,UAAM,MAAM,IAAI,OAAO,YAAY,QAAQ,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,CAAC;AAAA,EACxE;AACA,QAAM,cAAc,KAAK,IAAI,IAAI;AAGjC,QAAM,eAAe,oBAAoB;AAEzC,SAAO,EAAE,cAAc,aAAa,WAAW,aAAa;AAC9D;AAIA,eAAe,aAAa,OAAyC;AACnE,MAAI,OAAO,MAAM,SAAS,YAAY;AACpC,WAAO,EAAE,eAAe,OAAO,QAAQ,KAAK;AAAA,EAC9C;AACA,QAAM,KAAK,KAAK,IAAI;AACpB,MAAI;AACF,UAAM,MAAM,KAAK;AACjB,WAAO,EAAE,eAAe,MAAM,QAAQ,KAAK,IAAI,IAAI,GAAG;AAAA,EACxD,QAAQ;AACN,WAAO,EAAE,eAAe,MAAM,QAAQ,KAAK;AAAA,EAC7C;AACF;AAIA,SAAS,aACP,SACA,OACA,KACA,WACA,MACA,SACA,cACa;AACb,QAAM,QAAqB,CAAC;AAC5B,QAAM,cAAc,QAAQ,eAAe;AAC3C,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,aAAa,QAAQ,cAAc;AAEzC,MAAI,MAAM,OAAO,MAAM,aAAa;AAClC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,oBAAoB,MAAM,OAAO,GAAG,wBAAwB,WAAW;AAAA,IAClF,CAAC;AAAA,EACH;AACA,MAAI,UAAU,YAAY,iBAAiB;AACzC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,WAAW,UAAU,OAAO,UAAU,UAAU,SAAS,iBAAiB,eAAe;AAAA,IACpG,CAAC;AAAA,EACH;AACA,MAAI,KAAK,eAAe,YAAY;AAClC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,sBAAsB,KAAK,YAAY,cAAc,UAAU;AAAA,IAC1E,CAAC;AAAA,EACH;AACA,MAAI,cAAc,cAAc,QAAQ,IAAI,YAAY,GAAG;AACzD,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS,qCAAqC,IAAI,SAAS,IAAI,IAAI,UAAU;AAAA,IAC/E,CAAC;AAAA,EACH;AACA,MAAI,cAAc,cAAc,OAAO;AACrC,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AACA,MAAI,CAAC,QAAQ,eAAe;AAC1B,UAAM,KAAK;AAAA,MACT,MAAM;AAAA,MACN,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEA,SAAS,MAAM,OAA+C;AAC5D,QAAM,WAAW,MAAM,KAAK,CAAC,MAAM,EAAE,aAAa,OAAO;AACzD,QAAM,iBAAiB,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,iBAAiB;AACrE,QAAM,YAAY,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,gBAAgB;AAE/D,QAAM,cAA2B,CAAC;AAClC,MAAI,CAAC,UAAU;AACb,QAAI,CAAC,UAAW,aAAY,KAAK,SAAS;AAC1C,QAAI,CAAC,eAAgB,aAAY,KAAK,WAAW;AACjD,gBAAY,KAAK,UAAU,SAAS;AAAA,EACtC;AACA,SAAO,EAAE,aAAa,MAAM;AAC9B;AAOA,SAAS,SAAS,SAAiB,OAAO,GAAsB;AAC9D,QAAM,OAAO,SAAS,OAAO,IAAI,IAAI,GAAG,OAAO,IAAI,GAAG;AAGtD,QAAM,MAAM,aAAa,IAAI;AAC7B,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,IAAI;AAAA,IACJ,MAAK,oBAAI,KAAK,GAAE,YAAY;AAAA,IAC5B,KAAK,aAAa,IAAI,OAAO,EAAE,CAAC;AAAA,IAChC,OAAO;AAAA,EACT;AACF;AAEA,SAAS,aAAa,GAAmB;AACvC,MAAI,OAAO,WAAW,YAAa,QAAO,OAAO,KAAK,GAAG,OAAO,EAAE,SAAS,QAAQ;AACnF,SAAO,KAAK,SAAS,mBAAmB,CAAC,CAAC,CAAC;AAC7C;AAEA,SAAS,sBAA8B;AACrC,SAAO,KAAK,UAAU,SAAS,CAAC,CAAC,EAAE;AACrC;AAEA,SAAS,cAAc,UAAiC;AACtD,MAAI,QAAQ;AACZ,aAAW,QAAQ,OAAO,OAAO,QAAQ,GAAG;AAC1C,eAAW,OAAO,OAAO,OAAO,IAAI,GAAG;AACrC,eAAS,KAAK,UAAU,GAAG,EAAE;AAAA,IAC/B;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,MAAM,SAAiC;AAC9C,MAAI,QAAQ,WAAW,EAAG,QAAO,EAAE,OAAO,GAAG,KAAK,GAAG,KAAK,GAAG,KAAK,EAAE;AACpE,QAAM,SAAS,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC;AAChD,SAAO;AAAA,IACL,OAAO,OAAO;AAAA,IACd,KAAK,WAAW,QAAQ,GAAG;AAAA,IAC3B,KAAK,WAAW,QAAQ,IAAI;AAAA,IAC5B,KAAK,OAAO,OAAO,SAAS,CAAC;AAAA,EAC/B;AACF;AAEA,SAAS,WAAW,QAAkB,GAAmB;AACvD,QAAM,MAAM,KAAK,IAAI,OAAO,SAAS,GAAG,KAAK,MAAM,IAAI,OAAO,MAAM,CAAC;AACrE,SAAO,OAAO,GAAG;AACnB;AAEA,eAAe,kBACb,OACA,OACA,YACe;AACf,MAAI;AACF,UAAM,MAAM,MAAM,MAAM,KAAK,OAAO,UAAU;AAC9C,UAAM,QAAQ,IAAI,IAAI,IAAI,CAAC,OAAO,MAAM,OAAO,OAAO,YAAY,EAAE,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC,CAAC,CAAC;AAAA,EACxF,QAAQ;AAAA,EAER;AACF;;;AC7VA,eAAsB,cACpB,SAC8B;AAC9B,QAAM,UAAU,KAAK,IAAI;AACzB,QAAM,gBAAgB,QAAQ,iBAAiB;AAE/C,QAAM,UAAU,MAAM,cAAc,QAAQ,OAAO,OAAO;AAC1D,QAAM,UAAkC,CAAC;AAEzC,aAAW,KAAK,QAAQ,QAAQ,CAAC,GAAG;AAClC,UAAM,QAAQ,EAAE,SAAS,EAAE,MAAM,QAAQ,EAAE;AAC3C,UAAM,SAAS,MAAM,cAAc,EAAE,OAAO,EAAE,GAAG,SAAS,OAAO,UAAU,KAAK,GAAG,CAAC;AACpF,YAAQ,KAAK,EAAE,GAAG,QAAQ,MAAM,EAAE,MAAM,MAAM,CAAC;AAAA,EACjD;AAEA,QAAM,WAAW,iBAAiB,QAAQ,OAAO,SAAS,SAAS,QAAQ,MAAM,aAAa;AAC9F,QAAM,YAAY;AAAA,IAChB,GAAG,QAAQ,YAAY;AAAA,IACvB,GAAG,QAAQ,QAAQ,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,IAC7C,GAAG;AAAA,EACL,EAAE,OAAO,CAAC,MAAM,EAAE,aAAa,OAAO;AAEtC,SAAO;AAAA,IACL;AAAA,IAAS;AAAA,IAAS;AAAA,IAClB,aAAa,UAAU,WAAW;AAAA,IAClC,YAAY,KAAK,IAAI,IAAI;AAAA,IACzB,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,EACnC;AACF;AAEA,SAAS,iBACP,eACA,SACA,SACA,cAA4C,CAAC,GAC7C,eACgB;AAChB,QAAM,QAAwB,CAAC;AAE/B,UAAQ,QAAQ,CAAC,QAAQ,MAAM;AAC7B,UAAM,QAAQ,YAAY,CAAC;AAC3B,UAAM,QAAQ,OAAO;AAMrB,QAAI,OAAO,SAAS,eAAe,qBAAqB,OAAO,KAAK,GAAG;AACrE,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK;AAAA,MACpB,CAAC;AAAA,IACH;AAEA,QACE,OAAO,SAAS,eAChB,gBAAgB,KAChB,OAAO,cAAc,cAAc,OACnC;AACA,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK,wDAAmD,aAAa;AAAA,MACpF,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,SAAS,eAAe,QAAQ,MAAM,OAAO,MAAM,OAAO,MAAM,OAAO,MAAM,GAAG;AACzF,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,eAAe,QAAQ,MAAM,OAAO,GAAG,sBAAmB,KAAK,SAAS,OAAO,MAAM,OAAO,GAAG;AAAA,MAC1G,CAAC;AAAA,IACH;AAEA,QAAI,OAAO,SAAS,aAAa,OAAO,kBAAkB,MAAM;AAC9D,YAAM,KAAK;AAAA,QACT,QAAQ;AAAA,QACR,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SAAS,IAAI,KAAK;AAAA,MACpB,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAKA,SAAS,qBAAqB,MAAuB;AACnD,QAAM,IAAI,KAAK,YAAY;AAC3B,SAAO,0BAA0B,KAAK,CAAC;AACzC;","names":[]}
package/package.json ADDED
@@ -0,0 +1,68 @@
1
+ {
2
+ "name": "@noy-db/to-probe",
3
+ "version": "0.1.0-pre.3",
4
+ "description": "Diagnostic companion for the @noy-db/to-* store family — not itself a storage backend. Setup-time suitability test + topology check + runtime reliability monitor. Exercises the 6-method NoydbStore contract across five axes (write latency, CAS integrity, hydration cost, sync economics, network resilience) and produces a structured risk-scored report.",
5
+ "license": "MIT",
6
+ "author": "vLannaAi <vicio@lanna.ai>",
7
+ "homepage": "https://github.com/vLannaAi/noy-db/tree/main/packages/to-probe#readme",
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "git+https://github.com/vLannaAi/noy-db.git",
11
+ "directory": "packages/to-probe"
12
+ },
13
+ "bugs": {
14
+ "url": "https://github.com/vLannaAi/noy-db/issues"
15
+ },
16
+ "type": "module",
17
+ "sideEffects": false,
18
+ "exports": {
19
+ ".": {
20
+ "import": {
21
+ "types": "./dist/index.d.ts",
22
+ "default": "./dist/index.js"
23
+ },
24
+ "require": {
25
+ "types": "./dist/index.d.cts",
26
+ "default": "./dist/index.cjs"
27
+ }
28
+ }
29
+ },
30
+ "main": "./dist/index.cjs",
31
+ "module": "./dist/index.js",
32
+ "types": "./dist/index.d.ts",
33
+ "files": [
34
+ "dist",
35
+ "README.md",
36
+ "LICENSE"
37
+ ],
38
+ "engines": {
39
+ "node": ">=18.0.0"
40
+ },
41
+ "peerDependencies": {
42
+ "@noy-db/hub": "0.1.0-pre.3"
43
+ },
44
+ "devDependencies": {
45
+ "@types/node": "^22.0.0",
46
+ "@noy-db/hub": "0.1.0-pre.3"
47
+ },
48
+ "keywords": [
49
+ "noy-db",
50
+ "to-probe",
51
+ "benchmark",
52
+ "diagnostics",
53
+ "monitoring",
54
+ "suitability",
55
+ "health-check",
56
+ "zero-knowledge"
57
+ ],
58
+ "publishConfig": {
59
+ "access": "public",
60
+ "tag": "latest"
61
+ },
62
+ "scripts": {
63
+ "build": "tsup",
64
+ "test": "vitest run",
65
+ "lint": "eslint src/",
66
+ "typecheck": "tsc --noEmit"
67
+ }
68
+ }