@ibgib/core-gib 0.1.9 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/agent-helpers.d.mts +45 -0
  2. package/dist/agent-helpers.d.mts.map +1 -0
  3. package/dist/agent-helpers.mjs +36 -0
  4. package/dist/agent-helpers.mjs.map +1 -0
  5. package/dist/keystone/keystone-config-builder.respec.mjs +4 -4
  6. package/dist/keystone/keystone-config-builder.respec.mjs.map +1 -1
  7. package/dist/keystone/keystone-service-v1.respec.mjs +65 -62
  8. package/dist/keystone/keystone-service-v1.respec.mjs.map +1 -1
  9. package/dist/sync/sync-constants.d.mts +17 -0
  10. package/dist/sync/sync-constants.d.mts.map +1 -0
  11. package/dist/sync/sync-constants.mjs +16 -0
  12. package/dist/sync/sync-constants.mjs.map +1 -0
  13. package/dist/sync/sync-helpers.d.mts +15 -0
  14. package/dist/sync/sync-helpers.d.mts.map +1 -0
  15. package/dist/sync/sync-helpers.mjs +46 -0
  16. package/dist/sync/sync-helpers.mjs.map +1 -0
  17. package/dist/sync/sync-local-spaces.respec.d.mts +2 -0
  18. package/dist/sync/sync-local-spaces.respec.d.mts.map +1 -0
  19. package/dist/sync/sync-local-spaces.respec.mjs +159 -0
  20. package/dist/sync/sync-local-spaces.respec.mjs.map +1 -0
  21. package/dist/sync/sync-saga-coordinator.d.mts +118 -0
  22. package/dist/sync/sync-saga-coordinator.d.mts.map +1 -0
  23. package/dist/sync/sync-saga-coordinator.mjs +399 -0
  24. package/dist/sync/sync-saga-coordinator.mjs.map +1 -0
  25. package/dist/sync/sync-saga-coordinator.respec.d.mts +2 -0
  26. package/dist/sync/sync-saga-coordinator.respec.d.mts.map +1 -0
  27. package/dist/sync/sync-saga-coordinator.respec.mjs +40 -0
  28. package/dist/sync/sync-saga-coordinator.respec.mjs.map +1 -0
  29. package/dist/sync/sync-types.d.mts +103 -0
  30. package/dist/sync/sync-types.d.mts.map +1 -0
  31. package/dist/sync/sync-types.mjs +2 -0
  32. package/dist/sync/sync-types.mjs.map +1 -0
  33. package/dist/test/mock-space.d.mts +39 -0
  34. package/dist/test/mock-space.d.mts.map +1 -0
  35. package/dist/test/mock-space.mjs +79 -0
  36. package/dist/test/mock-space.mjs.map +1 -0
  37. package/dist/witness/space/inner-space/inner-space-v1.respec.mjs +163 -201
  38. package/dist/witness/space/inner-space/inner-space-v1.respec.mjs.map +1 -1
  39. package/dist/witness/space/space-helper.d.mts.map +1 -1
  40. package/dist/witness/space/space-helper.mjs +43 -4
  41. package/dist/witness/space/space-helper.mjs.map +1 -1
  42. package/dist/witness/space/space-helper.respec.d.mts +2 -0
  43. package/dist/witness/space/space-helper.respec.d.mts.map +1 -0
  44. package/dist/witness/space/space-helper.respec.mjs +30 -0
  45. package/dist/witness/space/space-helper.respec.mjs.map +1 -0
  46. package/package.json +2 -2
  47. package/src/agent-helpers.mts +58 -0
  48. package/src/keystone/keystone-config-builder.respec.mts +3 -3
  49. package/src/keystone/keystone-service-v1.respec.mts +66 -60
  50. package/src/sync/sync-constants.mts +24 -0
  51. package/src/sync/sync-helpers.mts +59 -0
  52. package/src/sync/sync-local-spaces.respec.mts +200 -0
  53. package/src/sync/sync-saga-coordinator.mts +477 -0
  54. package/src/sync/sync-saga-coordinator.respec.mts +52 -0
  55. package/src/sync/sync-types.mts +120 -0
  56. package/src/test/mock-space.mts +85 -0
  57. package/src/witness/space/inner-space/inner-space-v1.respec.mts +181 -228
  58. package/src/witness/space/space-helper.mts +42 -4
  59. package/src/witness/space/space-helper.respec.mts +42 -0
  60. package/tmp.md +11 -0
@@ -200,7 +200,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
200
200
 
201
201
  await respecfully(sir, 'Derivation Logic', async () => {
202
202
 
203
- await ifWeMight(sir, 'derivePoolSecret with same inputs returns same output', async () => {
203
+ await ifWe(sir, 'derivePoolSecret with same inputs returns same output', async () => {
204
204
  const strategy = KeystoneStrategyFactory.create({ config });
205
205
 
206
206
  const secretA = await strategy.derivePoolSecret({ masterSecret });
@@ -210,7 +210,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
210
210
  iReckon(sir, secretA).asTo('secret length').isGonnaBeTruthy();
211
211
  });
212
212
 
213
- await ifWeMight(sir, 'derivePoolSecret with different master secret returns different output', async () => {
213
+ await ifWe(sir, 'derivePoolSecret with different master secret returns different output', async () => {
214
214
  const strategy = KeystoneStrategyFactory.create({ config });
215
215
 
216
216
  const secretA = await strategy.derivePoolSecret({ masterSecret });
@@ -219,7 +219,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
219
219
  iReckon(sir, secretA).asTo('secrets differ').not.willEqual(secretB);
220
220
  });
221
221
 
222
- await ifWeMight(sir, 'derivePoolSecret with different salt returns different output', async () => {
222
+ await ifWe(sir, 'derivePoolSecret with different salt returns different output', async () => {
223
223
  // Modify salt in a copy of config
224
224
  const configB = { ...config, salt: "OtherPool" };
225
225
  const strategyA = KeystoneStrategyFactory.create({ config });
@@ -234,7 +234,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
234
234
 
235
235
  await respecfully(sir, 'Challenge/Solution Logic', async () => {
236
236
 
237
- await ifWeMight(sir, 'generateSolution -> generateChallenge -> validateSolution loop works', async () => {
237
+ await ifWe(sir, 'generateSolution -> generateChallenge -> validateSolution loop works', async () => {
238
238
  const strategy = KeystoneStrategyFactory.create({ config });
239
239
  const poolSecret = await strategy.derivePoolSecret({ masterSecret });
240
240
  const challengeId = "a3ff7843552870fc28bef2b"; // arbitrary random challengeId
@@ -253,7 +253,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
253
253
  iReckon(sir, isValid).asTo('valid pair should pass').isGonnaBeTrue();
254
254
  });
255
255
 
256
- await ifWeMight(sir, 'validateSolution fails for mismatched values', async () => {
256
+ await ifWe(sir, 'validateSolution fails for mismatched values', async () => {
257
257
  const strategy = KeystoneStrategyFactory.create({ config });
258
258
  const poolSecret = await strategy.derivePoolSecret({ masterSecret });
259
259
  const challengeId = "8c994f3ed598f150e25513"; // arbitrary random challengeId
@@ -269,7 +269,7 @@ await respecfully(sir, 'Suite A: Strategy Vectors (HashRevealV1)', async () => {
269
269
  iReckon(sir, isValid).asTo('tampered solution should fail').isGonnaBeFalse();
270
270
  });
271
271
 
272
- await ifWeMight(sir, 'validateSolution fails for mismatched challenge hashes', async () => {
272
+ await ifWe(sir, 'validateSolution fails for mismatched challenge hashes', async () => {
273
273
  const strategy = KeystoneStrategyFactory.create({ config });
274
274
  const poolSecret = await strategy.derivePoolSecret({ masterSecret });
275
275
 
@@ -310,7 +310,7 @@ await respecfully(sir, 'Suite B: Service Lifecycle', async () => {
310
310
  });
311
311
 
312
312
  await respecfully(sir, 'Genesis', async () => {
313
- await ifWeMight(sir, 'creates a valid genesis frame and persists it', async () => {
313
+ await ifWe(sir, 'creates a valid genesis frame and persists it', async () => {
314
314
  const config = createStandardPoolConfig(POOL_ID_DEFAULT);
315
315
 
316
316
  genesisKeystone = await service.genesis({
@@ -338,7 +338,7 @@ await respecfully(sir, 'Suite B: Service Lifecycle', async () => {
338
338
  });
339
339
 
340
340
  await respecfully(sir, 'Signing (Evolution)', async () => {
341
- await ifWeMight(sir, 'evolves the keystone with a valid proof', async () => {
341
+ await ifWe(sir, 'evolves the keystone with a valid proof', async () => {
342
342
  const claim: Partial<KeystoneClaim> = {
343
343
  target: "comment 123^gib",
344
344
  verb: "post"
@@ -368,7 +368,7 @@ await respecfully(sir, 'Suite B: Service Lifecycle', async () => {
368
368
  });
369
369
 
370
370
  await respecfully(sir, 'Validation', async () => {
371
- await ifWeMight(sir, 'validates the genesis->signed transition', async () => {
371
+ await ifWe(sir, 'validates the genesis->signed transition', async () => {
372
372
  const errors = await service.validate({
373
373
  prevIbGib: genesisKeystone,
374
374
  currentIbGib: signedKeystone,
@@ -411,7 +411,7 @@ await respecfully(sir, 'Suite C: Security Vectors', async () => {
411
411
  });
412
412
 
413
413
  await respecfully(sir, 'Wrong Secret (Forgery)', async () => {
414
- await ifWeMight(sir, 'prevents creation of forged frames', async () => {
414
+ await ifWe(sir, 'prevents creation of forged frames', async () => {
415
415
  const claim: Partial<KeystoneClaim> = { target: "comment 123^gib", verb: "post" };
416
416
 
417
417
  let errorCaught = false;
@@ -440,7 +440,7 @@ await respecfully(sir, 'Suite C: Security Vectors', async () => {
440
440
  });
441
441
 
442
442
  await respecfully(sir, 'Policy Violation (Restricted Verbs)', async () => {
443
- await ifWeMight(sir, 'throws error if signing forbidden verb with restricted pool', async () => {
443
+ await ifWe(sir, 'throws error if signing forbidden verb with restricted pool', async () => {
444
444
  // Create a specific restricted pool config manually
445
445
  const restrictedPoolId = "read_only_pool";
446
446
  const restrictedConfig = createStandardPoolConfig(restrictedPoolId);
@@ -481,7 +481,7 @@ await respecfully(sir, 'Suite C: Security Vectors', async () => {
481
481
  // SUITE D: REVOCATION
482
482
  // ===========================================================================
483
483
 
484
- await respecfullyDear(sir, 'Suite D: Revocation', async () => {
484
+ await respecfully(sir, 'Suite D: Revocation', async () => {
485
485
 
486
486
  const service = new KeystoneService_V1();
487
487
  const masterSecret = "AliceSecret_RevokeTest";
@@ -509,7 +509,7 @@ await respecfullyDear(sir, 'Suite D: Revocation', async () => {
509
509
  await respecfully(sir, 'Revoke Lifecycle', async () => {
510
510
  let revokedKeystone: KeystoneIbGib_V1;
511
511
 
512
- await ifWeMight(sir, 'successfully creates a revocation frame', async () => {
512
+ await ifWe(sir, 'successfully creates a revocation frame', async () => {
513
513
  revokedKeystone = await service.revoke({
514
514
  latestKeystone: genesisKeystone,
515
515
  masterSecret,
@@ -527,7 +527,7 @@ await respecfullyDear(sir, 'Suite D: Revocation', async () => {
527
527
  iReckon(sir, data.revocationInfo!.proof.claim.verb).willEqual(KEYSTONE_VERB_REVOKE);
528
528
  });
529
529
 
530
- await ifWeMight(sir, 'validates the revocation frame', async () => {
530
+ await ifWe(sir, 'validates the revocation frame', async () => {
531
531
  const errors = await service.validate({
532
532
  prevIbGib: genesisKeystone,
533
533
  currentIbGib: revokedKeystone!,
@@ -538,7 +538,7 @@ await respecfullyDear(sir, 'Suite D: Revocation', async () => {
538
538
  iReckon(sir, errors.length).asTo('no validation errors').willEqual(0);
539
539
  });
540
540
 
541
- await ifWeMight(sir, 'consumed the revocation pool (Scorched Earth)', async () => {
541
+ await ifWe(sir, 'consumed the revocation pool (Scorched Earth)', async () => {
542
542
  const data = revokedKeystone!.data!;
543
543
  const revokePool = data.challengePools.find(p => p.id === POOL_ID_REVOKE);
544
544
 
@@ -556,7 +556,7 @@ await respecfullyDear(sir, 'Suite D: Revocation', async () => {
556
556
  // SUITE E: STRUCTURAL EVOLUTION (addPools)
557
557
  // ===========================================================================
558
558
 
559
- await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async () => {
559
+ await respecfully(sir, 'Suite E: Structural Evolution (addPools)', async () => {
560
560
 
561
561
  const service = new KeystoneService_V1();
562
562
  const aliceSecret = "Alice_Master_Key";
@@ -615,7 +615,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
615
615
  });
616
616
 
617
617
  await respecfully(sir, 'Happy Path', async () => {
618
- await ifWeMight(sir, 'authorizes and adds a foreign pool', async () => {
618
+ await ifWe(sir, 'authorizes and adds a foreign pool', async () => {
619
619
  const bobPool = await createForeignPool("pool_bob", ["post"]);
620
620
 
621
621
  const updatedKeystone = await service.addPools({
@@ -654,7 +654,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
654
654
  });
655
655
 
656
656
  await respecfully(sir, 'Permissions & Logic', async () => {
657
- await ifWeMight(sir, 'fails if no pool allows "manage" verb', async () => {
657
+ await ifWe(sir, 'fails if no pool allows "manage" verb', async () => {
658
658
  // 1. Create a restricted keystone
659
659
  const restrictedConfig = createStandardPoolConfig("read_only");
660
660
  restrictedConfig.allowedVerbs = ['read']; // No 'manage'
@@ -686,7 +686,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
686
686
  iReckon(sir, errorCaught).asTo('permission denied').isGonnaBeTrue();
687
687
  });
688
688
 
689
- await ifWeMight(sir, 'fails on ID collision', async () => {
689
+ await ifWe(sir, 'fails on ID collision', async () => {
690
690
  // Try to add "pool_bob" again (it was added in Happy Path)
691
691
  const duplicatePool = await createForeignPool("pool_bob");
692
692
 
@@ -713,7 +713,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
713
713
  // SUITE E: STRUCTURAL EVOLUTION (addPools)
714
714
  // ===========================================================================
715
715
 
716
- await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async () => {
716
+ await respecfully(sir, 'Suite E: Structural Evolution (addPools)', async () => {
717
717
 
718
718
  const service = new KeystoneService_V1();
719
719
  const aliceSecret = "Alice_Master_Key";
@@ -772,7 +772,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
772
772
  });
773
773
 
774
774
  await respecfully(sir, 'Happy Path', async () => {
775
- await ifWeMight(sir, 'authorizes and adds a foreign pool', async () => {
775
+ await ifWe(sir, 'authorizes and adds a foreign pool', async () => {
776
776
  const bobPool = await createForeignPool("pool_bob", ["post"]);
777
777
 
778
778
  const updatedKeystone = await service.addPools({
@@ -811,7 +811,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
811
811
  });
812
812
 
813
813
  await respecfully(sir, 'Permissions & Logic', async () => {
814
- await ifWeMight(sir, 'fails if no pool allows "manage" verb', async () => {
814
+ await ifWe(sir, 'fails if no pool allows "manage" verb', async () => {
815
815
  // 1. Create a restricted keystone (read-only)
816
816
  const restrictedConfig = createStandardPoolConfig("read_only");
817
817
  restrictedConfig.allowedVerbs = ['read']; // No 'manage'
@@ -843,7 +843,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
843
843
  iReckon(sir, errorCaught).asTo('permission denied').isGonnaBeTrue();
844
844
  });
845
845
 
846
- await ifWeMight(sir, 'fails on ID collision', async () => {
846
+ await ifWe(sir, 'fails on ID collision', async () => {
847
847
  // Try to add "pool_bob" again (it was added in Happy Path)
848
848
  const duplicatePool = await createForeignPool("pool_bob");
849
849
 
@@ -870,7 +870,7 @@ await respecfullyDear(sir, 'Suite E: Structural Evolution (addPools)', async ()
870
870
  // SUITE F: DEEP INSPECTION (Granularity & Serialization)
871
871
  // ===========================================================================
872
872
 
873
- await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
873
+ await respecfully(sir, 'Suite F: Deep Inspection', async () => {
874
874
 
875
875
  const service = new KeystoneService_V1();
876
876
  const aliceSecret = "Alice_Deep_Inspect";
@@ -880,6 +880,8 @@ await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
880
880
  let mockMetaspace: any;
881
881
  let genesisKeystone: KeystoneIbGib_V1;
882
882
 
883
+ let signedKeystone: KeystoneIbGib_V1;
884
+
883
885
  // We use a specific hybrid config to test exact selection logic
884
886
  const hybridConfig = createStandardPoolConfig(salt) as KeystonePoolConfig_HashV1;
885
887
  // 2 FIFO + 2 Random = 4 Total per sign
@@ -900,9 +902,8 @@ await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
900
902
  });
901
903
 
902
904
  await respecfully(sir, 'Proof Granularity & Math', async () => {
903
- let signedKeystone: KeystoneIbGib_V1;
904
905
 
905
- await ifWeMight(sir, 'generates exactly the expected number of solutions', async () => {
906
+ await ifWe(sir, 'generates exactly the expected number of solutions', async () => {
906
907
  signedKeystone = await service.sign({
907
908
  latestKeystone: genesisKeystone,
908
909
  masterSecret: aliceSecret,
@@ -919,7 +920,7 @@ await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
919
920
  iReckon(sir, solutions.length).asTo('solution count').willEqual(4);
920
921
  });
921
922
 
922
- await ifWeMight(sir, 'verifies the math manually (White-box Crypto Check)', async () => {
923
+ await ifWe(sir, 'verifies the math manually (White-box Crypto Check)', async () => {
923
924
  const proof = signedKeystone.data!.proofs[0];
924
925
  const poolSnapshot = genesisKeystone.data!.challengePools.find(p => p.id === salt)!;
925
926
 
@@ -948,7 +949,7 @@ await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
948
949
  }
949
950
  });
950
951
 
951
- await ifWeMight(sir, 'verifies FIFO logic (Deterministic Selection)', async () => {
952
+ await ifWe(sir, 'verifies FIFO logic (Deterministic Selection)', async () => {
952
953
  const proof = signedKeystone.data!.proofs[0];
953
954
  const poolSnapshot = genesisKeystone.data!.challengePools.find(p => p.id === salt)!;
954
955
 
@@ -968,45 +969,50 @@ await respecfullyDear(sir, 'Suite F: Deep Inspection', async () => {
968
969
  });
969
970
  });
970
971
 
971
- // await respecfully(sir, 'DTO & Serialization', async () => {
972
+ await respecfully(sir, 'DTO & Serialization', async () => {
973
+
974
+ await ifWe(sir, 'survives a clone/JSON-cycle without corruption', async () => {
975
+ // 1. Create a DTO (simulate network transmission/storage)
976
+ // 'clone' does a JSON stringify/parse under the hood (usually) or structured clone.
977
+ const dto = clone(signedKeystone);
972
978
 
973
- // await ifWeMight(sir, 'survives a clone/JSON-cycle without corruption', async () => {
974
- // // 1. Create a DTO (simulate network transmission/storage)
975
- // // 'clone' does a JSON stringify/parse under the hood (usually) or structured clone.
976
- // const dto = clone(signedKeystone);
979
+ // 2. Structural checks
980
+ iReckon(sir, dto).asTo('dto exists').isGonnaBeTruthy();
981
+ iReckon(sir, dto.data).asTo('dto data').isGonnaBeTruthy();
982
+ iReckon(sir, dto.data!.proofs).asTo('dto proofs').isGonnaBeTruthy();
983
+
984
+ // 3. Functional check: Can the service validate this DTO?
985
+ // This ensures no prototypes or hidden properties were lost that the service depends on.
986
+ const errors = await service.validate({
987
+ prevIbGib: genesisKeystone,
988
+ currentIbGib: dto, // Passing the DTO, not the original object
989
+ });
977
990
 
978
- // // 2. Structural checks
979
- // iReckon(sir, dto).asTo('dto exists').isGonnaBeTruthy();
980
- // iReckon(sir, dto.data).asTo('dto data').isGonnaBeTruthy();
981
- // iReckon(sir, dto.data!.proofs).asTo('dto proofs').isGonnaBeTruthy();
991
+ iReckon(sir, errors.length).asTo('DTO validation errors').willEqual(0);
992
+ });
982
993
 
983
- // // 3. Functional check: Can the service validate this DTO?
984
- // // This ensures no prototypes or hidden properties were lost that the service depends on.
985
- // const errors = await service.validate({
986
- // prevIbGib: genesisKeystone,
987
- // currentIbGib: dto, // Passing the DTO, not the original object
988
- // });
994
+ await ifWe(sir, 'ensures data contains no functions or circular refs', async () => {
995
+ // A crude but effective test: ensure JSON.stringify doesn't throw
996
+ // and the result is equal to the object (if we parsed it back).
989
997
 
990
- // iReckon(sir, errors.length).asTo('DTO validation errors').willEqual(0);
991
- // });
998
+ const jsonStr = JSON.stringify(signedKeystone);
999
+ const parsed = JSON.parse(jsonStr);
992
1000
 
993
- // await ifWeMight(sir, 'ensures data contains no functions or circular refs', async () => {
994
- // // A crude but effective test: ensure JSON.stringify doesn't throw
995
- // // and the result is equal to the object (if we parsed it back).
1001
+ // Compare specific deep fields
1002
+ const originalSolution = signedKeystone.data!.proofs[0].solutions[0].value;
1003
+ const parsedSolution = parsed.data.proofs[0].solutions[0].value;
996
1004
 
997
- // const jsonStr = JSON.stringify(signedKeystone);
998
- // const parsed = JSON.parse(jsonStr);
1005
+ iReckon(sir, parsedSolution).asTo('deep property survives stringify').willEqual(originalSolution);
999
1006
 
1000
- // // Compare specific deep fields
1001
- // const originalSolution = signedKeystone.data!.proofs[0].solutions[0].value;
1002
- // const parsedSolution = parsed.data.proofs[0].solutions[0].value;
1007
+ // Ensure no extra properties were lost
1008
+ // FIX: JSON.stringify removes keys with 'undefined' values.
1009
+ // We must filter the original keys to match this behavior for a fair comparison.
1010
+ const origKeys = Object.keys(signedKeystone.data!)
1011
+ .filter(k => (signedKeystone.data as any)[k] !== undefined);
1003
1012
 
1004
- // iReckon(sir, parsedSolution).asTo('deep property survives stringify').willEqual(originalSolution);
1013
+ const parsedKeys = Object.keys(parsed.data);
1014
+ iReckon(sir, parsedKeys.length).asTo('key count matches').willEqual(origKeys.length);
1015
+ });
1005
1016
 
1006
- // // Ensure no extra properties were lost (rudimentary check)
1007
- // const origKeys = Object.keys(signedKeystone.data!);
1008
- // const parsedKeys = Object.keys(parsed.data);
1009
- // iReckon(sir, parsedKeys.length).asTo('key count matches').willEqual(origKeys.length);
1010
- // });
1011
- // });
1017
+ });
1012
1018
  });
@@ -0,0 +1,24 @@
1
+ export const SYNC_ATOM = "sync";
2
+
3
+ /**
4
+ * Protocol version string for V1.
5
+ */
6
+ export const SYNC_PROTOCOL_V1 = "sync 1.0.0";
7
+
8
+ export const SYNC_STAGE_INIT = "init";
9
+ export const SYNC_STAGE_REQUEST = "request";
10
+ export const SYNC_STAGE_DELTA = "delta";
11
+ export const SYNC_STAGE_COMMIT = "commit";
12
+
13
+ export type SyncStage =
14
+ | typeof SYNC_STAGE_INIT
15
+ | typeof SYNC_STAGE_REQUEST
16
+ | typeof SYNC_STAGE_DELTA
17
+ | typeof SYNC_STAGE_COMMIT;
18
+
19
+ export const SyncStage = {
20
+ init: SYNC_STAGE_INIT,
21
+ request: SYNC_STAGE_REQUEST,
22
+ delta: SYNC_STAGE_DELTA,
23
+ commit: SYNC_STAGE_COMMIT,
24
+ } as const;
@@ -0,0 +1,59 @@
1
+ import { extractErrorMsg } from "@ibgib/helper-gib/dist/helpers/utils-helper.mjs";
2
+ import { Ib } from "@ibgib/ts-gib/dist/types.mjs";
3
+ import { SYNC_ATOM } from "./sync-constants.mjs";
4
+ import { SyncData_V1, SyncIbInfo_V1 } from "./sync-types.mjs";
5
+
6
+ const GLOBAL_LOG_A_LOT = false; // Todo: import from core constants if needed
7
+ const logalot = GLOBAL_LOG_A_LOT;
8
+
9
+ /**
10
+ * Constructs the standard 'ib' string for a Sync frame.
11
+ */
12
+ export async function getSyncIb({
13
+ data,
14
+ }: {
15
+ data: SyncData_V1,
16
+ }): Promise<Ib> {
17
+ const lc = `[${getSyncIb.name}]`;
18
+ try {
19
+ // sync uuid stage
20
+ // e.g. "sync 1234-5678 init"
21
+ const ib = [
22
+ SYNC_ATOM,
23
+ data.uuid,
24
+ data.stage
25
+ ].join(' ');
26
+
27
+ return ib;
28
+ } catch (error) {
29
+ console.error(`${lc} ${extractErrorMsg(error)}`);
30
+ throw error;
31
+ }
32
+ }
33
+
34
+ /**
35
+ * Parses a standard Sync 'ib' string.
36
+ */
37
+ export async function parseSyncIb({
38
+ ib,
39
+ }: {
40
+ ib: Ib,
41
+ }): Promise<SyncIbInfo_V1> {
42
+ const lc = `[${parseSyncIb.name}]`;
43
+ try {
44
+ const parts = ib.split(' ');
45
+ if (parts.length !== 3) {
46
+ throw new Error(`Invalid sync ib. Expected 3 parts [atom uuid stage]. Got ${parts.length}. (E: 7c8d9...)`);
47
+ }
48
+ const [atom, uuid, stage] = parts;
49
+
50
+ if (atom !== SYNC_ATOM) {
51
+ throw new Error(`Invalid sync ib. Expected atom '${SYNC_ATOM}', got '${atom}'. (E: 8f9e1...)`);
52
+ }
53
+
54
+ return { atom, uuid, stage: stage as any };
55
+ } catch (error) {
56
+ console.error(`${lc} ${extractErrorMsg(error)}`);
57
+ throw error;
58
+ }
59
+ }
@@ -0,0 +1,200 @@
1
+ import { rm } from 'node:fs/promises';
2
+ import { default as pathUtils } from 'path';
3
+
4
+ import {
5
+ respecfully, lastOfAll
6
+ } from '@ibgib/helper-gib/dist/respec-gib/respec-gib.mjs';
7
+ const maam = `[${import.meta.url}]`, sir = maam;
8
+
9
+ import { Factory_V1 } from '@ibgib/ts-gib/dist/V1/factory.mjs';
10
+ import { rel8 } from '@ibgib/ts-gib/dist/V1/transforms/rel8.mjs';
11
+ import { getIbGibAddr, } from '@ibgib/ts-gib/dist/helper.mjs';
12
+ import { IBGIB_ENCODING } from '../witness/space/filesystem-space/filesystem-constants.mjs';
13
+ import { NodeFilesystemSpace_V1 } from '../witness/space/filesystem-space/node-filesystem-space/node-filesystem-space-v1.mjs';
14
+ import { NodeFilesystemSpaceData_V1 } from '../witness/space/filesystem-space/node-filesystem-space/node-filesystem-space-types.mjs';
15
+ import { SyncSagaCoordinator } from './sync-saga-coordinator.mjs';
16
+ import { SyncStage } from './sync-constants.mjs';
17
+ import { getTimestampInTicks, getUUID } from '@ibgib/helper-gib/dist/helpers/utils-helper.mjs';
18
+ import { GIB, IBGIB_DELIMITER } from '@ibgib/ts-gib/dist/V1/constants.mjs';
19
+ import { putInSpace, getFromSpace } from '../witness/space/space-helper.mjs';
20
+ import { KeystoneService_V1 } from '../keystone/keystone-service-v1.mjs';
21
+ import { KeystoneIbGib_V1 } from '../keystone/keystone-types.mjs';
22
+ import { mut8 } from '@ibgib/ts-gib/dist/V1/transforms/mut8.mjs';
23
+
24
+ const logalot = true;
25
+ const lc = `[sync-local-spaces.respec]`;
26
+
27
+ // Helper to create a temp space
28
+ const createTempSpace = async ({
29
+ baseDir,
30
+ name
31
+ }: {
32
+ baseDir: string,
33
+ name: string
34
+ }): Promise<NodeFilesystemSpace_V1> => {
35
+ const uuid = await getUUID();
36
+ const initialData: NodeFilesystemSpaceData_V1 = {
37
+ version: '1',
38
+ classname: NodeFilesystemSpace_V1.name,
39
+ uuid,
40
+ name,
41
+ description: `Temp space for ${name}`,
42
+ baseDir,
43
+ baseSubPath: 'ibgib', // keep short to avoid long path issues
44
+ spaceSubPath: name,
45
+ ibgibsSubPath: 'ibgibs',
46
+ metaSubPath: 'meta',
47
+ binSubPath: 'bin',
48
+ dnaSubPath: 'dna',
49
+ encoding: IBGIB_ENCODING,
50
+ mitigateLongPaths: true,
51
+ // other defaults
52
+ catchAllErrors: false,
53
+ trace: false,
54
+ };
55
+
56
+ // mimic constructor logic if needed, or just new it up
57
+ const space = new NodeFilesystemSpace_V1(initialData);
58
+ // Explicitly initialize if needed (based on findings, constructor calls super which calls initialize?
59
+ // Actually source had it commented out in ctor. Let's call it via a witness "put" which should verify initialization)
60
+ return space;
61
+ };
62
+
63
+ // class MockKeystoneService implements KeystoneService_V1 {
64
+ // async getIdentity(): Promise<KeystoneIbGib_V1> {
65
+ // // Return a dummy identity
66
+ // const res = await Factory_V1.firstGen({
67
+ // ib: 'identity',
68
+ // data: { uuid: await getUUID() },
69
+ // dna: true,
70
+ // });
71
+ // return res.newIbGib as KeystoneIbGib_V1;
72
+ // }
73
+ // // Implement other methods as no-ops or simple mocks
74
+ // async sign(ibGib: any): Promise<any> { return ibGib; }
75
+ // async verify(ibGib: any): Promise<boolean> { return true; }
76
+ // // Add other required properties/methods of KeystoneService_V1 if any (checking interface might be needed)
77
+ // // Assuming for now these are the core ones used by Coordinator.
78
+ // // If strict interface, might need more.
79
+ // }
80
+
81
+ await respecfully(sir, `Sync Local Spaces`, async () => {
82
+
83
+ const TEST_ROOT = pathUtils.join(process.cwd(), 'tmp_sync_test_' + await getUUID());
84
+
85
+ lastOfAll(sir, async () => {
86
+ if (!logalot) {
87
+ await rm(TEST_ROOT, { recursive: true, force: true });
88
+ } else {
89
+ console.log(`${lc} Leaving TEST_ROOT for inspection: ${TEST_ROOT}`);
90
+ }
91
+ });
92
+
93
+ await respecfully(sir, `Basic Push Sync (Source -> Dest)`, async () => {
94
+ // // 1. Setup Spaces
95
+ // const srcDir = pathUtils.join(TEST_ROOT, 'source');
96
+ // const destDir = pathUtils.join(TEST_ROOT, 'dest');
97
+
98
+ // const sourceSpace = await createTempSpace({ baseDir: srcDir, name: 'source' });
99
+ // const destSpace = await createTempSpace({ baseDir: destDir, name: 'dest' });
100
+
101
+ // // 2. Seed Source Data
102
+ // // 2.1 Create a "Stone" (Primitive-like or just immutable data)
103
+ // const stoneRes = await Factory_V1.firstGen({
104
+ // ib: 'stone_data',
105
+ // data: { some: 'data', timestamp: getTimestampInTicks() },
106
+ // dna: false,
107
+ // });
108
+ // const stone = stoneRes.newIbGib;
109
+ // const stoneAddr = getIbGibAddr({ ibGib: stone });
110
+
111
+ // // 2.2 Create a "Living" Timeline (Root -> Child)
112
+ // const rootRes = await Factory_V1.firstGen({
113
+ // ib: 'timeline_root',
114
+ // data: { type: 'root' },
115
+ // dna: true,
116
+ // });
117
+ // const root = rootRes.newIbGib;
118
+ // const rootAddr = getIbGibAddr({ ibGib: root });
119
+
120
+ // const childRes = await mut8({
121
+ // type: 'fork',
122
+ // src: root,
123
+ // dna: true,
124
+ // data: { type: 'child', n: 1 }
125
+ // });
126
+ // const child = childRes.newIbGib;
127
+ // const childAddr = getIbGibAddr({ ibGib: child });
128
+
129
+ // // Put into Source
130
+ // await putInSpace({
131
+ // space: sourceSpace,
132
+ // ibGibs: [stone, root, child]
133
+ // });
134
+
135
+ // // 3. Setup Sync Coordinator
136
+ // const mockKeystone = new MockKeystoneService() as unknown as KeystoneService_V1;
137
+ // const identity = await mockKeystone.getIdentity();
138
+ // const coordinator = new SyncSagaCoordinator(mockKeystone);
139
+
140
+ // // Define domain (roots to sync)
141
+ // const domainIbGibs = [root]; // We only explicitly track the "Living" root. Stone is standalone?
142
+ // // Actually for the test let's explicitly include both or rely on dependency graph.
143
+ // // If 'stone' is not related to 'root', it won't be picked up unless we add it to domain.
144
+ // // Let's Rel8 the stone to the child to test dependency traversal.
145
+
146
+ // const childWithRelRes = await rel8({
147
+ // src: child,
148
+ // rel8nsToAddByAddr: { 'linked_stone': [stoneAddr] },
149
+ // dna: true,
150
+ // });
151
+ // const childWithRel = childWithRelRes.newIbGib;
152
+ // const childWithRelAddr = getIbGibAddr({ ibGib: childWithRel });
153
+
154
+ // // Update Source with linked child
155
+ // await putInSpace({
156
+ // space: sourceSpace,
157
+ // ibGibs: [childWithRel]
158
+ // });
159
+
160
+ // // 4. Run Sync
161
+ // const syncRes = await coordinator.sync({
162
+ // source: sourceSpace,
163
+ // dest: destSpace,
164
+ // domainIbGibs: [childWithRel], // Sync starting from the tip
165
+ // identity
166
+ // });
167
+
168
+ // // 5. Verify Dest
169
+ // // Check Stone
170
+ // const getStone = await getFromSpace({
171
+ // space: destSpace,
172
+ // addr: stoneAddr
173
+ // });
174
+ // if (!getStone.success || !getStone.ibGibs?.[0]) {
175
+ // throw new Error(`Dest failed to get Stone: ${stoneAddr}`);
176
+ // }
177
+ // console.log(`${lc} Verified Stone synced.`);
178
+
179
+ // // Check ChildWithRel (Tip)
180
+ // const getChild = await getFromSpace({
181
+ // space: destSpace,
182
+ // addr: childWithRelAddr
183
+ // });
184
+ // if (!getChild.success || !getChild.ibGibs?.[0]) {
185
+ // throw new Error(`Dest failed to get ChildWithRel: ${childWithRelAddr}`);
186
+ // }
187
+ // console.log(`${lc} Verified Timeline Tip synced.`);
188
+
189
+ // // Check Root (Dependency)
190
+ // const getRoot = await getFromSpace({
191
+ // space: destSpace,
192
+ // addr: rootAddr
193
+ // });
194
+ // if (!getRoot.success || !getRoot.ibGibs?.[0]) {
195
+ // throw new Error(`Dest failed to get Root: ${rootAddr}`);
196
+ // }
197
+ // console.log(`${lc} Verified Root synced.`);
198
+
199
+ });
200
+ });