@gitgov/core 1.6.3 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/src/index.js CHANGED
@@ -6,8 +6,10 @@ import * as yaml from 'js-yaml';
6
6
  import { generateKeyPair, createHash, sign, verify } from 'crypto';
7
7
  import { promisify } from 'util';
8
8
  import * as pathUtils from 'path';
9
+ import { join, basename, dirname } from 'path';
9
10
  import { createRequire } from 'module';
10
11
  import { fileURLToPath } from 'url';
12
+ import { readdir } from 'fs/promises';
11
13
  import { EventEmitter } from 'events';
12
14
 
13
15
  var __defProp = Object.defineProperty;
@@ -2706,7 +2708,7 @@ async function verifySignatures(record, getActorPublicKey) {
2706
2708
  for (const signature of record.header.signatures) {
2707
2709
  const publicKeyBase64 = await getActorPublicKey(signature.keyId);
2708
2710
  if (!publicKeyBase64) {
2709
- logger2.warn(`Public key not found for actor: ${signature.keyId}`);
2711
+ logger2.debug(`Public key not found for actor: ${signature.keyId}`);
2710
2712
  return false;
2711
2713
  }
2712
2714
  const digest = `${record.header.payloadChecksum}:${signature.keyId}:${signature.role}:${signature.notes}:${signature.timestamp}`;
@@ -2788,51 +2790,6 @@ async function validateFullEmbeddedMetadataRecord(record, getActorPublicKey) {
2788
2790
  throw new SignatureVerificationError();
2789
2791
  }
2790
2792
  }
2791
- function validateEmbeddedMetadataBusinessRules(data) {
2792
- const errors = [];
2793
- if (data.header.type === "custom") {
2794
- if (!data.header.schemaUrl) {
2795
- errors.push({
2796
- field: "header.schemaUrl",
2797
- message: 'schemaUrl is required when header.type is "custom"',
2798
- value: data.header.schemaUrl
2799
- });
2800
- }
2801
- if (!data.header.schemaChecksum) {
2802
- errors.push({
2803
- field: "header.schemaChecksum",
2804
- message: 'schemaChecksum is required when header.type is "custom"',
2805
- value: data.header.schemaChecksum
2806
- });
2807
- }
2808
- }
2809
- const sha256Pattern = /^[a-fA-F0-9]{64}$/;
2810
- if (!sha256Pattern.test(data.header.payloadChecksum)) {
2811
- errors.push({
2812
- field: "header.payloadChecksum",
2813
- message: "payloadChecksum must be a valid SHA-256 hash (64 hex characters)",
2814
- value: data.header.payloadChecksum
2815
- });
2816
- }
2817
- if (data.header.schemaChecksum && !sha256Pattern.test(data.header.schemaChecksum)) {
2818
- errors.push({
2819
- field: "header.schemaChecksum",
2820
- message: "schemaChecksum must be a valid SHA-256 hash (64 hex characters)",
2821
- value: data.header.schemaChecksum
2822
- });
2823
- }
2824
- if (!data.header.signatures || data.header.signatures.length === 0) {
2825
- errors.push({
2826
- field: "header.signatures",
2827
- message: "At least one signature is required",
2828
- value: data.header.signatures
2829
- });
2830
- }
2831
- return {
2832
- isValid: errors.length === 0,
2833
- errors
2834
- };
2835
- }
2836
2793
 
2837
2794
  // src/validation/task_validator.ts
2838
2795
  function validateTaskRecordSchema(data) {
@@ -2899,7 +2856,7 @@ function generateFeedbackId(title, timestamp) {
2899
2856
  }
2900
2857
 
2901
2858
  // src/factories/task_factory.ts
2902
- async function createTaskRecord(payload) {
2859
+ function createTaskRecord(payload) {
2903
2860
  const timestamp = Math.floor(Date.now() / 1e3);
2904
2861
  const task = {
2905
2862
  id: payload.id || generateTaskId(payload.title || "", timestamp),
@@ -2919,6 +2876,18 @@ async function createTaskRecord(payload) {
2919
2876
  }
2920
2877
  return task;
2921
2878
  }
2879
+ function loadTaskRecord(data) {
2880
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
2881
+ if (!embeddedValidation.isValid) {
2882
+ throw new DetailedValidationError("GitGovRecord (TaskRecord)", embeddedValidation.errors);
2883
+ }
2884
+ const record = data;
2885
+ const payloadValidation = validateTaskRecordDetailed(record.payload);
2886
+ if (!payloadValidation.isValid) {
2887
+ throw new DetailedValidationError("TaskRecord payload", payloadValidation.errors);
2888
+ }
2889
+ return record;
2890
+ }
2922
2891
 
2923
2892
  // src/validation/cycle_validator.ts
2924
2893
  var cycleSchema = Schemas.CycleRecord;
@@ -2957,7 +2926,7 @@ async function validateFullCycleRecord(record, getActorPublicKey) {
2957
2926
  }
2958
2927
 
2959
2928
  // src/factories/cycle_factory.ts
2960
- async function createCycleRecord(payload) {
2929
+ function createCycleRecord(payload) {
2961
2930
  const timestamp = Math.floor(Date.now() / 1e3);
2962
2931
  const cycle = {
2963
2932
  id: payload.id || generateCycleId(payload.title || "", timestamp),
@@ -2976,6 +2945,18 @@ async function createCycleRecord(payload) {
2976
2945
  }
2977
2946
  return cycle;
2978
2947
  }
2948
+ function loadCycleRecord(data) {
2949
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
2950
+ if (!embeddedValidation.isValid) {
2951
+ throw new DetailedValidationError("GitGovRecord (CycleRecord)", embeddedValidation.errors);
2952
+ }
2953
+ const record = data;
2954
+ const payloadValidation = validateCycleRecordDetailed(record.payload);
2955
+ if (!payloadValidation.isValid) {
2956
+ throw new DetailedValidationError("CycleRecord payload", payloadValidation.errors);
2957
+ }
2958
+ return record;
2959
+ }
2979
2960
 
2980
2961
  // src/store/index.ts
2981
2962
  var store_exports = {};
@@ -3159,7 +3140,9 @@ var RecordStore = class {
3159
3140
  recordType;
3160
3141
  recordsDir;
3161
3142
  fs;
3162
- constructor(recordType, rootPath, fsDeps = promises) {
3143
+ loader;
3144
+ constructor(recordType, loader, rootPath, fsDeps = promises) {
3145
+ this.loader = loader;
3163
3146
  const foundRoot = rootPath || ConfigManager.findProjectRoot();
3164
3147
  if (!foundRoot) {
3165
3148
  throw new Error("Could not find project root. RecordStore requires a valid project root.");
@@ -3185,9 +3168,14 @@ var RecordStore = class {
3185
3168
  const filePath = this.getRecordPath(recordId);
3186
3169
  try {
3187
3170
  const content = await this.fs.readFile(filePath, "utf-8");
3188
- const record = JSON.parse(content);
3189
- return record;
3171
+ const raw = JSON.parse(content);
3172
+ const validatedRecord = this.loader(raw);
3173
+ return validatedRecord;
3190
3174
  } catch (e) {
3175
+ if (e instanceof DetailedValidationError) {
3176
+ console.warn(`\u26A0\uFE0F Invalid ${this.recordType} record ${recordId}:`, e.message);
3177
+ return null;
3178
+ }
3191
3179
  const error = e;
3192
3180
  if (error.code === "ENOENT") {
3193
3181
  return null;
@@ -3272,7 +3260,7 @@ async function validateFullActorRecord(record, getActorPublicKey) {
3272
3260
  }
3273
3261
 
3274
3262
  // src/factories/actor_factory.ts
3275
- async function createActorRecord(payload) {
3263
+ function createActorRecord(payload) {
3276
3264
  const actor = {
3277
3265
  id: payload.id || generateActorId(payload.type || "human", payload.displayName || ""),
3278
3266
  type: payload.type || "human",
@@ -3288,6 +3276,18 @@ async function createActorRecord(payload) {
3288
3276
  }
3289
3277
  return actor;
3290
3278
  }
3279
+ function loadActorRecord(data) {
3280
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
3281
+ if (!embeddedValidation.isValid) {
3282
+ throw new DetailedValidationError("GitGovRecord (ActorRecord)", embeddedValidation.errors);
3283
+ }
3284
+ const record = data;
3285
+ const payloadValidation = validateActorRecordDetailed(record.payload);
3286
+ if (!payloadValidation.isValid) {
3287
+ throw new DetailedValidationError("ActorRecord payload", payloadValidation.errors);
3288
+ }
3289
+ return record;
3290
+ }
3291
3291
 
3292
3292
  // src/validation/agent_validator.ts
3293
3293
  function validateAgentRecordSchema(data) {
@@ -3343,7 +3343,7 @@ async function validateAgentActorRelationship(agentRecord, getEffectiveActor) {
3343
3343
  }
3344
3344
 
3345
3345
  // src/factories/agent_factory.ts
3346
- async function createAgentRecord(payload) {
3346
+ function createAgentRecord(payload) {
3347
3347
  const agent = {
3348
3348
  id: payload.id || "",
3349
3349
  engine: payload.engine || { type: "local" },
@@ -3359,8 +3359,18 @@ async function createAgentRecord(payload) {
3359
3359
  }
3360
3360
  return agent;
3361
3361
  }
3362
-
3363
- // src/adapters/identity_adapter/index.ts
3362
+ function loadAgentRecord(data) {
3363
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
3364
+ if (!embeddedValidation.isValid) {
3365
+ throw new DetailedValidationError("GitGovRecord (AgentRecord)", embeddedValidation.errors);
3366
+ }
3367
+ const record = data;
3368
+ const payloadValidation = validateAgentRecordDetailed(record.payload);
3369
+ if (!payloadValidation.isValid) {
3370
+ throw new DetailedValidationError("AgentRecord payload", payloadValidation.errors);
3371
+ }
3372
+ return record;
3373
+ }
3364
3374
  var IdentityAdapter = class {
3365
3375
  actorStore;
3366
3376
  agentStore;
@@ -3396,9 +3406,9 @@ var IdentityAdapter = class {
3396
3406
  status: payload.status || "active",
3397
3407
  ...payload
3398
3408
  };
3399
- const validatedPayload = await createActorRecord(completePayload);
3409
+ const validatedPayload = createActorRecord(completePayload);
3400
3410
  const payloadChecksum = calculatePayloadChecksum(validatedPayload);
3401
- const signature = await signPayload(validatedPayload, privateKey, actorId, "author", "Actor registration");
3411
+ const signature = signPayload(validatedPayload, privateKey, actorId, "author", "Actor registration");
3402
3412
  const record = {
3403
3413
  header: {
3404
3414
  version: "1.0",
@@ -3416,6 +3426,18 @@ var IdentityAdapter = class {
3416
3426
  return signerActor?.publicKey || null;
3417
3427
  });
3418
3428
  await this.actorStore.write(record);
3429
+ try {
3430
+ const projectRoot2 = ConfigManager.findProjectRoot();
3431
+ if (projectRoot2) {
3432
+ const actorsDir = pathUtils.join(projectRoot2, ".gitgov", "actors");
3433
+ await promises.mkdir(actorsDir, { recursive: true });
3434
+ const keyPath = pathUtils.join(actorsDir, `${actorId}.key`);
3435
+ await promises.writeFile(keyPath, privateKey, "utf-8");
3436
+ await promises.chmod(keyPath, 384);
3437
+ }
3438
+ } catch (error) {
3439
+ console.warn(`\u26A0\uFE0F Could not persist private key for ${actorId}: ${error instanceof Error ? error.message : "Unknown error"}`);
3440
+ }
3419
3441
  if (this.eventBus) {
3420
3442
  const allActorIds = await this.actorStore.list();
3421
3443
  const isBootstrap = allActorIds.length === 1;
@@ -3433,7 +3455,6 @@ var IdentityAdapter = class {
3433
3455
  };
3434
3456
  this.eventBus.publish(event);
3435
3457
  }
3436
- console.warn(`Private key for ${actorId}: ${privateKey} (STORE SECURELY)`);
3437
3458
  return validatedPayload;
3438
3459
  }
3439
3460
  async getActor(actorId) {
@@ -3457,19 +3478,46 @@ var IdentityAdapter = class {
3457
3478
  throw new Error(`Actor not found: ${actorId}`);
3458
3479
  }
3459
3480
  const payloadChecksum = calculatePayloadChecksum(record.payload);
3460
- const mockSignature = {
3461
- keyId: actorId,
3462
- role,
3463
- notes: "Record signed",
3464
- signature: `mock-signature-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
3465
- timestamp: Math.floor(Date.now() / 1e3)
3466
- };
3481
+ let privateKey = null;
3482
+ try {
3483
+ const projectRoot2 = ConfigManager.findProjectRoot();
3484
+ if (projectRoot2) {
3485
+ const keyPath = pathUtils.join(projectRoot2, ".gitgov", "actors", `${actorId}.key`);
3486
+ const keyContent = await promises.readFile(keyPath, "utf-8");
3487
+ privateKey = keyContent.trim();
3488
+ }
3489
+ } catch (error) {
3490
+ console.warn(`\u26A0\uFE0F Private key not found for ${actorId}, using mock signature`);
3491
+ }
3492
+ let signature;
3493
+ if (privateKey) {
3494
+ signature = signPayload(record.payload, privateKey, actorId, role, "Record signed");
3495
+ } else {
3496
+ signature = {
3497
+ keyId: actorId,
3498
+ role,
3499
+ notes: "Record signed",
3500
+ signature: `mock-signature-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
3501
+ timestamp: Math.floor(Date.now() / 1e3)
3502
+ };
3503
+ }
3504
+ const existingSignatures = record.header.signatures || [];
3505
+ const hasPlaceholder = existingSignatures.some((sig) => sig.signature === "placeholder");
3506
+ let finalSignatures;
3507
+ if (hasPlaceholder) {
3508
+ const replaced = existingSignatures.map(
3509
+ (sig) => sig.signature === "placeholder" ? signature : sig
3510
+ );
3511
+ finalSignatures = replaced.length > 0 ? replaced : [signature];
3512
+ } else {
3513
+ finalSignatures = [...existingSignatures, signature];
3514
+ }
3467
3515
  const signedRecord = {
3468
3516
  ...record,
3469
3517
  header: {
3470
3518
  ...record.header,
3471
3519
  payloadChecksum,
3472
- signatures: [...record.header.signatures || [], mockSignature]
3520
+ signatures: finalSignatures
3473
3521
  }
3474
3522
  };
3475
3523
  return signedRecord;
@@ -3524,8 +3572,100 @@ var IdentityAdapter = class {
3524
3572
  const currentActorId = await this.resolveCurrentActorId(agentId);
3525
3573
  return this.getActor(currentActorId);
3526
3574
  }
3527
- async rotateActorKey(_actorId) {
3528
- throw new Error("rotateActorKey not implemented yet - complex operation");
3575
+ async rotateActorKey(actorId) {
3576
+ const oldActor = await this.getActor(actorId);
3577
+ if (!oldActor) {
3578
+ throw new Error(`ActorRecord with id ${actorId} not found`);
3579
+ }
3580
+ if (oldActor.status === "revoked") {
3581
+ throw new Error(`Cannot rotate key for revoked actor: ${actorId}`);
3582
+ }
3583
+ const { publicKey: newPublicKey, privateKey: newPrivateKey } = await generateKeys();
3584
+ const baseId = generateActorId(oldActor.type, oldActor.displayName);
3585
+ let newActorId;
3586
+ const versionMatch = baseId.match(/^(.+)-v(\d+)$/);
3587
+ if (versionMatch && versionMatch[1] && versionMatch[2]) {
3588
+ const baseWithoutVersion = versionMatch[1];
3589
+ const currentVersion = parseInt(versionMatch[2], 10);
3590
+ newActorId = `${baseWithoutVersion}-v${currentVersion + 1}`;
3591
+ } else {
3592
+ newActorId = `${baseId}-v2`;
3593
+ }
3594
+ const newActorPayload = {
3595
+ id: newActorId,
3596
+ type: oldActor.type,
3597
+ displayName: oldActor.displayName,
3598
+ publicKey: newPublicKey,
3599
+ roles: oldActor.roles,
3600
+ status: "active"
3601
+ };
3602
+ const validatedNewPayload = createActorRecord(newActorPayload);
3603
+ const payloadChecksum = calculatePayloadChecksum(validatedNewPayload);
3604
+ const signature = signPayload(validatedNewPayload, newPrivateKey, newActorId, "author", "Key rotation");
3605
+ const newRecord = {
3606
+ header: {
3607
+ version: "1.0",
3608
+ type: "actor",
3609
+ payloadChecksum,
3610
+ signatures: [signature]
3611
+ },
3612
+ payload: validatedNewPayload
3613
+ };
3614
+ await validateFullActorRecord(newRecord, async (keyId) => {
3615
+ if (keyId === newActorId) {
3616
+ return newPublicKey;
3617
+ }
3618
+ const signerActor = await this.getActor(keyId);
3619
+ return signerActor?.publicKey || null;
3620
+ });
3621
+ await this.actorStore.write(newRecord);
3622
+ const revokedOldActor = await this.revokeActor(
3623
+ actorId,
3624
+ "system",
3625
+ "rotation",
3626
+ newActorId
3627
+ // Mark succession
3628
+ );
3629
+ try {
3630
+ const configManager = new ConfigManager();
3631
+ const session = await configManager.loadSession();
3632
+ if (session) {
3633
+ session.lastSession = {
3634
+ actorId: newActorId,
3635
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
3636
+ };
3637
+ if (session.actorState && session.actorState[actorId]) {
3638
+ const oldState = session.actorState[actorId];
3639
+ session.actorState[newActorId] = {
3640
+ ...oldState,
3641
+ lastSync: (/* @__PURE__ */ new Date()).toISOString()
3642
+ };
3643
+ } else if (!session.actorState) {
3644
+ session.actorState = {};
3645
+ }
3646
+ const projectRoot2 = ConfigManager.findProjectRoot() || process.cwd();
3647
+ const sessionPath = pathUtils.join(projectRoot2, ".gitgov", ".session.json");
3648
+ await promises.writeFile(sessionPath, JSON.stringify(session, null, 2), "utf-8");
3649
+ }
3650
+ } catch (error) {
3651
+ console.warn(`\u26A0\uFE0F Could not update session for ${newActorId}: ${error instanceof Error ? error.message : "Unknown error"}`);
3652
+ }
3653
+ try {
3654
+ const projectRoot2 = ConfigManager.findProjectRoot();
3655
+ if (projectRoot2) {
3656
+ const actorsDir = pathUtils.join(projectRoot2, ".gitgov", "actors");
3657
+ await promises.mkdir(actorsDir, { recursive: true });
3658
+ const keyPath = pathUtils.join(actorsDir, `${newActorId}.key`);
3659
+ await promises.writeFile(keyPath, newPrivateKey, "utf-8");
3660
+ await promises.chmod(keyPath, 384);
3661
+ }
3662
+ } catch (error) {
3663
+ console.warn(`\u26A0\uFE0F Could not persist private key for ${newActorId}: ${error instanceof Error ? error.message : "Unknown error"}`);
3664
+ }
3665
+ return {
3666
+ oldActor: revokedOldActor,
3667
+ newActor: validatedNewPayload
3668
+ };
3529
3669
  }
3530
3670
  async revokeActor(actorId, revokedBy = "system", reason = "manual", supersededBy) {
3531
3671
  const existingRecord = await this.actorStore.read(actorId);
@@ -3589,9 +3729,26 @@ var IdentityAdapter = class {
3589
3729
  prompt_engine_requirements: payload.prompt_engine_requirements || {},
3590
3730
  ...payload
3591
3731
  };
3592
- const validatedPayload = await createAgentRecord(completePayload);
3732
+ const validatedPayload = createAgentRecord(completePayload);
3593
3733
  const payloadChecksum = calculatePayloadChecksum(validatedPayload);
3594
- const signature = signPayload(validatedPayload, "placeholder-private-key", payload.id, "author", "Agent registration");
3734
+ let privateKey;
3735
+ try {
3736
+ const projectRoot2 = ConfigManager.findProjectRoot();
3737
+ if (!projectRoot2) {
3738
+ throw new Error("Project root not found. Cannot locate private key.");
3739
+ }
3740
+ const keyPath = pathUtils.join(projectRoot2, ".gitgov", "actors", `${payload.id}.key`);
3741
+ const keyContent = await promises.readFile(keyPath, "utf-8");
3742
+ privateKey = keyContent.trim();
3743
+ if (!privateKey) {
3744
+ throw new Error(`Private key file is empty for ${payload.id}`);
3745
+ }
3746
+ } catch (error) {
3747
+ throw new Error(
3748
+ `Private key not found for actor ${payload.id}. AgentRecord requires a valid private key for cryptographic signing. If this is a legacy actor, you may need to regenerate the actor with 'gitgov actor new'. Original error: ${error instanceof Error ? error.message : "Unknown error"}`
3749
+ );
3750
+ }
3751
+ const signature = signPayload(validatedPayload, privateKey, payload.id, "author", "Agent registration");
3595
3752
  const record = {
3596
3753
  header: {
3597
3754
  version: "1.0",
@@ -3689,7 +3846,7 @@ async function validateFullFeedbackRecord(record, getPublicKey) {
3689
3846
  }
3690
3847
 
3691
3848
  // src/factories/feedback_factory.ts
3692
- async function createFeedbackRecord(payload) {
3849
+ function createFeedbackRecord(payload) {
3693
3850
  const timestamp = Math.floor(Date.now() / 1e3);
3694
3851
  const feedback = {
3695
3852
  id: payload.id || generateFeedbackId(payload.content || "feedback", timestamp),
@@ -3708,6 +3865,18 @@ async function createFeedbackRecord(payload) {
3708
3865
  }
3709
3866
  return feedback;
3710
3867
  }
3868
+ function loadFeedbackRecord(data) {
3869
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
3870
+ if (!embeddedValidation.isValid) {
3871
+ throw new DetailedValidationError("GitGovRecord (FeedbackRecord)", embeddedValidation.errors);
3872
+ }
3873
+ const record = data;
3874
+ const payloadValidation = validateFeedbackRecordDetailed(record.payload);
3875
+ if (!payloadValidation.isValid) {
3876
+ throw new DetailedValidationError("FeedbackRecord payload", payloadValidation.errors);
3877
+ }
3878
+ return record;
3879
+ }
3711
3880
 
3712
3881
  // src/adapters/feedback_adapter/index.ts
3713
3882
  var FeedbackAdapter = class {
@@ -3758,7 +3927,7 @@ var FeedbackAdapter = class {
3758
3927
  // Allows payload.status to override default
3759
3928
  };
3760
3929
  try {
3761
- const validatedPayload = await createFeedbackRecord(enrichedPayload);
3930
+ const validatedPayload = createFeedbackRecord(enrichedPayload);
3762
3931
  const unsignedRecord = {
3763
3932
  header: {
3764
3933
  version: "1.0",
@@ -3966,7 +4135,7 @@ async function validateFullExecutionRecord(record, getPublicKey) {
3966
4135
  }
3967
4136
 
3968
4137
  // src/factories/execution_factory.ts
3969
- async function createExecutionRecord(payload) {
4138
+ function createExecutionRecord(payload) {
3970
4139
  const timestamp = Math.floor(Date.now() / 1e3);
3971
4140
  const execution = {
3972
4141
  id: payload.id || generateExecutionId(payload.title || "execution", timestamp),
@@ -3984,6 +4153,18 @@ async function createExecutionRecord(payload) {
3984
4153
  }
3985
4154
  return execution;
3986
4155
  }
4156
+ function loadExecutionRecord(data) {
4157
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
4158
+ if (!embeddedValidation.isValid) {
4159
+ throw new DetailedValidationError("GitGovRecord (ExecutionRecord)", embeddedValidation.errors);
4160
+ }
4161
+ const record = data;
4162
+ const payloadValidation = validateExecutionRecordDetailed(record.payload);
4163
+ if (!payloadValidation.isValid) {
4164
+ throw new DetailedValidationError("ExecutionRecord payload", payloadValidation.errors);
4165
+ }
4166
+ return record;
4167
+ }
3987
4168
 
3988
4169
  // src/adapters/execution_adapter/index.ts
3989
4170
  var ExecutionAdapter = class {
@@ -4013,7 +4194,7 @@ var ExecutionAdapter = class {
4013
4194
  }
4014
4195
  }
4015
4196
  try {
4016
- const validatedPayload = await createExecutionRecord(payload);
4197
+ const validatedPayload = createExecutionRecord(payload);
4017
4198
  const unsignedRecord = {
4018
4199
  header: {
4019
4200
  version: "1.0",
@@ -4155,7 +4336,7 @@ async function validateFullChangelogRecord(record, getPublicKey) {
4155
4336
  }
4156
4337
 
4157
4338
  // src/factories/changelog_factory.ts
4158
- async function createChangelogRecord(payload) {
4339
+ function createChangelogRecord(payload) {
4159
4340
  const timestamp = Math.floor(Date.now() / 1e3);
4160
4341
  const changelog = {
4161
4342
  // Required fields
@@ -4179,6 +4360,18 @@ async function createChangelogRecord(payload) {
4179
4360
  }
4180
4361
  return changelog;
4181
4362
  }
4363
+ function loadChangelogRecord(data) {
4364
+ const embeddedValidation = validateEmbeddedMetadataDetailed(data);
4365
+ if (!embeddedValidation.isValid) {
4366
+ throw new DetailedValidationError("GitGovRecord (ChangelogRecord)", embeddedValidation.errors);
4367
+ }
4368
+ const record = data;
4369
+ const payloadValidation = validateChangelogRecordDetailed(record.payload);
4370
+ if (!payloadValidation.isValid) {
4371
+ throw new DetailedValidationError("ChangelogRecord payload", payloadValidation.errors);
4372
+ }
4373
+ return record;
4374
+ }
4182
4375
 
4183
4376
  // src/adapters/changelog_adapter/index.ts
4184
4377
  var ChangelogAdapter = class {
@@ -4233,7 +4426,7 @@ var ChangelogAdapter = class {
4233
4426
  if (!payload.id) {
4234
4427
  payload.id = generateChangelogId(payload.title, timestamp);
4235
4428
  }
4236
- const validatedPayload = await createChangelogRecord(payload);
4429
+ const validatedPayload = createChangelogRecord(payload);
4237
4430
  const unsignedRecord = {
4238
4431
  header: {
4239
4432
  version: "1.0",
@@ -4927,7 +5120,7 @@ var BacklogAdapter = class {
4927
5120
  * Creates a new task with workflow validation
4928
5121
  */
4929
5122
  async createTask(payload, actorId) {
4930
- const validatedPayload = await createTaskRecord(payload);
5123
+ const validatedPayload = createTaskRecord(payload);
4931
5124
  const unsignedRecord = {
4932
5125
  header: {
4933
5126
  version: "1.0",
@@ -5344,7 +5537,7 @@ ${task.status === "review" ? "[REJECTED]" : "[CANCELLED]"} ${reason} (${(/* @__P
5344
5537
  if (["archived"].includes(taskRecord.payload.status)) {
5345
5538
  throw new Error(`ProtocolViolationError: Cannot update task in final state: ${taskRecord.payload.status}`);
5346
5539
  }
5347
- const updatedPayload = await createTaskRecord({ ...taskRecord.payload, ...payload });
5540
+ const updatedPayload = createTaskRecord({ ...taskRecord.payload, ...payload });
5348
5541
  const updatedRecord = { ...taskRecord, payload: updatedPayload };
5349
5542
  await this.taskStore.write(updatedRecord);
5350
5543
  return updatedPayload;
@@ -5641,7 +5834,7 @@ ${task.status === "review" ? "[REJECTED]" : "[CANCELLED]"} ${reason} (${(/* @__P
5641
5834
  * Creates a new cycle with workflow validation
5642
5835
  */
5643
5836
  async createCycle(payload, actorId) {
5644
- const validatedPayload = await createCycleRecord(payload);
5837
+ const validatedPayload = createCycleRecord(payload);
5645
5838
  const unsignedRecord = {
5646
5839
  header: {
5647
5840
  version: "1.0",
@@ -5707,7 +5900,7 @@ ${task.status === "review" ? "[REJECTED]" : "[CANCELLED]"} ${reason} (${(/* @__P
5707
5900
  if (["archived"].includes(cycleRecord.payload.status)) {
5708
5901
  throw new Error(`ProtocolViolationError: Cannot update cycle in final state: ${cycleRecord.payload.status}`);
5709
5902
  }
5710
- const updatedPayload = await createCycleRecord({ ...cycleRecord.payload, ...payload });
5903
+ const updatedPayload = createCycleRecord({ ...cycleRecord.payload, ...payload });
5711
5904
  const updatedRecord = { ...cycleRecord, payload: updatedPayload };
5712
5905
  if (cycleRecord.payload.status !== updatedPayload.status) {
5713
5906
  this.eventBus.publish({
@@ -5930,6 +6123,44 @@ var indexer_adapter_exports = {};
5930
6123
  __export(indexer_adapter_exports, {
5931
6124
  FileIndexerAdapter: () => FileIndexerAdapter
5932
6125
  });
6126
+
6127
+ // src/utils/signature_utils.ts
6128
+ function extractAuthor(record) {
6129
+ const signatures = record.header.signatures;
6130
+ if (!signatures || signatures.length === 0) {
6131
+ return void 0;
6132
+ }
6133
+ const firstSignature = signatures[0];
6134
+ return {
6135
+ actorId: firstSignature.keyId,
6136
+ timestamp: firstSignature.timestamp
6137
+ };
6138
+ }
6139
+ function extractLastModifier(record) {
6140
+ const signatures = record.header.signatures;
6141
+ if (!signatures || signatures.length === 0) {
6142
+ return void 0;
6143
+ }
6144
+ const lastSignature = signatures[signatures.length - 1];
6145
+ if (!lastSignature) {
6146
+ return void 0;
6147
+ }
6148
+ return {
6149
+ actorId: lastSignature.keyId,
6150
+ timestamp: lastSignature.timestamp
6151
+ };
6152
+ }
6153
+
6154
+ // src/crypto/index.ts
6155
+ var crypto_exports = {};
6156
+ __export(crypto_exports, {
6157
+ calculatePayloadChecksum: () => calculatePayloadChecksum,
6158
+ generateKeys: () => generateKeys,
6159
+ signPayload: () => signPayload,
6160
+ verifySignatures: () => verifySignatures
6161
+ });
6162
+
6163
+ // src/adapters/indexer_adapter/index.ts
5933
6164
  var FileIndexerAdapter = class {
5934
6165
  metricsAdapter;
5935
6166
  taskStore;
@@ -5985,17 +6216,23 @@ var FileIndexerAdapter = class {
5985
6216
  actors
5986
6217
  };
5987
6218
  const activityHistory = await this.calculateActivityHistory(allRecords);
5988
- const enrichedTasks = [];
5989
- for (const task of tasks) {
5990
- const enrichedTask = await this.enrichTaskRecord(task, allRecords);
5991
- enrichedTasks.push(enrichedTask);
5992
- }
6219
+ const derivedStates = await this.calculateDerivedStates(allRecords);
6220
+ const derivedStateSets = {
6221
+ stalledTasks: new Set(derivedStates.stalledTasks),
6222
+ atRiskTasks: new Set(derivedStates.atRiskTasks),
6223
+ needsClarificationTasks: new Set(derivedStates.needsClarificationTasks),
6224
+ blockedByDependencyTasks: new Set(derivedStates.blockedByDependencyTasks)
6225
+ };
6226
+ const enrichedTasks = await Promise.all(
6227
+ tasks.map((task) => this.enrichTaskRecord(task, allRecords, derivedStateSets))
6228
+ );
6229
+ const integrityReport = await this.validateIntegrity();
5993
6230
  const indexData = {
5994
6231
  metadata: {
5995
6232
  generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
5996
6233
  lastCommitHash: await this.getGitCommitHash(),
5997
- integrityStatus: "valid",
5998
- // TODO: Implement integrity check
6234
+ integrityStatus: integrityReport.status,
6235
+ // Populated from validateIntegrity() (EARS-4)
5999
6236
  recordCounts: {
6000
6237
  tasks: tasks.length,
6001
6238
  cycles: cycles.length,
@@ -6009,16 +6246,20 @@ var FileIndexerAdapter = class {
6009
6246
  // Will be set below
6010
6247
  },
6011
6248
  metrics: { ...systemStatus, ...productivityMetrics, ...collaborationMetrics },
6249
+ derivedStates,
6250
+ // System-wide derived states for analytics
6012
6251
  activityHistory,
6013
- // NUEVO
6014
- tasks: tasks.map((t) => t.payload),
6015
- // Extract payloads for IndexData
6252
+ // Activity stream for dashboard
6253
+ tasks,
6254
+ // Keep full records with headers (source of truth)
6016
6255
  enrichedTasks,
6017
- // NUEVO - Tasks with activity metadata
6018
- cycles: cycles.map((c) => c.payload),
6019
- // Extract payloads for IndexData
6020
- actors: actors.map((a) => a.payload)
6021
- // Extract payloads for IndexData
6256
+ // Tasks with intelligence layer
6257
+ cycles,
6258
+ // Keep full records with headers
6259
+ actors,
6260
+ // Keep full records with headers
6261
+ feedback: allRecords.feedback
6262
+ // Optional - Phase 1B+ raw feedback records
6022
6263
  };
6023
6264
  const writeStart = performance.now();
6024
6265
  await this.writeCacheFile(indexData);
@@ -6031,6 +6272,8 @@ var FileIndexerAdapter = class {
6031
6272
  recordsProcessed: tasks.length + cycles.length + actors.length,
6032
6273
  metricsCalculated: 3,
6033
6274
  // systemStatus + productivity + collaboration
6275
+ derivedStatesApplied: Object.values(derivedStates).reduce((sum, arr) => sum + arr.length, 0),
6276
+ // Total tasks with derived states
6034
6277
  generationTime: totalTime,
6035
6278
  cacheSize,
6036
6279
  cacheStrategy: this.cacheStrategy,
@@ -6042,6 +6285,7 @@ var FileIndexerAdapter = class {
6042
6285
  success: false,
6043
6286
  recordsProcessed: 0,
6044
6287
  metricsCalculated: 0,
6288
+ derivedStatesApplied: 0,
6045
6289
  generationTime: performance.now() - startTime,
6046
6290
  cacheSize: 0,
6047
6291
  cacheStrategy: this.cacheStrategy,
@@ -6052,6 +6296,7 @@ var FileIndexerAdapter = class {
6052
6296
  }
6053
6297
  /**
6054
6298
  * [EARS-2] Gets data from local cache for fast CLI queries
6299
+ * [EARS-13] Returns null and logs warning if cache is corrupted
6055
6300
  */
6056
6301
  async getIndexData() {
6057
6302
  try {
@@ -6067,18 +6312,29 @@ var FileIndexerAdapter = class {
6067
6312
  const indexData = JSON.parse(cacheContent);
6068
6313
  return indexData;
6069
6314
  } catch (error) {
6070
- console.warn(`Cache read error: ${error instanceof Error ? error.message : String(error)}`);
6315
+ console.warn(`Warning: Cache is corrupted or invalid. Please regenerate with 'gitgov index'.`);
6316
+ console.warn(`Details: ${error instanceof Error ? error.message : String(error)}`);
6071
6317
  return null;
6072
6318
  }
6073
6319
  }
6074
6320
  /**
6075
- * [EARS-4] Validates integrity of Records without regenerating cache
6321
+ * [EARS-4, EARS-70 to EARS-76] Validates integrity of Records without regenerating cache
6322
+ *
6323
+ * PHASE 1A (IMPLEMENTED): Basic schema validation (required fields)
6324
+ * PHASE 1B (IMPLEMENTED): Cryptographic validation (checksums + signatures)
6325
+ * TODO FUTURE:
6326
+ * - Integrate ValidatorModule for comprehensive schema validation
6327
+ * - Compare cache consistency with Records
6328
+ * - Detect broken references between records
6329
+ * - Validate timestamp consistency
6076
6330
  */
6077
6331
  async validateIntegrity() {
6078
6332
  const startTime = performance.now();
6079
6333
  const errors = [];
6080
6334
  const warnings = [];
6081
6335
  let recordsScanned = 0;
6336
+ let checksumFailures = 0;
6337
+ let signatureFailures = 0;
6082
6338
  try {
6083
6339
  const [tasks, cycles] = await Promise.all([
6084
6340
  this.readAllTasks(),
@@ -6103,6 +6359,57 @@ var FileIndexerAdapter = class {
6103
6359
  });
6104
6360
  }
6105
6361
  }
6362
+ for (const task of tasks) {
6363
+ const calculatedChecksum = calculatePayloadChecksum(task.payload);
6364
+ if (calculatedChecksum !== task.header.payloadChecksum) {
6365
+ checksumFailures++;
6366
+ errors.push({
6367
+ type: "checksum_failure",
6368
+ recordId: task.payload.id,
6369
+ message: `Checksum mismatch: expected ${task.header.payloadChecksum}, got ${calculatedChecksum}`
6370
+ });
6371
+ }
6372
+ }
6373
+ for (const cycle of cycles) {
6374
+ const calculatedChecksum = calculatePayloadChecksum(cycle.payload);
6375
+ if (calculatedChecksum !== cycle.header.payloadChecksum) {
6376
+ checksumFailures++;
6377
+ errors.push({
6378
+ type: "checksum_failure",
6379
+ recordId: cycle.payload.id,
6380
+ message: `Checksum mismatch: expected ${cycle.header.payloadChecksum}, got ${calculatedChecksum}`
6381
+ });
6382
+ }
6383
+ }
6384
+ const getActorPublicKey = async (keyId) => {
6385
+ if (!this.actorStore) {
6386
+ return null;
6387
+ }
6388
+ const actor = await this.actorStore.read(keyId);
6389
+ return actor?.payload.publicKey || null;
6390
+ };
6391
+ for (const task of tasks) {
6392
+ const isValid = await verifySignatures(task, getActorPublicKey);
6393
+ if (!isValid) {
6394
+ signatureFailures++;
6395
+ errors.push({
6396
+ type: "signature_invalid",
6397
+ recordId: task.payload.id,
6398
+ message: "One or more signatures failed verification"
6399
+ });
6400
+ }
6401
+ }
6402
+ for (const cycle of cycles) {
6403
+ const isValid = await verifySignatures(cycle, getActorPublicKey);
6404
+ if (!isValid) {
6405
+ signatureFailures++;
6406
+ errors.push({
6407
+ type: "signature_invalid",
6408
+ recordId: cycle.payload.id,
6409
+ message: "One or more signatures failed verification"
6410
+ });
6411
+ }
6412
+ }
6106
6413
  const status = errors.length > 0 ? "errors" : warnings.length > 0 ? "warnings" : "valid";
6107
6414
  return {
6108
6415
  status,
@@ -6110,10 +6417,8 @@ var FileIndexerAdapter = class {
6110
6417
  errorsFound: errors,
6111
6418
  warningsFound: warnings,
6112
6419
  validationTime: performance.now() - startTime,
6113
- checksumFailures: 0,
6114
- // TODO: Implement checksum validation
6115
- signatureFailures: 0
6116
- // TODO: Implement signature validation
6420
+ checksumFailures,
6421
+ signatureFailures
6117
6422
  };
6118
6423
  } catch (error) {
6119
6424
  return {
@@ -6126,8 +6431,8 @@ var FileIndexerAdapter = class {
6126
6431
  }],
6127
6432
  warningsFound: warnings,
6128
6433
  validationTime: performance.now() - startTime,
6129
- checksumFailures: 0,
6130
- signatureFailures: 0
6434
+ checksumFailures,
6435
+ signatureFailures
6131
6436
  };
6132
6437
  }
6133
6438
  }
@@ -6252,6 +6557,7 @@ var FileIndexerAdapter = class {
6252
6557
  }
6253
6558
  /**
6254
6559
  * Reads all changelogs from changelogStore (graceful degradation) with full metadata.
6560
+ * Validates schema and filters out invalid records with warnings.
6255
6561
  */
6256
6562
  async readAllChangelogs() {
6257
6563
  if (!this.changelogStore) {
@@ -6269,12 +6575,61 @@ var FileIndexerAdapter = class {
6269
6575
  }
6270
6576
  /**
6271
6577
  * Writes cache data to file (Phase 1: JSON)
6578
+ *
6579
+ * [EARS-14] Creates automatic backup of existing cache before writing.
6580
+ * If write fails, backup can be restored to preserve previous cache state.
6272
6581
  */
6273
6582
  async writeCacheFile(indexData) {
6274
6583
  const cacheDir = pathUtils.dirname(this.cachePath);
6275
6584
  await promises.mkdir(cacheDir, { recursive: true });
6276
- const jsonContent = JSON.stringify(indexData, null, 2);
6277
- await promises.writeFile(this.cachePath, jsonContent, "utf-8");
6585
+ await this.createCacheBackup();
6586
+ try {
6587
+ const jsonContent = JSON.stringify(indexData, null, 2);
6588
+ await promises.writeFile(this.cachePath, jsonContent, "utf-8");
6589
+ await this.deleteCacheBackup();
6590
+ } catch (error) {
6591
+ await this.restoreCacheFromBackup();
6592
+ throw error;
6593
+ }
6594
+ }
6595
+ /**
6596
+ * [EARS-14] Creates a backup of the current cache file.
6597
+ * If cache doesn't exist, this is a no-op (nothing to backup).
6598
+ */
6599
+ async createCacheBackup() {
6600
+ const backupPath = `${this.cachePath}.backup`;
6601
+ try {
6602
+ const cacheExists = await this.cacheFileExists();
6603
+ if (cacheExists) {
6604
+ await promises.copyFile(this.cachePath, backupPath);
6605
+ }
6606
+ } catch (error) {
6607
+ console.warn(`Warning: Could not create cache backup: ${error instanceof Error ? error.message : String(error)}`);
6608
+ }
6609
+ }
6610
+ /**
6611
+ * [EARS-14] Restores cache from backup file.
6612
+ * Used when cache write operation fails to preserve previous state.
6613
+ */
6614
+ async restoreCacheFromBackup() {
6615
+ const backupPath = `${this.cachePath}.backup`;
6616
+ try {
6617
+ await promises.access(backupPath);
6618
+ await promises.copyFile(backupPath, this.cachePath);
6619
+ await promises.unlink(backupPath);
6620
+ } catch (error) {
6621
+ console.warn(`Warning: Could not restore cache from backup: ${error instanceof Error ? error.message : String(error)}`);
6622
+ }
6623
+ }
6624
+ /**
6625
+ * [EARS-14] Deletes backup file after successful cache write.
6626
+ */
6627
+ async deleteCacheBackup() {
6628
+ const backupPath = `${this.cachePath}.backup`;
6629
+ try {
6630
+ await promises.unlink(backupPath);
6631
+ } catch {
6632
+ }
6278
6633
  }
6279
6634
  /**
6280
6635
  * Checks if cache file exists
@@ -6308,6 +6663,79 @@ var FileIndexerAdapter = class {
6308
6663
  return "unknown";
6309
6664
  }
6310
6665
  }
6666
+ /**
6667
+ * [EARS-7 to EARS-10] Calculates system-wide derived states for analytics and filtering.
6668
+ *
6669
+ * Applies DerivedDataProtocol algorithms to categorize tasks:
6670
+ * - isStalled: Tasks en 'active' sin executions >7 días O en 'review' sin approval >3 días
6671
+ * - isAtRisk: Tasks con prioridad 'critical' + 'paused' O 2+ blocking feedbacks
6672
+ * - needsClarification: Tasks con feedback tipo 'question' abierto
6673
+ * - isBlockedByDependency: Tasks con referencias a tasks no completadas
6674
+ *
6675
+ * @see derived_data_protocol.md for detailed algorithms
6676
+ * @see EARS-7, EARS-8, EARS-9, EARS-10 for requirements
6677
+ */
6678
+ async calculateDerivedStates(allRecords) {
6679
+ const derivedStates = {
6680
+ stalledTasks: [],
6681
+ atRiskTasks: [],
6682
+ needsClarificationTasks: [],
6683
+ blockedByDependencyTasks: []
6684
+ };
6685
+ const now = Date.now();
6686
+ const SEVEN_DAYS_MS = 7 * 24 * 60 * 60 * 1e3;
6687
+ try {
6688
+ for (const task of allRecords.tasks) {
6689
+ const taskId = task.payload.id;
6690
+ const taskPayload = task.payload;
6691
+ if (taskPayload.status === "active" || taskPayload.status === "review") {
6692
+ const taskTimestamp = this.getTimestampFromId(taskId) * 1e3;
6693
+ const daysSinceCreation = (now - taskTimestamp) / (24 * 60 * 60 * 1e3);
6694
+ const hasRecentExecution = allRecords.executions.some((exec) => {
6695
+ if (exec.payload.taskId === taskId) {
6696
+ const execTimestamp = this.getTimestampFromId(exec.payload.id) * 1e3;
6697
+ return now - execTimestamp < SEVEN_DAYS_MS;
6698
+ }
6699
+ return false;
6700
+ });
6701
+ const isStalled = taskPayload.status === "active" && daysSinceCreation > 7 && !hasRecentExecution || taskPayload.status === "review" && daysSinceCreation > 3;
6702
+ if (isStalled) {
6703
+ derivedStates.stalledTasks.push(taskId);
6704
+ }
6705
+ }
6706
+ const isCriticalPaused = taskPayload.priority === "critical" && taskPayload.status === "paused";
6707
+ const blockingFeedbackCount = allRecords.feedback.filter((feedback) => {
6708
+ return feedback.payload.type === "blocking" && feedback.payload.status === "open" && feedback.payload.entityId === taskId;
6709
+ }).length;
6710
+ if (isCriticalPaused || blockingFeedbackCount >= 2) {
6711
+ derivedStates.atRiskTasks.push(taskId);
6712
+ }
6713
+ const hasOpenQuestion = allRecords.feedback.some((feedback) => {
6714
+ return feedback.payload.type === "question" && feedback.payload.status === "open" && feedback.payload.entityId === taskId;
6715
+ });
6716
+ if (hasOpenQuestion) {
6717
+ derivedStates.needsClarificationTasks.push(taskId);
6718
+ }
6719
+ if (taskPayload.references && taskPayload.references.length > 0) {
6720
+ const hasBlockingDependency = taskPayload.references.some((ref) => {
6721
+ if (ref.startsWith("task:")) {
6722
+ const dependencyId = ref.replace("task:", "");
6723
+ const dependencyTask = allRecords.tasks.find((t) => t.payload.id === dependencyId);
6724
+ return dependencyTask && dependencyTask.payload.status !== "done" && dependencyTask.payload.status !== "archived";
6725
+ }
6726
+ return false;
6727
+ });
6728
+ if (hasBlockingDependency) {
6729
+ derivedStates.blockedByDependencyTasks.push(taskId);
6730
+ }
6731
+ }
6732
+ }
6733
+ return derivedStates;
6734
+ } catch (error) {
6735
+ console.warn(`calculateDerivedStates error: ${error instanceof Error ? error.message : String(error)}`);
6736
+ return derivedStates;
6737
+ }
6738
+ }
6311
6739
  /**
6312
6740
  * [EARS-19] Calculates activity history from Record timestamps for dashboard activity streams
6313
6741
  */
@@ -6417,10 +6845,11 @@ var FileIndexerAdapter = class {
6417
6845
  /**
6418
6846
  * [EARS-21] Calculate lastUpdated timestamp and activity type for a task
6419
6847
  * Considers task file modification time and related records timestamps
6848
+ * @param taskPayload - Task payload (not full record with headers)
6420
6849
  */
6421
- async calculateLastUpdated(task, relatedRecords) {
6850
+ async calculateLastUpdated(taskPayload, relatedRecords) {
6422
6851
  try {
6423
- let lastUpdated = this.getTimestampFromId(task.id) * 1e3;
6852
+ let lastUpdated = this.getTimestampFromId(taskPayload.id) * 1e3;
6424
6853
  let lastActivityType = "task_created";
6425
6854
  let recentActivity = "Task created";
6426
6855
  try {
@@ -6428,10 +6857,10 @@ var FileIndexerAdapter = class {
6428
6857
  while (!fs.existsSync(pathUtils.join(projectRoot2, ".gitgov")) && projectRoot2 !== "/") {
6429
6858
  projectRoot2 = pathUtils.dirname(projectRoot2);
6430
6859
  }
6431
- const taskFilePath = pathUtils.join(projectRoot2, ".gitgov", "tasks", `${task.id}.json`);
6860
+ const taskFilePath = pathUtils.join(projectRoot2, ".gitgov", "tasks", `${taskPayload.id}.json`);
6432
6861
  const stats = await promises.stat(taskFilePath);
6433
6862
  const fileModTime = stats.mtime.getTime();
6434
- const creationTime = this.getTimestampFromId(task.id) * 1e3;
6863
+ const creationTime = this.getTimestampFromId(taskPayload.id) * 1e3;
6435
6864
  const timeDifference = fileModTime - creationTime;
6436
6865
  if (timeDifference > 6e4 && fileModTime > lastUpdated) {
6437
6866
  lastUpdated = fileModTime;
@@ -6441,7 +6870,7 @@ var FileIndexerAdapter = class {
6441
6870
  } catch (error) {
6442
6871
  }
6443
6872
  const relatedFeedback = relatedRecords.feedback.filter(
6444
- (f) => f.payload.entityId === task.id || f.payload.content.includes(task.id)
6873
+ (f) => f.payload.entityId === taskPayload.id || f.payload.content && f.payload.content.includes(taskPayload.id)
6445
6874
  );
6446
6875
  for (const feedback of relatedFeedback) {
6447
6876
  const feedbackTime = this.getTimestampFromId(feedback.payload.id) * 1e3;
@@ -6451,7 +6880,7 @@ var FileIndexerAdapter = class {
6451
6880
  recentActivity = `${feedback.payload.type} feedback: ${feedback.payload.content.slice(0, 30)}...`;
6452
6881
  }
6453
6882
  }
6454
- const relatedExecutions = relatedRecords.executions.filter((e) => e.payload.taskId === task.id);
6883
+ const relatedExecutions = relatedRecords.executions.filter((e) => e.payload.taskId === taskPayload.id);
6455
6884
  for (const execution of relatedExecutions) {
6456
6885
  const executionTime = this.getTimestampFromId(execution.payload.id) * 1e3;
6457
6886
  if (executionTime > lastUpdated) {
@@ -6461,7 +6890,7 @@ var FileIndexerAdapter = class {
6461
6890
  }
6462
6891
  }
6463
6892
  const relatedChangelogs = relatedRecords.changelogs.filter(
6464
- (c) => c.payload.relatedTasks.includes(task.id) || c.payload.description?.includes(task.id)
6893
+ (c) => c.payload.relatedTasks && c.payload.relatedTasks.includes(taskPayload.id) || c.payload.description?.includes(taskPayload.id)
6465
6894
  );
6466
6895
  for (const changelog of relatedChangelogs) {
6467
6896
  const changelogTime = this.getTimestampFromId(changelog.payload.id) * 1e3;
@@ -6473,7 +6902,7 @@ var FileIndexerAdapter = class {
6473
6902
  }
6474
6903
  return { lastUpdated, lastActivityType, recentActivity };
6475
6904
  } catch (error) {
6476
- const fallbackTime = this.getTimestampFromId(task.id) * 1e3;
6905
+ const fallbackTime = this.getTimestampFromId(taskPayload.id) * 1e3;
6477
6906
  return {
6478
6907
  lastUpdated: fallbackTime,
6479
6908
  lastActivityType: "task_created",
@@ -6486,14 +6915,119 @@ var FileIndexerAdapter = class {
6486
6915
  * @param task - Full GitGovTaskRecord with header.signatures for author/lastModifier extraction
6487
6916
  * @param relatedRecords - All related records with full metadata
6488
6917
  */
6489
- async enrichTaskRecord(task, relatedRecords) {
6918
+ /**
6919
+ * Enriches a task with complete intelligence layer (EARS 25-48)
6920
+ *
6921
+ * 11-step algorithm:
6922
+ * 1. Activity metadata (lastUpdated, lastActivityType, recentActivity)
6923
+ * 2. Signatures (author, lastModifier with timestamps)
6924
+ * 3. Assignments (assignedTo from feedback)
6925
+ * 4. Dependencies (dependsOn, blockedBy with typed references)
6926
+ * 5. Cycles (all cycles as array with id+title)
6927
+ * 6. Metrics (executionCount, blockingFeedbackCount, openQuestionCount)
6928
+ * 7. Time to resolution (for done tasks)
6929
+ * 8. Release info (isReleased, lastReleaseVersion from changelogs)
6930
+ * 9. Derived states (EARS-43: REUTILIZA pre-calculated derivedStates con O(1) lookup)
6931
+ * 10. Health score (0-100 using multi-factor algorithm)
6932
+ * 11. Time in current stage (days)
6933
+ *
6934
+ * @param task - Full GitGovTaskRecord with header.signatures
6935
+ * @param relatedRecords - All records for cross-referencing
6936
+ * @param derivedStateSets - Pre-calculated system-wide derived states as Sets for O(1) lookup (EARS-43)
6937
+ * @returns Promise<EnrichedTaskRecord> - Task with complete intelligence layer
6938
+ */
6939
+ async enrichTaskRecord(task, relatedRecords, derivedStateSets) {
6490
6940
  const { lastUpdated, lastActivityType, recentActivity } = await this.calculateLastUpdated(task.payload, relatedRecords);
6491
- return {
6941
+ const author = extractAuthor(task);
6942
+ const lastModifier = extractLastModifier(task);
6943
+ const assignments = relatedRecords.feedback.filter((f) => f.payload.entityId === task.payload.id && f.payload.type === "assignment").map((f) => ({
6944
+ actorId: f.payload.assignee || "unknown",
6945
+ assignedAt: this.getTimestampFromId(f.payload.id) * 1e3
6946
+ // ms
6947
+ }));
6948
+ const completedStatuses = ["done", "archived", "discarded"];
6949
+ const dependsOn = (task.payload.references || []).filter((ref) => {
6950
+ const hasValidPrefix = ref.startsWith("task:") || ref.startsWith("pr:") || ref.startsWith("issue:") || ref.startsWith("file:") || ref.startsWith("url:");
6951
+ if (!hasValidPrefix) return false;
6952
+ if (ref.startsWith("task:")) {
6953
+ const refTaskId = ref.replace("task:", "");
6954
+ const refTask = relatedRecords.tasks.find((t) => t.payload.id === refTaskId);
6955
+ return !refTask || !refTask.payload.status || !completedStatuses.includes(refTask.payload.status);
6956
+ }
6957
+ return true;
6958
+ });
6959
+ const blockedBy = relatedRecords.tasks.filter((t) => !completedStatuses.includes(t.payload.status)).filter((t) => (t.payload.references || []).includes(`task:${task.payload.id}`)).map((t) => `task:${t.payload.id}`);
6960
+ const cycles = (task.payload.cycleIds || []).map((cycleId) => {
6961
+ const cycle = relatedRecords.cycles.find((c) => c.payload.id === cycleId);
6962
+ return cycle ? { id: cycleId, title: cycle.payload.title } : null;
6963
+ }).filter((c) => c !== null);
6964
+ const executionCount = relatedRecords.executions.filter((e) => e.payload.taskId === task.payload.id).length;
6965
+ const blockingFeedbackCount = relatedRecords.feedback.filter(
6966
+ (f) => f.payload.entityId === task.payload.id && f.payload.type === "blocking" && f.payload.status === "open"
6967
+ ).length;
6968
+ const openQuestionCount = relatedRecords.feedback.filter(
6969
+ (f) => f.payload.entityId === task.payload.id && f.payload.type === "question" && f.payload.status === "open"
6970
+ ).length;
6971
+ const timeToResolution = task.payload.status === "done" ? (lastUpdated - this.getTimestampFromId(task.payload.id) * 1e3) / (1e3 * 60 * 60) : void 0;
6972
+ const releaseChangelogs = relatedRecords.changelogs.filter(
6973
+ (cl) => cl.payload.relatedTasks.includes(task.payload.id)
6974
+ );
6975
+ const isReleased = releaseChangelogs.length > 0;
6976
+ const lastReleaseVersion = isReleased ? releaseChangelogs[releaseChangelogs.length - 1]?.payload.version || void 0 : void 0;
6977
+ const taskId = task.payload.id;
6978
+ const isStalled = derivedStateSets.stalledTasks.has(taskId);
6979
+ const isAtRisk = derivedStateSets.atRiskTasks.has(taskId);
6980
+ const needsClarification = derivedStateSets.needsClarificationTasks.has(taskId);
6981
+ const isBlockedByDependency = derivedStateSets.blockedByDependencyTasks.has(taskId);
6982
+ const daysSinceLastUpdate = (Date.now() - lastUpdated) / (1e3 * 60 * 60 * 24);
6983
+ let healthScore = 100;
6984
+ if (task.payload.status === "done") healthScore -= 0;
6985
+ else if (task.payload.status === "active") healthScore -= 5;
6986
+ else if (task.payload.status === "ready") healthScore -= 10;
6987
+ else if (task.payload.status === "review") healthScore -= 15;
6988
+ else if (task.payload.status === "paused") healthScore -= 25;
6989
+ else healthScore -= 30;
6990
+ healthScore -= Math.min(blockingFeedbackCount * 10, 30);
6991
+ if (executionCount === 0 && task.payload.status === "active") healthScore -= 20;
6992
+ else if (executionCount < 2) healthScore -= 10;
6993
+ if (daysSinceLastUpdate > 30) healthScore -= 20;
6994
+ else if (daysSinceLastUpdate > 14) healthScore -= 15;
6995
+ else if (daysSinceLastUpdate > 7) healthScore -= 10;
6996
+ healthScore = Math.max(0, Math.min(100, healthScore));
6997
+ const timeInCurrentStage = daysSinceLastUpdate;
6998
+ const enrichedRecord = {
6492
6999
  ...task.payload,
7000
+ derivedState: {
7001
+ isStalled,
7002
+ isAtRisk,
7003
+ needsClarification,
7004
+ isBlockedByDependency,
7005
+ healthScore,
7006
+ timeInCurrentStage
7007
+ },
7008
+ relationships: {
7009
+ ...author && { author },
7010
+ ...lastModifier && { lastModifier },
7011
+ assignedTo: assignments,
7012
+ dependsOn,
7013
+ blockedBy,
7014
+ cycles
7015
+ },
7016
+ metrics: {
7017
+ executionCount,
7018
+ blockingFeedbackCount,
7019
+ openQuestionCount,
7020
+ ...timeToResolution !== void 0 && { timeToResolution }
7021
+ },
7022
+ release: {
7023
+ isReleased,
7024
+ ...lastReleaseVersion !== void 0 && { lastReleaseVersion }
7025
+ },
6493
7026
  lastUpdated,
6494
7027
  lastActivityType,
6495
- recentActivity
7028
+ ...recentActivity && { recentActivity }
6496
7029
  };
7030
+ return enrichedRecord;
6497
7031
  }
6498
7032
  /**
6499
7033
  * Format timestamp as human-readable time ago
@@ -6530,6 +7064,15 @@ var project_adapter_exports = {};
6530
7064
  __export(project_adapter_exports, {
6531
7065
  ProjectAdapter: () => ProjectAdapter
6532
7066
  });
7067
+
7068
+ // src/utils/esm_helper.ts
7069
+ function getImportMetaUrl() {
7070
+ try {
7071
+ return import.meta.url;
7072
+ } catch {
7073
+ return null;
7074
+ }
7075
+ }
6533
7076
  var ProjectAdapter = class {
6534
7077
  identityAdapter;
6535
7078
  backlogAdapter;
@@ -6638,8 +7181,8 @@ var ProjectAdapter = class {
6638
7181
  /**
6639
7182
  * [EARS-2] Validates environment for GitGovernance initialization
6640
7183
  */
6641
- async validateEnvironment(path5) {
6642
- const targetPath = path5 || process.env["GITGOV_ORIGINAL_DIR"] || process.cwd();
7184
+ async validateEnvironment(path6) {
7185
+ const targetPath = path6 || process.env["GITGOV_ORIGINAL_DIR"] || process.cwd();
6643
7186
  const warnings = [];
6644
7187
  const suggestions = [];
6645
7188
  try {
@@ -6822,14 +7365,6 @@ var ProjectAdapter = class {
6822
7365
  }
6823
7366
  }
6824
7367
  async copyAgentPrompt(gitgovPath) {
6825
- function getImportMetaUrl() {
6826
- try {
6827
- const getUrl = new Function("return import.meta.url");
6828
- return getUrl();
6829
- } catch {
6830
- return null;
6831
- }
6832
- }
6833
7368
  const targetPrompt = pathUtils.join(gitgovPath, "gitgov");
6834
7369
  const potentialSources = [];
6835
7370
  potentialSources.push(
@@ -6841,9 +7376,8 @@ var ProjectAdapter = class {
6841
7376
  const require2 = createRequire(metaUrl);
6842
7377
  const pkgJsonPath = require2.resolve("@gitgov/core/package.json");
6843
7378
  const pkgRoot = pathUtils.dirname(pkgJsonPath);
6844
- potentialSources.push(
6845
- pathUtils.join(pkgRoot, "prompts/gitgov_agent_prompt.md")
6846
- );
7379
+ const promptPath = pathUtils.join(pkgRoot, "prompts/gitgov_agent_prompt.md");
7380
+ potentialSources.push(promptPath);
6847
7381
  }
6848
7382
  } catch {
6849
7383
  }
@@ -6852,9 +7386,8 @@ var ProjectAdapter = class {
6852
7386
  if (metaUrl) {
6853
7387
  const __filename = fileURLToPath(metaUrl);
6854
7388
  const __dirname = pathUtils.dirname(__filename);
6855
- potentialSources.push(
6856
- pathUtils.resolve(__dirname, "../../prompts/gitgov_agent_prompt.md")
6857
- );
7389
+ const promptPath = pathUtils.resolve(__dirname, "../../prompts/gitgov_agent_prompt.md");
7390
+ potentialSources.push(promptPath);
6858
7391
  }
6859
7392
  } catch {
6860
7393
  }
@@ -7583,15 +8116,6 @@ var WorkflowMethodologyAdapter = class _WorkflowMethodologyAdapter {
7583
8116
  }
7584
8117
  };
7585
8118
 
7586
- // src/crypto/index.ts
7587
- var crypto_exports = {};
7588
- __export(crypto_exports, {
7589
- calculatePayloadChecksum: () => calculatePayloadChecksum,
7590
- generateKeys: () => generateKeys,
7591
- signPayload: () => signPayload,
7592
- verifySignatures: () => verifySignatures
7593
- });
7594
-
7595
8119
  // src/factories/index.ts
7596
8120
  var factories_exports = {};
7597
8121
  __export(factories_exports, {
@@ -7605,7 +8129,14 @@ __export(factories_exports, {
7605
8129
  createFeedbackRecord: () => createFeedbackRecord,
7606
8130
  createTaskRecord: () => createTaskRecord,
7607
8131
  createTestSignature: () => createTestSignature,
7608
- createWorkflowMethodologyConfig: () => createWorkflowMethodologyConfig
8132
+ createWorkflowMethodologyConfig: () => createWorkflowMethodologyConfig,
8133
+ loadActorRecord: () => loadActorRecord,
8134
+ loadAgentRecord: () => loadAgentRecord,
8135
+ loadChangelogRecord: () => loadChangelogRecord,
8136
+ loadCycleRecord: () => loadCycleRecord,
8137
+ loadExecutionRecord: () => loadExecutionRecord,
8138
+ loadFeedbackRecord: () => loadFeedbackRecord,
8139
+ loadTaskRecord: () => loadTaskRecord
7609
8140
  });
7610
8141
 
7611
8142
  // src/validation/workflow_methodology_validator.ts
@@ -7683,7 +8214,7 @@ function validateWorkflowMethodologyConfigBusinessRules(config) {
7683
8214
  }
7684
8215
 
7685
8216
  // src/factories/workflow_methodology_factory.ts
7686
- async function createWorkflowMethodologyConfig(payload) {
8217
+ function createWorkflowMethodologyConfig(payload) {
7687
8218
  const config = {
7688
8219
  version: payload.version || "1.0.0",
7689
8220
  name: payload.name || "Custom Methodology",
@@ -7851,8 +8382,8 @@ function createTestSignature(keyId = "human:test-user", role = "author", notes =
7851
8382
  keyId,
7852
8383
  role,
7853
8384
  notes,
7854
- signature: "dGVzdHNpZ25hdHVyZWJhc2U2NGVuY29kZWRkdW1teWZvcnRlc3RpbmdwdXJwb3Nlc29ubHlub3RyZWFsY3J5cHRvZ3JhcGh5PT0=",
7855
- // Dummy 88-char base64 for testing (matches Ed25519 signature length)
8385
+ signature: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==",
8386
+ // Dummy 88-char base64 for testing (86 chars + ==, matches Ed25519 signature format)
7856
8387
  timestamp
7857
8388
  };
7858
8389
  }
@@ -7866,7 +8397,7 @@ function inferTypeFromPayload(payload) {
7866
8397
  if ("displayName" in payload && "publicKey" in payload) return "actor";
7867
8398
  return "custom";
7868
8399
  }
7869
- async function createEmbeddedMetadataRecord(payload, options = {}) {
8400
+ function createEmbeddedMetadataRecord(payload, options = {}) {
7870
8401
  const inferredType = inferTypeFromPayload(payload);
7871
8402
  const type = options.header?.type || inferredType;
7872
8403
  const payloadChecksum = calculatePayloadChecksum(payload);
@@ -7906,6 +8437,1449 @@ async function createEmbeddedMetadataRecord(payload, options = {}) {
7906
8437
  return embeddedRecord;
7907
8438
  }
7908
8439
 
8440
+ // src/lint/index.ts
8441
+ var lint_exports = {};
8442
+ __export(lint_exports, {
8443
+ LintModule: () => LintModule
8444
+ });
8445
+ function isTaskRecord2(payload) {
8446
+ return "title" in payload && "status" in payload && "priority" in payload && "description" in payload;
8447
+ }
8448
+ function isCycleRecord2(payload) {
8449
+ return "title" in payload && "status" in payload && !("priority" in payload);
8450
+ }
8451
+ function isExecutionRecord2(payload) {
8452
+ return "taskId" in payload && "type" in payload && "result" in payload;
8453
+ }
8454
+ var logger3 = createLogger("[LintModule]");
8455
+ var LintModule = class {
8456
+ recordStore;
8457
+ indexerAdapter;
8458
+ fileSystem;
8459
+ lastBackupPath = null;
8460
+ /**
8461
+ * Constructor for LintModule with graceful degradation.
8462
+ *
8463
+ * @param dependencies - Module dependencies (some optional)
8464
+ * @throws {Error} If recordStore is not present
8465
+ *
8466
+ * @example
8467
+ * ```typescript
8468
+ * const lintModule = new LintModule({
8469
+ * recordStore: taskStore, // REQUIRED
8470
+ * indexerAdapter: indexerAdapter, // optional
8471
+ * fileSystem: customFileSystem // optional (default: Node.js fs)
8472
+ * });
8473
+ * ```
8474
+ */
8475
+ constructor(dependencies) {
8476
+ if (!dependencies.recordStore) {
8477
+ throw new Error("recordStore is required for file access");
8478
+ }
8479
+ this.recordStore = dependencies.recordStore;
8480
+ this.indexerAdapter = dependencies.indexerAdapter ?? null;
8481
+ if (!this.indexerAdapter) {
8482
+ logger3.warn(
8483
+ "indexerAdapter not provided, reference validation will be limited"
8484
+ );
8485
+ }
8486
+ this.fileSystem = dependencies.fileSystem ?? {
8487
+ readFile: async (path6, encoding) => {
8488
+ return promises.readFile(path6, encoding);
8489
+ },
8490
+ writeFile: async (path6, content) => {
8491
+ await promises.writeFile(path6, content, "utf-8");
8492
+ },
8493
+ exists: async (path6) => {
8494
+ try {
8495
+ await promises.access(path6);
8496
+ return true;
8497
+ } catch {
8498
+ return false;
8499
+ }
8500
+ },
8501
+ unlink: async (path6) => {
8502
+ await promises.unlink(path6);
8503
+ }
8504
+ };
8505
+ }
8506
+ /**
8507
+ * Valida todos los records en el directorio especificado.
8508
+ *
8509
+ * Usa delegation pattern: llama a recordStore.read() que internamente usa loaders
8510
+ * para validar schema + embedded metadata. Luego agrega validaciones adicionales
8511
+ * (convenciones, referencias).
8512
+ *
8513
+ * @param options - Opciones de configuración
8514
+ * @returns {Promise<LintReport>} Reporte consolidado con todos los resultados
8515
+ *
8516
+ * @example
8517
+ * ```typescript
8518
+ * const report = await lintModule.lint({
8519
+ * path: '.gitgov/',
8520
+ * validateReferences: true,
8521
+ * validateActors: true,
8522
+ * concurrent: true
8523
+ * });
8524
+ *
8525
+ * console.log(`Errors: ${report.summary.errors}`);
8526
+ * console.log(`Warnings: ${report.summary.warnings}`);
8527
+ * ```
8528
+ */
8529
+ async lint(options) {
8530
+ const startTime = Date.now();
8531
+ const opts = {
8532
+ path: options?.path ?? ".gitgov/",
8533
+ validateReferences: options?.validateReferences ?? false,
8534
+ validateActors: options?.validateActors ?? false,
8535
+ validateChecksums: options?.validateChecksums ?? true,
8536
+ validateSignatures: options?.validateSignatures ?? true,
8537
+ validateConventions: options?.validateConventions ?? true,
8538
+ failFast: options?.failFast ?? false,
8539
+ concurrent: options?.concurrent ?? true,
8540
+ concurrencyLimit: options?.concurrencyLimit ?? 10
8541
+ };
8542
+ const results = [];
8543
+ const originalWarn = console.warn;
8544
+ console.warn = () => {
8545
+ };
8546
+ try {
8547
+ const recordsWithTypes = await this.discoverAllRecordsWithTypes(opts.path);
8548
+ const recordIds = recordsWithTypes.map((r) => r.id);
8549
+ logger3.info(`Starting lint validation for ${recordIds.length} records`);
8550
+ const recordTypeMap = /* @__PURE__ */ new Map();
8551
+ for (const { id, type } of recordsWithTypes) {
8552
+ recordTypeMap.set(id, type);
8553
+ }
8554
+ if (opts.concurrent) {
8555
+ const batches = this.chunkArray(recordIds, opts.concurrencyLimit);
8556
+ for (const batch of batches) {
8557
+ const batchResults = await Promise.all(
8558
+ batch.map((recordId) => this.lintSingleRecord(recordId, opts, recordTypeMap.get(recordId)))
8559
+ );
8560
+ for (const batchResult of batchResults) {
8561
+ results.push(...batchResult);
8562
+ if (opts.failFast && batchResult.some((r) => r.level === "error")) {
8563
+ logger3.warn("Fail-fast mode: stopping after first error");
8564
+ break;
8565
+ }
8566
+ }
8567
+ if (opts.failFast && results.some((r) => r.level === "error")) {
8568
+ break;
8569
+ }
8570
+ }
8571
+ } else {
8572
+ for (const recordId of recordIds) {
8573
+ const recordResults = await this.lintSingleRecord(recordId, opts, recordTypeMap.get(recordId));
8574
+ results.push(...recordResults);
8575
+ if (opts.failFast && recordResults.some((r) => r.level === "error")) {
8576
+ logger3.warn("Fail-fast mode: stopping after first error");
8577
+ break;
8578
+ }
8579
+ }
8580
+ }
8581
+ const executionTime = Date.now() - startTime;
8582
+ const errors = results.filter((r) => r.level === "error").length;
8583
+ const warnings = results.filter((r) => r.level === "warning").length;
8584
+ const fixable = results.filter((r) => r.fixable).length;
8585
+ logger3.info(
8586
+ `Lint completed in ${executionTime}ms: ${recordIds.length} files, ${errors} errors, ${warnings} warnings`
8587
+ );
8588
+ return {
8589
+ summary: {
8590
+ filesChecked: recordIds.length,
8591
+ errors,
8592
+ warnings,
8593
+ fixable,
8594
+ executionTime
8595
+ },
8596
+ results,
8597
+ metadata: {
8598
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
8599
+ options: opts,
8600
+ version: "1.0.0"
8601
+ }
8602
+ };
8603
+ } catch (error) {
8604
+ logger3.error("Lint operation failed:", error);
8605
+ throw error;
8606
+ } finally {
8607
+ console.warn = originalWarn;
8608
+ }
8609
+ }
8610
+ /**
8611
+ * Validates a specific file and returns its results.
8612
+ * Ultra-fast validation for single records (target: <50ms).
8613
+ *
8614
+ * @param filePath - Path to the file to validate
8615
+ * @param options - Configuration options
8616
+ * @returns {Promise<LintReport>} Lint report for this single file
8617
+ *
8618
+ * @example
8619
+ * ```typescript
8620
+ * const report = await lintModule.lintFile('.gitgov/tasks/1234567890-task-example.json', {
8621
+ * validateReferences: true
8622
+ * });
8623
+ * ```
8624
+ */
8625
+ async lintFile(filePath, options) {
8626
+ const startTime = Date.now();
8627
+ const recordId = this.extractRecordId(filePath);
8628
+ const pathParts = filePath.split("/");
8629
+ const typeDirIndex = pathParts.findIndex(
8630
+ (part) => ["tasks", "cycles", "executions", "changelogs", "feedback", "actors", "agents"].includes(part)
8631
+ );
8632
+ let entityType = this.getEntityType(recordId);
8633
+ if (typeDirIndex >= 0 && pathParts[typeDirIndex]) {
8634
+ const typeDir = pathParts[typeDirIndex];
8635
+ const typeMap = {
8636
+ "tasks": "task",
8637
+ "cycles": "cycle",
8638
+ "executions": "execution",
8639
+ "changelogs": "changelog",
8640
+ "feedback": "feedback",
8641
+ "actors": "actor",
8642
+ "agents": "agent"
8643
+ };
8644
+ entityType = typeMap[typeDir] || entityType;
8645
+ }
8646
+ const opts = {
8647
+ path: filePath,
8648
+ validateReferences: options?.validateReferences ?? false,
8649
+ validateActors: options?.validateActors ?? false,
8650
+ validateChecksums: options?.validateChecksums ?? true,
8651
+ validateSignatures: options?.validateSignatures ?? true,
8652
+ validateConventions: options?.validateConventions ?? true,
8653
+ failFast: options?.failFast ?? false,
8654
+ concurrent: false,
8655
+ // Single file, no concurrency needed
8656
+ concurrencyLimit: 1
8657
+ };
8658
+ const originalWarn = console.warn;
8659
+ console.warn = () => {
8660
+ };
8661
+ try {
8662
+ const results = await this.lintSingleRecord(recordId, opts, entityType);
8663
+ const executionTime = Date.now() - startTime;
8664
+ const errors = results.filter((r) => r.level === "error").length;
8665
+ const warnings = results.filter((r) => r.level === "warning").length;
8666
+ const fixable = results.filter((r) => r.fixable).length;
8667
+ return {
8668
+ summary: {
8669
+ filesChecked: 1,
8670
+ errors,
8671
+ warnings,
8672
+ fixable,
8673
+ executionTime
8674
+ },
8675
+ results,
8676
+ metadata: {
8677
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
8678
+ options: opts,
8679
+ version: "1.0.0"
8680
+ }
8681
+ };
8682
+ } finally {
8683
+ console.warn = originalWarn;
8684
+ }
8685
+ }
8686
+ /**
8687
+ * Valida un record individual y retorna sus resultados.
8688
+ *
8689
+ * @private
8690
+ * @param recordId - ID del record a validar
8691
+ * @param options - Opciones de configuración
8692
+ * @returns {Promise<LintResult[]>} Array de resultados para este record
8693
+ */
8694
+ async lintSingleRecord(recordId, options, entityTypeOverride) {
8695
+ const results = [];
8696
+ const entityType = entityTypeOverride || this.getEntityType(recordId);
8697
+ let filePath;
8698
+ if (options.path && options.path.endsWith(".json")) {
8699
+ const projectRoot2 = ConfigManager.findProjectRoot();
8700
+ if (options.path.startsWith("/")) {
8701
+ filePath = options.path;
8702
+ } else if (projectRoot2) {
8703
+ filePath = join(projectRoot2, options.path);
8704
+ } else {
8705
+ filePath = join(process.cwd(), options.path);
8706
+ }
8707
+ } else {
8708
+ filePath = this.getFilePath(recordId, entityType);
8709
+ }
8710
+ try {
8711
+ let record = null;
8712
+ try {
8713
+ const content = await this.fileSystem.readFile(filePath, "utf-8");
8714
+ const raw = JSON.parse(content);
8715
+ switch (entityType) {
8716
+ case "task":
8717
+ record = loadTaskRecord(raw);
8718
+ break;
8719
+ case "actor":
8720
+ record = loadActorRecord(raw);
8721
+ break;
8722
+ case "agent":
8723
+ record = loadAgentRecord(raw);
8724
+ break;
8725
+ case "cycle":
8726
+ record = loadCycleRecord(raw);
8727
+ break;
8728
+ case "execution":
8729
+ record = loadExecutionRecord(raw);
8730
+ break;
8731
+ case "changelog":
8732
+ record = loadChangelogRecord(raw);
8733
+ break;
8734
+ case "feedback":
8735
+ record = loadFeedbackRecord(raw);
8736
+ break;
8737
+ default:
8738
+ record = await this.recordStore.read(recordId);
8739
+ }
8740
+ } catch (validationError) {
8741
+ if (validationError instanceof DetailedValidationError) {
8742
+ const hasAdditionalProperties = validationError.errors.some(
8743
+ (e) => e.message.includes("must NOT have additional properties") || e.message.includes("must not have additional properties")
8744
+ );
8745
+ const filteredErrors = hasAdditionalProperties ? validationError.errors.filter(
8746
+ (e) => !e.message.includes("oneOf") && !e.message.includes("must match") && !e.message.includes("boolean schema is false")
8747
+ ) : validationError.errors;
8748
+ for (const err of filteredErrors) {
8749
+ const tempError = new DetailedValidationError("Record", [err]);
8750
+ tempError.message = `${err.field}: ${err.message}`;
8751
+ const validatorType = this.detectValidatorType(tempError);
8752
+ const isFixable = this.isFixable(tempError);
8753
+ results.push({
8754
+ level: "error",
8755
+ filePath,
8756
+ validator: validatorType,
8757
+ message: `${err.field}: ${err.message}`,
8758
+ entity: {
8759
+ type: entityType,
8760
+ id: recordId
8761
+ },
8762
+ fixable: isFixable,
8763
+ ...err && {
8764
+ context: {
8765
+ ...err.field && { field: err.field },
8766
+ ...err.value !== void 0 && { actual: err.value },
8767
+ ...err.message && { expected: err.message }
8768
+ }
8769
+ }
8770
+ });
8771
+ }
8772
+ return results;
8773
+ }
8774
+ const fsError = validationError;
8775
+ let errorMessage;
8776
+ if (fsError.code === "ENOENT") {
8777
+ errorMessage = `Record file not found: ${recordId}`;
8778
+ } else if (validationError instanceof SyntaxError) {
8779
+ errorMessage = `Invalid JSON in record file: ${recordId}`;
8780
+ } else {
8781
+ errorMessage = `Failed to read record file: ${validationError instanceof Error ? validationError.message : String(validationError)}`;
8782
+ }
8783
+ results.push({
8784
+ level: "error",
8785
+ filePath,
8786
+ validator: "SCHEMA_VALIDATION",
8787
+ message: errorMessage,
8788
+ entity: {
8789
+ type: entityType,
8790
+ id: recordId
8791
+ },
8792
+ fixable: false
8793
+ });
8794
+ return results;
8795
+ }
8796
+ if (!record) {
8797
+ results.push({
8798
+ level: "error",
8799
+ filePath,
8800
+ validator: "SCHEMA_VALIDATION",
8801
+ message: `Record validation failed: ${recordId}`,
8802
+ entity: {
8803
+ type: entityType,
8804
+ id: recordId
8805
+ },
8806
+ fixable: false
8807
+ });
8808
+ return results;
8809
+ }
8810
+ if (options.validateConventions) {
8811
+ const conventionResults = await this.validateConventions(record, recordId, filePath, entityType);
8812
+ results.push(...conventionResults);
8813
+ }
8814
+ if (options.validateReferences && this.indexerAdapter) {
8815
+ const refResults = await this.validateReferences(record, recordId, filePath, entityType);
8816
+ results.push(...refResults);
8817
+ }
8818
+ if (options.validateActors && this.indexerAdapter) {
8819
+ const actorResults = await this.validateActors(record, recordId, filePath, entityType);
8820
+ results.push(...actorResults);
8821
+ }
8822
+ } catch (error) {
8823
+ if (error instanceof DetailedValidationError) {
8824
+ const hasAdditionalProperties = error.errors.some(
8825
+ (e) => e.message.includes("must NOT have additional properties") || e.message.includes("must not have additional properties")
8826
+ );
8827
+ const filteredErrors = hasAdditionalProperties ? error.errors.filter(
8828
+ (e) => !e.message.includes("oneOf") && !e.message.includes("must match") && !e.message.includes("boolean schema is false")
8829
+ ) : error.errors;
8830
+ for (const err of filteredErrors) {
8831
+ const tempError = new DetailedValidationError("Record", [err]);
8832
+ tempError.message = `${err.field}: ${err.message}`;
8833
+ const validatorType = this.detectValidatorType(tempError);
8834
+ const isFixable = this.isFixable(tempError);
8835
+ results.push({
8836
+ level: "error",
8837
+ filePath,
8838
+ validator: validatorType,
8839
+ message: `${err.field}: ${err.message}`,
8840
+ entity: {
8841
+ type: entityType,
8842
+ id: recordId
8843
+ },
8844
+ fixable: isFixable,
8845
+ ...err && {
8846
+ context: {
8847
+ ...err.field && { field: err.field },
8848
+ ...err.value !== void 0 && { actual: err.value },
8849
+ ...err.message && { expected: err.message }
8850
+ }
8851
+ }
8852
+ });
8853
+ }
8854
+ } else {
8855
+ results.push({
8856
+ level: "error",
8857
+ filePath,
8858
+ validator: "SCHEMA_VALIDATION",
8859
+ message: error instanceof Error ? error.message : String(error),
8860
+ entity: {
8861
+ type: entityType,
8862
+ id: recordId
8863
+ },
8864
+ fixable: false
8865
+ });
8866
+ }
8867
+ }
8868
+ return results;
8869
+ }
8870
+ /**
8871
+ * Valida un archivo específico.
8872
+ *
8873
+ * Útil para validación en vivo (ej: dashboard, IDE integration).
8874
+ *
8875
+ * @param filePath - Path del archivo a validar
8876
+ * @param options - Opciones de configuración
8877
+ * @returns {Promise<LintResult[]>} Resultados de validación para este archivo
8878
+ *
8879
+ * @example
8880
+ * ```typescript
8881
+ * const results = await lintModule.lintFile(
8882
+ * '.gitgov/tasks/task-123.json',
8883
+ * { validateReferences: true }
8884
+ * );
8885
+ * ```
8886
+ */
8887
+ /**
8888
+ * Applies automatic repairs to problems marked as fixable.
8889
+ *
8890
+ * @param lintReport - Lint report with detected problems
8891
+ * @param fixOptions - Options for the fix operation
8892
+ * @returns {Promise<FixReport>} Report of applied repairs
8893
+ *
8894
+ * @example
8895
+ * ```typescript
8896
+ * const lintReport = await lintModule.lint();
8897
+ * const fixReport = await lintModule.fix(lintReport, {
8898
+ * createBackups: true,
8899
+ * keyId: 'system:migrator'
8900
+ * });
8901
+ *
8902
+ * console.log(`Fixed: ${fixReport.summary.fixed}`);
8903
+ * ```
8904
+ */
8905
+ async fix(lintReport, fixOptions) {
8906
+ const opts = {
8907
+ ...fixOptions?.fixTypes && { fixTypes: fixOptions.fixTypes },
8908
+ createBackups: fixOptions?.createBackups ?? true,
8909
+ keyId: fixOptions?.keyId ?? "system:migrator",
8910
+ dryRun: fixOptions?.dryRun ?? false,
8911
+ ...fixOptions?.privateKey && { privateKey: fixOptions.privateKey }
8912
+ };
8913
+ const fixes = [];
8914
+ let fixableResults = lintReport.results.filter((r) => r.fixable);
8915
+ if (opts.fixTypes && opts.fixTypes.length > 0) {
8916
+ fixableResults = fixableResults.filter(
8917
+ (r) => opts.fixTypes.includes(r.validator)
8918
+ );
8919
+ }
8920
+ logger3.info(`Starting fix operation for ${fixableResults.length} fixable problems`);
8921
+ const resultsByFile = /* @__PURE__ */ new Map();
8922
+ for (const result of fixableResults) {
8923
+ if (!resultsByFile.has(result.filePath)) {
8924
+ resultsByFile.set(result.filePath, /* @__PURE__ */ new Map());
8925
+ }
8926
+ const fileResults = resultsByFile.get(result.filePath);
8927
+ if (!fileResults.has(result.validator)) {
8928
+ fileResults.set(result.validator, []);
8929
+ }
8930
+ fileResults.get(result.validator).push(result);
8931
+ }
8932
+ for (const [, validatorMap] of resultsByFile) {
8933
+ for (const [, results] of validatorMap) {
8934
+ const primaryResult = results[0];
8935
+ let backupPath;
8936
+ try {
8937
+ if (opts.dryRun) {
8938
+ fixes.push({
8939
+ filePath: primaryResult.filePath,
8940
+ validator: primaryResult.validator,
8941
+ action: `Would fix ${primaryResult.validator} (${results.length} error${results.length === 1 ? "" : "s"})`,
8942
+ success: true
8943
+ });
8944
+ continue;
8945
+ }
8946
+ if (opts.createBackups && !backupPath) {
8947
+ backupPath = await this.createBackup(primaryResult.filePath);
8948
+ }
8949
+ await this.applyFix(primaryResult, opts, results);
8950
+ fixes.push({
8951
+ filePath: primaryResult.filePath,
8952
+ validator: primaryResult.validator,
8953
+ action: `Fixed ${primaryResult.validator} (${results.length} error${results.length === 1 ? "" : "s"})`,
8954
+ success: true,
8955
+ ...backupPath && { backupPath }
8956
+ });
8957
+ logger3.debug(`Successfully fixed ${primaryResult.filePath} (${primaryResult.validator}, ${results.length} errors)`);
8958
+ } catch (error) {
8959
+ if (opts.createBackups && backupPath) {
8960
+ try {
8961
+ await this.restoreBackup(primaryResult.filePath);
8962
+ logger3.warn(`Restored backup for ${primaryResult.filePath} after fix failure`);
8963
+ } catch (restoreError) {
8964
+ logger3.error(`Failed to restore backup for ${primaryResult.filePath}:`, restoreError);
8965
+ }
8966
+ }
8967
+ fixes.push({
8968
+ filePath: primaryResult.filePath,
8969
+ validator: primaryResult.validator,
8970
+ action: `Failed to fix ${primaryResult.validator}`,
8971
+ success: false,
8972
+ error: error instanceof Error ? error.message : String(error),
8973
+ ...backupPath && { backupPath }
8974
+ // Include backup path even if fix failed
8975
+ });
8976
+ logger3.error(`Failed to fix ${primaryResult.filePath}:`, error);
8977
+ }
8978
+ }
8979
+ }
8980
+ const summary = {
8981
+ fixed: fixes.filter((f) => f.success).length,
8982
+ failed: fixes.filter((f) => !f.success).length,
8983
+ backupsCreated: opts.createBackups ? fixes.filter((f) => f.backupPath).length : 0
8984
+ };
8985
+ logger3.info(
8986
+ `Fix operation completed: ${summary.fixed} fixed, ${summary.failed} failed, ${summary.backupsCreated} backups created`
8987
+ );
8988
+ return {
8989
+ summary,
8990
+ fixes
8991
+ };
8992
+ }
8993
+ // ==================== Helper Methods ====================
8994
+ /**
8995
+ * Splits an array into chunks of the specified size.
8996
+ * @private
8997
+ */
8998
+ chunkArray(array, chunkSize) {
8999
+ const chunks = [];
9000
+ for (let i = 0; i < array.length; i += chunkSize) {
9001
+ chunks.push(array.slice(i, i + chunkSize));
9002
+ }
9003
+ return chunks;
9004
+ }
9005
+ /**
9006
+ * Discovers all records with their types by scanning the filesystem.
9007
+ * This ensures we know the correct type for each record based on its directory.
9008
+ * @private
9009
+ */
9010
+ async discoverAllRecordsWithTypes(path6) {
9011
+ const projectRoot2 = ConfigManager.findProjectRoot() || path6 || ".gitgov/";
9012
+ const recordTypes = [
9013
+ "actor",
9014
+ "agent",
9015
+ "cycle",
9016
+ "task",
9017
+ "execution",
9018
+ "changelog",
9019
+ "feedback"
9020
+ ];
9021
+ const allRecords = [];
9022
+ for (const recordType of recordTypes) {
9023
+ const dirNameMap = {
9024
+ "task": "tasks",
9025
+ "cycle": "cycles",
9026
+ "execution": "executions",
9027
+ "changelog": "changelogs",
9028
+ "feedback": "feedback",
9029
+ // feedback directory is singular, not plural
9030
+ "actor": "actors",
9031
+ "agent": "agents"
9032
+ };
9033
+ const dirName = dirNameMap[recordType];
9034
+ const dirPath = join(projectRoot2, ".gitgov", dirName);
9035
+ try {
9036
+ const files = await readdir(dirPath);
9037
+ const jsonFiles = files.filter((f) => f.endsWith(".json"));
9038
+ const records = jsonFiles.map((f) => ({
9039
+ id: f.replace(".json", ""),
9040
+ type: recordType
9041
+ }));
9042
+ allRecords.push(...records);
9043
+ } catch (error) {
9044
+ continue;
9045
+ }
9046
+ }
9047
+ return allRecords;
9048
+ }
9049
+ /**
9050
+ * Gets the file path for a given recordId.
9051
+ * Matches the format used by RecordStore.getRecordPath()
9052
+ * @private
9053
+ */
9054
+ getFilePath(recordId, entityTypeOverride) {
9055
+ const type = entityTypeOverride || this.getEntityType(recordId);
9056
+ const projectRoot2 = ConfigManager.findProjectRoot();
9057
+ const dirNameMap = {
9058
+ "task": "tasks",
9059
+ "cycle": "cycles",
9060
+ "execution": "executions",
9061
+ "changelog": "changelogs",
9062
+ "feedback": "feedback",
9063
+ // feedback directory is singular, not plural
9064
+ "actor": "actors",
9065
+ "agent": "agents"
9066
+ };
9067
+ const dirName = dirNameMap[type];
9068
+ if (!projectRoot2) {
9069
+ const safeId2 = recordId.replace(/:/g, "_");
9070
+ return join(".gitgov", dirName, `${safeId2}.json`);
9071
+ }
9072
+ const safeId = recordId.replace(/:/g, "_");
9073
+ return join(projectRoot2, ".gitgov", dirName, `${safeId}.json`);
9074
+ }
9075
+ /**
9076
+ * Extracts the recordId from a filePath.
9077
+ * @private
9078
+ */
9079
+ extractRecordId(filePath) {
9080
+ return basename(filePath, ".json");
9081
+ }
9082
+ /**
9083
+ * Detects the entity type from a recordId.
9084
+ * @private
9085
+ */
9086
+ getEntityType(recordId) {
9087
+ if (recordId.match(/^\d+-exec-/)) return "execution";
9088
+ if (recordId.match(/^\d+-changelog-/)) return "changelog";
9089
+ if (recordId.match(/^\d+-feedback-/)) return "feedback";
9090
+ if (recordId.match(/^\d+-cycle-/)) return "cycle";
9091
+ if (recordId.match(/^\d+-task-/)) return "task";
9092
+ if (recordId.startsWith("execution:") || recordId.includes("-execution-")) return "execution";
9093
+ if (recordId.startsWith("changelog:") || recordId.includes("-changelog-")) return "changelog";
9094
+ if (recordId.startsWith("feedback:") || recordId.includes("-feedback-")) return "feedback";
9095
+ if (recordId.startsWith("task:") || recordId.includes("-task-")) return "task";
9096
+ if (recordId.startsWith("cycle:") || recordId.includes("-cycle-")) return "cycle";
9097
+ if (recordId.startsWith("actor:") || recordId.startsWith("human:") || recordId.startsWith("agent:")) return "actor";
9098
+ if (recordId.startsWith("human_") || recordId.match(/^human-/)) return "actor";
9099
+ if (recordId.startsWith("agent_") || recordId.match(/^agent-/)) return "agent";
9100
+ if (recordId.startsWith("agent:")) return "agent";
9101
+ return "task";
9102
+ }
9103
+ /**
9104
+ * Detects the validator type based on the error.
9105
+ * @private
9106
+ */
9107
+ detectValidatorType(error) {
9108
+ const errorMessage = error.message.toLowerCase();
9109
+ const fieldPath = error.errors?.[0]?.field?.toLowerCase() || "";
9110
+ const allErrorMessages = error.errors?.map((e) => e.message?.toLowerCase() || "").join(" ") || "";
9111
+ const combinedText = `${errorMessage} ${fieldPath} ${allErrorMessages}`;
9112
+ if (combinedText.includes("checksum") || fieldPath.includes("payloadchecksum")) {
9113
+ return "CHECKSUM_VERIFICATION";
9114
+ }
9115
+ if (combinedText.includes("signature") || fieldPath.includes("/signatures/") || fieldPath.includes("signatures")) {
9116
+ return "SIGNATURE_STRUCTURE";
9117
+ }
9118
+ if (combinedText.includes("header") || combinedText.includes("payload") || fieldPath.includes("/header/")) {
9119
+ return "EMBEDDED_METADATA_STRUCTURE";
9120
+ }
9121
+ const versionMismatchIndicators = [
9122
+ "required in v",
9123
+ "field required in v",
9124
+ "deprecated",
9125
+ "obsolete",
9126
+ "schema version",
9127
+ "migration",
9128
+ "v1 to v2",
9129
+ "v2 to v3"
9130
+ ];
9131
+ const hasVersionMismatchIndicator = versionMismatchIndicators.some(
9132
+ (indicator) => combinedText.includes(indicator)
9133
+ );
9134
+ const hasVersionSpecificMessage = /v\d+|version\s+\d+/i.test(combinedText);
9135
+ if (hasVersionMismatchIndicator || hasVersionSpecificMessage) {
9136
+ return "SCHEMA_VERSION_MISMATCH";
9137
+ }
9138
+ return "SCHEMA_VALIDATION";
9139
+ }
9140
+ /**
9141
+ * Determines if an error is fixable.
9142
+ * @private
9143
+ */
9144
+ isFixable(error) {
9145
+ const errorMessage = error.message.toLowerCase();
9146
+ if (errorMessage.includes("header") || errorMessage.includes("metadata")) {
9147
+ return true;
9148
+ }
9149
+ if (errorMessage.includes("must not have additional properties") || errorMessage.includes("must NOT have additional properties")) {
9150
+ return true;
9151
+ }
9152
+ if (errorMessage.includes("checksum")) {
9153
+ return true;
9154
+ }
9155
+ if (errorMessage.includes("signature") && errorMessage.includes("format")) {
9156
+ return true;
9157
+ }
9158
+ return false;
9159
+ }
9160
+ /**
9161
+ * Validates conventions (file naming, timestamps, etc).
9162
+ * Implements EARS-13 through EARS-16.
9163
+ * @private
9164
+ */
9165
+ async validateConventions(record, recordId, filePath, entityType) {
9166
+ const results = [];
9167
+ const dirNameMap = {
9168
+ "task": "tasks",
9169
+ "cycle": "cycles",
9170
+ "execution": "executions",
9171
+ "changelog": "changelogs",
9172
+ "feedback": "feedback",
9173
+ // feedback directory is singular, not plural
9174
+ "actor": "actors",
9175
+ "agent": "agents"
9176
+ };
9177
+ const expectedDir = `.gitgov/${dirNameMap[entityType]}`;
9178
+ if (!filePath.includes(expectedDir)) {
9179
+ results.push({
9180
+ level: "error",
9181
+ filePath,
9182
+ validator: "FILE_NAMING_CONVENTION",
9183
+ message: `File should be in ${expectedDir}/ directory but found in ${filePath}`,
9184
+ entity: {
9185
+ type: entityType,
9186
+ id: recordId
9187
+ },
9188
+ fixable: false,
9189
+ context: {
9190
+ field: "directory",
9191
+ actual: dirname(filePath),
9192
+ expected: expectedDir
9193
+ }
9194
+ });
9195
+ }
9196
+ const expectedFilename = `${recordId}.json`;
9197
+ const actualFilename = basename(filePath);
9198
+ if (actualFilename !== expectedFilename) {
9199
+ results.push({
9200
+ level: "error",
9201
+ filePath,
9202
+ validator: "FILE_NAMING_CONVENTION",
9203
+ message: `Filename '${actualFilename}' does not match entity ID '${recordId}'`,
9204
+ entity: {
9205
+ type: entityType,
9206
+ id: recordId
9207
+ },
9208
+ fixable: false,
9209
+ context: {
9210
+ field: "filename",
9211
+ actual: actualFilename,
9212
+ expected: expectedFilename
9213
+ }
9214
+ });
9215
+ }
9216
+ const payload = record.payload;
9217
+ if (payload.createdAt && payload.updatedAt) {
9218
+ const created = new Date(payload.createdAt).getTime();
9219
+ const updated = new Date(payload.updatedAt).getTime();
9220
+ if (created > updated) {
9221
+ results.push({
9222
+ level: "error",
9223
+ filePath,
9224
+ validator: "TEMPORAL_CONSISTENCY",
9225
+ message: `createdAt (${String(payload.createdAt)}) is after updatedAt (${String(payload.updatedAt)})`,
9226
+ entity: {
9227
+ type: entityType,
9228
+ id: recordId
9229
+ },
9230
+ fixable: false,
9231
+ context: {
9232
+ field: "timestamps",
9233
+ actual: { createdAt: payload.createdAt, updatedAt: payload.updatedAt },
9234
+ expected: "createdAt <= updatedAt"
9235
+ }
9236
+ });
9237
+ }
9238
+ if (payload.completedAt) {
9239
+ const completed = new Date(payload.completedAt).getTime();
9240
+ if (completed < created) {
9241
+ results.push({
9242
+ level: "error",
9243
+ filePath,
9244
+ validator: "TEMPORAL_CONSISTENCY",
9245
+ message: `completedAt (${String(payload.completedAt)}) is before createdAt (${String(payload.createdAt)})`,
9246
+ entity: {
9247
+ type: entityType,
9248
+ id: recordId
9249
+ },
9250
+ fixable: false
9251
+ });
9252
+ }
9253
+ }
9254
+ if (payload.discardedAt) {
9255
+ const discarded = new Date(payload.discardedAt).getTime();
9256
+ if (discarded < created) {
9257
+ results.push({
9258
+ level: "error",
9259
+ filePath,
9260
+ validator: "TEMPORAL_CONSISTENCY",
9261
+ message: `discardedAt (${String(payload.discardedAt)}) is before createdAt (${String(payload.createdAt)})`,
9262
+ entity: {
9263
+ type: entityType,
9264
+ id: recordId
9265
+ },
9266
+ fixable: false
9267
+ });
9268
+ }
9269
+ }
9270
+ }
9271
+ return results;
9272
+ }
9273
+ /**
9274
+ * Validates references (typed references, bidirectional consistency).
9275
+ * Implements EARS-17 through EARS-22.
9276
+ * Requires indexerAdapter to be present.
9277
+ * @private
9278
+ */
9279
+ async validateReferences(record, recordId, filePath, entityType) {
9280
+ const results = [];
9281
+ const payload = record.payload;
9282
+ if (entityType === "execution" && isExecutionRecord2(payload) && payload.taskId) {
9283
+ try {
9284
+ const taskRecord = await this.recordStore.read(payload.taskId);
9285
+ if (!taskRecord) {
9286
+ results.push({
9287
+ level: "warning",
9288
+ filePath,
9289
+ validator: "REFERENTIAL_INTEGRITY",
9290
+ message: `Referenced taskId '${payload.taskId}' not found`,
9291
+ entity: {
9292
+ type: entityType,
9293
+ id: recordId
9294
+ },
9295
+ fixable: false,
9296
+ context: {
9297
+ field: "taskId",
9298
+ actual: payload.taskId,
9299
+ expected: "existing task record"
9300
+ }
9301
+ });
9302
+ } else {
9303
+ const taskPayload = taskRecord.payload;
9304
+ if (taskPayload.status === "discarded") {
9305
+ results.push({
9306
+ level: "warning",
9307
+ filePath,
9308
+ validator: "SOFT_DELETE_DETECTION",
9309
+ message: `Referenced task '${payload.taskId}' has status 'discarded'`,
9310
+ entity: {
9311
+ type: entityType,
9312
+ id: recordId
9313
+ },
9314
+ fixable: false
9315
+ });
9316
+ }
9317
+ }
9318
+ } catch (error) {
9319
+ results.push({
9320
+ level: "warning",
9321
+ filePath,
9322
+ validator: "REFERENTIAL_INTEGRITY",
9323
+ message: `Failed to validate taskId reference: ${error instanceof Error ? error.message : String(error)}`,
9324
+ entity: {
9325
+ type: entityType,
9326
+ id: recordId
9327
+ },
9328
+ fixable: false
9329
+ });
9330
+ }
9331
+ }
9332
+ if (isTaskRecord2(payload) || isExecutionRecord2(payload)) {
9333
+ const payloadWithRefs = payload;
9334
+ if (payloadWithRefs.references && Array.isArray(payloadWithRefs.references)) {
9335
+ for (const ref of payloadWithRefs.references) {
9336
+ const refStr = String(ref);
9337
+ if (!refStr.includes(":")) {
9338
+ results.push({
9339
+ level: "warning",
9340
+ filePath,
9341
+ validator: "TYPED_REFERENCE",
9342
+ message: `Reference '${refStr}' missing type prefix (expected: task:, cycle:, file:, etc.)`,
9343
+ entity: {
9344
+ type: entityType,
9345
+ id: recordId
9346
+ },
9347
+ fixable: false,
9348
+ context: {
9349
+ field: "references",
9350
+ actual: refStr,
9351
+ expected: "prefix:value format"
9352
+ }
9353
+ });
9354
+ continue;
9355
+ }
9356
+ const parts = refStr.split(":", 2);
9357
+ if (parts.length < 2) continue;
9358
+ const [prefix, value] = parts;
9359
+ if (!prefix || !value) continue;
9360
+ const knownPrefixes = ["task", "cycle", "execution", "changelog", "feedback", "actor", "agent", "file", "url", "commit", "pr", "adapter"];
9361
+ if (!knownPrefixes.includes(prefix)) {
9362
+ results.push({
9363
+ level: "warning",
9364
+ filePath,
9365
+ validator: "TYPED_REFERENCE",
9366
+ message: `Unknown reference prefix '${prefix}' (known: ${knownPrefixes.join(", ")})`,
9367
+ entity: {
9368
+ type: entityType,
9369
+ id: recordId
9370
+ },
9371
+ fixable: false,
9372
+ context: {
9373
+ field: "references",
9374
+ actual: prefix,
9375
+ expected: knownPrefixes.join(", ")
9376
+ }
9377
+ });
9378
+ }
9379
+ if (["task", "cycle", "execution", "changelog", "feedback"].includes(prefix)) {
9380
+ try {
9381
+ const referencedRecord = await this.recordStore.read(value);
9382
+ if (!referencedRecord) {
9383
+ results.push({
9384
+ level: "warning",
9385
+ filePath,
9386
+ validator: "REFERENTIAL_INTEGRITY",
9387
+ message: `Referenced ${prefix} '${value}' not found`,
9388
+ entity: {
9389
+ type: entityType,
9390
+ id: recordId
9391
+ },
9392
+ fixable: false,
9393
+ context: {
9394
+ field: "references",
9395
+ actual: refStr,
9396
+ expected: `existing ${prefix} record`
9397
+ }
9398
+ });
9399
+ } else {
9400
+ const refPayload = referencedRecord.payload;
9401
+ if ("status" in refPayload && refPayload.status === "discarded") {
9402
+ results.push({
9403
+ level: "warning",
9404
+ filePath,
9405
+ validator: "SOFT_DELETE_DETECTION",
9406
+ message: `Referenced ${prefix} '${value}' has status 'discarded'`,
9407
+ entity: {
9408
+ type: entityType,
9409
+ id: recordId
9410
+ },
9411
+ fixable: false
9412
+ });
9413
+ }
9414
+ }
9415
+ } catch (error) {
9416
+ }
9417
+ }
9418
+ }
9419
+ }
9420
+ }
9421
+ if (entityType === "task" && isTaskRecord2(payload) && payload.cycleIds && Array.isArray(payload.cycleIds)) {
9422
+ for (const cycleId of payload.cycleIds) {
9423
+ try {
9424
+ const cycleRecord = await this.recordStore.read(cycleId);
9425
+ if (cycleRecord) {
9426
+ const cyclePayload = cycleRecord.payload;
9427
+ if (cyclePayload.taskIds && Array.isArray(cyclePayload.taskIds)) {
9428
+ if (!cyclePayload.taskIds.includes(recordId)) {
9429
+ results.push({
9430
+ level: "warning",
9431
+ filePath,
9432
+ validator: "BIDIRECTIONAL_CONSISTENCY",
9433
+ message: `Task references cycle '${cycleId}' in cycleIds but cycle doesn't include this task in taskIds[]`,
9434
+ entity: {
9435
+ type: entityType,
9436
+ id: recordId
9437
+ },
9438
+ fixable: true,
9439
+ context: {
9440
+ field: "cycleIds",
9441
+ actual: cycleId,
9442
+ expected: `cycle should include task ${recordId} in taskIds[]`
9443
+ }
9444
+ });
9445
+ }
9446
+ }
9447
+ }
9448
+ } catch (error) {
9449
+ }
9450
+ }
9451
+ }
9452
+ if (entityType === "cycle" && isCycleRecord2(payload) && payload.taskIds && Array.isArray(payload.taskIds)) {
9453
+ for (const taskId of payload.taskIds) {
9454
+ try {
9455
+ const taskRecord = await this.recordStore.read(taskId);
9456
+ if (taskRecord) {
9457
+ const taskPayload = taskRecord.payload;
9458
+ if (!taskPayload.cycleIds || !taskPayload.cycleIds.includes(recordId)) {
9459
+ results.push({
9460
+ level: "warning",
9461
+ filePath,
9462
+ validator: "BIDIRECTIONAL_CONSISTENCY",
9463
+ message: `Cycle includes task '${taskId}' in taskIds[] but task doesn't include this cycle in cycleIds[]`,
9464
+ entity: {
9465
+ type: entityType,
9466
+ id: recordId
9467
+ },
9468
+ fixable: true,
9469
+ context: {
9470
+ field: "taskIds",
9471
+ actual: taskPayload.cycleIds || [],
9472
+ expected: `task should include cycle ${recordId} in cycleIds[]`
9473
+ }
9474
+ });
9475
+ }
9476
+ }
9477
+ } catch (error) {
9478
+ }
9479
+ }
9480
+ }
9481
+ return results;
9482
+ }
9483
+ /**
9484
+ * Validates actorIds (resolution in .gitgov/actors/).
9485
+ * Implements EARS-19.
9486
+ * @private
9487
+ */
9488
+ async validateActors(record, recordId, filePath, entityType) {
9489
+ const results = [];
9490
+ if (record.header && record.header.signatures && Array.isArray(record.header.signatures)) {
9491
+ for (const signature of record.header.signatures) {
9492
+ if (signature.keyId) {
9493
+ try {
9494
+ const actorRecord = await this.recordStore.read(signature.keyId);
9495
+ if (!actorRecord) {
9496
+ results.push({
9497
+ level: "warning",
9498
+ filePath,
9499
+ validator: "ACTOR_RESOLUTION",
9500
+ message: `Actor '${signature.keyId}' referenced in signature not found in .gitgov/actors/`,
9501
+ entity: {
9502
+ type: entityType,
9503
+ id: recordId
9504
+ },
9505
+ fixable: false,
9506
+ context: {
9507
+ field: "signatures.keyId",
9508
+ actual: signature.keyId,
9509
+ expected: "existing actor record"
9510
+ }
9511
+ });
9512
+ }
9513
+ } catch (error) {
9514
+ results.push({
9515
+ level: "warning",
9516
+ filePath,
9517
+ validator: "ACTOR_RESOLUTION",
9518
+ message: `Failed to validate actor '${signature.keyId}': ${error instanceof Error ? error.message : String(error)}`,
9519
+ entity: {
9520
+ type: entityType,
9521
+ id: recordId
9522
+ },
9523
+ fixable: false
9524
+ });
9525
+ }
9526
+ }
9527
+ }
9528
+ }
9529
+ const payload = record.payload;
9530
+ if ("actorId" in payload && payload.actorId) {
9531
+ const actorId = payload.actorId;
9532
+ if (actorId) {
9533
+ try {
9534
+ const actorRecord = await this.recordStore.read(actorId);
9535
+ if (!actorRecord) {
9536
+ results.push({
9537
+ level: "warning",
9538
+ filePath,
9539
+ validator: "ACTOR_RESOLUTION",
9540
+ message: `Actor '${actorId}' referenced in payload not found`,
9541
+ entity: {
9542
+ type: entityType,
9543
+ id: recordId
9544
+ },
9545
+ fixable: false,
9546
+ context: {
9547
+ field: "actorId",
9548
+ actual: actorId,
9549
+ expected: "existing actor record"
9550
+ }
9551
+ });
9552
+ }
9553
+ } catch (error) {
9554
+ }
9555
+ }
9556
+ }
9557
+ return results;
9558
+ }
9559
+ /**
9560
+ * Applies a specific repair based on the problem type.
9561
+ * @private
9562
+ */
9563
+ async applyFix(result, options, allErrors) {
9564
+ switch (result.validator) {
9565
+ case "EMBEDDED_METADATA_STRUCTURE":
9566
+ await this.fixLegacyRecord(result, options, allErrors);
9567
+ break;
9568
+ case "BIDIRECTIONAL_CONSISTENCY":
9569
+ await this.fixBidirectionalReference(result);
9570
+ break;
9571
+ case "CHECKSUM_VERIFICATION":
9572
+ await this.recalculateChecksum(result);
9573
+ break;
9574
+ case "SIGNATURE_STRUCTURE":
9575
+ await this.fixSignatureStructure(result, options, allErrors || [result]);
9576
+ break;
9577
+ default:
9578
+ throw new Error(`Fix not implemented for validator: ${result.validator}`);
9579
+ }
9580
+ }
9581
+ /**
9582
+ * Repairs a legacy record by wrapping it in embedded metadata.
9583
+ * Implements EARS-24: Normalize legacy records with signature.
9584
+ * @private
9585
+ */
9586
+ async fixLegacyRecord(result, options, allErrors) {
9587
+ if (!options.privateKey) {
9588
+ throw new Error("privateKey is required in FixOptions to sign legacy records");
9589
+ }
9590
+ const fileContent = await this.fileSystem.readFile(result.filePath, "utf-8");
9591
+ const rawData = JSON.parse(fileContent);
9592
+ if (typeof rawData !== "object" || rawData === null || !("header" in rawData) || !("payload" in rawData)) {
9593
+ throw new Error(`Record does not have EmbeddedMetadataRecord structure (missing header or payload): ${result.filePath}`);
9594
+ }
9595
+ const rawObj = rawData;
9596
+ if (!rawObj["header"] || !rawObj["payload"]) {
9597
+ throw new Error(`Record does not have EmbeddedMetadataRecord structure (missing header or payload): ${result.filePath}`);
9598
+ }
9599
+ const embeddedRecord = rawData;
9600
+ const hasAdditionalProperties = allErrors?.some(
9601
+ (e) => e.message.includes("must NOT have additional properties") && e.message.includes("/payload")
9602
+ );
9603
+ if (hasAdditionalProperties) {
9604
+ const entityType = this.getEntityType(result.entity.id);
9605
+ let cleanPayload;
9606
+ try {
9607
+ switch (entityType) {
9608
+ case "task":
9609
+ const taskPayload = embeddedRecord.payload;
9610
+ cleanPayload = {
9611
+ id: taskPayload.id,
9612
+ title: taskPayload.title,
9613
+ status: taskPayload.status,
9614
+ priority: taskPayload.priority,
9615
+ description: taskPayload.description,
9616
+ ...taskPayload.cycleIds && { cycleIds: taskPayload.cycleIds },
9617
+ ...taskPayload.tags && { tags: taskPayload.tags },
9618
+ ...taskPayload.references && { references: taskPayload.references },
9619
+ ...taskPayload.notes && { notes: taskPayload.notes }
9620
+ };
9621
+ break;
9622
+ case "cycle":
9623
+ const cyclePayload = embeddedRecord.payload;
9624
+ cleanPayload = {
9625
+ id: cyclePayload.id,
9626
+ title: cyclePayload.title,
9627
+ status: cyclePayload.status,
9628
+ ...cyclePayload.taskIds && { taskIds: cyclePayload.taskIds },
9629
+ ...cyclePayload.childCycleIds && { childCycleIds: cyclePayload.childCycleIds },
9630
+ ...cyclePayload.tags && { tags: cyclePayload.tags },
9631
+ ...cyclePayload.notes && { notes: cyclePayload.notes }
9632
+ };
9633
+ break;
9634
+ default:
9635
+ await this.recalculateChecksum(result);
9636
+ return;
9637
+ }
9638
+ const payloadChecksum = calculatePayloadChecksum(cleanPayload);
9639
+ const signature = signPayload(
9640
+ cleanPayload,
9641
+ options.privateKey,
9642
+ options.keyId || result.entity.id,
9643
+ "author",
9644
+ "Signature regenerated after removing additional properties"
9645
+ );
9646
+ const fixedRecord = {
9647
+ header: {
9648
+ ...embeddedRecord.header,
9649
+ payloadChecksum,
9650
+ signatures: [signature]
9651
+ },
9652
+ payload: cleanPayload
9653
+ };
9654
+ await this.fileSystem.writeFile(
9655
+ result.filePath,
9656
+ JSON.stringify(fixedRecord, null, 2)
9657
+ );
9658
+ logger3.info(`Removed additional properties from payload: ${result.filePath}`);
9659
+ return;
9660
+ } catch (error) {
9661
+ logger3.warn(`Could not clean additional properties, recalculating checksum only: ${error instanceof Error ? error.message : String(error)}`);
9662
+ }
9663
+ }
9664
+ await this.recalculateChecksum(result);
9665
+ }
9666
+ /**
9667
+ * Repairs bidirectional inconsistencies between Task and Cycle.
9668
+ * Implements EARS-25: Sync bidirectional references.
9669
+ * @private
9670
+ */
9671
+ async fixBidirectionalReference(result) {
9672
+ const entityType = result.entity.type;
9673
+ const recordId = result.entity.id;
9674
+ if (entityType === "task") {
9675
+ const taskRecord = await this.recordStore.read(recordId);
9676
+ if (!taskRecord) return;
9677
+ const taskPayload = taskRecord.payload;
9678
+ if (!taskPayload.cycleIds || taskPayload.cycleIds.length === 0) return;
9679
+ for (const cycleId of taskPayload.cycleIds) {
9680
+ const cycleRecord = await this.recordStore.read(cycleId);
9681
+ if (!cycleRecord) continue;
9682
+ const mutableCyclePayload = cycleRecord.payload;
9683
+ if (!mutableCyclePayload.taskIds) {
9684
+ mutableCyclePayload.taskIds = [];
9685
+ }
9686
+ if (!mutableCyclePayload.taskIds.includes(recordId)) {
9687
+ mutableCyclePayload.taskIds.push(recordId);
9688
+ const cycleFilePath = this.getFilePath(cycleId);
9689
+ await this.fileSystem.writeFile(
9690
+ cycleFilePath,
9691
+ JSON.stringify(cycleRecord, null, 2)
9692
+ );
9693
+ logger3.info(`Fixed bidirectional reference: Added task ${recordId} to cycle ${cycleId}`);
9694
+ }
9695
+ }
9696
+ } else if (entityType === "cycle") {
9697
+ const cycleRecord = await this.recordStore.read(recordId);
9698
+ if (!cycleRecord) return;
9699
+ const cyclePayload = cycleRecord.payload;
9700
+ if (!cyclePayload.taskIds || cyclePayload.taskIds.length === 0) return;
9701
+ const taskIdFromContext = result.context?.actual;
9702
+ const tasksToFix = taskIdFromContext ? [taskIdFromContext] : cyclePayload.taskIds;
9703
+ for (const taskId of tasksToFix) {
9704
+ const taskRecord = await this.recordStore.read(taskId);
9705
+ if (!taskRecord) continue;
9706
+ const taskPayload = taskRecord.payload;
9707
+ if (!taskPayload.cycleIds) {
9708
+ taskPayload.cycleIds = [];
9709
+ }
9710
+ if (!taskPayload.cycleIds.includes(recordId)) {
9711
+ taskPayload.cycleIds.push(recordId);
9712
+ const taskFilePath = this.getFilePath(taskId);
9713
+ await this.fileSystem.writeFile(
9714
+ taskFilePath,
9715
+ JSON.stringify(taskRecord, null, 2)
9716
+ );
9717
+ logger3.info(`Fixed bidirectional reference: Added cycle ${recordId} to task ${taskId} in cycleIds[]`);
9718
+ }
9719
+ }
9720
+ }
9721
+ }
9722
+ /**
9723
+ * Recalculates the checksum of a record.
9724
+ * Implements checksum repair for corrupted checksums.
9725
+ * @private
9726
+ */
9727
+ async recalculateChecksum(result) {
9728
+ const fileContent = await this.fileSystem.readFile(result.filePath, "utf-8");
9729
+ const record = JSON.parse(fileContent);
9730
+ if (!record.header || !record.payload) {
9731
+ throw new Error("Cannot recalculate checksum: invalid record structure");
9732
+ }
9733
+ const correctChecksum = calculatePayloadChecksum(record.payload);
9734
+ record.header.payloadChecksum = correctChecksum;
9735
+ await this.fileSystem.writeFile(
9736
+ result.filePath,
9737
+ JSON.stringify(record, null, 2)
9738
+ );
9739
+ logger3.info(`Recalculated checksum for: ${result.filePath}`);
9740
+ }
9741
+ /**
9742
+ * Creates a backup of a file.
9743
+ * @private
9744
+ */
9745
+ async createBackup(filePath) {
9746
+ const timestamp = Date.now();
9747
+ const backupPath = `${filePath}.backup-${timestamp}`;
9748
+ const content = await this.fileSystem.readFile(filePath, "utf-8");
9749
+ await this.fileSystem.writeFile(backupPath, content);
9750
+ this.lastBackupPath = backupPath;
9751
+ return backupPath;
9752
+ }
9753
+ /**
9754
+ * Fixes signature structure errors by analyzing specific errors and applying targeted fixes.
9755
+ * Reads the record directly (bypassing validation), extracts payload, and fixes signature issues:
9756
+ * - Adds missing 'notes' field with a valid value
9757
+ * - Removes additional properties not allowed
9758
+ * - Regenerates invalid signatures
9759
+ * Works even when signatures have invalid format (e.g., "placeholder" instead of base64).
9760
+ * @private
9761
+ */
9762
+ async fixSignatureStructure(result, options, allErrors) {
9763
+ if (!options.privateKey) {
9764
+ throw new Error("Private key required to fix signature structure errors");
9765
+ }
9766
+ const content = await this.fileSystem.readFile(result.filePath, "utf-8");
9767
+ let raw;
9768
+ try {
9769
+ raw = JSON.parse(content);
9770
+ } catch (parseError) {
9771
+ throw new Error(`Invalid JSON in file: ${result.filePath}`);
9772
+ }
9773
+ if (typeof raw !== "object" || raw === null || !("header" in raw) || !("payload" in raw)) {
9774
+ throw new Error(`Record does not have EmbeddedMetadataRecord structure (missing header or payload): ${result.filePath}`);
9775
+ }
9776
+ const rawObj = raw;
9777
+ if (!rawObj["header"] || !rawObj["payload"]) {
9778
+ throw new Error(`Record does not have EmbeddedMetadataRecord structure (missing header or payload): ${result.filePath}`);
9779
+ }
9780
+ const embeddedRecord = raw;
9781
+ const payload = embeddedRecord.payload;
9782
+ const existingHeader = embeddedRecord.header;
9783
+ const payloadChecksum = calculatePayloadChecksum(payload);
9784
+ const needsNotes = allErrors.some((e) => e.message.includes("must have required property 'notes'"));
9785
+ const hasAdditionalProperties = allErrors.some((e) => e.message.includes("must NOT have additional properties"));
9786
+ const hasInvalidSignature = allErrors.some((e) => e.message.includes("signature: must match pattern"));
9787
+ let keyId = options.keyId || result.entity.id;
9788
+ let role = "author";
9789
+ let notes = "Signature regenerated by lint --fix";
9790
+ if (existingHeader?.signatures?.[0]) {
9791
+ const existingSig = existingHeader.signatures[0];
9792
+ if (existingSig.keyId) {
9793
+ keyId = existingSig.keyId;
9794
+ }
9795
+ if (existingSig.role) {
9796
+ role = existingSig.role;
9797
+ }
9798
+ if (existingSig.notes && typeof existingSig.notes === "string" && existingSig.notes.length > 0) {
9799
+ notes = existingSig.notes;
9800
+ }
9801
+ }
9802
+ if (needsNotes && !notes) {
9803
+ notes = "Signature regenerated by lint --fix";
9804
+ }
9805
+ const needsRegeneration = hasInvalidSignature || hasAdditionalProperties || needsNotes && (hasInvalidSignature || hasAdditionalProperties);
9806
+ let fixedSignature;
9807
+ if (needsRegeneration) {
9808
+ fixedSignature = signPayload(
9809
+ payload,
9810
+ options.privateKey,
9811
+ keyId,
9812
+ role,
9813
+ notes
9814
+ );
9815
+ } else {
9816
+ const existingSig = existingHeader?.signatures?.[0];
9817
+ fixedSignature = {
9818
+ keyId: existingSig?.keyId || keyId,
9819
+ role: existingSig?.role || role,
9820
+ notes: needsNotes ? notes || "Signature regenerated by lint --fix" : existingSig?.notes || notes,
9821
+ signature: existingSig?.signature || "",
9822
+ timestamp: existingSig?.timestamp || Math.floor(Date.now() / 1e3)
9823
+ };
9824
+ }
9825
+ const entityType = this.getEntityType(result.entity.id);
9826
+ const fixedRecord = {
9827
+ header: {
9828
+ version: existingHeader?.version || "1.0",
9829
+ type: existingHeader?.type || entityType,
9830
+ payloadChecksum,
9831
+ signatures: [fixedSignature]
9832
+ // Replace all signatures with one valid signature
9833
+ },
9834
+ payload
9835
+ };
9836
+ await this.fileSystem.writeFile(
9837
+ result.filePath,
9838
+ JSON.stringify(fixedRecord, null, 2)
9839
+ );
9840
+ const action = needsRegeneration ? "regenerated" : "fixed structure";
9841
+ logger3.info(`Fixed signature structure: ${result.filePath} (${action} signature for ${keyId})`);
9842
+ }
9843
+ /**
9844
+ * Restores a file from its most recent backup.
9845
+ * Implements EARS-32: Restore backup if fix fails.
9846
+ * @private
9847
+ */
9848
+ async restoreBackup(filePath) {
9849
+ if (this.lastBackupPath) {
9850
+ try {
9851
+ const exists = await this.fileSystem.exists(this.lastBackupPath);
9852
+ if (exists) {
9853
+ const backupContent = await this.fileSystem.readFile(this.lastBackupPath, "utf-8");
9854
+ await this.fileSystem.writeFile(filePath, backupContent);
9855
+ logger3.info(`Restored ${filePath} from backup ${this.lastBackupPath}`);
9856
+ this.lastBackupPath = null;
9857
+ return;
9858
+ }
9859
+ } catch (error) {
9860
+ }
9861
+ }
9862
+ const now = Date.now();
9863
+ const timeWindows = [0, 1e3, 5e3, 1e4, 6e4];
9864
+ for (const delta of timeWindows) {
9865
+ const timestamp = now - delta;
9866
+ const backupPath = `${filePath}.backup-${timestamp}`;
9867
+ try {
9868
+ const exists = await this.fileSystem.exists(backupPath);
9869
+ if (exists) {
9870
+ const backupContent = await this.fileSystem.readFile(backupPath, "utf-8");
9871
+ await this.fileSystem.writeFile(filePath, backupContent);
9872
+ logger3.info(`Restored ${filePath} from backup ${backupPath}`);
9873
+ return;
9874
+ }
9875
+ } catch (error) {
9876
+ continue;
9877
+ }
9878
+ }
9879
+ throw new Error(`No backup found for ${filePath}`);
9880
+ }
9881
+ };
9882
+
7909
9883
  // src/validation/index.ts
7910
9884
  var validation_exports = {};
7911
9885
  __export(validation_exports, {
@@ -7927,7 +9901,6 @@ __export(validation_exports, {
7927
9901
  validateChangelogRecordSchema: () => validateChangelogRecordSchema,
7928
9902
  validateCycleRecordDetailed: () => validateCycleRecordDetailed,
7929
9903
  validateCycleRecordSchema: () => validateCycleRecordSchema,
7930
- validateEmbeddedMetadataBusinessRules: () => validateEmbeddedMetadataBusinessRules,
7931
9904
  validateEmbeddedMetadataDetailed: () => validateEmbeddedMetadataDetailed,
7932
9905
  validateEmbeddedMetadataSchema: () => validateEmbeddedMetadataSchema,
7933
9906
  validateExecutionRecordDetailed: () => validateExecutionRecordDetailed,
@@ -8385,10 +10358,10 @@ var RelationshipAnalyzer = class {
8385
10358
  }
8386
10359
  const visited = /* @__PURE__ */ new Set();
8387
10360
  const recursionStack = /* @__PURE__ */ new Set();
8388
- const path5 = [];
10361
+ const path6 = [];
8389
10362
  for (const node of graph.keys()) {
8390
10363
  if (!visited.has(node)) {
8391
- const cyclePath = this.findCycleDFS(node, graph, visited, recursionStack, path5);
10364
+ const cyclePath = this.findCycleDFS(node, graph, visited, recursionStack, path6);
8392
10365
  if (cyclePath.length > 0) {
8393
10366
  const cycleDescription = this.formatCycleError(cyclePath);
8394
10367
  throw new CircularDependencyError(cycleDescription);
@@ -8399,24 +10372,24 @@ var RelationshipAnalyzer = class {
8399
10372
  /**
8400
10373
  * DFS helper for circular dependency detection with path tracking
8401
10374
  */
8402
- findCycleDFS(node, graph, visited, recursionStack, path5) {
10375
+ findCycleDFS(node, graph, visited, recursionStack, path6) {
8403
10376
  visited.add(node);
8404
10377
  recursionStack.add(node);
8405
- path5.push(node);
10378
+ path6.push(node);
8406
10379
  const neighbors = graph.get(node) || [];
8407
10380
  for (const neighbor of neighbors) {
8408
10381
  if (!visited.has(neighbor)) {
8409
- const cyclePath = this.findCycleDFS(neighbor, graph, visited, recursionStack, path5);
10382
+ const cyclePath = this.findCycleDFS(neighbor, graph, visited, recursionStack, path6);
8410
10383
  if (cyclePath.length > 0) {
8411
10384
  return cyclePath;
8412
10385
  }
8413
10386
  } else if (recursionStack.has(neighbor)) {
8414
- const cycleStartIndex = path5.indexOf(neighbor);
8415
- return path5.slice(cycleStartIndex).concat([neighbor]);
10387
+ const cycleStartIndex = path6.indexOf(neighbor);
10388
+ return path6.slice(cycleStartIndex).concat([neighbor]);
8416
10389
  }
8417
10390
  }
8418
10391
  recursionStack.delete(node);
8419
- path5.pop();
10392
+ path6.pop();
8420
10393
  return [];
8421
10394
  }
8422
10395
  /**
@@ -9124,6 +11097,6 @@ var DiagramGenerator = class {
9124
11097
  }
9125
11098
  };
9126
11099
 
9127
- export { adapters_exports as Adapters, backlog_adapter_exports as BacklogAdapter, changelog_adapter_exports as ChangelogAdapter, config_manager_exports as Config, crypto_exports as Crypto, diagram_generator_exports as DiagramGenerator, event_bus_exports as EventBus, execution_adapter_exports as ExecutionAdapter, factories_exports as Factories, feedback_adapter_exports as FeedbackAdapter, identity_adapter_exports as IdentityAdapter, indexer_adapter_exports as IndexerAdapter, logger_exports as Logger, metrics_adapter_exports as MetricsAdapter, project_adapter_exports as ProjectAdapter, types_exports as Records, schemas_exports as Schemas, store_exports as Store, validation_exports as Validation, workflow_methodology_adapter_exports as WorkflowMethodologyAdapter };
11100
+ export { adapters_exports as Adapters, backlog_adapter_exports as BacklogAdapter, changelog_adapter_exports as ChangelogAdapter, config_manager_exports as Config, crypto_exports as Crypto, diagram_generator_exports as DiagramGenerator, event_bus_exports as EventBus, execution_adapter_exports as ExecutionAdapter, factories_exports as Factories, feedback_adapter_exports as FeedbackAdapter, identity_adapter_exports as IdentityAdapter, indexer_adapter_exports as IndexerAdapter, lint_exports as Lint, logger_exports as Logger, metrics_adapter_exports as MetricsAdapter, project_adapter_exports as ProjectAdapter, types_exports as Records, schemas_exports as Schemas, store_exports as Store, validation_exports as Validation, workflow_methodology_adapter_exports as WorkflowMethodologyAdapter };
9128
11101
  //# sourceMappingURL=index.js.map
9129
11102
  //# sourceMappingURL=index.js.map