pumuki 6.3.142 → 6.3.144

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -6,6 +6,13 @@ This project follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6
6
 
7
7
  ## [Unreleased]
8
8
 
9
+ ## [6.3.143] - 2026-05-05
10
+
11
+ ### Fixed
12
+
13
+ - **PUMUKI-INC-060 baseline TDD/BDD fresco:** los cambios in-scope bloquean si la evidencia de baseline TDD/BDD está caducada, obligando a reejecutar los tests baseline del componente antes de editar código relacionado.
14
+ - **Ventana configurable de evidencia:** `PUMUKI_TDD_BDD_EVIDENCE_MAX_AGE_SECONDS` permite ajustar la frescura máxima; por defecto son 900 segundos y los valores inválidos mantienen el modo estricto.
15
+
9
16
  ## [6.3.142] - 2026-05-05
10
17
 
11
18
  ### Fixed
@@ -4,6 +4,18 @@ This file tracks the active deterministic framework line used in this repository
4
4
  Canonical release chronology lives in `CHANGELOG.md`.
5
5
  This file keeps only the operational highlights and rollout notes that matter while running the framework.
6
6
 
7
+ ### 2026-05-05 (v6.3.144)
8
+
9
+ - **RuralGo PUMUKI-INC-122:** `pumuki sdd evidence` serializa escrituras concurrentes del artefacto `.pumuki/artifacts/pumuki-evidence-v1.json` con lock local y rename atómico.
10
+ - **Sin pérdida de slices:** dos ejecuciones paralelas fusionan slices existentes/nuevas en lugar de pisar el JSON compartido; además el scaffold genera baseline TDD/BDD válido desde la evidencia PRE_WRITE.
11
+ - **Rollout:** publicar `pumuki@6.3.144`, repinear primero RuralGo y revalidar `status`, `doctor` y una ráfaga concurrente de `pumuki sdd evidence`.
12
+
13
+ ### 2026-05-05 (v6.3.143)
14
+
15
+ - **RuralGo PUMUKI-INC-060:** PRE_WRITE deja de aceptar evidencia TDD/BDD de baseline caducada para cambios in-scope.
16
+ - **Baseline antes de editar:** si el componente tocado requiere TDD/BDD, la evidencia debe ser reciente y pasada; si no, Pumuki bloquea y pide reejecutar baseline tests.
17
+ - **Rollout:** publicar `pumuki@6.3.143`, repinear primero RuralGo y revalidar `status`, `doctor` y el bloqueo por evidencia stale.
18
+
7
19
  ### 2026-05-05 (v6.3.142)
8
20
 
9
21
  - **RuralGo PUMUKI-INC-059:** PRE_WRITE vuelve a pedir hechos AST para iOS SOLID aunque el flag experimental de heurísticas esté apagado.
@@ -1,5 +1,14 @@
1
- import { createHash } from 'node:crypto';
2
- import { mkdirSync, writeFileSync } from 'node:fs';
1
+ import { createHash, randomUUID } from 'node:crypto';
2
+ import {
3
+ closeSync,
4
+ existsSync,
5
+ mkdirSync,
6
+ openSync,
7
+ readFileSync,
8
+ renameSync,
9
+ rmSync,
10
+ writeFileSync,
11
+ } from 'node:fs';
3
12
  import { basename, dirname, isAbsolute, relative, resolve } from 'node:path';
4
13
  import { readEvidenceResult, type EvidenceReadResult } from '../evidence/readEvidence';
5
14
 
@@ -27,6 +36,11 @@ export type SddEvidenceScaffoldResult = {
27
36
  slices: Array<{
28
37
  id: string;
29
38
  scenario_ref: string;
39
+ baseline: {
40
+ status: 'passed';
41
+ timestamp: string;
42
+ test_ref: string;
43
+ };
30
44
  red: {
31
45
  status: 'failed';
32
46
  timestamp: string;
@@ -62,9 +76,100 @@ export type SddEvidenceScaffoldResult = {
62
76
  };
63
77
  };
64
78
 
79
+ type SddEvidenceArtifact = SddEvidenceScaffoldResult['artifact'];
80
+
81
+ const LOCK_WAIT_BUFFER_BYTES = 4;
82
+ const EVIDENCE_ARTIFACT_LOCK_TIMEOUT_MS = 5000;
83
+ const EVIDENCE_ARTIFACT_LOCK_RETRY_DELAY_MS = 25;
84
+ const EVIDENCE_ARTIFACT_JSON_INDENT_SPACES = 2;
85
+
65
86
  const computeDigest = (value: string): string =>
66
87
  `sha256:${createHash('sha256').update(value, 'utf8').digest('hex')}`;
67
88
 
89
+ const sleepSync = (milliseconds: number): void => {
90
+ Atomics.wait(new Int32Array(new SharedArrayBuffer(LOCK_WAIT_BUFFER_BYTES)), 0, 0, milliseconds);
91
+ };
92
+
93
+ const acquireFileLock = (lockPath: string): (() => void) => {
94
+ const startedAt = Date.now();
95
+ mkdirSync(dirname(lockPath), { recursive: true });
96
+ while (true) {
97
+ try {
98
+ const fd = openSync(lockPath, 'wx');
99
+ writeFileSync(fd, `${process.pid}:${new Date().toISOString()}\n`, 'utf8');
100
+ return () => {
101
+ closeSync(fd);
102
+ rmSync(lockPath, { force: true });
103
+ };
104
+ } catch (error) {
105
+ const code = error instanceof Error && 'code' in error
106
+ ? String((error as NodeJS.ErrnoException).code)
107
+ : '';
108
+ if (code !== 'EEXIST') {
109
+ throw error;
110
+ }
111
+ if (Date.now() - startedAt > EVIDENCE_ARTIFACT_LOCK_TIMEOUT_MS) {
112
+ throw new Error(
113
+ `[pumuki][sdd] evidence artifact is locked by another process: ${lockPath}. Retry when the current evidence write finishes.`
114
+ );
115
+ }
116
+ sleepSync(EVIDENCE_ARTIFACT_LOCK_RETRY_DELAY_MS);
117
+ }
118
+ }
119
+ };
120
+
121
+ const readExistingSddEvidenceArtifact = (path: string): SddEvidenceArtifact | null => {
122
+ if (!existsSync(path)) {
123
+ return null;
124
+ }
125
+ const parsed = JSON.parse(readFileSync(path, 'utf8')) as Partial<SddEvidenceArtifact>;
126
+ if (parsed.version !== '1' || !Array.isArray(parsed.slices)) {
127
+ return null;
128
+ }
129
+ return parsed as SddEvidenceArtifact;
130
+ };
131
+
132
+ const mergeSddEvidenceArtifacts = (params: {
133
+ existing: SddEvidenceArtifact | null;
134
+ next: SddEvidenceArtifact;
135
+ }): SddEvidenceArtifact => {
136
+ if (!params.existing) {
137
+ return params.next;
138
+ }
139
+ const slices = params.existing.slices.filter(
140
+ (slice) => !params.next.slices.some((nextSlice) => nextSlice.id === slice.id)
141
+ );
142
+ return {
143
+ ...params.next,
144
+ slices: [...slices, ...params.next.slices],
145
+ };
146
+ };
147
+
148
+ const writeSddEvidenceArtifactAtomically = (params: {
149
+ outputPath: string;
150
+ artifact: SddEvidenceArtifact;
151
+ }): {
152
+ artifact: SddEvidenceArtifact;
153
+ serialized: string;
154
+ } => {
155
+ const lockPath = `${params.outputPath}.lock`;
156
+ const release = acquireFileLock(lockPath);
157
+ const tempPath = `${params.outputPath}.${process.pid}.${randomUUID()}.tmp`;
158
+ try {
159
+ const artifact = mergeSddEvidenceArtifacts({
160
+ existing: readExistingSddEvidenceArtifact(params.outputPath),
161
+ next: params.artifact,
162
+ });
163
+ const serialized = `${JSON.stringify(artifact, null, EVIDENCE_ARTIFACT_JSON_INDENT_SPACES)}\n`;
164
+ writeFileSync(tempPath, serialized, 'utf8');
165
+ renameSync(tempPath, params.outputPath);
166
+ return { artifact, serialized };
167
+ } finally {
168
+ rmSync(tempPath, { force: true });
169
+ release();
170
+ }
171
+ };
172
+
68
173
  const resolveRepoBoundPath = (params: {
69
174
  repoRoot: string;
70
175
  candidatePath: string;
@@ -91,7 +196,7 @@ const resolveRepoBoundPath = (params: {
91
196
  return resolved;
92
197
  };
93
198
 
94
- const isPlaceholderToken = (value: string): boolean => {
199
+ const isReservedInputValue = (value: string): boolean => {
95
200
  const normalized = value.trim().toLowerCase();
96
201
  return (
97
202
  normalized === 'todo' ||
@@ -110,7 +215,7 @@ const normalizeRequired = (value: string | undefined, flagName: string): string
110
215
  if (normalized.length === 0) {
111
216
  throw new Error(`[pumuki][sdd] evidence requires ${flagName}.`);
112
217
  }
113
- if (isPlaceholderToken(normalized)) {
218
+ if (isReservedInputValue(normalized)) {
114
219
  throw new Error(`[pumuki][sdd] evidence ${flagName} must not be a placeholder value.`);
115
220
  }
116
221
  return normalized;
@@ -214,6 +319,11 @@ export const runSddEvidenceScaffold = (params?: {
214
319
  {
215
320
  id: scenarioId,
216
321
  scenario_ref: resolveScenarioReference(scenarioId),
322
+ baseline: {
323
+ status: 'passed',
324
+ timestamp: validEvidence.source_descriptor.generated_at ?? generatedAt,
325
+ test_ref: 'pumuki sdd validate --stage=PRE_WRITE --json',
326
+ },
217
327
  red: {
218
328
  status: 'failed',
219
329
  timestamp: generatedAt,
@@ -249,13 +359,19 @@ export const runSddEvidenceScaffold = (params?: {
249
359
  status: 'valid',
250
360
  },
251
361
  };
252
- const serialized = `${JSON.stringify(artifact, null, 2)}\n`;
253
- const digest = computeDigest(serialized);
362
+ let finalArtifact = artifact;
363
+ let serialized = `${JSON.stringify(finalArtifact, null, 2)}\n`;
254
364
 
255
365
  if (!dryRun) {
256
366
  mkdirSync(dirname(outputAbsolutePath), { recursive: true });
257
- writeFileSync(outputAbsolutePath, serialized, 'utf8');
367
+ const writeResult = writeSddEvidenceArtifactAtomically({
368
+ outputPath: outputAbsolutePath,
369
+ artifact,
370
+ });
371
+ finalArtifact = writeResult.artifact;
372
+ serialized = writeResult.serialized;
258
373
  }
374
+ const digest = computeDigest(serialized);
259
375
 
260
376
  return {
261
377
  command: 'pumuki sdd evidence',
@@ -273,6 +389,6 @@ export const runSddEvidenceScaffold = (params?: {
273
389
  written: !dryRun,
274
390
  digest,
275
391
  },
276
- artifact,
392
+ artifact: finalArtifact,
277
393
  };
278
394
  };
@@ -76,10 +76,33 @@ const isTimelineOrdered = (timestamps: ReadonlyArray<string | undefined>): boole
76
76
  return true;
77
77
  };
78
78
 
79
+ const DEFAULT_EVIDENCE_MAX_AGE_SECONDS = 900;
80
+
81
+ const resolveEvidenceMaxAgeSeconds = (): number => {
82
+ const raw = process.env.PUMUKI_TDD_BDD_EVIDENCE_MAX_AGE_SECONDS?.trim();
83
+ if (!raw) {
84
+ return DEFAULT_EVIDENCE_MAX_AGE_SECONDS;
85
+ }
86
+ const parsed = Number.parseInt(raw, 10);
87
+ if (!Number.isFinite(parsed) || parsed <= 0) {
88
+ return DEFAULT_EVIDENCE_MAX_AGE_SECONDS;
89
+ }
90
+ return parsed;
91
+ };
92
+
93
+ const resolveEvidenceAgeSeconds = (generatedAt: string, nowMs: number): number | null => {
94
+ const generatedAtMs = new Date(generatedAt).getTime();
95
+ if (Number.isNaN(generatedAtMs)) {
96
+ return null;
97
+ }
98
+ return Math.max(0, Math.floor((nowMs - generatedAtMs) / 1000));
99
+ };
100
+
79
101
  export const enforceTddBddPolicy = (params: {
80
102
  facts: ReadonlyArray<Fact>;
81
103
  repoRoot: string;
82
104
  branch: string | null;
105
+ now?: () => number;
83
106
  }): TddBddEnforcementResult => {
84
107
  const scope = classifyTddBddScope(params.facts);
85
108
  const baseSnapshot: TddBddSnapshot = {
@@ -210,6 +233,46 @@ export const enforceTddBddPolicy = (params: {
210
233
  };
211
234
  }
212
235
 
236
+ const maxAgeSeconds = resolveEvidenceMaxAgeSeconds();
237
+ const ageSeconds = resolveEvidenceAgeSeconds(
238
+ evidenceRead.evidence.generated_at,
239
+ params.now?.() ?? Date.now()
240
+ );
241
+ if (ageSeconds === null || ageSeconds > maxAgeSeconds) {
242
+ const messageAge =
243
+ ageSeconds === null ? 'unknown' : `${ageSeconds}s`;
244
+ const finding = buildFinding({
245
+ ruleId: 'generic_tdd_baseline_required',
246
+ code: 'TDD_BDD_EVIDENCE_STALE',
247
+ message:
248
+ `TDD/BDD evidence is stale for this PRE_WRITE baseline: age=${messageAge}, max=${maxAgeSeconds}s. Re-run the baseline tests for the touched component and refresh evidence before editing related code.`,
249
+ filePath: evidenceRead.path,
250
+ });
251
+ return {
252
+ findings: [finding],
253
+ snapshot: {
254
+ ...baseSnapshot,
255
+ status: 'blocked',
256
+ evidence: {
257
+ ...baseSnapshot.evidence,
258
+ state: 'valid',
259
+ version: evidenceRead.evidence.version,
260
+ slices_total: evidenceRead.evidence.slices.length,
261
+ slices_valid: 0,
262
+ slices_invalid: evidenceRead.evidence.slices.length,
263
+ integrity_ok: evidenceRead.integrity.valid,
264
+ errors: ['TDD_BDD_EVIDENCE_STALE'],
265
+ baseline: {
266
+ required: true,
267
+ passed: 0,
268
+ missing: 0,
269
+ failed: 0,
270
+ },
271
+ },
272
+ },
273
+ };
274
+ }
275
+
213
276
  const sliceFindings: Finding[] = [];
214
277
  const seenSliceIds = new Set<string>();
215
278
  let validSlices = 0;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pumuki",
3
- "version": "6.3.142",
3
+ "version": "6.3.144",
4
4
  "description": "Enterprise-grade AST Intelligence System with multi-platform support (iOS, Android, Backend, Frontend) and Feature-First + DDD + Clean Architecture enforcement. Includes dynamic violations API for intelligent querying.",
5
5
  "main": "index.js",
6
6
  "bin": {