create-sdd-project 0.16.10 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/doctor.js CHANGED
@@ -79,6 +79,9 @@ function runDoctor(cwd) {
79
79
  // 14. AGENTS.md Standards References (v0.16.10)
80
80
  results.push(checkAgentsMdStandardsRefs(cwd));
81
81
 
82
+ // 15. .sdd-meta.json structural integrity (v0.17.0)
83
+ results.push(checkMetaJson(cwd, aiTools, projectType));
84
+
82
85
  return results;
83
86
  }
84
87
 
@@ -1013,6 +1016,151 @@ function checkAgentsMdStandardsRefs(cwd) {
1013
1016
  };
1014
1017
  }
1015
1018
 
1019
+ /**
1020
+ * Check #15 (v0.17.0): .sdd-meta.json structural integrity.
1021
+ *
1022
+ * v0.17.0 introduces content-addressable hashing via .sdd-meta.json to
1023
+ * track "the last time the tool wrote this file". The upgrade path uses
1024
+ * the hashes to answer "did the user edit since last tool-write" without
1025
+ * comparing against the new template's adapted output (which would drift
1026
+ * across versions — the Codex P1 from v0.16.10 cross-model review).
1027
+ *
1028
+ * This doctor check validates the METADATA STRUCTURE ONLY:
1029
+ * - Valid JSON
1030
+ * - schemaVersion ≤ current
1031
+ * - hashes is a sensible object shape
1032
+ * - Every hash value matches sha256:<64 hex>
1033
+ * - Every key that's NOT in the expected set is flagged as orphan
1034
+ *
1035
+ * It does NOT validate hashes against current on-disk content (Codex M3
1036
+ * from plan v1.0 review). Hash mismatches are the EXPECTED result of
1037
+ * legitimate user customization; reporting them here would generate
1038
+ * permanent noise and bury real integrity issues.
1039
+ *
1040
+ * Severity:
1041
+ * - File absent → PASS with informational message (pre-v0.17.0 project)
1042
+ * - Present and valid → PASS
1043
+ * - Parse/shape errors → WARN (not FAIL — upgrade still falls back safely)
1044
+ * - Orphan entries → WARN (non-fatal, upgrade prunes them next run)
1045
+ */
1046
+ function checkMetaJson(cwd, aiTools, projectType) {
1047
+ const metaPath = path.join(cwd, '.sdd-meta.json');
1048
+ if (!fs.existsSync(metaPath)) {
1049
+ return {
1050
+ status: PASS,
1051
+ message: 'Provenance metadata: not present (pre-v0.17.0 project or fresh install)',
1052
+ details: [],
1053
+ };
1054
+ }
1055
+
1056
+ let raw;
1057
+ try {
1058
+ raw = fs.readFileSync(metaPath, 'utf8');
1059
+ } catch (e) {
1060
+ return {
1061
+ status: WARN,
1062
+ message: '.sdd-meta.json: unreadable',
1063
+ details: [e.code || e.message],
1064
+ };
1065
+ }
1066
+
1067
+ let parsed;
1068
+ try {
1069
+ parsed = JSON.parse(raw);
1070
+ } catch (e) {
1071
+ return {
1072
+ status: WARN,
1073
+ message: '.sdd-meta.json: invalid JSON',
1074
+ details: [e.message, 'Next upgrade will regenerate it via the fallback path.'],
1075
+ };
1076
+ }
1077
+
1078
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
1079
+ return {
1080
+ status: WARN,
1081
+ message: '.sdd-meta.json: root is not an object',
1082
+ details: ['Next upgrade will regenerate it.'],
1083
+ };
1084
+ }
1085
+
1086
+ // Validate schemaVersion. Absent = v1 (forward-compat). Newer than
1087
+ // supported = still WARN (fallback path handles it).
1088
+ const {
1089
+ CURRENT_SCHEMA_VERSION,
1090
+ expectedSmartDiffTrackedPaths,
1091
+ } = require('./meta');
1092
+ const schemaVersion = parsed.schemaVersion ?? 1;
1093
+ if (typeof schemaVersion !== 'number' || schemaVersion < 1) {
1094
+ return {
1095
+ status: WARN,
1096
+ message: `.sdd-meta.json: invalid schemaVersion ${schemaVersion}`,
1097
+ details: [],
1098
+ };
1099
+ }
1100
+ if (schemaVersion > CURRENT_SCHEMA_VERSION) {
1101
+ return {
1102
+ status: WARN,
1103
+ message: `.sdd-meta.json: schemaVersion ${schemaVersion} newer than supported ${CURRENT_SCHEMA_VERSION}`,
1104
+ details: ['Upgrade the sdd-devflow CLI to the latest version.'],
1105
+ };
1106
+ }
1107
+
1108
+ const hashes = parsed.hashes;
1109
+ if (typeof hashes !== 'object' || hashes === null || Array.isArray(hashes)) {
1110
+ return {
1111
+ status: WARN,
1112
+ message: '.sdd-meta.json: hashes field is not an object',
1113
+ details: ['Next upgrade will regenerate it.'],
1114
+ };
1115
+ }
1116
+
1117
+ // Validate each entry's shape.
1118
+ const HASH_RE = /^sha256:[0-9a-f]{64}$/;
1119
+ const issues = [];
1120
+ for (const [k, v] of Object.entries(hashes)) {
1121
+ if (typeof k !== 'string') {
1122
+ issues.push(`invalid key: ${typeof k}`);
1123
+ continue;
1124
+ }
1125
+ // Reject absolute paths and `..` traversal.
1126
+ if (k.startsWith('/') || k.includes('..')) {
1127
+ issues.push(`suspicious key: ${k}`);
1128
+ continue;
1129
+ }
1130
+ if (typeof v !== 'string' || !HASH_RE.test(v)) {
1131
+ issues.push(`invalid hash for ${k}`);
1132
+ }
1133
+ }
1134
+
1135
+ if (issues.length > 0) {
1136
+ return {
1137
+ status: WARN,
1138
+ message: `.sdd-meta.json: ${issues.length} shape issue${issues.length > 1 ? 's' : ''}`,
1139
+ details: [...issues.slice(0, 5), 'Next upgrade will regenerate affected entries.'],
1140
+ };
1141
+ }
1142
+
1143
+ // Detect orphan entries (keys not expected for the current tool/type).
1144
+ const expected = expectedSmartDiffTrackedPaths(aiTools, projectType);
1145
+ const orphans = Object.keys(hashes).filter((k) => !expected.has(k));
1146
+ if (orphans.length > 0) {
1147
+ return {
1148
+ status: WARN,
1149
+ message: `.sdd-meta.json: ${orphans.length} orphan entr${orphans.length > 1 ? 'ies' : 'y'}`,
1150
+ details: [
1151
+ ...orphans.slice(0, 5).map((o) => `Orphan: ${o}`),
1152
+ 'Non-fatal — next upgrade will prune these automatically.',
1153
+ ],
1154
+ };
1155
+ }
1156
+
1157
+ return {
1158
+ status: PASS,
1159
+ message: `Provenance metadata: valid (${Object.keys(hashes).length} tracked files)`,
1160
+ details: [],
1161
+ };
1162
+ }
1163
+
1016
1164
  module.exports = {
1017
1165
  runDoctor,
1018
1166
  printResults,
package/lib/generator.js CHANGED
@@ -10,6 +10,7 @@ const {
10
10
  BACKEND_AGENTS,
11
11
  } = require('./config');
12
12
  const { adaptAgentContentForProjectType } = require('./adapt-agents');
13
+ const { writeMeta, computeInstallHashes } = require('./meta');
13
14
 
14
15
  function generate(config) {
15
16
  const templateDir = path.join(__dirname, '..', 'template');
@@ -95,6 +96,13 @@ function generate(config) {
95
96
  const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
96
97
  fs.writeFileSync(path.join(dest, '.sdd-version'), pkg.version + '\n', 'utf8');
97
98
 
99
+ // v0.17.0: write provenance hashes. Captures whatever generator.js's
100
+ // pipeline produced (lighter than init-generator — adaptAgentsForStack
101
+ // only, no stack-adaptations module). First upgrade re-adapts via the
102
+ // scanner-driven pipeline; the hash answers "did the user edit since
103
+ // install" precisely regardless of which pipeline wrote the content.
104
+ writeMeta(dest, computeInstallHashes(dest, config.aiTools, config.projectType));
105
+
98
106
  // Show notes
99
107
  const notes = collectNotes(config);
100
108
  if (notes.length > 0) {
@@ -8,6 +8,8 @@ const {
8
8
  BACKEND_AGENTS,
9
9
  } = require('./config');
10
10
  const { adaptAgentContentForProjectType } = require('./adapt-agents');
11
+ const { applyStackAdaptations } = require('./stack-adaptations');
12
+ const { writeMeta, computeInstallHashes } = require('./meta');
11
13
 
12
14
  /**
13
15
  * Install SDD DevFlow into an existing project.
@@ -252,6 +254,24 @@ function generateInit(config) {
252
254
  const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
253
255
  fs.writeFileSync(path.join(dest, '.sdd-version'), pkg.version + '\n', 'utf8');
254
256
 
257
+ // v0.17.0: write provenance hashes. Computed AFTER all adaptations
258
+ // so the hashes reflect the exact post-install content on disk. The
259
+ // next `--upgrade` uses these hashes to answer "did the user edit
260
+ // this file?" precisely, avoiding cross-version false-positive
261
+ // preserve warnings (Codex P1 from v0.16.10 plan review).
262
+ //
263
+ // Codex round 2 P1 fix: exclude pre-existing files that --init skipped.
264
+ // Those belong to the user, not to SDD DevFlow, and must NOT be marked
265
+ // as tool-canonical — otherwise the next upgrade would hash-match the
266
+ // user's content and silently overwrite it. `skipped` already contains
267
+ // the relative paths of files that were skipped due to pre-existence;
268
+ // convert them to POSIX form and pass as the exclude set.
269
+ const skippedPosix = new Set(skipped.map((p) => p.split('\\').join('/')));
270
+ writeMeta(
271
+ dest,
272
+ computeInstallHashes(dest, config.aiTools, config.projectType, skippedPosix)
273
+ );
274
+
255
275
  // Done
256
276
  console.log(`\nDone! Next steps:`);
257
277
  console.log(` git add -A && git commit -m "chore: add SDD DevFlow to existing project"`);
@@ -343,163 +363,22 @@ function regexReplaceInFile(filePath, replacements) {
343
363
  }
344
364
 
345
365
  function adaptCopiedFiles(dest, scan, config) {
346
- const orm = scan.backend.orm || 'your ORM';
347
- const db = scan.backend.db || 'your database';
348
-
349
- // Common Zod generic validation replacements (all agents + skills)
350
- // Phase 1: Replace "Zod" with "validation" (most specific first)
351
- const zodReplacements = [
352
- ['Zod data schemas', 'validation schemas'],
353
- ['Zod schemas', 'validation schemas'],
354
- ];
355
- // Phase 2: Clean up shared/src/schemas/ path (Zod-specific convention)
356
- // Applied AFTER phase 1, so these match the post-replacement text
357
- const schemaPathReplacements = [
358
- ['validation schemas in `shared/src/schemas/` if applicable', 'validation schemas if applicable'],
359
- ['validation schemas in `shared/src/schemas/` (if shared workspace exists)', 'validation schemas (if shared workspace exists)'],
360
- ['validation schemas in `shared/src/schemas/`', 'validation schemas'],
361
- ['validation schemas (`shared/src/schemas/`)', 'validation schemas'],
362
- ['`shared/src/schemas/` (if exists) for current validation schemas', 'project validation schemas'],
363
- // Gemini spec-creator: no "Zod" prefix, standalone path reference
364
- ['and `shared/src/schemas/` (if exists)', ''],
365
- ['schemas vs `shared/src/schemas/`', 'validation schemas up to date'],
366
- ];
367
-
368
- // ORM/DB replacements for backend agents
369
- let ormReplacements = [];
370
- if (scan.backend.orm && scan.backend.orm !== 'Prisma') {
371
- ormReplacements = [
372
- ['Prisma ORM, and PostgreSQL', `${orm}${db !== 'your database' ? `, and ${db}` : ''}`],
373
- ['Repository implementations (Prisma)', `Repository implementations (${orm})`],
374
- ];
375
- } else if (!scan.backend.orm) {
376
- // No ORM detected — remove Prisma references with generic text
377
- const dbLabel = db !== 'your database' ? `, and ${db}` : '';
378
- ormReplacements = [
379
- ['Prisma ORM, and PostgreSQL', dbLabel ? dbLabel.slice(6) : 'your database'],
380
- ['Repository implementations (Prisma)', 'Repository implementations'],
381
- ];
382
- }
383
-
384
- // Apply to all AI tool directories
385
- const toolDirs = [];
386
- if (config.aiTools !== 'gemini') toolDirs.push('.claude');
387
- if (config.aiTools !== 'claude') toolDirs.push('.gemini');
388
-
389
- for (const dir of toolDirs) {
390
- // Backend agents: Zod + ORM replacements
391
- if (scan.backend.validation !== 'Zod') {
392
- const backendAgentReplacements = [...zodReplacements, ...ormReplacements];
393
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, backendAgentReplacements);
394
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, backendAgentReplacements);
395
- // Phase 2: clean up shared/src/schemas/ paths
396
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, schemaPathReplacements);
397
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, schemaPathReplacements);
398
- } else if (ormReplacements.length > 0) {
399
- // Zod detected but different ORM — only ORM replacements
400
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, ormReplacements);
401
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, ormReplacements);
402
- }
403
-
404
- // Multi-purpose agents: Zod replacements only
405
- if (scan.backend.validation !== 'Zod') {
406
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
407
- replaceInCopiedFile(dest, `${dir}/agents/spec-creator.md`, allZodReplacements);
408
- replaceInCopiedFile(dest, `${dir}/agents/production-code-validator.md`, allZodReplacements);
409
- replaceInCopiedFile(dest, `${dir}/agents/database-architect.md`, allZodReplacements);
410
- }
411
-
412
- // Skills: Zod + schema path replacements
413
- if (scan.backend.validation !== 'Zod') {
414
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
415
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/SKILL.md`, allZodReplacements);
416
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/references/ticket-template.md`, allZodReplacements);
417
- }
418
- }
419
-
420
- // Architecture adaptation: DDD-specific content in backend agents
421
- const arch = scan.srcStructure ? scan.srcStructure.pattern : 'ddd';
422
- if (arch !== 'ddd') {
423
- for (const dir of toolDirs) {
424
- // backend-planner: adapt header, exploration paths, implementation order, rules
425
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-planner.md'), [
426
- // Header: remove DDD reference (Claude verbose format)
427
- ['specializing in Domain-Driven Design (DDD) layered architecture with deep knowledge of',
428
- 'specializing in layered architecture with deep knowledge of'],
429
- // Header: remove DDD reference (Gemini condensed format)
430
- ['(DDD architecture)', '(layered architecture)'],
431
- // Exploration: replace DDD-specific paths with generic (number-agnostic)
432
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
433
- [/\d+\. Explore existing domain entities, services, validators, repositories\n/,
434
- '5. Explore the codebase for existing patterns, layer structure, and reusable code\n'],
435
- [/\d+\. Explore `backend\/src\/infrastructure\/` for existing repositories\n/, ''],
436
- // Implementation Order (Claude format)
437
- ['following DDD layer order: Domain > Application > Infrastructure > Presentation > Tests',
438
- 'following the layer order defined in backend-standards.mdc'],
439
- // Implementation Order (Gemini format)
440
- ['Implementation Order (Domain > Application > Infrastructure > Presentation > Tests)',
441
- 'Implementation Order (see backend-standards.mdc for layer order)'],
442
- // Rules (Claude format)
443
- ['Follow DDD layer separation: Domain > Application > Infrastructure > Presentation',
444
- 'Follow the layer separation defined in backend-standards.mdc'],
445
- ]);
446
- // backend-developer: adapt frontmatter, header, exploration, implementation order, rules
447
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-developer.md'), [
448
- // Frontmatter (Claude format)
449
- ['follows DDD layered architecture',
450
- 'follows layered architecture'],
451
- // Header (Claude format)
452
- ['specializing in Domain-Driven Design (DDD) with',
453
- 'specializing in layered architecture with'],
454
- // Header (Gemini condensed format)
455
- ['(DDD architecture)', '(layered architecture)'],
456
- // Exploration: remove shared/src/schemas reference (number-agnostic)
457
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
458
- // Implementation Order (Claude format): replace DDD layers
459
- ['Follow the DDD layer order from the plan:',
460
- 'Follow the layer order from the plan (see backend-standards.mdc for project layers):'],
461
- [/\d+\. \*\*Domain Layer\*\*: Entities, value objects, repository interfaces, domain errors\n/,
462
- '1. **Data Layer**: Models, database operations, data access\n'],
463
- [/\d+\. \*\*Application Layer\*\*: Services, validators, DTOs\n/,
464
- '2. **Business Logic Layer**: Controllers, services, external integrations\n'],
465
- [/\d+\. \*\*Infrastructure Layer\*\*: Repository implementations \([^)]*\), external integrations\n/,
466
- '3. **Presentation Layer**: Routes, handlers, middleware\n'],
467
- [/\d+\. \*\*Presentation Layer\*\*: Controllers, routes, middleware\n/,
468
- '4. **Integration Layer**: Wiring, configuration, server registration\n'],
469
- // Implementation Order (Gemini format)
470
- ['Follow DDD layer order: Domain > Application > Infrastructure > Presentation.',
471
- 'Follow the layer order defined in backend-standards.mdc.'],
472
- // Rules (Claude format)
473
- ['**ALWAYS** follow DDD layer separation',
474
- '**ALWAYS** follow the layer separation defined in backend-standards.mdc'],
475
- ['**ALWAYS** handle errors with custom domain error classes',
476
- '**ALWAYS** handle errors following the patterns in backend-standards.mdc'],
477
- // Rules (Gemini format)
478
- ['ALWAYS handle errors with domain error classes',
479
- 'ALWAYS handle errors following the patterns in backend-standards.mdc'],
480
- // Documentation: remove shared/src/schemas mandatory update
481
- [/- (?:\*\*MANDATORY\*\*: )?If modifying a DB schema → update .* schemas in `shared\/src\/schemas\/` BEFORE continuing\n/, ''],
482
- ]);
483
- }
484
- }
485
-
486
- // Agent/skill content: remove frontend/backend-specific references for single-stack projects
366
+ // v0.17.0: delegate stack-specific adaptations (Zod → validation,
367
+ // ORM swap, DDD layered, documentation-standards.mdc project-type
368
+ // pruning) to the shared module in lib/stack-adaptations.js. This
369
+ // makes the exact same transformation available to upgrade-generator.js
370
+ // for the hash-based smart-diff replacement path.
371
+ //
372
+ // The in-memory variant (applyStackAdaptationsToContent) is also used
373
+ // by the upgrade fallback path to construct the "what init would have
374
+ // written" comparison target when .sdd-meta.json is missing (Gemini M1
375
+ // fix from plan v1.0 cross-model review).
376
+ applyStackAdaptations(dest, scan, config, null);
377
+
378
+ // Remove frontend/backend-specific references for single-stack
379
+ // projects. Separate from stack adaptations this is project-type
380
+ // pruning, not stack substitution.
487
381
  adaptAgentContentForProjectType(dest, config, regexReplaceInFile);
488
-
489
- // documentation-standards.mdc: remove irrelevant rows based on project type
490
- const docStdPath = path.join(dest, 'ai-specs', 'specs', 'documentation-standards.mdc');
491
- if (fs.existsSync(docStdPath)) {
492
- let content = fs.readFileSync(docStdPath, 'utf8');
493
- if (config.projectType === 'backend') {
494
- content = content.replace(/\| `ai-specs\/specs\/frontend-standards\.mdc` \|[^\n]*\n/, '');
495
- content = content.replace(/\| `docs\/specs\/ui-components\.md` \|[^\n]*\n/, '');
496
- content = content.replace(/ - UI component changes → `docs\/specs\/ui-components\.md`\n/, '');
497
- } else if (config.projectType === 'frontend') {
498
- content = content.replace(/\| `ai-specs\/specs\/backend-standards\.mdc` \|[^\n]*\n/, '');
499
- content = content.replace(/\| `docs\/specs\/api-spec\.yaml` \|[^\n]*\n/, '');
500
- }
501
- fs.writeFileSync(docStdPath, content, 'utf8');
502
- }
503
382
  }
504
383
 
505
384
  // --- Standards Adaptation ---
@@ -762,7 +641,14 @@ function adaptAgentsMd(template, config, scan) {
762
641
  return norm !== 'docs' && norm !== 'ai-specs';
763
642
  });
764
643
  if (meaningfulDirs.length > 0) {
765
- const tree = rootDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
644
+ // v0.17.0: use meaningfulDirs (not rootDirs) to build the tree. The
645
+ // SDD-infrastructure dirs (ai-specs/, docs/) get installed BY init
646
+ // itself, so at install time they're absent from the scan but at
647
+ // upgrade time they're present. Using rootDirs would produce
648
+ // different content in each scan → hash drift → false-positive
649
+ // preserve warnings on every upgrade. Using meaningfulDirs stabilizes
650
+ // the output across install and upgrade for the same project.
651
+ const tree = meaningfulDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
766
652
  const treeBlock = `\`\`\`\nproject/\n${tree}\n└── docs/ ← Documentation\n\`\`\``;
767
653
  content = content.replace(
768
654
  /<!-- CONFIG: Adjust directories[^>]*-->\n+```\nproject\/\n[\s\S]*?```/,
@@ -1069,9 +955,25 @@ function appendGitignore(dest, skipped) {
1069
955
  const sddEntries = '\n# SDD DevFlow\ndocs/tickets/*.md\n!docs/tickets/.gitkeep\n';
1070
956
 
1071
957
  if (fs.existsSync(gitignorePath)) {
1072
- const content = fs.readFileSync(gitignorePath, 'utf8');
958
+ let content = fs.readFileSync(gitignorePath, 'utf8');
959
+ let appended = false;
1073
960
  if (!content.includes('SDD DevFlow')) {
1074
- fs.appendFileSync(gitignorePath, sddEntries, 'utf8');
961
+ content = content.trimEnd() + sddEntries;
962
+ appended = true;
963
+ }
964
+ // v0.17.0: ensure .sdd-meta.json + .sdd-backup/ are ignored (local-only
965
+ // metadata). Codex round 2 P2 fix — --init must also append these
966
+ // entries idempotently, matching the upgrade path.
967
+ if (!/^\s*\/?\.sdd-backup\/?\s*$/m.test(content)) {
968
+ content = content.trimEnd() + '\n\n# sdd-devflow upgrade backups (ignored — kept locally for recovery only)\n.sdd-backup/\n';
969
+ appended = true;
970
+ }
971
+ if (!/^\s*\/?\.sdd-meta\.json\s*$/m.test(content)) {
972
+ content = content.trimEnd() + '\n\n# sdd-devflow provenance tracking (local-only, content-addressable hashes)\n.sdd-meta.json\n';
973
+ appended = true;
974
+ }
975
+ if (appended) {
976
+ fs.writeFileSync(gitignorePath, content, 'utf8');
1075
977
  step('Appended SDD entries to .gitignore');
1076
978
  }
1077
979
  } else {