@contractspec/module.workspace 1.44.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +50 -0
- package/dist/ai/code-generation.d.ts +28 -0
- package/dist/ai/code-generation.d.ts.map +1 -0
- package/dist/ai/code-generation.js +138 -0
- package/dist/ai/code-generation.js.map +1 -0
- package/dist/ai/spec-creation.d.ts +27 -0
- package/dist/ai/spec-creation.d.ts.map +1 -0
- package/dist/ai/spec-creation.js +102 -0
- package/dist/ai/spec-creation.js.map +1 -0
- package/dist/analysis/deps/graph.d.ts +34 -0
- package/dist/analysis/deps/graph.d.ts.map +1 -0
- package/dist/analysis/deps/graph.js +85 -0
- package/dist/analysis/deps/graph.js.map +1 -0
- package/dist/analysis/deps/parse-imports.d.ts +17 -0
- package/dist/analysis/deps/parse-imports.d.ts.map +1 -0
- package/dist/analysis/deps/parse-imports.js +31 -0
- package/dist/analysis/deps/parse-imports.js.map +1 -0
- package/dist/analysis/diff/deep-diff.d.ts +33 -0
- package/dist/analysis/diff/deep-diff.d.ts.map +1 -0
- package/dist/analysis/diff/deep-diff.js +114 -0
- package/dist/analysis/diff/deep-diff.js.map +1 -0
- package/dist/analysis/diff/semantic.d.ts +11 -0
- package/dist/analysis/diff/semantic.d.ts.map +1 -0
- package/dist/analysis/diff/semantic.js +97 -0
- package/dist/analysis/diff/semantic.js.map +1 -0
- package/dist/analysis/feature-scan.d.ts +15 -0
- package/dist/analysis/feature-scan.d.ts.map +1 -0
- package/dist/analysis/feature-scan.js +152 -0
- package/dist/analysis/feature-scan.js.map +1 -0
- package/dist/analysis/grouping.d.ts +79 -0
- package/dist/analysis/grouping.d.ts.map +1 -0
- package/dist/analysis/grouping.js +115 -0
- package/dist/analysis/grouping.js.map +1 -0
- package/dist/analysis/impact/classifier.d.ts +19 -0
- package/dist/analysis/impact/classifier.d.ts.map +1 -0
- package/dist/analysis/impact/classifier.js +135 -0
- package/dist/analysis/impact/classifier.js.map +1 -0
- package/dist/analysis/impact/index.js +2 -0
- package/dist/analysis/impact/rules.d.ts +35 -0
- package/dist/analysis/impact/rules.d.ts.map +1 -0
- package/dist/analysis/impact/rules.js +154 -0
- package/dist/analysis/impact/rules.js.map +1 -0
- package/dist/analysis/impact/types.d.ts +95 -0
- package/dist/analysis/impact/types.d.ts.map +1 -0
- package/dist/analysis/index.js +14 -0
- package/dist/analysis/snapshot/index.js +2 -0
- package/dist/analysis/snapshot/normalizer.d.ts +36 -0
- package/dist/analysis/snapshot/normalizer.d.ts.map +1 -0
- package/dist/analysis/snapshot/normalizer.js +66 -0
- package/dist/analysis/snapshot/normalizer.js.map +1 -0
- package/dist/analysis/snapshot/snapshot.d.ts +18 -0
- package/dist/analysis/snapshot/snapshot.d.ts.map +1 -0
- package/dist/analysis/snapshot/snapshot.js +163 -0
- package/dist/analysis/snapshot/snapshot.js.map +1 -0
- package/dist/analysis/snapshot/types.d.ts +80 -0
- package/dist/analysis/snapshot/types.d.ts.map +1 -0
- package/dist/analysis/spec-scan.d.ts +34 -0
- package/dist/analysis/spec-scan.d.ts.map +1 -0
- package/dist/analysis/spec-scan.js +349 -0
- package/dist/analysis/spec-scan.js.map +1 -0
- package/dist/analysis/validate/spec-structure.d.ts +29 -0
- package/dist/analysis/validate/spec-structure.d.ts.map +1 -0
- package/dist/analysis/validate/spec-structure.js +139 -0
- package/dist/analysis/validate/spec-structure.js.map +1 -0
- package/dist/formatter.d.ts +42 -0
- package/dist/formatter.d.ts.map +1 -0
- package/dist/formatter.js +163 -0
- package/dist/formatter.js.map +1 -0
- package/dist/index.d.ts +35 -0
- package/dist/index.js +33 -0
- package/dist/templates/app-config.d.ts +7 -0
- package/dist/templates/app-config.d.ts.map +1 -0
- package/dist/templates/app-config.js +106 -0
- package/dist/templates/app-config.js.map +1 -0
- package/dist/templates/data-view.d.ts +7 -0
- package/dist/templates/data-view.d.ts.map +1 -0
- package/dist/templates/data-view.js +69 -0
- package/dist/templates/data-view.js.map +1 -0
- package/dist/templates/event.d.ts +11 -0
- package/dist/templates/event.d.ts.map +1 -0
- package/dist/templates/event.js +41 -0
- package/dist/templates/event.js.map +1 -0
- package/dist/templates/experiment.d.ts +7 -0
- package/dist/templates/experiment.d.ts.map +1 -0
- package/dist/templates/experiment.js +88 -0
- package/dist/templates/experiment.js.map +1 -0
- package/dist/templates/handler.d.ts +20 -0
- package/dist/templates/handler.d.ts.map +1 -0
- package/dist/templates/handler.js +96 -0
- package/dist/templates/handler.js.map +1 -0
- package/dist/templates/integration-utils.js +105 -0
- package/dist/templates/integration-utils.js.map +1 -0
- package/dist/templates/integration.d.ts +7 -0
- package/dist/templates/integration.d.ts.map +1 -0
- package/dist/templates/integration.js +63 -0
- package/dist/templates/integration.js.map +1 -0
- package/dist/templates/knowledge.d.ts +7 -0
- package/dist/templates/knowledge.d.ts.map +1 -0
- package/dist/templates/knowledge.js +69 -0
- package/dist/templates/knowledge.js.map +1 -0
- package/dist/templates/migration.d.ts +7 -0
- package/dist/templates/migration.d.ts.map +1 -0
- package/dist/templates/migration.js +61 -0
- package/dist/templates/migration.js.map +1 -0
- package/dist/templates/operation.d.ts +11 -0
- package/dist/templates/operation.d.ts.map +1 -0
- package/dist/templates/operation.js +101 -0
- package/dist/templates/operation.js.map +1 -0
- package/dist/templates/presentation.d.ts +11 -0
- package/dist/templates/presentation.d.ts.map +1 -0
- package/dist/templates/presentation.js +79 -0
- package/dist/templates/presentation.js.map +1 -0
- package/dist/templates/telemetry.d.ts +7 -0
- package/dist/templates/telemetry.d.ts.map +1 -0
- package/dist/templates/telemetry.js +90 -0
- package/dist/templates/telemetry.js.map +1 -0
- package/dist/templates/utils.d.ts +27 -0
- package/dist/templates/utils.d.ts.map +1 -0
- package/dist/templates/utils.js +39 -0
- package/dist/templates/utils.js.map +1 -0
- package/dist/templates/workflow-runner.d.ts +16 -0
- package/dist/templates/workflow-runner.d.ts.map +1 -0
- package/dist/templates/workflow-runner.js +49 -0
- package/dist/templates/workflow-runner.js.map +1 -0
- package/dist/templates/workflow.d.ts +11 -0
- package/dist/templates/workflow.d.ts.map +1 -0
- package/dist/templates/workflow.js +68 -0
- package/dist/templates/workflow.js.map +1 -0
- package/dist/types/analysis-types.d.ts +126 -0
- package/dist/types/analysis-types.d.ts.map +1 -0
- package/dist/types/generation-types.d.ts +84 -0
- package/dist/types/generation-types.d.ts.map +1 -0
- package/dist/types/generation-types.js +21 -0
- package/dist/types/generation-types.js.map +1 -0
- package/dist/types/spec-types.d.ts +345 -0
- package/dist/types/spec-types.d.ts.map +1 -0
- package/package.json +55 -0
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
//#region src/analysis/diff/deep-diff.ts
|
|
2
|
+
/**
|
|
3
|
+
* Compute deep differences between two IO schemas.
|
|
4
|
+
*/
|
|
5
|
+
function computeIoDiff(base, head, options = {}) {
|
|
6
|
+
const diffs = [];
|
|
7
|
+
diffs.push(...computeFieldsDiff(base.input, head.input, "io.input", options));
|
|
8
|
+
diffs.push(...computeFieldsDiff(base.output, head.output, "io.output", options));
|
|
9
|
+
return options.breakingOnly ? diffs.filter((d) => d.type === "breaking") : diffs;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Compute differences between two field maps.
|
|
13
|
+
*/
|
|
14
|
+
function computeFieldsDiff(baseFields, headFields, pathPrefix, options = {}) {
|
|
15
|
+
const diffs = [];
|
|
16
|
+
const baseNames = new Set(Object.keys(baseFields));
|
|
17
|
+
const headNames = new Set(Object.keys(headFields));
|
|
18
|
+
for (const name of baseNames) if (!headNames.has(name)) {
|
|
19
|
+
const baseField = baseFields[name];
|
|
20
|
+
diffs.push({
|
|
21
|
+
type: "breaking",
|
|
22
|
+
path: `${pathPrefix}.${name}`,
|
|
23
|
+
oldValue: baseField,
|
|
24
|
+
newValue: void 0,
|
|
25
|
+
description: `Field '${name}' was removed`
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
for (const name of headNames) if (!baseNames.has(name)) {
|
|
29
|
+
const headField = headFields[name];
|
|
30
|
+
const isBreaking = headField?.required === true;
|
|
31
|
+
diffs.push({
|
|
32
|
+
type: isBreaking ? "breaking" : "added",
|
|
33
|
+
path: `${pathPrefix}.${name}`,
|
|
34
|
+
oldValue: void 0,
|
|
35
|
+
newValue: headField,
|
|
36
|
+
description: isBreaking ? `Required field '${name}' was added` : `Optional field '${name}' was added`
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
for (const name of baseNames) if (headNames.has(name)) {
|
|
40
|
+
const baseField = baseFields[name];
|
|
41
|
+
const headField = headFields[name];
|
|
42
|
+
if (baseField && headField) diffs.push(...computeFieldDiff(baseField, headField, `${pathPrefix}.${name}`, options));
|
|
43
|
+
}
|
|
44
|
+
return diffs;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Compute differences between two field definitions.
|
|
48
|
+
*/
|
|
49
|
+
function computeFieldDiff(base, head, path, _options = {}) {
|
|
50
|
+
const diffs = [];
|
|
51
|
+
if (base.type !== head.type) diffs.push({
|
|
52
|
+
type: "breaking",
|
|
53
|
+
path: `${path}.type`,
|
|
54
|
+
oldValue: base.type,
|
|
55
|
+
newValue: head.type,
|
|
56
|
+
description: `Field type changed from '${base.type}' to '${head.type}'`
|
|
57
|
+
});
|
|
58
|
+
if (base.required !== head.required) {
|
|
59
|
+
const isBreaking = !base.required && head.required;
|
|
60
|
+
diffs.push({
|
|
61
|
+
type: isBreaking ? "breaking" : "changed",
|
|
62
|
+
path: `${path}.required`,
|
|
63
|
+
oldValue: base.required,
|
|
64
|
+
newValue: head.required,
|
|
65
|
+
description: isBreaking ? `Field '${base.name}' changed from optional to required` : `Field '${base.name}' changed from required to optional`
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
if (base.nullable !== head.nullable) {
|
|
69
|
+
const isBreaking = base.nullable && !head.nullable;
|
|
70
|
+
diffs.push({
|
|
71
|
+
type: isBreaking ? "breaking" : "changed",
|
|
72
|
+
path: `${path}.nullable`,
|
|
73
|
+
oldValue: base.nullable,
|
|
74
|
+
newValue: head.nullable,
|
|
75
|
+
description: isBreaking ? `Field '${base.name}' is no longer nullable` : `Field '${base.name}' is now nullable`
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
if (base.type === "enum" && head.type === "enum") {
|
|
79
|
+
const baseValues = new Set(base.enumValues ?? []);
|
|
80
|
+
const headValues = new Set(head.enumValues ?? []);
|
|
81
|
+
for (const value of baseValues) if (!headValues.has(value)) diffs.push({
|
|
82
|
+
type: "breaking",
|
|
83
|
+
path: `${path}.enumValues`,
|
|
84
|
+
oldValue: base.enumValues,
|
|
85
|
+
newValue: head.enumValues,
|
|
86
|
+
description: `Enum value '${value}' was removed`
|
|
87
|
+
});
|
|
88
|
+
for (const value of headValues) if (!baseValues.has(value)) diffs.push({
|
|
89
|
+
type: "added",
|
|
90
|
+
path: `${path}.enumValues`,
|
|
91
|
+
oldValue: base.enumValues,
|
|
92
|
+
newValue: head.enumValues,
|
|
93
|
+
description: `Enum value '${value}' was added`
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
if (base.type === "object" && head.type === "object" && base.properties && head.properties) diffs.push(...computeFieldsDiff(base.properties, head.properties, path, _options));
|
|
97
|
+
if (base.type === "array" && head.type === "array" && base.items && head.items) diffs.push(...computeFieldDiff(base.items, head.items, `${path}.items`, _options));
|
|
98
|
+
return diffs;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Classify a diff as breaking based on context.
|
|
102
|
+
*/
|
|
103
|
+
function isBreakingChange(diff, context) {
|
|
104
|
+
if (context === "output") return diff.type === "breaking" || diff.type === "removed";
|
|
105
|
+
if (context === "input") {
|
|
106
|
+
if (diff.type === "added" && diff.description?.includes("Required field")) return true;
|
|
107
|
+
return diff.type === "breaking";
|
|
108
|
+
}
|
|
109
|
+
return diff.type === "breaking";
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
//#endregion
|
|
113
|
+
export { computeFieldDiff, computeFieldsDiff, computeIoDiff, isBreakingChange };
|
|
114
|
+
//# sourceMappingURL=deep-diff.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"deep-diff.js","names":["diffs: SemanticDiffItem[]"],"sources":["../../../src/analysis/diff/deep-diff.ts"],"sourcesContent":["/**\n * Deep diff engine for IO schema comparison.\n *\n * Compares input/output schemas field-by-field to detect\n * breaking and non-breaking changes.\n */\n\nimport type { FieldSnapshot, IoSnapshot } from '../snapshot/types';\nimport type { SemanticDiffItem } from '../../types/analysis-types';\n\n/**\n * Deep diff options.\n */\nexport interface DeepDiffOptions {\n /** Only report breaking changes */\n breakingOnly?: boolean;\n /** Path prefix for nested diffs */\n pathPrefix?: string;\n}\n\n/**\n * Compute deep differences between two IO schemas.\n */\nexport function computeIoDiff(\n base: IoSnapshot,\n head: IoSnapshot,\n options: DeepDiffOptions = {}\n): SemanticDiffItem[] {\n const diffs: SemanticDiffItem[] = [];\n\n // Compare input schemas\n diffs.push(...computeFieldsDiff(base.input, head.input, 'io.input', options));\n\n // Compare output schemas\n diffs.push(\n ...computeFieldsDiff(base.output, head.output, 'io.output', options)\n );\n\n return options.breakingOnly\n ? diffs.filter((d) => d.type === 'breaking')\n : diffs;\n}\n\n/**\n * Compute differences between two field maps.\n */\nexport function computeFieldsDiff(\n baseFields: Record<string, FieldSnapshot>,\n headFields: Record<string, FieldSnapshot>,\n pathPrefix: string,\n options: DeepDiffOptions = {}\n): SemanticDiffItem[] {\n const diffs: SemanticDiffItem[] = [];\n const baseNames = new Set(Object.keys(baseFields));\n const headNames = new Set(Object.keys(headFields));\n\n // Check for removed fields\n for (const name of baseNames) {\n if (!headNames.has(name)) {\n const baseField = baseFields[name];\n diffs.push({\n type: 'breaking',\n path: `${pathPrefix}.${name}`,\n oldValue: baseField,\n newValue: undefined,\n description: `Field '${name}' was removed`,\n });\n }\n }\n\n // Check for added fields\n for (const name of headNames) {\n if (!baseNames.has(name)) {\n const headField = headFields[name];\n const isBreaking = headField?.required === true;\n diffs.push({\n type: isBreaking ? 'breaking' : 'added',\n path: `${pathPrefix}.${name}`,\n oldValue: undefined,\n newValue: headField,\n description: isBreaking\n ? `Required field '${name}' was added`\n : `Optional field '${name}' was added`,\n });\n }\n }\n\n // Check for changed fields\n for (const name of baseNames) {\n if (headNames.has(name)) {\n const baseField = baseFields[name];\n const headField = headFields[name];\n if (baseField && headField) {\n diffs.push(\n ...computeFieldDiff(\n baseField,\n headField,\n `${pathPrefix}.${name}`,\n options\n )\n );\n }\n }\n }\n\n return diffs;\n}\n\n/**\n * Compute differences between two field definitions.\n */\nexport function computeFieldDiff(\n base: FieldSnapshot,\n head: FieldSnapshot,\n path: string,\n _options: DeepDiffOptions = {}\n): SemanticDiffItem[] {\n const diffs: SemanticDiffItem[] = [];\n\n // Type change is always breaking\n if (base.type !== head.type) {\n diffs.push({\n type: 'breaking',\n path: `${path}.type`,\n oldValue: base.type,\n newValue: head.type,\n description: `Field type changed from '${base.type}' to '${head.type}'`,\n });\n }\n\n // Required change\n if (base.required !== head.required) {\n const isBreaking = !base.required && head.required; // Optional -> Required is breaking\n diffs.push({\n type: isBreaking ? 'breaking' : 'changed',\n path: `${path}.required`,\n oldValue: base.required,\n newValue: head.required,\n description: isBreaking\n ? `Field '${base.name}' changed from optional to required`\n : `Field '${base.name}' changed from required to optional`,\n });\n }\n\n // Nullable change\n if (base.nullable !== head.nullable) {\n const isBreaking = base.nullable && !head.nullable; // Nullable -> Non-nullable is breaking\n diffs.push({\n type: isBreaking ? 'breaking' : 'changed',\n path: `${path}.nullable`,\n oldValue: base.nullable,\n newValue: head.nullable,\n description: isBreaking\n ? `Field '${base.name}' is no longer nullable`\n : `Field '${base.name}' is now nullable`,\n });\n }\n\n // Enum values change\n if (base.type === 'enum' && head.type === 'enum') {\n const baseValues = new Set(base.enumValues ?? []);\n const headValues = new Set(head.enumValues ?? []);\n\n // Removed enum values are breaking\n for (const value of baseValues) {\n if (!headValues.has(value)) {\n diffs.push({\n type: 'breaking',\n path: `${path}.enumValues`,\n oldValue: base.enumValues,\n newValue: head.enumValues,\n description: `Enum value '${value}' was removed`,\n });\n }\n }\n\n // Added enum values are non-breaking\n for (const value of headValues) {\n if (!baseValues.has(value)) {\n diffs.push({\n type: 'added',\n path: `${path}.enumValues`,\n oldValue: base.enumValues,\n newValue: head.enumValues,\n description: `Enum value '${value}' was added`,\n });\n }\n }\n }\n\n // Nested object fields\n if (\n base.type === 'object' &&\n head.type === 'object' &&\n base.properties &&\n head.properties\n ) {\n diffs.push(\n ...computeFieldsDiff(base.properties, head.properties, path, _options)\n );\n }\n\n // Array items\n if (\n base.type === 'array' &&\n head.type === 'array' &&\n base.items &&\n head.items\n ) {\n diffs.push(\n ...computeFieldDiff(base.items, head.items, `${path}.items`, _options)\n );\n }\n\n return diffs;\n}\n\n/**\n * Classify a diff as breaking based on context.\n */\nexport function isBreakingChange(\n diff: SemanticDiffItem,\n context: 'input' | 'output'\n): boolean {\n // In output context, removing/changing fields is always breaking\n if (context === 'output') {\n return diff.type === 'breaking' || diff.type === 'removed';\n }\n\n // In input context, adding required fields is breaking\n if (context === 'input') {\n if (diff.type === 'added' && diff.description?.includes('Required field')) {\n return true;\n }\n return diff.type === 'breaking';\n }\n\n return diff.type === 'breaking';\n}\n"],"mappings":";;;;AAuBA,SAAgB,cACd,MACA,MACA,UAA2B,EAAE,EACT;CACpB,MAAMA,QAA4B,EAAE;AAGpC,OAAM,KAAK,GAAG,kBAAkB,KAAK,OAAO,KAAK,OAAO,YAAY,QAAQ,CAAC;AAG7E,OAAM,KACJ,GAAG,kBAAkB,KAAK,QAAQ,KAAK,QAAQ,aAAa,QAAQ,CACrE;AAED,QAAO,QAAQ,eACX,MAAM,QAAQ,MAAM,EAAE,SAAS,WAAW,GAC1C;;;;;AAMN,SAAgB,kBACd,YACA,YACA,YACA,UAA2B,EAAE,EACT;CACpB,MAAMA,QAA4B,EAAE;CACpC,MAAM,YAAY,IAAI,IAAI,OAAO,KAAK,WAAW,CAAC;CAClD,MAAM,YAAY,IAAI,IAAI,OAAO,KAAK,WAAW,CAAC;AAGlD,MAAK,MAAM,QAAQ,UACjB,KAAI,CAAC,UAAU,IAAI,KAAK,EAAE;EACxB,MAAM,YAAY,WAAW;AAC7B,QAAM,KAAK;GACT,MAAM;GACN,MAAM,GAAG,WAAW,GAAG;GACvB,UAAU;GACV,UAAU;GACV,aAAa,UAAU,KAAK;GAC7B,CAAC;;AAKN,MAAK,MAAM,QAAQ,UACjB,KAAI,CAAC,UAAU,IAAI,KAAK,EAAE;EACxB,MAAM,YAAY,WAAW;EAC7B,MAAM,aAAa,WAAW,aAAa;AAC3C,QAAM,KAAK;GACT,MAAM,aAAa,aAAa;GAChC,MAAM,GAAG,WAAW,GAAG;GACvB,UAAU;GACV,UAAU;GACV,aAAa,aACT,mBAAmB,KAAK,eACxB,mBAAmB,KAAK;GAC7B,CAAC;;AAKN,MAAK,MAAM,QAAQ,UACjB,KAAI,UAAU,IAAI,KAAK,EAAE;EACvB,MAAM,YAAY,WAAW;EAC7B,MAAM,YAAY,WAAW;AAC7B,MAAI,aAAa,UACf,OAAM,KACJ,GAAG,iBACD,WACA,WACA,GAAG,WAAW,GAAG,QACjB,QACD,CACF;;AAKP,QAAO;;;;;AAMT,SAAgB,iBACd,MACA,MACA,MACA,WAA4B,EAAE,EACV;CACpB,MAAMA,QAA4B,EAAE;AAGpC,KAAI,KAAK,SAAS,KAAK,KACrB,OAAM,KAAK;EACT,MAAM;EACN,MAAM,GAAG,KAAK;EACd,UAAU,KAAK;EACf,UAAU,KAAK;EACf,aAAa,4BAA4B,KAAK,KAAK,QAAQ,KAAK,KAAK;EACtE,CAAC;AAIJ,KAAI,KAAK,aAAa,KAAK,UAAU;EACnC,MAAM,aAAa,CAAC,KAAK,YAAY,KAAK;AAC1C,QAAM,KAAK;GACT,MAAM,aAAa,aAAa;GAChC,MAAM,GAAG,KAAK;GACd,UAAU,KAAK;GACf,UAAU,KAAK;GACf,aAAa,aACT,UAAU,KAAK,KAAK,uCACpB,UAAU,KAAK,KAAK;GACzB,CAAC;;AAIJ,KAAI,KAAK,aAAa,KAAK,UAAU;EACnC,MAAM,aAAa,KAAK,YAAY,CAAC,KAAK;AAC1C,QAAM,KAAK;GACT,MAAM,aAAa,aAAa;GAChC,MAAM,GAAG,KAAK;GACd,UAAU,KAAK;GACf,UAAU,KAAK;GACf,aAAa,aACT,UAAU,KAAK,KAAK,2BACpB,UAAU,KAAK,KAAK;GACzB,CAAC;;AAIJ,KAAI,KAAK,SAAS,UAAU,KAAK,SAAS,QAAQ;EAChD,MAAM,aAAa,IAAI,IAAI,KAAK,cAAc,EAAE,CAAC;EACjD,MAAM,aAAa,IAAI,IAAI,KAAK,cAAc,EAAE,CAAC;AAGjD,OAAK,MAAM,SAAS,WAClB,KAAI,CAAC,WAAW,IAAI,MAAM,CACxB,OAAM,KAAK;GACT,MAAM;GACN,MAAM,GAAG,KAAK;GACd,UAAU,KAAK;GACf,UAAU,KAAK;GACf,aAAa,eAAe,MAAM;GACnC,CAAC;AAKN,OAAK,MAAM,SAAS,WAClB,KAAI,CAAC,WAAW,IAAI,MAAM,CACxB,OAAM,KAAK;GACT,MAAM;GACN,MAAM,GAAG,KAAK;GACd,UAAU,KAAK;GACf,UAAU,KAAK;GACf,aAAa,eAAe,MAAM;GACnC,CAAC;;AAMR,KACE,KAAK,SAAS,YACd,KAAK,SAAS,YACd,KAAK,cACL,KAAK,WAEL,OAAM,KACJ,GAAG,kBAAkB,KAAK,YAAY,KAAK,YAAY,MAAM,SAAS,CACvE;AAIH,KACE,KAAK,SAAS,WACd,KAAK,SAAS,WACd,KAAK,SACL,KAAK,MAEL,OAAM,KACJ,GAAG,iBAAiB,KAAK,OAAO,KAAK,OAAO,GAAG,KAAK,SAAS,SAAS,CACvE;AAGH,QAAO;;;;;AAMT,SAAgB,iBACd,MACA,SACS;AAET,KAAI,YAAY,SACd,QAAO,KAAK,SAAS,cAAc,KAAK,SAAS;AAInD,KAAI,YAAY,SAAS;AACvB,MAAI,KAAK,SAAS,WAAW,KAAK,aAAa,SAAS,iBAAiB,CACvE,QAAO;AAET,SAAO,KAAK,SAAS;;AAGvB,QAAO,KAAK,SAAS"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { SemanticDiffItem, SemanticDiffOptions } from "../../types/analysis-types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/analysis/diff/semantic.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Compute semantic differences between two spec sources.
|
|
7
|
+
*/
|
|
8
|
+
declare function computeSemanticDiff(aCode: string, aPath: string, bCode: string, bPath: string, options?: SemanticDiffOptions): SemanticDiffItem[];
|
|
9
|
+
//#endregion
|
|
10
|
+
export { computeSemanticDiff };
|
|
11
|
+
//# sourceMappingURL=semantic.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"semantic.d.ts","names":[],"sources":["../../../src/analysis/diff/semantic.ts"],"sourcesContent":[],"mappings":";;;;;;;iBAegB,mBAAA,uEAKL,sBACR"}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { scanSpecSource } from "../spec-scan.js";
|
|
2
|
+
|
|
3
|
+
//#region src/analysis/diff/semantic.ts
|
|
4
|
+
/**
|
|
5
|
+
* Compute semantic differences between two spec sources.
|
|
6
|
+
*/
|
|
7
|
+
function computeSemanticDiff(aCode, aPath, bCode, bPath, options = {}) {
|
|
8
|
+
const a = scanSpecSource(aCode, aPath);
|
|
9
|
+
const b = scanSpecSource(bCode, bPath);
|
|
10
|
+
const diffs = [];
|
|
11
|
+
compareScalar(diffs, "specType", a.specType, b.specType, {
|
|
12
|
+
breaking: true,
|
|
13
|
+
label: "Spec type"
|
|
14
|
+
});
|
|
15
|
+
compareScalar(diffs, "key", a.key, b.key, {
|
|
16
|
+
breaking: true,
|
|
17
|
+
label: "Key"
|
|
18
|
+
});
|
|
19
|
+
compareScalar(diffs, "version", a.version, b.version, {
|
|
20
|
+
breaking: true,
|
|
21
|
+
label: "Version"
|
|
22
|
+
});
|
|
23
|
+
compareScalar(diffs, "kind", a.kind, b.kind, {
|
|
24
|
+
breaking: true,
|
|
25
|
+
label: "Kind"
|
|
26
|
+
});
|
|
27
|
+
compareScalar(diffs, "stability", a.stability, b.stability, {
|
|
28
|
+
breaking: isStabilityDowngrade(a, b),
|
|
29
|
+
label: "Stability"
|
|
30
|
+
});
|
|
31
|
+
compareArray(diffs, "owners", a.owners ?? [], b.owners ?? [], { label: "Owners" });
|
|
32
|
+
compareArray(diffs, "tags", a.tags ?? [], b.tags ?? [], { label: "Tags" });
|
|
33
|
+
compareStructuralHints(diffs, a, b);
|
|
34
|
+
return options.breakingOnly ? diffs.filter((d) => d.type === "breaking") : diffs;
|
|
35
|
+
}
|
|
36
|
+
function compareScalar(diffs, path, a, b, config) {
|
|
37
|
+
if (a === b) return;
|
|
38
|
+
diffs.push({
|
|
39
|
+
type: config.breaking ? "breaking" : "changed",
|
|
40
|
+
path,
|
|
41
|
+
oldValue: a,
|
|
42
|
+
newValue: b,
|
|
43
|
+
description: `${config.label} changed`
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
function compareArray(diffs, path, a, b, config) {
|
|
47
|
+
const aSorted = [...a].sort();
|
|
48
|
+
const bSorted = [...b].sort();
|
|
49
|
+
if (JSON.stringify(aSorted) === JSON.stringify(bSorted)) return;
|
|
50
|
+
diffs.push({
|
|
51
|
+
type: "changed",
|
|
52
|
+
path,
|
|
53
|
+
oldValue: aSorted,
|
|
54
|
+
newValue: bSorted,
|
|
55
|
+
description: `${config.label} changed`
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
function isStabilityDowngrade(a, b) {
|
|
59
|
+
const order = {
|
|
60
|
+
experimental: 0,
|
|
61
|
+
beta: 1,
|
|
62
|
+
stable: 2,
|
|
63
|
+
deprecated: 3
|
|
64
|
+
};
|
|
65
|
+
const aValue = a.stability ? order[a.stability] ?? 0 : 0;
|
|
66
|
+
return (b.stability ? order[b.stability] ?? 0 : 0) > aValue;
|
|
67
|
+
}
|
|
68
|
+
function compareStructuralHints(diffs, a, b) {
|
|
69
|
+
compareScalar(diffs, "hasMeta", a.hasMeta, b.hasMeta, {
|
|
70
|
+
breaking: a.specType === "operation" || b.specType === "operation",
|
|
71
|
+
label: "meta section presence"
|
|
72
|
+
});
|
|
73
|
+
compareScalar(diffs, "hasIo", a.hasIo, b.hasIo, {
|
|
74
|
+
breaking: a.specType === "operation" || b.specType === "operation",
|
|
75
|
+
label: "io section presence"
|
|
76
|
+
});
|
|
77
|
+
compareScalar(diffs, "hasPolicy", a.hasPolicy, b.hasPolicy, {
|
|
78
|
+
breaking: a.specType === "operation" || b.specType === "operation",
|
|
79
|
+
label: "policy section presence"
|
|
80
|
+
});
|
|
81
|
+
compareScalar(diffs, "hasPayload", a.hasPayload, b.hasPayload, {
|
|
82
|
+
breaking: a.specType === "event" || b.specType === "event",
|
|
83
|
+
label: "payload section presence"
|
|
84
|
+
});
|
|
85
|
+
compareScalar(diffs, "hasContent", a.hasContent, b.hasContent, {
|
|
86
|
+
breaking: a.specType === "presentation" || b.specType === "presentation",
|
|
87
|
+
label: "content section presence"
|
|
88
|
+
});
|
|
89
|
+
compareScalar(diffs, "hasDefinition", a.hasDefinition, b.hasDefinition, {
|
|
90
|
+
breaking: a.specType === "workflow" || b.specType === "workflow",
|
|
91
|
+
label: "definition section presence"
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
//#endregion
|
|
96
|
+
export { computeSemanticDiff };
|
|
97
|
+
//# sourceMappingURL=semantic.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"semantic.js","names":["diffs: SemanticDiffItem[]","order: Record<string, number>"],"sources":["../../../src/analysis/diff/semantic.ts"],"sourcesContent":["/**\n * Semantic diff computation for contract specs.\n * Extracted from cli-contractspec/src/commands/diff/semantic.ts\n */\n\nimport type {\n SemanticDiffItem,\n SemanticDiffOptions,\n SpecScanResult,\n} from '../../types/analysis-types';\nimport { scanSpecSource } from '../spec-scan';\n\n/**\n * Compute semantic differences between two spec sources.\n */\nexport function computeSemanticDiff(\n aCode: string,\n aPath: string,\n bCode: string,\n bPath: string,\n options: SemanticDiffOptions = {}\n): SemanticDiffItem[] {\n const a = scanSpecSource(aCode, aPath);\n const b = scanSpecSource(bCode, bPath);\n\n const diffs: SemanticDiffItem[] = [];\n\n compareScalar(diffs, 'specType', a.specType, b.specType, {\n breaking: true,\n label: 'Spec type',\n });\n\n compareScalar(diffs, 'key', a.key, b.key, {\n breaking: true,\n label: 'Key',\n });\n compareScalar(diffs, 'version', a.version, b.version, {\n breaking: true,\n label: 'Version',\n });\n compareScalar(diffs, 'kind', a.kind, b.kind, {\n breaking: true,\n label: 'Kind',\n });\n\n compareScalar(diffs, 'stability', a.stability, b.stability, {\n breaking: isStabilityDowngrade(a, b),\n label: 'Stability',\n });\n\n compareArray(diffs, 'owners', a.owners ?? [], b.owners ?? [], {\n label: 'Owners',\n });\n compareArray(diffs, 'tags', a.tags ?? [], b.tags ?? [], { label: 'Tags' });\n\n compareStructuralHints(diffs, a, b);\n\n const filtered = options.breakingOnly\n ? diffs.filter((d) => d.type === 'breaking')\n : diffs;\n\n return filtered;\n}\n\nfunction compareScalar(\n diffs: SemanticDiffItem[],\n path: string,\n a: unknown,\n b: unknown,\n config: { breaking: boolean; label: string }\n) {\n if (a === b) return;\n diffs.push({\n type: config.breaking ? 'breaking' : 'changed',\n path,\n oldValue: a,\n newValue: b,\n description: `${config.label} changed`,\n });\n}\n\nfunction compareArray(\n diffs: SemanticDiffItem[],\n path: string,\n a: string[],\n b: string[],\n config: { label: string }\n) {\n const aSorted = [...a].sort();\n const bSorted = [...b].sort();\n if (JSON.stringify(aSorted) === JSON.stringify(bSorted)) return;\n\n diffs.push({\n type: 'changed',\n path,\n oldValue: aSorted,\n newValue: bSorted,\n description: `${config.label} changed`,\n });\n}\n\nfunction isStabilityDowngrade(a: SpecScanResult, b: SpecScanResult): boolean {\n const order: Record<string, number> = {\n experimental: 0,\n beta: 1,\n stable: 2,\n deprecated: 3,\n };\n const aValue = a.stability ? (order[a.stability] ?? 0) : 0;\n const bValue = b.stability ? (order[b.stability] ?? 0) : 0;\n // Moving toward deprecated is effectively a breaking signal for consumers.\n return bValue > aValue;\n}\n\nfunction compareStructuralHints(\n diffs: SemanticDiffItem[],\n a: SpecScanResult,\n b: SpecScanResult\n) {\n // For operations these sections are usually required; missing them is breaking.\n compareScalar(diffs, 'hasMeta', a.hasMeta, b.hasMeta, {\n breaking: a.specType === 'operation' || b.specType === 'operation',\n label: 'meta section presence',\n });\n compareScalar(diffs, 'hasIo', a.hasIo, b.hasIo, {\n breaking: a.specType === 'operation' || b.specType === 'operation',\n label: 'io section presence',\n });\n compareScalar(diffs, 'hasPolicy', a.hasPolicy, b.hasPolicy, {\n breaking: a.specType === 'operation' || b.specType === 'operation',\n label: 'policy section presence',\n });\n compareScalar(diffs, 'hasPayload', a.hasPayload, b.hasPayload, {\n breaking: a.specType === 'event' || b.specType === 'event',\n label: 'payload section presence',\n });\n compareScalar(diffs, 'hasContent', a.hasContent, b.hasContent, {\n breaking: a.specType === 'presentation' || b.specType === 'presentation',\n label: 'content section presence',\n });\n compareScalar(diffs, 'hasDefinition', a.hasDefinition, b.hasDefinition, {\n breaking: a.specType === 'workflow' || b.specType === 'workflow',\n label: 'definition section presence',\n });\n}\n"],"mappings":";;;;;;AAeA,SAAgB,oBACd,OACA,OACA,OACA,OACA,UAA+B,EAAE,EACb;CACpB,MAAM,IAAI,eAAe,OAAO,MAAM;CACtC,MAAM,IAAI,eAAe,OAAO,MAAM;CAEtC,MAAMA,QAA4B,EAAE;AAEpC,eAAc,OAAO,YAAY,EAAE,UAAU,EAAE,UAAU;EACvD,UAAU;EACV,OAAO;EACR,CAAC;AAEF,eAAc,OAAO,OAAO,EAAE,KAAK,EAAE,KAAK;EACxC,UAAU;EACV,OAAO;EACR,CAAC;AACF,eAAc,OAAO,WAAW,EAAE,SAAS,EAAE,SAAS;EACpD,UAAU;EACV,OAAO;EACR,CAAC;AACF,eAAc,OAAO,QAAQ,EAAE,MAAM,EAAE,MAAM;EAC3C,UAAU;EACV,OAAO;EACR,CAAC;AAEF,eAAc,OAAO,aAAa,EAAE,WAAW,EAAE,WAAW;EAC1D,UAAU,qBAAqB,GAAG,EAAE;EACpC,OAAO;EACR,CAAC;AAEF,cAAa,OAAO,UAAU,EAAE,UAAU,EAAE,EAAE,EAAE,UAAU,EAAE,EAAE,EAC5D,OAAO,UACR,CAAC;AACF,cAAa,OAAO,QAAQ,EAAE,QAAQ,EAAE,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAE,OAAO,QAAQ,CAAC;AAE1E,wBAAuB,OAAO,GAAG,EAAE;AAMnC,QAJiB,QAAQ,eACrB,MAAM,QAAQ,MAAM,EAAE,SAAS,WAAW,GAC1C;;AAKN,SAAS,cACP,OACA,MACA,GACA,GACA,QACA;AACA,KAAI,MAAM,EAAG;AACb,OAAM,KAAK;EACT,MAAM,OAAO,WAAW,aAAa;EACrC;EACA,UAAU;EACV,UAAU;EACV,aAAa,GAAG,OAAO,MAAM;EAC9B,CAAC;;AAGJ,SAAS,aACP,OACA,MACA,GACA,GACA,QACA;CACA,MAAM,UAAU,CAAC,GAAG,EAAE,CAAC,MAAM;CAC7B,MAAM,UAAU,CAAC,GAAG,EAAE,CAAC,MAAM;AAC7B,KAAI,KAAK,UAAU,QAAQ,KAAK,KAAK,UAAU,QAAQ,CAAE;AAEzD,OAAM,KAAK;EACT,MAAM;EACN;EACA,UAAU;EACV,UAAU;EACV,aAAa,GAAG,OAAO,MAAM;EAC9B,CAAC;;AAGJ,SAAS,qBAAqB,GAAmB,GAA4B;CAC3E,MAAMC,QAAgC;EACpC,cAAc;EACd,MAAM;EACN,QAAQ;EACR,YAAY;EACb;CACD,MAAM,SAAS,EAAE,YAAa,MAAM,EAAE,cAAc,IAAK;AAGzD,SAFe,EAAE,YAAa,MAAM,EAAE,cAAc,IAAK,KAEzC;;AAGlB,SAAS,uBACP,OACA,GACA,GACA;AAEA,eAAc,OAAO,WAAW,EAAE,SAAS,EAAE,SAAS;EACpD,UAAU,EAAE,aAAa,eAAe,EAAE,aAAa;EACvD,OAAO;EACR,CAAC;AACF,eAAc,OAAO,SAAS,EAAE,OAAO,EAAE,OAAO;EAC9C,UAAU,EAAE,aAAa,eAAe,EAAE,aAAa;EACvD,OAAO;EACR,CAAC;AACF,eAAc,OAAO,aAAa,EAAE,WAAW,EAAE,WAAW;EAC1D,UAAU,EAAE,aAAa,eAAe,EAAE,aAAa;EACvD,OAAO;EACR,CAAC;AACF,eAAc,OAAO,cAAc,EAAE,YAAY,EAAE,YAAY;EAC7D,UAAU,EAAE,aAAa,WAAW,EAAE,aAAa;EACnD,OAAO;EACR,CAAC;AACF,eAAc,OAAO,cAAc,EAAE,YAAY,EAAE,YAAY;EAC7D,UAAU,EAAE,aAAa,kBAAkB,EAAE,aAAa;EAC1D,OAAO;EACR,CAAC;AACF,eAAc,OAAO,iBAAiB,EAAE,eAAe,EAAE,eAAe;EACtE,UAAU,EAAE,aAAa,cAAc,EAAE,aAAa;EACtD,OAAO;EACR,CAAC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { FeatureScanResult } from "../types/analysis-types.js";
|
|
2
|
+
|
|
3
|
+
//#region src/analysis/feature-scan.d.ts
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Check if a file is a feature file based on naming conventions.
|
|
7
|
+
*/
|
|
8
|
+
declare function isFeatureFile(filePath: string): boolean;
|
|
9
|
+
/**
|
|
10
|
+
* Scan a feature source file to extract metadata.
|
|
11
|
+
*/
|
|
12
|
+
declare function scanFeatureSource(code: string, filePath: string): FeatureScanResult;
|
|
13
|
+
//#endregion
|
|
14
|
+
export { isFeatureFile, scanFeatureSource };
|
|
15
|
+
//# sourceMappingURL=feature-scan.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"feature-scan.d.ts","names":[],"sources":["../../src/analysis/feature-scan.ts"],"sourcesContent":[],"mappings":";;;;;;;iBAYgB,aAAA;;;;iBAOA,iBAAA,kCAGb"}
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
//#region src/analysis/feature-scan.ts
|
|
2
|
+
/**
|
|
3
|
+
* Check if a file is a feature file based on naming conventions.
|
|
4
|
+
*/
|
|
5
|
+
function isFeatureFile(filePath) {
|
|
6
|
+
return filePath.includes(".feature.");
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Scan a feature source file to extract metadata.
|
|
10
|
+
*/
|
|
11
|
+
function scanFeatureSource(code, filePath) {
|
|
12
|
+
const key = matchStringField(code, "key") ?? extractKeyFromFilePath(filePath);
|
|
13
|
+
const title = matchStringField(code, "title") ?? void 0;
|
|
14
|
+
const description = matchStringField(code, "description") ?? void 0;
|
|
15
|
+
const domain = matchStringField(code, "domain") ?? void 0;
|
|
16
|
+
const stabilityRaw = matchStringField(code, "stability");
|
|
17
|
+
return {
|
|
18
|
+
filePath,
|
|
19
|
+
key,
|
|
20
|
+
title,
|
|
21
|
+
description,
|
|
22
|
+
domain,
|
|
23
|
+
stability: isStability(stabilityRaw) ? stabilityRaw : void 0,
|
|
24
|
+
owners: matchStringArrayField(code, "owners"),
|
|
25
|
+
tags: matchStringArrayField(code, "tags"),
|
|
26
|
+
operations: extractRefsFromArray(code, "operations"),
|
|
27
|
+
events: extractRefsFromArray(code, "events"),
|
|
28
|
+
presentations: extractRefsFromArray(code, "presentations"),
|
|
29
|
+
experiments: extractRefsFromArray(code, "experiments"),
|
|
30
|
+
capabilities: extractCapabilities(code),
|
|
31
|
+
opToPresentationLinks: extractOpToPresentationLinks(code)
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Extract refs from a named array (e.g., operations, events, presentations).
|
|
36
|
+
*/
|
|
37
|
+
function extractRefsFromArray(code, fieldName) {
|
|
38
|
+
const refs = [];
|
|
39
|
+
const arrayPattern = new RegExp(`${escapeRegex(fieldName)}\\s*:\\s*\\[([\\s\\S]*?)\\]`, "m");
|
|
40
|
+
const arrayMatch = code.match(arrayPattern);
|
|
41
|
+
if (!arrayMatch?.[1]) return refs;
|
|
42
|
+
const refPattern = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*(\d+)/g;
|
|
43
|
+
let match;
|
|
44
|
+
while ((match = refPattern.exec(arrayMatch[1])) !== null) if (match[1] && match[2]) refs.push({
|
|
45
|
+
key: match[1],
|
|
46
|
+
version: Number(match[2])
|
|
47
|
+
});
|
|
48
|
+
return refs;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Extract capability bindings (provides and requires).
|
|
52
|
+
*/
|
|
53
|
+
function extractCapabilities(code) {
|
|
54
|
+
const provides = [];
|
|
55
|
+
const requires = [];
|
|
56
|
+
const capabilitiesMatch = code.match(/capabilities\s*:\s*\{([\s\S]*?)\}/);
|
|
57
|
+
if (!capabilitiesMatch?.[1]) return {
|
|
58
|
+
provides,
|
|
59
|
+
requires
|
|
60
|
+
};
|
|
61
|
+
const capabilitiesContent = capabilitiesMatch[1];
|
|
62
|
+
const providesMatch = capabilitiesContent.match(/provides\s*:\s*\[([\s\S]*?)\]/);
|
|
63
|
+
if (providesMatch?.[1]) {
|
|
64
|
+
const refPattern = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*(\d+)/g;
|
|
65
|
+
let match;
|
|
66
|
+
while ((match = refPattern.exec(providesMatch[1])) !== null) if (match[1] && match[2]) provides.push({
|
|
67
|
+
key: match[1],
|
|
68
|
+
version: Number(match[2])
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
const requiresMatch = capabilitiesContent.match(/requires\s*:\s*\[([\s\S]*?)\]/);
|
|
72
|
+
if (requiresMatch?.[1]) {
|
|
73
|
+
const refPatternWithVersion = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*(\d+)/g;
|
|
74
|
+
const refPatternKeyOnly = /\{\s*key:\s*['"]([^'"]+)['"]\s*\}/g;
|
|
75
|
+
let match = null;
|
|
76
|
+
while ((match = refPatternWithVersion.exec(requiresMatch[1])) !== null) if (match[1] && match[2]) requires.push({
|
|
77
|
+
key: match[1],
|
|
78
|
+
version: Number(match[2])
|
|
79
|
+
});
|
|
80
|
+
while ((match = refPatternKeyOnly.exec(requiresMatch[1])) !== null) if (match && match[1]) {
|
|
81
|
+
if (!requires.some((r) => r.key === match[1])) requires.push({
|
|
82
|
+
key: match[1],
|
|
83
|
+
version: 1
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
return {
|
|
88
|
+
provides,
|
|
89
|
+
requires
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Extract opToPresentation links.
|
|
94
|
+
*/
|
|
95
|
+
function extractOpToPresentationLinks(code) {
|
|
96
|
+
const links = [];
|
|
97
|
+
const arrayMatch = code.match(/opToPresentation\s*:\s*\[([\s\S]*?)\]/);
|
|
98
|
+
if (!arrayMatch?.[1]) return links;
|
|
99
|
+
const linkPattern = /\{\s*op:\s*\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*(\d+)\s*\}\s*,\s*pres:\s*\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*(\d+)\s*\}/g;
|
|
100
|
+
let match;
|
|
101
|
+
while ((match = linkPattern.exec(arrayMatch[1])) !== null) if (match[1] && match[2] && match[3] && match[4]) links.push({
|
|
102
|
+
op: {
|
|
103
|
+
key: match[1],
|
|
104
|
+
version: Number(match[2])
|
|
105
|
+
},
|
|
106
|
+
pres: {
|
|
107
|
+
key: match[3],
|
|
108
|
+
version: Number(match[4])
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
return links;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Extract key from file path as fallback.
|
|
115
|
+
*/
|
|
116
|
+
function extractKeyFromFilePath(filePath) {
|
|
117
|
+
return (filePath.split("/").pop() ?? filePath).replace(/\.feature\.[jt]s$/, "").replace(/[^a-zA-Z0-9-]/g, "-");
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Match a string field in source code.
|
|
121
|
+
*/
|
|
122
|
+
function matchStringField(code, field) {
|
|
123
|
+
const regex = /* @__PURE__ */ new RegExp(`${escapeRegex(field)}\\s*:\\s*['"]([^'"]+)['"]`);
|
|
124
|
+
return code.match(regex)?.[1] ?? null;
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Match a string array field in source code.
|
|
128
|
+
*/
|
|
129
|
+
function matchStringArrayField(code, field) {
|
|
130
|
+
const regex = /* @__PURE__ */ new RegExp(`${escapeRegex(field)}\\s*:\\s*\\[([\\s\\S]*?)\\]`);
|
|
131
|
+
const match = code.match(regex);
|
|
132
|
+
if (!match?.[1]) return void 0;
|
|
133
|
+
const inner = match[1];
|
|
134
|
+
const items = Array.from(inner.matchAll(/['"]([^'"]+)['"]/g)).map((m) => m[1]).filter((value) => typeof value === "string" && value.length > 0);
|
|
135
|
+
return items.length > 0 ? items : void 0;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Check if a value is a valid stability.
|
|
139
|
+
*/
|
|
140
|
+
function isStability(value) {
|
|
141
|
+
return value === "experimental" || value === "beta" || value === "stable" || value === "deprecated";
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Escape regex special characters.
|
|
145
|
+
*/
|
|
146
|
+
function escapeRegex(value) {
|
|
147
|
+
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
//#endregion
|
|
151
|
+
export { isFeatureFile, scanFeatureSource };
|
|
152
|
+
//# sourceMappingURL=feature-scan.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"feature-scan.js","names":["refs: RefInfo[]","provides: RefInfo[]","requires: RefInfo[]","match: RegExpExecArray | null","links: { op: RefInfo; pres: RefInfo }[]"],"sources":["../../src/analysis/feature-scan.ts"],"sourcesContent":["/**\n * Feature file scanning utilities.\n *\n * Extracts FeatureModuleSpec metadata from source code without execution.\n */\n\nimport type { FeatureScanResult, RefInfo } from '../types/analysis-types';\nimport type { Stability } from '../types/spec-types';\n\n/**\n * Check if a file is a feature file based on naming conventions.\n */\nexport function isFeatureFile(filePath: string): boolean {\n return filePath.includes('.feature.');\n}\n\n/**\n * Scan a feature source file to extract metadata.\n */\nexport function scanFeatureSource(\n code: string,\n filePath: string\n): FeatureScanResult {\n const key = matchStringField(code, 'key') ?? extractKeyFromFilePath(filePath);\n const title = matchStringField(code, 'title') ?? undefined;\n const description = matchStringField(code, 'description') ?? undefined;\n const domain = matchStringField(code, 'domain') ?? undefined;\n const stabilityRaw = matchStringField(code, 'stability');\n const stability = isStability(stabilityRaw) ? stabilityRaw : undefined;\n const owners = matchStringArrayField(code, 'owners');\n const tags = matchStringArrayField(code, 'tags');\n\n // Extract operations\n const operations = extractRefsFromArray(code, 'operations');\n\n // Extract events\n const events = extractRefsFromArray(code, 'events');\n\n // Extract presentations\n const presentations = extractRefsFromArray(code, 'presentations');\n\n // Extract experiments\n const experiments = extractRefsFromArray(code, 'experiments');\n\n // Extract capabilities\n const capabilities = extractCapabilities(code);\n\n // Extract op to presentation links\n const opToPresentationLinks = extractOpToPresentationLinks(code);\n\n return {\n filePath,\n key,\n title,\n description,\n domain,\n stability,\n owners,\n tags,\n operations,\n events,\n presentations,\n experiments,\n capabilities,\n opToPresentationLinks,\n };\n}\n\n/**\n * Extract refs from a named array (e.g., operations, events, presentations).\n */\nfunction extractRefsFromArray(code: string, fieldName: string): RefInfo[] {\n const refs: RefInfo[] = [];\n\n // Match the array section\n const arrayPattern = new RegExp(\n `${escapeRegex(fieldName)}\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`,\n 'm'\n );\n const arrayMatch = code.match(arrayPattern);\n\n if (!arrayMatch?.[1]) return refs;\n\n // Extract each { key: 'x', version: N } entry\n const refPattern = /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*(\\d+)/g;\n let match;\n while ((match = refPattern.exec(arrayMatch[1])) !== null) {\n if (match[1] && match[2]) {\n refs.push({\n key: match[1],\n version: Number(match[2]),\n });\n }\n }\n\n return refs;\n}\n\n/**\n * Extract capability bindings (provides and requires).\n */\nfunction extractCapabilities(code: string): {\n provides: RefInfo[];\n requires: RefInfo[];\n} {\n const provides: RefInfo[] = [];\n const requires: RefInfo[] = [];\n\n // Match the capabilities section\n const capabilitiesMatch = code.match(/capabilities\\s*:\\s*\\{([\\s\\S]*?)\\}/);\n if (!capabilitiesMatch?.[1]) {\n return { provides, requires };\n }\n\n const capabilitiesContent = capabilitiesMatch[1];\n\n // Extract provides\n const providesMatch = capabilitiesContent.match(\n /provides\\s*:\\s*\\[([\\s\\S]*?)\\]/\n );\n if (providesMatch?.[1]) {\n const refPattern = /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*(\\d+)/g;\n let match;\n while ((match = refPattern.exec(providesMatch[1])) !== null) {\n if (match[1] && match[2]) {\n provides.push({\n key: match[1],\n version: Number(match[2]),\n });\n }\n }\n }\n\n // Extract requires\n const requiresMatch = capabilitiesContent.match(\n /requires\\s*:\\s*\\[([\\s\\S]*?)\\]/\n );\n if (requiresMatch?.[1]) {\n // Requires can have key+version or just key\n const refPatternWithVersion =\n /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*(\\d+)/g;\n const refPatternKeyOnly = /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*\\}/g;\n\n let match: RegExpExecArray | null = null;\n while ((match = refPatternWithVersion.exec(requiresMatch[1])) !== null) {\n if (match[1] && match[2]) {\n requires.push({\n key: match[1],\n version: Number(match[2]),\n });\n }\n }\n\n // Also match key-only requires (version defaults to 1)\n while ((match = refPatternKeyOnly.exec(requiresMatch[1])) !== null) {\n if (match && match[1]) {\n // Check if we already added this with a version\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n const alreadyExists = requires.some((r) => r.key === match![1]);\n if (!alreadyExists) {\n requires.push({\n key: match[1],\n version: 1, // Default version\n });\n }\n }\n }\n }\n\n return { provides, requires };\n}\n\n/**\n * Extract opToPresentation links.\n */\nfunction extractOpToPresentationLinks(\n code: string\n): { op: RefInfo; pres: RefInfo }[] {\n const links: { op: RefInfo; pres: RefInfo }[] = [];\n\n // Match the opToPresentation array\n const arrayMatch = code.match(/opToPresentation\\s*:\\s*\\[([\\s\\S]*?)\\]/);\n if (!arrayMatch?.[1]) return links;\n\n // Match each link entry\n // Pattern: { op: { key: 'x', version: N }, pres: { key: 'y', version: M } }\n const linkPattern =\n /\\{\\s*op:\\s*\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*(\\d+)\\s*\\}\\s*,\\s*pres:\\s*\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*(\\d+)\\s*\\}/g;\n\n let match;\n while ((match = linkPattern.exec(arrayMatch[1])) !== null) {\n if (match[1] && match[2] && match[3] && match[4]) {\n links.push({\n op: { key: match[1], version: Number(match[2]) },\n pres: { key: match[3], version: Number(match[4]) },\n });\n }\n }\n\n return links;\n}\n\n/**\n * Extract key from file path as fallback.\n */\nfunction extractKeyFromFilePath(filePath: string): string {\n const fileName = filePath.split('/').pop() ?? filePath;\n return fileName\n .replace(/\\.feature\\.[jt]s$/, '')\n .replace(/[^a-zA-Z0-9-]/g, '-');\n}\n\n/**\n * Match a string field in source code.\n */\nfunction matchStringField(code: string, field: string): string | null {\n const regex = new RegExp(`${escapeRegex(field)}\\\\s*:\\\\s*['\"]([^'\"]+)['\"]`);\n const match = code.match(regex);\n return match?.[1] ?? null;\n}\n\n/**\n * Match a string array field in source code.\n */\nfunction matchStringArrayField(\n code: string,\n field: string\n): string[] | undefined {\n const regex = new RegExp(`${escapeRegex(field)}\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`);\n const match = code.match(regex);\n if (!match?.[1]) return undefined;\n\n const inner = match[1];\n const items = Array.from(inner.matchAll(/['\"]([^'\"]+)['\"]/g))\n .map((m) => m[1])\n .filter(\n (value): value is string => typeof value === 'string' && value.length > 0\n );\n\n return items.length > 0 ? items : undefined;\n}\n\n/**\n * Check if a value is a valid stability.\n */\nfunction isStability(value: string | null): value is Stability {\n return (\n value === 'experimental' ||\n value === 'beta' ||\n value === 'stable' ||\n value === 'deprecated'\n );\n}\n\n/**\n * Escape regex special characters.\n */\nfunction escapeRegex(value: string): string {\n return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n"],"mappings":";;;;AAYA,SAAgB,cAAc,UAA2B;AACvD,QAAO,SAAS,SAAS,YAAY;;;;;AAMvC,SAAgB,kBACd,MACA,UACmB;CACnB,MAAM,MAAM,iBAAiB,MAAM,MAAM,IAAI,uBAAuB,SAAS;CAC7E,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,IAAI;CACjD,MAAM,cAAc,iBAAiB,MAAM,cAAc,IAAI;CAC7D,MAAM,SAAS,iBAAiB,MAAM,SAAS,IAAI;CACnD,MAAM,eAAe,iBAAiB,MAAM,YAAY;AAuBxD,QAAO;EACL;EACA;EACA;EACA;EACA;EACA,WA5BgB,YAAY,aAAa,GAAG,eAAe;EA6B3D,QA5Ba,sBAAsB,MAAM,SAAS;EA6BlD,MA5BW,sBAAsB,MAAM,OAAO;EA6B9C,YA1BiB,qBAAqB,MAAM,aAAa;EA2BzD,QAxBa,qBAAqB,MAAM,SAAS;EAyBjD,eAtBoB,qBAAqB,MAAM,gBAAgB;EAuB/D,aApBkB,qBAAqB,MAAM,cAAc;EAqB3D,cAlBmB,oBAAoB,KAAK;EAmB5C,uBAhB4B,6BAA6B,KAAK;EAiB/D;;;;;AAMH,SAAS,qBAAqB,MAAc,WAA8B;CACxE,MAAMA,OAAkB,EAAE;CAG1B,MAAM,eAAe,IAAI,OACvB,GAAG,YAAY,UAAU,CAAC,8BAC1B,IACD;CACD,MAAM,aAAa,KAAK,MAAM,aAAa;AAE3C,KAAI,CAAC,aAAa,GAAI,QAAO;CAG7B,MAAM,aAAa;CACnB,IAAI;AACJ,SAAQ,QAAQ,WAAW,KAAK,WAAW,GAAG,MAAM,KAClD,KAAI,MAAM,MAAM,MAAM,GACpB,MAAK,KAAK;EACR,KAAK,MAAM;EACX,SAAS,OAAO,MAAM,GAAG;EAC1B,CAAC;AAIN,QAAO;;;;;AAMT,SAAS,oBAAoB,MAG3B;CACA,MAAMC,WAAsB,EAAE;CAC9B,MAAMC,WAAsB,EAAE;CAG9B,MAAM,oBAAoB,KAAK,MAAM,oCAAoC;AACzE,KAAI,CAAC,oBAAoB,GACvB,QAAO;EAAE;EAAU;EAAU;CAG/B,MAAM,sBAAsB,kBAAkB;CAG9C,MAAM,gBAAgB,oBAAoB,MACxC,gCACD;AACD,KAAI,gBAAgB,IAAI;EACtB,MAAM,aAAa;EACnB,IAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,cAAc,GAAG,MAAM,KACrD,KAAI,MAAM,MAAM,MAAM,GACpB,UAAS,KAAK;GACZ,KAAK,MAAM;GACX,SAAS,OAAO,MAAM,GAAG;GAC1B,CAAC;;CAMR,MAAM,gBAAgB,oBAAoB,MACxC,gCACD;AACD,KAAI,gBAAgB,IAAI;EAEtB,MAAM,wBACJ;EACF,MAAM,oBAAoB;EAE1B,IAAIC,QAAgC;AACpC,UAAQ,QAAQ,sBAAsB,KAAK,cAAc,GAAG,MAAM,KAChE,KAAI,MAAM,MAAM,MAAM,GACpB,UAAS,KAAK;GACZ,KAAK,MAAM;GACX,SAAS,OAAO,MAAM,GAAG;GAC1B,CAAC;AAKN,UAAQ,QAAQ,kBAAkB,KAAK,cAAc,GAAG,MAAM,KAC5D,KAAI,SAAS,MAAM,IAIjB;OAAI,CADkB,SAAS,MAAM,MAAM,EAAE,QAAQ,MAAO,GAAG,CAE7D,UAAS,KAAK;IACZ,KAAK,MAAM;IACX,SAAS;IACV,CAAC;;;AAMV,QAAO;EAAE;EAAU;EAAU;;;;;AAM/B,SAAS,6BACP,MACkC;CAClC,MAAMC,QAA0C,EAAE;CAGlD,MAAM,aAAa,KAAK,MAAM,wCAAwC;AACtE,KAAI,CAAC,aAAa,GAAI,QAAO;CAI7B,MAAM,cACJ;CAEF,IAAI;AACJ,SAAQ,QAAQ,YAAY,KAAK,WAAW,GAAG,MAAM,KACnD,KAAI,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,GAC5C,OAAM,KAAK;EACT,IAAI;GAAE,KAAK,MAAM;GAAI,SAAS,OAAO,MAAM,GAAG;GAAE;EAChD,MAAM;GAAE,KAAK,MAAM;GAAI,SAAS,OAAO,MAAM,GAAG;GAAE;EACnD,CAAC;AAIN,QAAO;;;;;AAMT,SAAS,uBAAuB,UAA0B;AAExD,SADiB,SAAS,MAAM,IAAI,CAAC,KAAK,IAAI,UAE3C,QAAQ,qBAAqB,GAAG,CAChC,QAAQ,kBAAkB,IAAI;;;;;AAMnC,SAAS,iBAAiB,MAAc,OAA8B;CACpE,MAAM,wBAAQ,IAAI,OAAO,GAAG,YAAY,MAAM,CAAC,2BAA2B;AAE1E,QADc,KAAK,MAAM,MAAM,GAChB,MAAM;;;;;AAMvB,SAAS,sBACP,MACA,OACsB;CACtB,MAAM,wBAAQ,IAAI,OAAO,GAAG,YAAY,MAAM,CAAC,6BAA6B;CAC5E,MAAM,QAAQ,KAAK,MAAM,MAAM;AAC/B,KAAI,CAAC,QAAQ,GAAI,QAAO;CAExB,MAAM,QAAQ,MAAM;CACpB,MAAM,QAAQ,MAAM,KAAK,MAAM,SAAS,oBAAoB,CAAC,CAC1D,KAAK,MAAM,EAAE,GAAG,CAChB,QACE,UAA2B,OAAO,UAAU,YAAY,MAAM,SAAS,EACzE;AAEH,QAAO,MAAM,SAAS,IAAI,QAAQ;;;;;AAMpC,SAAS,YAAY,OAA0C;AAC7D,QACE,UAAU,kBACV,UAAU,UACV,UAAU,YACV,UAAU;;;;;AAOd,SAAS,YAAY,OAAuB;AAC1C,QAAO,MAAM,QAAQ,uBAAuB,OAAO"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { Stability } from "../types/spec-types.js";
|
|
2
|
+
import { FeatureScanResult, SpecScanResult } from "../types/analysis-types.js";
|
|
3
|
+
|
|
4
|
+
//#region src/analysis/grouping.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Filter criteria for spec scan results.
|
|
8
|
+
*/
|
|
9
|
+
interface SpecFilter {
|
|
10
|
+
/** Filter by tags (item must have at least one matching tag) */
|
|
11
|
+
tags?: string[];
|
|
12
|
+
/** Filter by owners (item must have at least one matching owner) */
|
|
13
|
+
owners?: string[];
|
|
14
|
+
/** Filter by stability levels */
|
|
15
|
+
stability?: Stability[];
|
|
16
|
+
/** Filter by spec type */
|
|
17
|
+
specType?: SpecScanResult['specType'][];
|
|
18
|
+
/** Filter by name pattern (glob) */
|
|
19
|
+
namePattern?: string;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Grouping key function type.
|
|
23
|
+
*/
|
|
24
|
+
type GroupKeyFn<T> = (item: T) => string;
|
|
25
|
+
/**
|
|
26
|
+
* Grouped items result.
|
|
27
|
+
*/
|
|
28
|
+
interface GroupedItems<T> {
|
|
29
|
+
key: string;
|
|
30
|
+
items: T[];
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Pre-built grouping strategies for spec scan results.
|
|
34
|
+
*/
|
|
35
|
+
declare const SpecGroupingStrategies: {
|
|
36
|
+
/** Group by first tag. */
|
|
37
|
+
byTag: (item: SpecScanResult) => string;
|
|
38
|
+
/** Group by first owner. */
|
|
39
|
+
byOwner: (item: SpecScanResult) => string;
|
|
40
|
+
/** Group by domain (first segment of name). */
|
|
41
|
+
byDomain: (item: SpecScanResult) => string;
|
|
42
|
+
/** Group by stability. */
|
|
43
|
+
byStability: (item: SpecScanResult) => string;
|
|
44
|
+
/** Group by spec type. */
|
|
45
|
+
bySpecType: (item: SpecScanResult) => string;
|
|
46
|
+
/** Group by file directory. */
|
|
47
|
+
byDirectory: (item: SpecScanResult) => string;
|
|
48
|
+
};
|
|
49
|
+
/**
|
|
50
|
+
* Filter specs by criteria.
|
|
51
|
+
*/
|
|
52
|
+
declare function filterSpecs(specs: SpecScanResult[], filter: SpecFilter): SpecScanResult[];
|
|
53
|
+
/**
|
|
54
|
+
* Group specs by key function.
|
|
55
|
+
*/
|
|
56
|
+
declare function groupSpecs<T>(items: T[], keyFn: GroupKeyFn<T>): Map<string, T[]>;
|
|
57
|
+
/**
|
|
58
|
+
* Group specs and return as array.
|
|
59
|
+
*/
|
|
60
|
+
declare function groupSpecsToArray<T>(items: T[], keyFn: GroupKeyFn<T>): GroupedItems<T>[];
|
|
61
|
+
/**
|
|
62
|
+
* Get unique tags from spec results.
|
|
63
|
+
*/
|
|
64
|
+
declare function getUniqueSpecTags(specs: SpecScanResult[]): string[];
|
|
65
|
+
/**
|
|
66
|
+
* Get unique owners from spec results.
|
|
67
|
+
*/
|
|
68
|
+
declare function getUniqueSpecOwners(specs: SpecScanResult[]): string[];
|
|
69
|
+
/**
|
|
70
|
+
* Get unique domains from spec results.
|
|
71
|
+
*/
|
|
72
|
+
declare function getUniqueSpecDomains(specs: SpecScanResult[]): string[];
|
|
73
|
+
/**
|
|
74
|
+
* Filter features by criteria.
|
|
75
|
+
*/
|
|
76
|
+
declare function filterFeatures(features: FeatureScanResult[], filter: SpecFilter): FeatureScanResult[];
|
|
77
|
+
//#endregion
|
|
78
|
+
export { GroupKeyFn, GroupedItems, SpecFilter, SpecGroupingStrategies, filterFeatures, filterSpecs, getUniqueSpecDomains, getUniqueSpecOwners, getUniqueSpecTags, groupSpecs, groupSpecsToArray };
|
|
79
|
+
//# sourceMappingURL=grouping.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"grouping.d.ts","names":[],"sources":["../../src/analysis/grouping.ts"],"sourcesContent":[],"mappings":";;;;;AAmCA;AAQA;;AAKkB,UAlCD,UAAA,CAkCC;EAGC;EASG,IAAA,CAAA,EAAA,MAAA,EAAA;EAGD;EAGC,MAAA,CAAA,EAAA,MAAA,EAAA;EAAc;EAUpB,SAAA,CAAA,EAxDF,SAwDa,EAAA;EAClB;EACC,QAAA,CAAA,EAxDG,cAwDH,CAAA,UAAA,CAAA,EAAA;EACP;EAAc,WAAA,CAAA,EAAA,MAAA;AAiDjB;;;;AAGe,KArGH,UAqGG,CAAA,CAAA,CAAA,GAAA,CAAA,IAAA,EArGoB,CAqGpB,EAAA,GAAA,MAAA;;;AAmBf;AACS,UApHQ,YAoHR,CAAA,CAAA,CAAA,CAAA;EACW,GAAA,EAAA,MAAA;EAAX,KAAA,EAnHA,CAmHA,EAAA;;;;AAWT;AAagB,cArIH,sBAqI8B,EAAA;EAa3B;EAWA,KAAA,EAAA,CAAA,IAAA,EA3JA,cA2Jc,EAAA,GAAA,MAAA;EAClB;EACF,OAAA,EAAA,CAAA,IAAA,EA1JQ,cA0JR,EAAA,GAAA,MAAA;EACP;EAAiB,QAAA,EAAA,CAAA,IAAA,EAxJD,cAwJC,EAAA,GAAA,MAAA;;sBA/IE;;qBAGD;;sBAGC;;;;;iBAUN,WAAA,QACP,0BACC,aACP;;;;iBAiDa,qBACP,YACA,WAAW,KACjB,YAAY;;;;iBAmBC,4BACP,YACA,WAAW,KACjB,aAAa;;;;iBAUA,iBAAA,QAAyB;;;;iBAazB,mBAAA,QAA2B;;;;iBAa3B,oBAAA,QAA4B;;;;iBAW5B,cAAA,WACJ,6BACF,aACP"}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
//#region src/analysis/grouping.ts
|
|
2
|
+
/**
|
|
3
|
+
* Pre-built grouping strategies for spec scan results.
|
|
4
|
+
*/
|
|
5
|
+
const SpecGroupingStrategies = {
|
|
6
|
+
byTag: (item) => item.tags?.[0] ?? "untagged",
|
|
7
|
+
byOwner: (item) => item.owners?.[0] ?? "unowned",
|
|
8
|
+
byDomain: (item) => {
|
|
9
|
+
const key = item.key ?? "";
|
|
10
|
+
if (key.includes(".")) return key.split(".")[0] ?? "default";
|
|
11
|
+
return "default";
|
|
12
|
+
},
|
|
13
|
+
byStability: (item) => item.stability ?? "stable",
|
|
14
|
+
bySpecType: (item) => item.specType,
|
|
15
|
+
byDirectory: (item) => {
|
|
16
|
+
return item.filePath.split("/").slice(0, -1).join("/") || ".";
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
/**
|
|
20
|
+
* Filter specs by criteria.
|
|
21
|
+
*/
|
|
22
|
+
function filterSpecs(specs, filter) {
|
|
23
|
+
return specs.filter((spec) => {
|
|
24
|
+
if (filter.tags?.length) {
|
|
25
|
+
if (!filter.tags.some((tag) => spec.tags?.includes(tag))) return false;
|
|
26
|
+
}
|
|
27
|
+
if (filter.owners?.length) {
|
|
28
|
+
if (!filter.owners.some((owner) => spec.owners?.includes(owner))) return false;
|
|
29
|
+
}
|
|
30
|
+
if (filter.stability?.length) {
|
|
31
|
+
if (!filter.stability.includes(spec.stability ?? "stable")) return false;
|
|
32
|
+
}
|
|
33
|
+
if (filter.specType?.length) {
|
|
34
|
+
if (!filter.specType.includes(spec.specType)) return false;
|
|
35
|
+
}
|
|
36
|
+
if (filter.namePattern) {
|
|
37
|
+
const key = spec.key ?? "";
|
|
38
|
+
const pattern = filter.namePattern.replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
39
|
+
if (!new RegExp(`^${pattern}$`, "i").test(key)) return false;
|
|
40
|
+
}
|
|
41
|
+
return true;
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Group specs by key function.
|
|
46
|
+
*/
|
|
47
|
+
function groupSpecs(items, keyFn) {
|
|
48
|
+
const groups = /* @__PURE__ */ new Map();
|
|
49
|
+
for (const item of items) {
|
|
50
|
+
const key = keyFn(item);
|
|
51
|
+
const existing = groups.get(key);
|
|
52
|
+
if (existing) existing.push(item);
|
|
53
|
+
else groups.set(key, [item]);
|
|
54
|
+
}
|
|
55
|
+
return groups;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Group specs and return as array.
|
|
59
|
+
*/
|
|
60
|
+
function groupSpecsToArray(items, keyFn) {
|
|
61
|
+
const map = groupSpecs(items, keyFn);
|
|
62
|
+
return Array.from(map.entries()).map(([key, items$1]) => ({
|
|
63
|
+
key,
|
|
64
|
+
items: items$1
|
|
65
|
+
})).sort((a, b) => a.key.localeCompare(b.key));
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Get unique tags from spec results.
|
|
69
|
+
*/
|
|
70
|
+
function getUniqueSpecTags(specs) {
|
|
71
|
+
const tags = /* @__PURE__ */ new Set();
|
|
72
|
+
for (const spec of specs) for (const tag of spec.tags ?? []) tags.add(tag);
|
|
73
|
+
return Array.from(tags).sort();
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Get unique owners from spec results.
|
|
77
|
+
*/
|
|
78
|
+
function getUniqueSpecOwners(specs) {
|
|
79
|
+
const owners = /* @__PURE__ */ new Set();
|
|
80
|
+
for (const spec of specs) for (const owner of spec.owners ?? []) owners.add(owner);
|
|
81
|
+
return Array.from(owners).sort();
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Get unique domains from spec results.
|
|
85
|
+
*/
|
|
86
|
+
function getUniqueSpecDomains(specs) {
|
|
87
|
+
const domains = /* @__PURE__ */ new Set();
|
|
88
|
+
for (const spec of specs) domains.add(SpecGroupingStrategies.byDomain(spec));
|
|
89
|
+
return Array.from(domains).sort();
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Filter features by criteria.
|
|
93
|
+
*/
|
|
94
|
+
function filterFeatures(features, filter) {
|
|
95
|
+
return features.filter((feature) => {
|
|
96
|
+
if (filter.tags?.length) {
|
|
97
|
+
if (!filter.tags.some((tag) => feature.tags?.includes(tag))) return false;
|
|
98
|
+
}
|
|
99
|
+
if (filter.owners?.length) {
|
|
100
|
+
if (!filter.owners.some((owner) => feature.owners?.includes(owner))) return false;
|
|
101
|
+
}
|
|
102
|
+
if (filter.stability?.length) {
|
|
103
|
+
if (!filter.stability.includes(feature.stability ?? "stable")) return false;
|
|
104
|
+
}
|
|
105
|
+
if (filter.namePattern) {
|
|
106
|
+
const pattern = filter.namePattern.replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
107
|
+
if (!new RegExp(`^${pattern}$`, "i").test(feature.key)) return false;
|
|
108
|
+
}
|
|
109
|
+
return true;
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
//#endregion
|
|
114
|
+
export { SpecGroupingStrategies, filterFeatures, filterSpecs, getUniqueSpecDomains, getUniqueSpecOwners, getUniqueSpecTags, groupSpecs, groupSpecsToArray };
|
|
115
|
+
//# sourceMappingURL=grouping.js.map
|