@contractspec/module.workspace 1.46.2 → 1.48.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analysis/deps/graph.js.map +1 -1
- package/dist/analysis/deps/parse-imports.js.map +1 -1
- package/dist/analysis/diff/deep-diff.js.map +1 -1
- package/dist/analysis/diff/semantic.js.map +1 -1
- package/dist/analysis/example-scan.d.ts.map +1 -1
- package/dist/analysis/example-scan.js +2 -37
- package/dist/analysis/example-scan.js.map +1 -1
- package/dist/analysis/feature-extractor.js +203 -0
- package/dist/analysis/feature-extractor.js.map +1 -0
- package/dist/analysis/feature-scan.d.ts.map +1 -1
- package/dist/analysis/feature-scan.js +20 -121
- package/dist/analysis/feature-scan.js.map +1 -1
- package/dist/analysis/impact/classifier.js.map +1 -1
- package/dist/analysis/impact/rules.js.map +1 -1
- package/dist/analysis/index.js +3 -1
- package/dist/analysis/snapshot/normalizer.js.map +1 -1
- package/dist/analysis/snapshot/snapshot.js.map +1 -1
- package/dist/analysis/spec-parsing-utils.d.ts +26 -0
- package/dist/analysis/spec-parsing-utils.d.ts.map +1 -0
- package/dist/analysis/spec-parsing-utils.js +98 -0
- package/dist/analysis/spec-parsing-utils.js.map +1 -0
- package/dist/analysis/spec-scan.d.ts +8 -22
- package/dist/analysis/spec-scan.d.ts.map +1 -1
- package/dist/analysis/spec-scan.js +105 -337
- package/dist/analysis/spec-scan.js.map +1 -1
- package/dist/analysis/utils/matchers.js +77 -0
- package/dist/analysis/utils/matchers.js.map +1 -0
- package/dist/analysis/utils/variables.js +45 -0
- package/dist/analysis/utils/variables.js.map +1 -0
- package/dist/analysis/validate/index.js +1 -0
- package/dist/analysis/validate/spec-structure.d.ts.map +1 -1
- package/dist/analysis/validate/spec-structure.js +401 -85
- package/dist/analysis/validate/spec-structure.js.map +1 -1
- package/dist/formatter.js.map +1 -1
- package/dist/formatters/index.js +2 -0
- package/dist/formatters/spec-markdown.d.ts +4 -1
- package/dist/formatters/spec-markdown.d.ts.map +1 -1
- package/dist/formatters/spec-markdown.js +12 -4
- package/dist/formatters/spec-markdown.js.map +1 -1
- package/dist/formatters/spec-to-docblock.d.ts +3 -1
- package/dist/formatters/spec-to-docblock.d.ts.map +1 -1
- package/dist/formatters/spec-to-docblock.js +2 -2
- package/dist/formatters/spec-to-docblock.js.map +1 -1
- package/dist/index.d.ts +4 -3
- package/dist/index.js +4 -2
- package/dist/templates/integration-utils.js.map +1 -1
- package/dist/templates/integration.js +3 -4
- package/dist/templates/integration.js.map +1 -1
- package/dist/templates/knowledge.js.map +1 -1
- package/dist/templates/workflow.js.map +1 -1
- package/dist/types/analysis-types.d.ts +24 -3
- package/dist/types/analysis-types.d.ts.map +1 -1
- package/dist/types/generation-types.js.map +1 -1
- package/dist/types/llm-types.d.ts +1 -1
- package/dist/types/llm-types.d.ts.map +1 -1
- package/package.json +9 -10
|
@@ -1,156 +1,55 @@
|
|
|
1
|
+
import { isStability, matchStringArrayField, matchStringField } from "./utils/matchers.js";
|
|
2
|
+
import { extractFeatureRefs } from "./feature-extractor.js";
|
|
3
|
+
|
|
1
4
|
//#region src/analysis/feature-scan.ts
|
|
2
5
|
/**
|
|
3
6
|
* Check if a file is a feature file based on naming conventions.
|
|
4
7
|
*/
|
|
5
8
|
function isFeatureFile(filePath) {
|
|
6
|
-
return filePath.includes(".feature.");
|
|
9
|
+
return filePath.includes(".feature.") && filePath.endsWith(".ts");
|
|
7
10
|
}
|
|
8
11
|
/**
|
|
9
12
|
* Scan a feature source file to extract metadata.
|
|
10
13
|
*/
|
|
11
14
|
function scanFeatureSource(code, filePath) {
|
|
12
15
|
const key = matchStringField(code, "key") ?? extractKeyFromFilePath(filePath);
|
|
16
|
+
const version = matchStringField(code, "version") ?? "1.0.0";
|
|
13
17
|
const title = matchStringField(code, "title") ?? void 0;
|
|
14
18
|
const description = matchStringField(code, "description") ?? void 0;
|
|
15
19
|
const goal = matchStringField(code, "goal") ?? void 0;
|
|
16
20
|
const context = matchStringField(code, "context") ?? void 0;
|
|
17
|
-
const domain = matchStringField(code, "domain") ?? void 0;
|
|
18
21
|
const stabilityRaw = matchStringField(code, "stability");
|
|
22
|
+
const stability = isStability(stabilityRaw) ? stabilityRaw : void 0;
|
|
23
|
+
const owners = matchStringArrayField(code, "owners");
|
|
24
|
+
const tags = matchStringArrayField(code, "tags");
|
|
25
|
+
const refs = extractFeatureRefs(code);
|
|
19
26
|
return {
|
|
20
27
|
filePath,
|
|
21
28
|
key,
|
|
29
|
+
version,
|
|
22
30
|
title,
|
|
23
31
|
description,
|
|
24
32
|
goal,
|
|
25
33
|
context,
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
34
|
+
stability,
|
|
35
|
+
owners,
|
|
36
|
+
tags,
|
|
37
|
+
operations: refs.operations,
|
|
38
|
+
events: refs.events,
|
|
39
|
+
presentations: refs.presentations,
|
|
40
|
+
experiments: refs.experiments,
|
|
41
|
+
capabilities: refs.capabilities,
|
|
42
|
+
opToPresentationLinks: refs.opToPresentationLinks,
|
|
43
|
+
presentationsTargets: refs.presentationsTargets,
|
|
36
44
|
sourceBlock: code
|
|
37
45
|
};
|
|
38
46
|
}
|
|
39
47
|
/**
|
|
40
|
-
* Extract refs from a named array (e.g., operations, events, presentations).
|
|
41
|
-
*/
|
|
42
|
-
function extractRefsFromArray(code, fieldName) {
|
|
43
|
-
const refs = [];
|
|
44
|
-
const arrayPattern = new RegExp(`${escapeRegex(fieldName)}\\s*:\\s*\\[([\\s\\S]*?)\\]`, "m");
|
|
45
|
-
const arrayMatch = code.match(arrayPattern);
|
|
46
|
-
if (!arrayMatch?.[1]) return refs;
|
|
47
|
-
const refPattern = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]/g;
|
|
48
|
-
let match;
|
|
49
|
-
while ((match = refPattern.exec(arrayMatch[1])) !== null) if (match[1] && match[2]) refs.push({
|
|
50
|
-
key: match[1],
|
|
51
|
-
version: match[2]
|
|
52
|
-
});
|
|
53
|
-
return refs;
|
|
54
|
-
}
|
|
55
|
-
/**
|
|
56
|
-
* Extract capability bindings (provides and requires).
|
|
57
|
-
*/
|
|
58
|
-
function extractCapabilities(code) {
|
|
59
|
-
const provides = [];
|
|
60
|
-
const requires = [];
|
|
61
|
-
const capabilitiesMatch = code.match(/capabilities\s*:\s*\{([\s\S]*?)\}\s*,?\s*(?:opToPresentation|$|\})/);
|
|
62
|
-
if (!capabilitiesMatch?.[1]) return {
|
|
63
|
-
provides,
|
|
64
|
-
requires
|
|
65
|
-
};
|
|
66
|
-
const capabilitiesContent = capabilitiesMatch[1];
|
|
67
|
-
const providesMatch = capabilitiesContent.match(/provides\s*:\s*\[([\s\S]*?)\]/);
|
|
68
|
-
if (providesMatch?.[1]) {
|
|
69
|
-
const refPattern = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]/g;
|
|
70
|
-
let match;
|
|
71
|
-
while ((match = refPattern.exec(providesMatch[1])) !== null) if (match[1] && match[2]) provides.push({
|
|
72
|
-
key: match[1],
|
|
73
|
-
version: match[2]
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
const requiresMatch = capabilitiesContent.match(/requires\s*:\s*\[([\s\S]*?)\]/);
|
|
77
|
-
if (requiresMatch?.[1]) {
|
|
78
|
-
const refPatternWithVersion = /\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]/g;
|
|
79
|
-
const refPatternKeyOnly = /\{\s*key:\s*['"]([^'"]+)['"]\s*\}/g;
|
|
80
|
-
let match = null;
|
|
81
|
-
while ((match = refPatternWithVersion.exec(requiresMatch[1])) !== null) if (match[1] && match[2]) requires.push({
|
|
82
|
-
key: match[1],
|
|
83
|
-
version: match[2]
|
|
84
|
-
});
|
|
85
|
-
while ((match = refPatternKeyOnly.exec(requiresMatch[1])) !== null) if (match && match[1]) {
|
|
86
|
-
if (!requires.some((r) => r.key === match[1])) requires.push({
|
|
87
|
-
key: match[1],
|
|
88
|
-
version: "1.0.0"
|
|
89
|
-
});
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
return {
|
|
93
|
-
provides,
|
|
94
|
-
requires
|
|
95
|
-
};
|
|
96
|
-
}
|
|
97
|
-
/**
|
|
98
|
-
* Extract opToPresentation links.
|
|
99
|
-
*/
|
|
100
|
-
function extractOpToPresentationLinks(code) {
|
|
101
|
-
const links = [];
|
|
102
|
-
const arrayMatch = code.match(/opToPresentation\s*:\s*\[([\s\S]*?)\]/);
|
|
103
|
-
if (!arrayMatch?.[1]) return links;
|
|
104
|
-
const linkPattern = /\{\s*op:\s*\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]\s*\}\s*,\s*pres:\s*\{\s*key:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]\s*\}/g;
|
|
105
|
-
let match;
|
|
106
|
-
while ((match = linkPattern.exec(arrayMatch[1])) !== null) if (match[1] && match[2] && match[3] && match[4]) links.push({
|
|
107
|
-
op: {
|
|
108
|
-
key: match[1],
|
|
109
|
-
version: match[2]
|
|
110
|
-
},
|
|
111
|
-
pres: {
|
|
112
|
-
key: match[3],
|
|
113
|
-
version: match[4]
|
|
114
|
-
}
|
|
115
|
-
});
|
|
116
|
-
return links;
|
|
117
|
-
}
|
|
118
|
-
/**
|
|
119
48
|
* Extract key from file path as fallback.
|
|
120
49
|
*/
|
|
121
50
|
function extractKeyFromFilePath(filePath) {
|
|
122
51
|
return (filePath.split("/").pop() ?? filePath).replace(/\.feature\.[jt]s$/, "").replace(/[^a-zA-Z0-9-]/g, "-");
|
|
123
52
|
}
|
|
124
|
-
/**
|
|
125
|
-
* Match a string field in source code.
|
|
126
|
-
*/
|
|
127
|
-
function matchStringField(code, field) {
|
|
128
|
-
const regex = /* @__PURE__ */ new RegExp(`${escapeRegex(field)}\\s*:\\s*['"]([^'"]+)['"]`);
|
|
129
|
-
return code.match(regex)?.[1] ?? null;
|
|
130
|
-
}
|
|
131
|
-
/**
|
|
132
|
-
* Match a string array field in source code.
|
|
133
|
-
*/
|
|
134
|
-
function matchStringArrayField(code, field) {
|
|
135
|
-
const regex = /* @__PURE__ */ new RegExp(`${escapeRegex(field)}\\s*:\\s*\\[([\\s\\S]*?)\\]`);
|
|
136
|
-
const match = code.match(regex);
|
|
137
|
-
if (!match?.[1]) return void 0;
|
|
138
|
-
const inner = match[1];
|
|
139
|
-
const items = Array.from(inner.matchAll(/['"]([^'"]+)['"]/g)).map((m) => m[1]).filter((value) => typeof value === "string" && value.length > 0);
|
|
140
|
-
return items.length > 0 ? items : void 0;
|
|
141
|
-
}
|
|
142
|
-
/**
|
|
143
|
-
* Check if a value is a valid stability.
|
|
144
|
-
*/
|
|
145
|
-
function isStability(value) {
|
|
146
|
-
return value === "experimental" || value === "beta" || value === "stable" || value === "deprecated";
|
|
147
|
-
}
|
|
148
|
-
/**
|
|
149
|
-
* Escape regex special characters.
|
|
150
|
-
*/
|
|
151
|
-
function escapeRegex(value) {
|
|
152
|
-
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
153
|
-
}
|
|
154
53
|
|
|
155
54
|
//#endregion
|
|
156
55
|
export { isFeatureFile, scanFeatureSource };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"feature-scan.js","names":["refs: RefInfo[]","provides: RefInfo[]","requires: RefInfo[]","match: RegExpExecArray | null","links: { op: RefInfo; pres: RefInfo }[]"],"sources":["../../src/analysis/feature-scan.ts"],"sourcesContent":["/**\n * Feature file scanning utilities.\n *\n * Extracts FeatureModuleSpec metadata from source code without execution.\n */\n\nimport type { FeatureScanResult, RefInfo } from '../types/analysis-types';\nimport type { Stability } from '../types/spec-types';\n\n/**\n * Check if a file is a feature file based on naming conventions.\n */\nexport function isFeatureFile(filePath: string): boolean {\n return filePath.includes('.feature.');\n}\n\n/**\n * Scan a feature source file to extract metadata.\n */\nexport function scanFeatureSource(\n code: string,\n filePath: string\n): FeatureScanResult {\n const key = matchStringField(code, 'key') ?? extractKeyFromFilePath(filePath);\n const title = matchStringField(code, 'title') ?? undefined;\n const description = matchStringField(code, 'description') ?? undefined;\n const goal = matchStringField(code, 'goal') ?? undefined;\n const context = matchStringField(code, 'context') ?? undefined;\n const domain = matchStringField(code, 'domain') ?? undefined;\n const stabilityRaw = matchStringField(code, 'stability');\n const stability = isStability(stabilityRaw) ? stabilityRaw : undefined;\n const owners = matchStringArrayField(code, 'owners');\n const tags = matchStringArrayField(code, 'tags');\n\n // Extract operations\n const operations = extractRefsFromArray(code, 'operations');\n\n // Extract events\n const events = extractRefsFromArray(code, 'events');\n\n // Extract presentations\n const presentations = extractRefsFromArray(code, 'presentations');\n\n // Extract experiments\n const experiments = extractRefsFromArray(code, 'experiments');\n\n // Extract capabilities\n const capabilities = extractCapabilities(code);\n\n // Extract op to presentation links\n const opToPresentationLinks = extractOpToPresentationLinks(code);\n\n return {\n filePath,\n key,\n title,\n description,\n goal,\n context,\n domain,\n stability,\n owners,\n tags,\n operations,\n events,\n presentations,\n experiments,\n capabilities,\n opToPresentationLinks,\n sourceBlock: code,\n };\n}\n\n/**\n * Extract refs from a named array (e.g., operations, events, presentations).\n */\nfunction extractRefsFromArray(code: string, fieldName: string): RefInfo[] {\n const refs: RefInfo[] = [];\n\n // Match the array section\n const arrayPattern = new RegExp(\n `${escapeRegex(fieldName)}\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`,\n 'm'\n );\n const arrayMatch = code.match(arrayPattern);\n\n if (!arrayMatch?.[1]) return refs;\n\n // Extract each { key: 'x', version: 'x.y.z' } entry\n const refPattern =\n /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*['\"]([^'\"]+)['\"]/g;\n let match;\n while ((match = refPattern.exec(arrayMatch[1])) !== null) {\n if (match[1] && match[2]) {\n refs.push({\n key: match[1],\n version: match[2],\n });\n }\n }\n\n return refs;\n}\n\n/**\n * Extract capability bindings (provides and requires).\n */\nfunction extractCapabilities(code: string): {\n provides: RefInfo[];\n requires: RefInfo[];\n} {\n const provides: RefInfo[] = [];\n const requires: RefInfo[] = [];\n\n // Match the capabilities section - need to match nested braces properly\n const capabilitiesMatch = code.match(\n /capabilities\\s*:\\s*\\{([\\s\\S]*?)\\}\\s*,?\\s*(?:opToPresentation|$|\\})/\n );\n if (!capabilitiesMatch?.[1]) {\n return { provides, requires };\n }\n\n const capabilitiesContent = capabilitiesMatch[1];\n\n // Extract provides\n const providesMatch = capabilitiesContent.match(\n /provides\\s*:\\s*\\[([\\s\\S]*?)\\]/\n );\n if (providesMatch?.[1]) {\n const refPattern =\n /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*['\"]([^'\"]+)['\"]/g;\n let match;\n while ((match = refPattern.exec(providesMatch[1])) !== null) {\n if (match[1] && match[2]) {\n provides.push({\n key: match[1],\n version: match[2],\n });\n }\n }\n }\n\n // Extract requires\n const requiresMatch = capabilitiesContent.match(\n /requires\\s*:\\s*\\[([\\s\\S]*?)\\]/\n );\n if (requiresMatch?.[1]) {\n // Requires can have key+version or just key\n const refPatternWithVersion =\n /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*['\"]([^'\"]+)['\"]/g;\n const refPatternKeyOnly = /\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*\\}/g;\n\n let match: RegExpExecArray | null = null;\n while ((match = refPatternWithVersion.exec(requiresMatch[1])) !== null) {\n if (match[1] && match[2]) {\n requires.push({\n key: match[1],\n version: match[2],\n });\n }\n }\n\n // Also match key-only requires (version defaults to 1)\n while ((match = refPatternKeyOnly.exec(requiresMatch[1])) !== null) {\n if (match && match[1]) {\n // Check if we already added this with a version\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n const alreadyExists = requires.some((r) => r.key === match![1]);\n if (!alreadyExists) {\n requires.push({\n key: match[1],\n version: '1.0.0', // Default version\n });\n }\n }\n }\n }\n\n return { provides, requires };\n}\n\n/**\n * Extract opToPresentation links.\n */\nfunction extractOpToPresentationLinks(\n code: string\n): { op: RefInfo; pres: RefInfo }[] {\n const links: { op: RefInfo; pres: RefInfo }[] = [];\n\n // Match the opToPresentation array\n const arrayMatch = code.match(/opToPresentation\\s*:\\s*\\[([\\s\\S]*?)\\]/);\n if (!arrayMatch?.[1]) return links;\n\n // Match each link entry\n // Pattern: { op: { key: 'x', version: 'N' }, pres: { key: 'y', version: 'M' } }\n const linkPattern =\n /\\{\\s*op:\\s*\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*['\"]([^'\"]+)['\"]\\s*\\}\\s*,\\s*pres:\\s*\\{\\s*key:\\s*['\"]([^'\"]+)['\"]\\s*,\\s*version:\\s*['\"]([^'\"]+)['\"]\\s*\\}/g;\n\n let match;\n while ((match = linkPattern.exec(arrayMatch[1])) !== null) {\n if (match[1] && match[2] && match[3] && match[4]) {\n links.push({\n op: { key: match[1], version: match[2] },\n pres: { key: match[3], version: match[4] },\n });\n }\n }\n\n return links;\n}\n\n/**\n * Extract key from file path as fallback.\n */\nfunction extractKeyFromFilePath(filePath: string): string {\n const fileName = filePath.split('/').pop() ?? filePath;\n return fileName\n .replace(/\\.feature\\.[jt]s$/, '')\n .replace(/[^a-zA-Z0-9-]/g, '-');\n}\n\n/**\n * Match a string field in source code.\n */\nfunction matchStringField(code: string, field: string): string | null {\n const regex = new RegExp(`${escapeRegex(field)}\\\\s*:\\\\s*['\"]([^'\"]+)['\"]`);\n const match = code.match(regex);\n return match?.[1] ?? null;\n}\n\n/**\n * Match a string array field in source code.\n */\nfunction matchStringArrayField(\n code: string,\n field: string\n): string[] | undefined {\n const regex = new RegExp(`${escapeRegex(field)}\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`);\n const match = code.match(regex);\n if (!match?.[1]) return undefined;\n\n const inner = match[1];\n const items = Array.from(inner.matchAll(/['\"]([^'\"]+)['\"]/g))\n .map((m) => m[1])\n .filter(\n (value): value is string => typeof value === 'string' && value.length > 0\n );\n\n return items.length > 0 ? items : undefined;\n}\n\n/**\n * Check if a value is a valid stability.\n */\nfunction isStability(value: string | null): value is Stability {\n return (\n value === 'experimental' ||\n value === 'beta' ||\n value === 'stable' ||\n value === 'deprecated'\n );\n}\n\n/**\n * Escape regex special characters.\n */\nfunction escapeRegex(value: string): string {\n return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n"],"mappings":";;;;AAYA,SAAgB,cAAc,UAA2B;AACvD,QAAO,SAAS,SAAS,YAAY;;;;;AAMvC,SAAgB,kBACd,MACA,UACmB;CACnB,MAAM,MAAM,iBAAiB,MAAM,MAAM,IAAI,uBAAuB,SAAS;CAC7E,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,IAAI;CACjD,MAAM,cAAc,iBAAiB,MAAM,cAAc,IAAI;CAC7D,MAAM,OAAO,iBAAiB,MAAM,OAAO,IAAI;CAC/C,MAAM,UAAU,iBAAiB,MAAM,UAAU,IAAI;CACrD,MAAM,SAAS,iBAAiB,MAAM,SAAS,IAAI;CACnD,MAAM,eAAe,iBAAiB,MAAM,YAAY;AAuBxD,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA,WA9BgB,YAAY,aAAa,GAAG,eAAe;EA+B3D,QA9Ba,sBAAsB,MAAM,SAAS;EA+BlD,MA9BW,sBAAsB,MAAM,OAAO;EA+B9C,YA5BiB,qBAAqB,MAAM,aAAa;EA6BzD,QA1Ba,qBAAqB,MAAM,SAAS;EA2BjD,eAxBoB,qBAAqB,MAAM,gBAAgB;EAyB/D,aAtBkB,qBAAqB,MAAM,cAAc;EAuB3D,cApBmB,oBAAoB,KAAK;EAqB5C,uBAlB4B,6BAA6B,KAAK;EAmB9D,aAAa;EACd;;;;;AAMH,SAAS,qBAAqB,MAAc,WAA8B;CACxE,MAAMA,OAAkB,EAAE;CAG1B,MAAM,eAAe,IAAI,OACvB,GAAG,YAAY,UAAU,CAAC,8BAC1B,IACD;CACD,MAAM,aAAa,KAAK,MAAM,aAAa;AAE3C,KAAI,CAAC,aAAa,GAAI,QAAO;CAG7B,MAAM,aACJ;CACF,IAAI;AACJ,SAAQ,QAAQ,WAAW,KAAK,WAAW,GAAG,MAAM,KAClD,KAAI,MAAM,MAAM,MAAM,GACpB,MAAK,KAAK;EACR,KAAK,MAAM;EACX,SAAS,MAAM;EAChB,CAAC;AAIN,QAAO;;;;;AAMT,SAAS,oBAAoB,MAG3B;CACA,MAAMC,WAAsB,EAAE;CAC9B,MAAMC,WAAsB,EAAE;CAG9B,MAAM,oBAAoB,KAAK,MAC7B,qEACD;AACD,KAAI,CAAC,oBAAoB,GACvB,QAAO;EAAE;EAAU;EAAU;CAG/B,MAAM,sBAAsB,kBAAkB;CAG9C,MAAM,gBAAgB,oBAAoB,MACxC,gCACD;AACD,KAAI,gBAAgB,IAAI;EACtB,MAAM,aACJ;EACF,IAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,cAAc,GAAG,MAAM,KACrD,KAAI,MAAM,MAAM,MAAM,GACpB,UAAS,KAAK;GACZ,KAAK,MAAM;GACX,SAAS,MAAM;GAChB,CAAC;;CAMR,MAAM,gBAAgB,oBAAoB,MACxC,gCACD;AACD,KAAI,gBAAgB,IAAI;EAEtB,MAAM,wBACJ;EACF,MAAM,oBAAoB;EAE1B,IAAIC,QAAgC;AACpC,UAAQ,QAAQ,sBAAsB,KAAK,cAAc,GAAG,MAAM,KAChE,KAAI,MAAM,MAAM,MAAM,GACpB,UAAS,KAAK;GACZ,KAAK,MAAM;GACX,SAAS,MAAM;GAChB,CAAC;AAKN,UAAQ,QAAQ,kBAAkB,KAAK,cAAc,GAAG,MAAM,KAC5D,KAAI,SAAS,MAAM,IAIjB;OAAI,CADkB,SAAS,MAAM,MAAM,EAAE,QAAQ,MAAO,GAAG,CAE7D,UAAS,KAAK;IACZ,KAAK,MAAM;IACX,SAAS;IACV,CAAC;;;AAMV,QAAO;EAAE;EAAU;EAAU;;;;;AAM/B,SAAS,6BACP,MACkC;CAClC,MAAMC,QAA0C,EAAE;CAGlD,MAAM,aAAa,KAAK,MAAM,wCAAwC;AACtE,KAAI,CAAC,aAAa,GAAI,QAAO;CAI7B,MAAM,cACJ;CAEF,IAAI;AACJ,SAAQ,QAAQ,YAAY,KAAK,WAAW,GAAG,MAAM,KACnD,KAAI,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,GAC5C,OAAM,KAAK;EACT,IAAI;GAAE,KAAK,MAAM;GAAI,SAAS,MAAM;GAAI;EACxC,MAAM;GAAE,KAAK,MAAM;GAAI,SAAS,MAAM;GAAI;EAC3C,CAAC;AAIN,QAAO;;;;;AAMT,SAAS,uBAAuB,UAA0B;AAExD,SADiB,SAAS,MAAM,IAAI,CAAC,KAAK,IAAI,UAE3C,QAAQ,qBAAqB,GAAG,CAChC,QAAQ,kBAAkB,IAAI;;;;;AAMnC,SAAS,iBAAiB,MAAc,OAA8B;CACpE,MAAM,wBAAQ,IAAI,OAAO,GAAG,YAAY,MAAM,CAAC,2BAA2B;AAE1E,QADc,KAAK,MAAM,MAAM,GAChB,MAAM;;;;;AAMvB,SAAS,sBACP,MACA,OACsB;CACtB,MAAM,wBAAQ,IAAI,OAAO,GAAG,YAAY,MAAM,CAAC,6BAA6B;CAC5E,MAAM,QAAQ,KAAK,MAAM,MAAM;AAC/B,KAAI,CAAC,QAAQ,GAAI,QAAO;CAExB,MAAM,QAAQ,MAAM;CACpB,MAAM,QAAQ,MAAM,KAAK,MAAM,SAAS,oBAAoB,CAAC,CAC1D,KAAK,MAAM,EAAE,GAAG,CAChB,QACE,UAA2B,OAAO,UAAU,YAAY,MAAM,SAAS,EACzE;AAEH,QAAO,MAAM,SAAS,IAAI,QAAQ;;;;;AAMpC,SAAS,YAAY,OAA0C;AAC7D,QACE,UAAU,kBACV,UAAU,UACV,UAAU,YACV,UAAU;;;;;AAOd,SAAS,YAAY,OAAuB;AAC1C,QAAO,MAAM,QAAQ,uBAAuB,OAAO"}
|
|
1
|
+
{"version":3,"file":"feature-scan.js","names":[],"sources":["../../src/analysis/feature-scan.ts"],"sourcesContent":["import {\n isStability,\n matchStringArrayField,\n matchStringField,\n} from './utils/matchers';\nimport type { FeatureScanResult } from '../types/analysis-types';\nimport { extractFeatureRefs } from './feature-extractor';\n\n/**\n * Check if a file is a feature file based on naming conventions.\n */\nexport function isFeatureFile(filePath: string): boolean {\n return filePath.includes('.feature.') && filePath.endsWith('.ts');\n}\n\n/**\n * Scan a feature source file to extract metadata.\n */\nexport function scanFeatureSource(\n code: string,\n filePath: string\n): FeatureScanResult {\n const key = matchStringField(code, 'key') ?? extractKeyFromFilePath(filePath);\n const versionRaw = matchStringField(code, 'version');\n const version = versionRaw ?? '1.0.0'; // Default version\n const title = matchStringField(code, 'title') ?? undefined;\n const description = matchStringField(code, 'description') ?? undefined;\n const goal = matchStringField(code, 'goal') ?? undefined;\n const context = matchStringField(code, 'context') ?? undefined;\n const stabilityRaw = matchStringField(code, 'stability');\n const stability = isStability(stabilityRaw) ? stabilityRaw : undefined;\n const owners = matchStringArrayField(code, 'owners');\n const tags = matchStringArrayField(code, 'tags');\n\n // Parse structure using ts-morph to extract nested refs\n const refs = extractFeatureRefs(code);\n\n return {\n filePath,\n key,\n version,\n title,\n description,\n goal,\n context,\n stability,\n owners,\n tags,\n operations: refs.operations,\n events: refs.events,\n presentations: refs.presentations,\n experiments: refs.experiments,\n capabilities: refs.capabilities,\n opToPresentationLinks: refs.opToPresentationLinks,\n presentationsTargets: refs.presentationsTargets,\n sourceBlock: code,\n };\n}\n\n/**\n * Extract key from file path as fallback.\n */\nfunction extractKeyFromFilePath(filePath: string): string {\n const fileName = filePath.split('/').pop() ?? filePath;\n return fileName\n .replace(/\\.feature\\.[jt]s$/, '')\n .replace(/[^a-zA-Z0-9-]/g, '-');\n}\n"],"mappings":";;;;;;;AAWA,SAAgB,cAAc,UAA2B;AACvD,QAAO,SAAS,SAAS,YAAY,IAAI,SAAS,SAAS,MAAM;;;;;AAMnE,SAAgB,kBACd,MACA,UACmB;CACnB,MAAM,MAAM,iBAAiB,MAAM,MAAM,IAAI,uBAAuB,SAAS;CAE7E,MAAM,UADa,iBAAiB,MAAM,UAAU,IACtB;CAC9B,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,IAAI;CACjD,MAAM,cAAc,iBAAiB,MAAM,cAAc,IAAI;CAC7D,MAAM,OAAO,iBAAiB,MAAM,OAAO,IAAI;CAC/C,MAAM,UAAU,iBAAiB,MAAM,UAAU,IAAI;CACrD,MAAM,eAAe,iBAAiB,MAAM,YAAY;CACxD,MAAM,YAAY,YAAY,aAAa,GAAG,eAAe;CAC7D,MAAM,SAAS,sBAAsB,MAAM,SAAS;CACpD,MAAM,OAAO,sBAAsB,MAAM,OAAO;CAGhD,MAAM,OAAO,mBAAmB,KAAK;AAErC,QAAO;EACL;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA,YAAY,KAAK;EACjB,QAAQ,KAAK;EACb,eAAe,KAAK;EACpB,aAAa,KAAK;EAClB,cAAc,KAAK;EACnB,uBAAuB,KAAK;EAC5B,sBAAsB,KAAK;EAC3B,aAAa;EACd;;;;;AAMH,SAAS,uBAAuB,UAA0B;AAExD,SADiB,SAAS,MAAM,IAAI,CAAC,KAAK,IAAI,UAE3C,QAAQ,qBAAqB,GAAG,CAChC,QAAQ,kBAAkB,IAAI"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"classifier.js","names":[
|
|
1
|
+
{"version":3,"file":"classifier.js","names":[],"sources":["../../../src/analysis/impact/classifier.ts"],"sourcesContent":["/**\n * Impact classifier.\n *\n * Classifies contract changes as breaking, non-breaking, or info.\n */\n\nimport type { SemanticDiffItem } from '../../types/analysis-types';\nimport type { SpecSnapshot } from '../snapshot/types';\nimport { DEFAULT_RULES, findMatchingRule } from './rules';\nimport type {\n ClassifyOptions,\n ImpactDelta,\n ImpactResult,\n ImpactStatus,\n ImpactSummary,\n} from './types';\n\n/**\n * Classify the impact of changes between base and head snapshots.\n *\n * @param baseSpecs - Specs from the base (baseline) version\n * @param headSpecs - Specs from the head (current) version\n * @param diffs - Semantic diff items from comparison\n * @param options - Classification options\n * @returns Classified impact result\n */\nexport function classifyImpact(\n baseSpecs: SpecSnapshot[],\n headSpecs: SpecSnapshot[],\n diffs: SemanticDiffItem[],\n options: ClassifyOptions = {}\n): ImpactResult {\n const rules = options.customRules ?? DEFAULT_RULES;\n const deltas: ImpactDelta[] = [];\n\n // Create lookup maps\n const baseMap = new Map(baseSpecs.map((s) => [`${s.key}@${s.version}`, s]));\n const headMap = new Map(headSpecs.map((s) => [`${s.key}@${s.version}`, s]));\n\n // Detect added specs\n const addedSpecs: ImpactResult['addedSpecs'] = [];\n for (const spec of headSpecs) {\n const lookupKey = `${spec.key}@${spec.version}`;\n if (!baseMap.has(lookupKey)) {\n addedSpecs.push({\n key: spec.key,\n version: spec.version,\n type: spec.type,\n });\n }\n }\n\n // Detect removed specs\n const removedSpecs: ImpactResult['removedSpecs'] = [];\n for (const spec of baseSpecs) {\n const lookupKey = `${spec.key}@${spec.version}`;\n if (!headMap.has(lookupKey)) {\n removedSpecs.push({\n key: spec.key,\n version: spec.version,\n type: spec.type,\n });\n\n // Removed spec is always breaking\n deltas.push({\n specKey: spec.key,\n specVersion: spec.version,\n specType: spec.type,\n path: `spec.${spec.key}`,\n severity: 'breaking',\n rule: 'endpoint-removed',\n description: `${spec.type === 'operation' ? 'Operation' : 'Event'} '${spec.key}' was removed`,\n });\n }\n }\n\n // Classify diffs\n for (const diff of diffs) {\n const matchingRule = findMatchingRule(\n { path: diff.path, description: diff.description, type: diff.type },\n rules\n );\n\n // Extract spec key from path (heuristic)\n const specKey = extractSpecKey(diff.path, baseSpecs, headSpecs);\n const specInfo = findSpecInfo(specKey, baseSpecs, headSpecs);\n\n deltas.push({\n specKey: specInfo?.key ?? 'unknown',\n specVersion: specInfo?.version ?? '1.0.0',\n specType: specInfo?.type ?? 'operation',\n path: diff.path,\n severity: matchingRule?.severity ?? mapDiffTypeToSeverity(diff.type),\n rule: matchingRule?.id ?? 'unknown',\n description: diff.description,\n oldValue: diff.oldValue,\n newValue: diff.newValue,\n });\n }\n\n // Add added specs as non-breaking changes\n for (const spec of addedSpecs) {\n deltas.push({\n specKey: spec.key,\n specVersion: spec.version,\n specType: spec.type,\n path: `spec.${spec.key}`,\n severity: 'non_breaking',\n rule: 'endpoint-added',\n description: `${spec.type === 'operation' ? 'Operation' : 'Event'} '${spec.key}' was added`,\n });\n }\n\n // Calculate summary\n const summary = calculateSummary(deltas, addedSpecs, removedSpecs);\n\n // Determine status\n const hasBreaking = summary.breaking > 0 || summary.removed > 0;\n const hasNonBreaking = summary.nonBreaking > 0 || summary.added > 0;\n const status = determineStatus(hasBreaking, hasNonBreaking);\n\n return {\n status,\n hasBreaking,\n hasNonBreaking,\n summary,\n deltas,\n addedSpecs,\n removedSpecs,\n timestamp: new Date().toISOString(),\n };\n}\n\n/**\n * Calculate summary counts from deltas.\n */\nfunction calculateSummary(\n deltas: ImpactDelta[],\n addedSpecs: ImpactResult['addedSpecs'],\n removedSpecs: ImpactResult['removedSpecs']\n): ImpactSummary {\n return {\n breaking: deltas.filter((d) => d.severity === 'breaking').length,\n nonBreaking: deltas.filter((d) => d.severity === 'non_breaking').length,\n info: deltas.filter((d) => d.severity === 'info').length,\n added: addedSpecs.length,\n removed: removedSpecs.length,\n };\n}\n\n/**\n * Determine overall status from flags.\n */\nfunction determineStatus(\n hasBreaking: boolean,\n hasNonBreaking: boolean\n): ImpactStatus {\n if (hasBreaking) return 'breaking';\n if (hasNonBreaking) return 'non-breaking';\n return 'no-impact';\n}\n\n/**\n * Map semantic diff type to impact severity.\n */\nfunction mapDiffTypeToSeverity(type: string): ImpactDelta['severity'] {\n switch (type) {\n case 'breaking':\n return 'breaking';\n case 'removed':\n return 'breaking';\n case 'added':\n return 'non_breaking';\n case 'changed':\n return 'info';\n default:\n return 'info';\n }\n}\n\n/**\n * Extract spec key from a diff path (heuristic).\n */\nfunction extractSpecKey(\n _path: string,\n _baseSpecs: SpecSnapshot[],\n _headSpecs: SpecSnapshot[]\n): string | undefined {\n // This is a simplified heuristic; in practice would need more context\n return undefined;\n}\n\n/**\n * Find spec info from key.\n */\nfunction findSpecInfo(\n key: string | undefined,\n baseSpecs: SpecSnapshot[],\n headSpecs: SpecSnapshot[]\n): SpecSnapshot | undefined {\n if (!key) return headSpecs[0] ?? baseSpecs[0];\n return (\n headSpecs.find((s) => s.key === key) ?? baseSpecs.find((s) => s.key === key)\n );\n}\n"],"mappings":";;;;;;;;;;;;AA0BA,SAAgB,eACd,WACA,WACA,OACA,UAA2B,EAAE,EACf;CACd,MAAM,QAAQ,QAAQ,eAAe;CACrC,MAAM,SAAwB,EAAE;CAGhC,MAAM,UAAU,IAAI,IAAI,UAAU,KAAK,MAAM,CAAC,GAAG,EAAE,IAAI,GAAG,EAAE,WAAW,EAAE,CAAC,CAAC;CAC3E,MAAM,UAAU,IAAI,IAAI,UAAU,KAAK,MAAM,CAAC,GAAG,EAAE,IAAI,GAAG,EAAE,WAAW,EAAE,CAAC,CAAC;CAG3E,MAAM,aAAyC,EAAE;AACjD,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,YAAY,GAAG,KAAK,IAAI,GAAG,KAAK;AACtC,MAAI,CAAC,QAAQ,IAAI,UAAU,CACzB,YAAW,KAAK;GACd,KAAK,KAAK;GACV,SAAS,KAAK;GACd,MAAM,KAAK;GACZ,CAAC;;CAKN,MAAM,eAA6C,EAAE;AACrD,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,YAAY,GAAG,KAAK,IAAI,GAAG,KAAK;AACtC,MAAI,CAAC,QAAQ,IAAI,UAAU,EAAE;AAC3B,gBAAa,KAAK;IAChB,KAAK,KAAK;IACV,SAAS,KAAK;IACd,MAAM,KAAK;IACZ,CAAC;AAGF,UAAO,KAAK;IACV,SAAS,KAAK;IACd,aAAa,KAAK;IAClB,UAAU,KAAK;IACf,MAAM,QAAQ,KAAK;IACnB,UAAU;IACV,MAAM;IACN,aAAa,GAAG,KAAK,SAAS,cAAc,cAAc,QAAQ,IAAI,KAAK,IAAI;IAChF,CAAC;;;AAKN,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,eAAe,iBACnB;GAAE,MAAM,KAAK;GAAM,aAAa,KAAK;GAAa,MAAM,KAAK;GAAM,EACnE,MACD;EAID,MAAM,WAAW,aADD,eAAe,KAAK,MAAM,WAAW,UAAU,EACxB,WAAW,UAAU;AAE5D,SAAO,KAAK;GACV,SAAS,UAAU,OAAO;GAC1B,aAAa,UAAU,WAAW;GAClC,UAAU,UAAU,QAAQ;GAC5B,MAAM,KAAK;GACX,UAAU,cAAc,YAAY,sBAAsB,KAAK,KAAK;GACpE,MAAM,cAAc,MAAM;GAC1B,aAAa,KAAK;GAClB,UAAU,KAAK;GACf,UAAU,KAAK;GAChB,CAAC;;AAIJ,MAAK,MAAM,QAAQ,WACjB,QAAO,KAAK;EACV,SAAS,KAAK;EACd,aAAa,KAAK;EAClB,UAAU,KAAK;EACf,MAAM,QAAQ,KAAK;EACnB,UAAU;EACV,MAAM;EACN,aAAa,GAAG,KAAK,SAAS,cAAc,cAAc,QAAQ,IAAI,KAAK,IAAI;EAChF,CAAC;CAIJ,MAAM,UAAU,iBAAiB,QAAQ,YAAY,aAAa;CAGlE,MAAM,cAAc,QAAQ,WAAW,KAAK,QAAQ,UAAU;CAC9D,MAAM,iBAAiB,QAAQ,cAAc,KAAK,QAAQ,QAAQ;AAGlE,QAAO;EACL,QAHa,gBAAgB,aAAa,eAAe;EAIzD;EACA;EACA;EACA;EACA;EACA;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACpC;;;;;AAMH,SAAS,iBACP,QACA,YACA,cACe;AACf,QAAO;EACL,UAAU,OAAO,QAAQ,MAAM,EAAE,aAAa,WAAW,CAAC;EAC1D,aAAa,OAAO,QAAQ,MAAM,EAAE,aAAa,eAAe,CAAC;EACjE,MAAM,OAAO,QAAQ,MAAM,EAAE,aAAa,OAAO,CAAC;EAClD,OAAO,WAAW;EAClB,SAAS,aAAa;EACvB;;;;;AAMH,SAAS,gBACP,aACA,gBACc;AACd,KAAI,YAAa,QAAO;AACxB,KAAI,eAAgB,QAAO;AAC3B,QAAO;;;;;AAMT,SAAS,sBAAsB,MAAuC;AACpE,SAAQ,MAAR;EACE,KAAK,WACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,QACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,QACE,QAAO;;;;;;AAOb,SAAS,eACP,OACA,YACA,YACoB;;;;AAQtB,SAAS,aACP,KACA,WACA,WAC0B;AAC1B,KAAI,CAAC,IAAK,QAAO,UAAU,MAAM,UAAU;AAC3C,QACE,UAAU,MAAM,MAAM,EAAE,QAAQ,IAAI,IAAI,UAAU,MAAM,MAAM,EAAE,QAAQ,IAAI"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"rules.js","names":[
|
|
1
|
+
{"version":3,"file":"rules.js","names":[],"sources":["../../../src/analysis/impact/rules.ts"],"sourcesContent":["/**\n * Impact classification rules.\n *\n * Defines rules for classifying changes as breaking or non-breaking.\n */\n\nimport type { ImpactRule, ImpactSeverity } from './types';\n\n/**\n * Default breaking change rules.\n */\nexport const BREAKING_RULES: ImpactRule[] = [\n {\n id: 'endpoint-removed',\n description: 'Endpoint/operation was removed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('spec.') && delta.type === 'removed',\n },\n {\n id: 'field-removed',\n description: 'Field was removed from response',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.output.') && delta.description.includes('removed'),\n },\n {\n id: 'field-type-changed',\n description: 'Field type was changed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.type') &&\n delta.description.includes('type changed'),\n },\n {\n id: 'field-made-required',\n description: 'Optional field became required',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.required') &&\n delta.description.includes('optional to required'),\n },\n {\n id: 'enum-value-removed',\n description: 'Enum value was removed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.enumValues') &&\n delta.description.includes('removed'),\n },\n {\n id: 'nullable-removed',\n description: 'Field is no longer nullable',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.nullable') &&\n delta.description.includes('no longer nullable'),\n },\n {\n id: 'method-changed',\n description: 'HTTP method was changed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.http.method') || delta.path.includes('.method'),\n },\n {\n id: 'path-changed',\n description: 'HTTP path was changed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.http.path') || delta.path.includes('.path'),\n },\n {\n id: 'required-field-added-to-input',\n description: 'Required field was added to input',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.input.') &&\n delta.description.includes('Required field'),\n },\n {\n id: 'event-payload-field-removed',\n description: 'Event payload field was removed',\n severity: 'breaking',\n matches: (delta) =>\n delta.path.includes('.payload.') && delta.description.includes('removed'),\n },\n];\n\n/**\n * Non-breaking change rules.\n */\nexport const NON_BREAKING_RULES: ImpactRule[] = [\n {\n id: 'optional-field-added',\n description: 'Optional field was added',\n severity: 'non_breaking',\n matches: (delta) =>\n delta.description.includes('Optional field') &&\n delta.description.includes('added'),\n },\n {\n id: 'endpoint-added',\n description: 'New endpoint/operation was added',\n severity: 'non_breaking',\n matches: (delta) => delta.path.includes('spec.') && delta.type === 'added',\n },\n {\n id: 'enum-value-added',\n description: 'Enum value was added',\n severity: 'non_breaking',\n matches: (delta) =>\n delta.path.includes('.enumValues') && delta.description.includes('added'),\n },\n {\n id: 'field-made-optional',\n description: 'Required field became optional',\n severity: 'non_breaking',\n matches: (delta) =>\n delta.path.includes('.required') &&\n delta.description.includes('required to optional'),\n },\n {\n id: 'nullable-added',\n description: 'Field is now nullable',\n severity: 'non_breaking',\n matches: (delta) =>\n delta.path.includes('.nullable') &&\n delta.description.includes('now nullable'),\n },\n];\n\n/**\n * Info-level change rules.\n */\nexport const INFO_RULES: ImpactRule[] = [\n {\n id: 'stability-changed',\n description: 'Stability level was changed',\n severity: 'info',\n matches: (delta) => delta.path.includes('.stability'),\n },\n {\n id: 'description-changed',\n description: 'Description was changed',\n severity: 'info',\n matches: (delta) => delta.path.includes('.description'),\n },\n {\n id: 'owners-changed',\n description: 'Owners were changed',\n severity: 'info',\n matches: (delta) => delta.path.includes('.owners'),\n },\n {\n id: 'tags-changed',\n description: 'Tags were changed',\n severity: 'info',\n matches: (delta) => delta.path.includes('.tags'),\n },\n];\n\n/**\n * All default rules in priority order (breaking > non_breaking > info).\n */\nexport const DEFAULT_RULES: ImpactRule[] = [\n ...BREAKING_RULES,\n ...NON_BREAKING_RULES,\n ...INFO_RULES,\n];\n\n/**\n * Get rules by severity.\n */\nexport function getRulesBySeverity(severity: ImpactSeverity): ImpactRule[] {\n return DEFAULT_RULES.filter((rule) => rule.severity === severity);\n}\n\n/**\n * Find matching rule for a delta.\n */\nexport function findMatchingRule(\n delta: { path: string; description: string; type: string },\n rules: ImpactRule[] = DEFAULT_RULES\n): ImpactRule | undefined {\n return rules.find((rule) => rule.matches(delta));\n}\n"],"mappings":";;;;AAWA,MAAa,iBAA+B;CAC1C;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,QAAQ,IAAI,MAAM,SAAS;EAClD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,WAAW,IAAI,MAAM,YAAY,SAAS,UAAU;EAC3E;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,QAAQ,IAC5B,MAAM,YAAY,SAAS,eAAe;EAC7C;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,YAAY,IAChC,MAAM,YAAY,SAAS,uBAAuB;EACrD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,cAAc,IAClC,MAAM,YAAY,SAAS,UAAU;EACxC;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,YAAY,IAChC,MAAM,YAAY,SAAS,qBAAqB;EACnD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,eAAe,IAAI,MAAM,KAAK,SAAS,UAAU;EACxE;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,aAAa,IAAI,MAAM,KAAK,SAAS,QAAQ;EACpE;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,UAAU,IAC9B,MAAM,YAAY,SAAS,iBAAiB;EAC/C;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,YAAY,IAAI,MAAM,YAAY,SAAS,UAAU;EAC5E;CACF;;;;AAKD,MAAa,qBAAmC;CAC9C;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,YAAY,SAAS,iBAAiB,IAC5C,MAAM,YAAY,SAAS,QAAQ;EACtC;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UAAU,MAAM,KAAK,SAAS,QAAQ,IAAI,MAAM,SAAS;EACpE;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,cAAc,IAAI,MAAM,YAAY,SAAS,QAAQ;EAC5E;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,YAAY,IAChC,MAAM,YAAY,SAAS,uBAAuB;EACrD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UACR,MAAM,KAAK,SAAS,YAAY,IAChC,MAAM,YAAY,SAAS,eAAe;EAC7C;CACF;;;;AAKD,MAAa,aAA2B;CACtC;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UAAU,MAAM,KAAK,SAAS,aAAa;EACtD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UAAU,MAAM,KAAK,SAAS,eAAe;EACxD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UAAU,MAAM,KAAK,SAAS,UAAU;EACnD;CACD;EACE,IAAI;EACJ,aAAa;EACb,UAAU;EACV,UAAU,UAAU,MAAM,KAAK,SAAS,QAAQ;EACjD;CACF;;;;AAKD,MAAa,gBAA8B;CACzC,GAAG;CACH,GAAG;CACH,GAAG;CACJ;;;;AAKD,SAAgB,mBAAmB,UAAwC;AACzE,QAAO,cAAc,QAAQ,SAAS,KAAK,aAAa,SAAS;;;;;AAMnE,SAAgB,iBACd,OACA,QAAsB,eACE;AACxB,QAAO,MAAM,MAAM,SAAS,KAAK,QAAQ,MAAM,CAAC"}
|
package/dist/analysis/index.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { extractTestCoverage, extractTestTarget } from "./spec-parsing-utils.js";
|
|
2
|
+
import { inferSpecTypeFromFilePath, scanAllSpecsFromSource, scanSpecSource } from "./spec-scan.js";
|
|
2
3
|
import { isFeatureFile, scanFeatureSource } from "./feature-scan.js";
|
|
3
4
|
import { isExampleFile, scanExampleSource } from "./example-scan.js";
|
|
4
5
|
import { SpecGroupingStrategies, filterFeatures, filterSpecs, getUniqueSpecDomains, getUniqueSpecOwners, getUniqueSpecTags, groupSpecs, groupSpecsToArray } from "./grouping.js";
|
|
@@ -7,6 +8,7 @@ import { computeFieldDiff, computeFieldsDiff, computeIoDiff, isBreakingChange }
|
|
|
7
8
|
import { addContractNode, buildReverseEdges, createContractGraph, detectCycles, findMissingDependencies, toDot } from "./deps/graph.js";
|
|
8
9
|
import { parseImportedSpecNames } from "./deps/parse-imports.js";
|
|
9
10
|
import { validateSpecStructure } from "./validate/spec-structure.js";
|
|
11
|
+
import "./validate/index.js";
|
|
10
12
|
import { computeHash, normalizeValue, sortFields, sortSpecs, toCanonicalJson } from "./snapshot/normalizer.js";
|
|
11
13
|
import { generateSnapshot } from "./snapshot/snapshot.js";
|
|
12
14
|
import "./snapshot/index.js";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"normalizer.js","names":[
|
|
1
|
+
{"version":3,"file":"normalizer.js","names":[],"sources":["../../../src/analysis/snapshot/normalizer.ts"],"sourcesContent":["/**\n * JSON normalization utilities for deterministic snapshots.\n *\n * Ensures that snapshots are stable across ordering, whitespace,\n * and other non-semantic differences.\n */\n\nimport { createHash } from 'crypto';\nimport { compareVersions } from 'compare-versions';\n\n/**\n * Normalize a value for deterministic JSON serialization.\n * - Sorts object keys alphabetically\n * - Removes undefined values\n * - Preserves null values\n */\nexport function normalizeValue(value: unknown): unknown {\n if (value === null || value === undefined) {\n return value === null ? null : undefined;\n }\n\n if (Array.isArray(value)) {\n return value.map(normalizeValue);\n }\n\n if (typeof value === 'object') {\n const obj = value as Record<string, unknown>;\n const sortedKeys = Object.keys(obj).sort();\n const normalized: Record<string, unknown> = {};\n\n for (const key of sortedKeys) {\n const normalizedValue = normalizeValue(obj[key]);\n // Only include defined values\n if (normalizedValue !== undefined) {\n normalized[key] = normalizedValue;\n }\n }\n\n return normalized;\n }\n\n return value;\n}\n\n/**\n * Serialize a value to deterministic JSON string.\n */\nexport function toCanonicalJson(value: unknown): string {\n return JSON.stringify(normalizeValue(value), null, 0);\n}\n\n/**\n * Compute a SHA-256 hash of canonical JSON representation.\n */\nexport function computeHash(value: unknown): string {\n const canonical = toCanonicalJson(value);\n return createHash('sha256').update(canonical).digest('hex').slice(0, 16);\n}\n\n/**\n * Sort specs by key and version for deterministic ordering.\n */\nexport function sortSpecs<T extends { key: string; version: string }>(\n specs: T[]\n): T[] {\n return [...specs].sort((a, b) => {\n const keyCompare = a.key.localeCompare(b.key);\n if (keyCompare !== 0) return keyCompare;\n return compareVersions(a.version, b.version);\n });\n}\n\n/**\n * Sort field snapshots by name for deterministic ordering.\n */\nexport function sortFields(\n fields: Record<string, unknown>\n): Record<string, unknown> {\n const sorted: Record<string, unknown> = {};\n const keys = Object.keys(fields).sort();\n for (const key of keys) {\n sorted[key] = fields[key];\n }\n return sorted;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAgBA,SAAgB,eAAe,OAAyB;AACtD,KAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO,UAAU,OAAO,OAAO;AAGjC,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,MAAM,IAAI,eAAe;AAGlC,KAAI,OAAO,UAAU,UAAU;EAC7B,MAAM,MAAM;EACZ,MAAM,aAAa,OAAO,KAAK,IAAI,CAAC,MAAM;EAC1C,MAAM,aAAsC,EAAE;AAE9C,OAAK,MAAM,OAAO,YAAY;GAC5B,MAAM,kBAAkB,eAAe,IAAI,KAAK;AAEhD,OAAI,oBAAoB,OACtB,YAAW,OAAO;;AAItB,SAAO;;AAGT,QAAO;;;;;AAMT,SAAgB,gBAAgB,OAAwB;AACtD,QAAO,KAAK,UAAU,eAAe,MAAM,EAAE,MAAM,EAAE;;;;;AAMvD,SAAgB,YAAY,OAAwB;CAClD,MAAM,YAAY,gBAAgB,MAAM;AACxC,QAAO,WAAW,SAAS,CAAC,OAAO,UAAU,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,GAAG;;;;;AAM1E,SAAgB,UACd,OACK;AACL,QAAO,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;EAC/B,MAAM,aAAa,EAAE,IAAI,cAAc,EAAE,IAAI;AAC7C,MAAI,eAAe,EAAG,QAAO;AAC7B,SAAO,gBAAgB,EAAE,SAAS,EAAE,QAAQ;GAC5C;;;;;AAMJ,SAAgB,WACd,QACyB;CACzB,MAAM,SAAkC,EAAE;CAC1C,MAAM,OAAO,OAAO,KAAK,OAAO,CAAC,MAAM;AACvC,MAAK,MAAM,OAAO,KAChB,QAAO,OAAO,OAAO;AAEvB,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"snapshot.js","names":[
|
|
1
|
+
{"version":3,"file":"snapshot.js","names":[],"sources":["../../../src/analysis/snapshot/snapshot.ts"],"sourcesContent":["/**\n * Contract snapshot generation.\n *\n * Generates canonical, deterministic snapshots from spec source files\n * for comparison and impact detection.\n */\n\nimport { scanSpecSource } from '../spec-scan';\nimport { computeHash, sortSpecs, sortFields } from './normalizer';\nimport type {\n ContractSnapshot,\n EventSnapshot,\n FieldSnapshot,\n FieldType,\n IoSnapshot,\n OperationSnapshot,\n SnapshotOptions,\n SpecSnapshot,\n} from './types';\n\n/**\n * Generate a contract snapshot from spec source files.\n *\n * @param specs - Array of { path, content } for each spec file\n * @param options - Snapshot generation options\n * @returns Canonical contract snapshot\n */\nexport function generateSnapshot(\n specs: { path: string; content: string }[],\n options: SnapshotOptions = {}\n): ContractSnapshot {\n const snapshots: SpecSnapshot[] = [];\n\n for (const { path, content } of specs) {\n const scanned = scanSpecSource(content, path);\n\n // Filter by types if specified\n if (\n options.types &&\n !options.types.includes(scanned.specType as 'operation' | 'event')\n ) {\n continue;\n }\n\n if (\n scanned.specType === 'operation' &&\n scanned.key &&\n scanned.version !== undefined\n ) {\n const opSnapshot = createOperationSnapshot(scanned, content);\n if (opSnapshot) {\n snapshots.push(opSnapshot);\n }\n } else if (\n scanned.specType === 'event' &&\n scanned.key &&\n scanned.version !== undefined\n ) {\n const eventSnapshot = createEventSnapshot(scanned, content);\n if (eventSnapshot) {\n snapshots.push(eventSnapshot);\n }\n }\n }\n\n const sortedSpecs = sortSpecs(snapshots);\n const hash = computeHash({ specs: sortedSpecs });\n\n return {\n version: '1.0.0',\n generatedAt: new Date().toISOString(),\n specs: sortedSpecs,\n hash,\n };\n}\n\n/**\n * Create an operation snapshot from scanned spec data.\n */\nfunction createOperationSnapshot(\n scanned: ReturnType<typeof scanSpecSource>,\n content: string\n): OperationSnapshot | null {\n if (!scanned.key || scanned.version === undefined) {\n return null;\n }\n\n const io = extractIoFromSource(content);\n const http = extractHttpBinding(content);\n\n return {\n type: 'operation',\n key: scanned.key,\n version: scanned.version,\n kind:\n scanned.kind === 'command' || scanned.kind === 'query'\n ? scanned.kind\n : 'command',\n stability: scanned.stability ?? 'experimental',\n http: http ?? undefined,\n io,\n authLevel: extractAuthLevel(content),\n emittedEvents: scanned.emittedEvents,\n };\n}\n\n/**\n * Create an event snapshot from scanned spec data.\n */\nfunction createEventSnapshot(\n scanned: ReturnType<typeof scanSpecSource>,\n content: string\n): EventSnapshot | null {\n if (!scanned.key || scanned.version === undefined) {\n return null;\n }\n\n const payload = extractPayloadFromSource(content);\n\n return {\n type: 'event',\n key: scanned.key,\n version: scanned.version,\n stability: scanned.stability ?? 'experimental',\n payload,\n };\n}\n\n/**\n * Extract IO schema from source code.\n * This is a heuristic extraction - not full Zod introspection.\n */\nfunction extractIoFromSource(content: string): IoSnapshot {\n const input = extractSchemaFields(content, 'input');\n const output = extractSchemaFields(content, 'output');\n\n return {\n input: sortFields(input) as Record<string, FieldSnapshot>,\n output: sortFields(output) as Record<string, FieldSnapshot>,\n };\n}\n\n/**\n * Extract payload schema from event source code.\n */\nfunction extractPayloadFromSource(\n content: string\n): Record<string, FieldSnapshot> {\n const fields = extractSchemaFields(content, 'payload');\n return sortFields(fields) as Record<string, FieldSnapshot>;\n}\n\n/**\n * Extract schema fields from a specific section of the source.\n */\nfunction extractSchemaFields(\n content: string,\n section: 'input' | 'output' | 'payload'\n): Record<string, FieldSnapshot> {\n const fields: Record<string, FieldSnapshot> = {};\n\n // Look for z.object({ ... }) patterns within the section\n const sectionPattern = new RegExp(\n `${section}\\\\s*:\\\\s*z\\\\.object\\\\(\\\\{([^}]+)\\\\}`,\n 's'\n );\n const sectionMatch = content.match(sectionPattern);\n\n if (!sectionMatch?.[1]) {\n return fields;\n }\n\n const sectionContent = sectionMatch[1];\n\n // Match field definitions: fieldName: z.string(), z.number(), etc.\n const fieldPattern = /(\\w+)\\s*:\\s*z\\.(\\w+)\\((.*?)\\)/g;\n let match;\n\n while ((match = fieldPattern.exec(sectionContent)) !== null) {\n const [, fieldName, zodType] = match;\n if (!fieldName || !zodType) continue;\n\n const isOptional =\n sectionContent.includes(`${fieldName}:`) &&\n sectionContent\n .slice(sectionContent.indexOf(`${fieldName}:`))\n .includes('.optional()');\n const isNullable =\n sectionContent.includes(`${fieldName}:`) &&\n sectionContent\n .slice(sectionContent.indexOf(`${fieldName}:`))\n .includes('.nullable()');\n\n fields[fieldName] = {\n name: fieldName,\n type: mapZodTypeToFieldType(zodType),\n required: !isOptional,\n nullable: isNullable,\n };\n }\n\n return fields;\n}\n\n/**\n * Map Zod type to FieldType.\n */\nfunction mapZodTypeToFieldType(zodType: string): FieldType {\n const mapping: Record<string, FieldType> = {\n string: 'string',\n number: 'number',\n boolean: 'boolean',\n object: 'object',\n array: 'array',\n enum: 'enum',\n union: 'union',\n literal: 'literal',\n date: 'date',\n coerce: 'unknown',\n };\n return mapping[zodType] ?? 'unknown';\n}\n\n/**\n * Extract HTTP binding from source code.\n */\nfunction extractHttpBinding(content: string): {\n method: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';\n path: string;\n} | null {\n // Look for http: { method: 'X', path: 'Y' } pattern\n const methodMatch = content.match(/method\\s*:\\s*['\"](\\w+)['\"]/);\n const pathMatch = content.match(/path\\s*:\\s*['\"]([^'\"]+)['\"]/);\n\n if (methodMatch?.[1] && pathMatch?.[1]) {\n const method = methodMatch[1].toUpperCase();\n if (['GET', 'POST', 'PUT', 'PATCH', 'DELETE'].includes(method)) {\n return {\n method: method as 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE',\n path: pathMatch[1],\n };\n }\n }\n\n return null;\n}\n\n/**\n * Extract auth level from source code.\n */\nfunction extractAuthLevel(content: string): string | undefined {\n const authMatch = content.match(/auth\\s*:\\s*['\"](\\w+)['\"]/);\n return authMatch?.[1];\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA2BA,SAAgB,iBACd,OACA,UAA2B,EAAE,EACX;CAClB,MAAM,YAA4B,EAAE;AAEpC,MAAK,MAAM,EAAE,MAAM,aAAa,OAAO;EACrC,MAAM,UAAU,eAAe,SAAS,KAAK;AAG7C,MACE,QAAQ,SACR,CAAC,QAAQ,MAAM,SAAS,QAAQ,SAAkC,CAElE;AAGF,MACE,QAAQ,aAAa,eACrB,QAAQ,OACR,QAAQ,YAAY,QACpB;GACA,MAAM,aAAa,wBAAwB,SAAS,QAAQ;AAC5D,OAAI,WACF,WAAU,KAAK,WAAW;aAG5B,QAAQ,aAAa,WACrB,QAAQ,OACR,QAAQ,YAAY,QACpB;GACA,MAAM,gBAAgB,oBAAoB,SAAS,QAAQ;AAC3D,OAAI,cACF,WAAU,KAAK,cAAc;;;CAKnC,MAAM,cAAc,UAAU,UAAU;CACxC,MAAM,OAAO,YAAY,EAAE,OAAO,aAAa,CAAC;AAEhD,QAAO;EACL,SAAS;EACT,8BAAa,IAAI,MAAM,EAAC,aAAa;EACrC,OAAO;EACP;EACD;;;;;AAMH,SAAS,wBACP,SACA,SAC0B;AAC1B,KAAI,CAAC,QAAQ,OAAO,QAAQ,YAAY,OACtC,QAAO;CAGT,MAAM,KAAK,oBAAoB,QAAQ;CACvC,MAAM,OAAO,mBAAmB,QAAQ;AAExC,QAAO;EACL,MAAM;EACN,KAAK,QAAQ;EACb,SAAS,QAAQ;EACjB,MACE,QAAQ,SAAS,aAAa,QAAQ,SAAS,UAC3C,QAAQ,OACR;EACN,WAAW,QAAQ,aAAa;EAChC,MAAM,QAAQ;EACd;EACA,WAAW,iBAAiB,QAAQ;EACpC,eAAe,QAAQ;EACxB;;;;;AAMH,SAAS,oBACP,SACA,SACsB;AACtB,KAAI,CAAC,QAAQ,OAAO,QAAQ,YAAY,OACtC,QAAO;CAGT,MAAM,UAAU,yBAAyB,QAAQ;AAEjD,QAAO;EACL,MAAM;EACN,KAAK,QAAQ;EACb,SAAS,QAAQ;EACjB,WAAW,QAAQ,aAAa;EAChC;EACD;;;;;;AAOH,SAAS,oBAAoB,SAA6B;CACxD,MAAM,QAAQ,oBAAoB,SAAS,QAAQ;CACnD,MAAM,SAAS,oBAAoB,SAAS,SAAS;AAErD,QAAO;EACL,OAAO,WAAW,MAAM;EACxB,QAAQ,WAAW,OAAO;EAC3B;;;;;AAMH,SAAS,yBACP,SAC+B;AAE/B,QAAO,WADQ,oBAAoB,SAAS,UAAU,CAC7B;;;;;AAM3B,SAAS,oBACP,SACA,SAC+B;CAC/B,MAAM,SAAwC,EAAE;CAGhD,MAAM,iBAAiB,IAAI,OACzB,GAAG,QAAQ,sCACX,IACD;CACD,MAAM,eAAe,QAAQ,MAAM,eAAe;AAElD,KAAI,CAAC,eAAe,GAClB,QAAO;CAGT,MAAM,iBAAiB,aAAa;CAGpC,MAAM,eAAe;CACrB,IAAI;AAEJ,SAAQ,QAAQ,aAAa,KAAK,eAAe,MAAM,MAAM;EAC3D,MAAM,GAAG,WAAW,WAAW;AAC/B,MAAI,CAAC,aAAa,CAAC,QAAS;EAE5B,MAAM,aACJ,eAAe,SAAS,GAAG,UAAU,GAAG,IACxC,eACG,MAAM,eAAe,QAAQ,GAAG,UAAU,GAAG,CAAC,CAC9C,SAAS,cAAc;EAC5B,MAAM,aACJ,eAAe,SAAS,GAAG,UAAU,GAAG,IACxC,eACG,MAAM,eAAe,QAAQ,GAAG,UAAU,GAAG,CAAC,CAC9C,SAAS,cAAc;AAE5B,SAAO,aAAa;GAClB,MAAM;GACN,MAAM,sBAAsB,QAAQ;GACpC,UAAU,CAAC;GACX,UAAU;GACX;;AAGH,QAAO;;;;;AAMT,SAAS,sBAAsB,SAA4B;AAazD,QAZ2C;EACzC,QAAQ;EACR,QAAQ;EACR,SAAS;EACT,QAAQ;EACR,OAAO;EACP,MAAM;EACN,OAAO;EACP,SAAS;EACT,MAAM;EACN,QAAQ;EACT,CACc,YAAY;;;;;AAM7B,SAAS,mBAAmB,SAGnB;CAEP,MAAM,cAAc,QAAQ,MAAM,6BAA6B;CAC/D,MAAM,YAAY,QAAQ,MAAM,8BAA8B;AAE9D,KAAI,cAAc,MAAM,YAAY,IAAI;EACtC,MAAM,SAAS,YAAY,GAAG,aAAa;AAC3C,MAAI;GAAC;GAAO;GAAQ;GAAO;GAAS;GAAS,CAAC,SAAS,OAAO,CAC5D,QAAO;GACG;GACR,MAAM,UAAU;GACjB;;AAIL,QAAO;;;;;AAMT,SAAS,iBAAiB,SAAqC;AAE7D,QADkB,QAAQ,MAAM,2BAA2B,GACxC"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
//#region src/analysis/spec-parsing-utils.d.ts
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Extract test target from a TestSpec source.
|
|
5
|
+
* Parses the `target: { type: 'operation', key, version }` field OR
|
|
6
|
+
* the nested format `target: { type: 'operation', operation: { key, version } }`.
|
|
7
|
+
*/
|
|
8
|
+
declare function extractTestTarget(code: string): {
|
|
9
|
+
type: 'operation' | 'workflow';
|
|
10
|
+
key: string;
|
|
11
|
+
version: string | undefined;
|
|
12
|
+
} | undefined;
|
|
13
|
+
/**
|
|
14
|
+
* Extract test coverage info from a TestSpec source.
|
|
15
|
+
* Checks for presence of success (expectOutput) and failure (expectError) scenarios.
|
|
16
|
+
* Supports both formats:
|
|
17
|
+
* - New: `expectOutput: {}` and `expectError: {}`
|
|
18
|
+
* - Old: `{ type: 'expectOutput', ... }` and `{ type: 'expectError', ... }`
|
|
19
|
+
*/
|
|
20
|
+
declare function extractTestCoverage(code: string): {
|
|
21
|
+
hasSuccess: boolean;
|
|
22
|
+
hasError: boolean;
|
|
23
|
+
};
|
|
24
|
+
//#endregion
|
|
25
|
+
export { extractTestCoverage, extractTestTarget };
|
|
26
|
+
//# sourceMappingURL=spec-parsing-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spec-parsing-utils.d.ts","names":[],"sources":["../../src/analysis/spec-parsing-utils.ts"],"sourcesContent":[],"mappings":";;;;;;;iBA0EgB,iBAAA;;;;;;;;;;;;iBAiEA,mBAAA"}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { escapeRegex, findMatchingDelimiter, matchStringField, matchVersionField } from "./utils/matchers.js";
|
|
2
|
+
|
|
3
|
+
//#region src/analysis/spec-parsing-utils.ts
|
|
4
|
+
function parsePolicy(code) {
|
|
5
|
+
const policyBlock = code.match(/policy\s*:\s*\{([\s\S]*?)\}/);
|
|
6
|
+
if (!policyBlock?.[1]) return [];
|
|
7
|
+
return extractRefList(policyBlock[1], "policies") ?? [];
|
|
8
|
+
}
|
|
9
|
+
function extractRefList(code, field) {
|
|
10
|
+
const regex = /* @__PURE__ */ new RegExp(`${escapeRegex(field)}\\s*:\\s*\\[([\\s\\S]*?)\\]`);
|
|
11
|
+
const match = code.match(regex);
|
|
12
|
+
if (!match?.[1]) return void 0;
|
|
13
|
+
const inner = match[1];
|
|
14
|
+
const items = [];
|
|
15
|
+
const parts = inner.match(/\{[\s\S]*?\}/g);
|
|
16
|
+
if (parts) for (const part of parts) {
|
|
17
|
+
const k = matchStringField(part, "key");
|
|
18
|
+
const v = matchVersionField(part, "version");
|
|
19
|
+
if (k) items.push({
|
|
20
|
+
key: k,
|
|
21
|
+
version: v ?? "1.0.0"
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
return items.length > 0 ? items : void 0;
|
|
25
|
+
}
|
|
26
|
+
function extractTestRefs(code) {
|
|
27
|
+
const regex = /* @__PURE__ */ new RegExp(`testRefs\\s*:\\s*\\[([\\s\\S]*?)\\]`);
|
|
28
|
+
const match = code.match(regex);
|
|
29
|
+
if (!match?.[1]) return void 0;
|
|
30
|
+
const inner = match[1];
|
|
31
|
+
const items = [];
|
|
32
|
+
const parts = inner.match(/\{[\s\S]*?\}/g);
|
|
33
|
+
if (parts) for (const part of parts) {
|
|
34
|
+
const k = matchStringField(part, "key");
|
|
35
|
+
const v = matchVersionField(part, "version");
|
|
36
|
+
const t = matchStringField(part, "type");
|
|
37
|
+
if (k) items.push({
|
|
38
|
+
key: k,
|
|
39
|
+
version: v ?? "1.0.0",
|
|
40
|
+
type: t === "error" ? "error" : "success"
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
return items.length > 0 ? items : void 0;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Extract test target from a TestSpec source.
|
|
47
|
+
* Parses the `target: { type: 'operation', key, version }` field OR
|
|
48
|
+
* the nested format `target: { type: 'operation', operation: { key, version } }`.
|
|
49
|
+
*/
|
|
50
|
+
function extractTestTarget(code) {
|
|
51
|
+
const targetStartMatch = code.match(/target\s*:\s*\{/);
|
|
52
|
+
if (!targetStartMatch || targetStartMatch.index === void 0) return void 0;
|
|
53
|
+
const openBraceIndex = targetStartMatch.index + targetStartMatch[0].length - 1;
|
|
54
|
+
const closeBraceIndex = findMatchingDelimiter(code, openBraceIndex, "{", "}");
|
|
55
|
+
if (closeBraceIndex === -1) return void 0;
|
|
56
|
+
const targetBlock = code.substring(openBraceIndex + 1, closeBraceIndex);
|
|
57
|
+
const typeMatch = targetBlock.match(/type\s*:\s*['"](\w+)['"]/);
|
|
58
|
+
if (!typeMatch?.[1]) return void 0;
|
|
59
|
+
const type = typeMatch[1];
|
|
60
|
+
if (type !== "operation" && type !== "workflow") return void 0;
|
|
61
|
+
const flatKey = matchStringField(targetBlock, "key");
|
|
62
|
+
if (flatKey) return {
|
|
63
|
+
type,
|
|
64
|
+
key: flatKey,
|
|
65
|
+
version: matchVersionField(targetBlock, "version")
|
|
66
|
+
};
|
|
67
|
+
const refBlockMatch = targetBlock.match(/* @__PURE__ */ new RegExp(`${type}\\s*:\\s*\\{([\\s\\S]*?)\\}`));
|
|
68
|
+
if (!refBlockMatch?.[1]) return void 0;
|
|
69
|
+
const refBlock = refBlockMatch[1];
|
|
70
|
+
const key = matchStringField(refBlock, "key");
|
|
71
|
+
if (!key) return void 0;
|
|
72
|
+
return {
|
|
73
|
+
type,
|
|
74
|
+
key,
|
|
75
|
+
version: matchVersionField(refBlock, "version")
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Extract test coverage info from a TestSpec source.
|
|
80
|
+
* Checks for presence of success (expectOutput) and failure (expectError) scenarios.
|
|
81
|
+
* Supports both formats:
|
|
82
|
+
* - New: `expectOutput: {}` and `expectError: {}`
|
|
83
|
+
* - Old: `{ type: 'expectOutput', ... }` and `{ type: 'expectError', ... }`
|
|
84
|
+
*/
|
|
85
|
+
function extractTestCoverage(code) {
|
|
86
|
+
const hasSuccessNew = /expectOutput\s*:/.test(code);
|
|
87
|
+
const hasErrorNew = /expectError\s*:/.test(code);
|
|
88
|
+
const hasSuccessOld = /(['"]?)type\1\s*:\s*['"]expectOutput['"]/.test(code);
|
|
89
|
+
const hasErrorOld = /(['"]?)type\1\s*:\s*['"]expectError['"]/.test(code);
|
|
90
|
+
return {
|
|
91
|
+
hasSuccess: hasSuccessNew || hasSuccessOld,
|
|
92
|
+
hasError: hasErrorNew || hasErrorOld
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
//#endregion
|
|
97
|
+
export { extractRefList, extractTestCoverage, extractTestRefs, extractTestTarget, parsePolicy };
|
|
98
|
+
//# sourceMappingURL=spec-parsing-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spec-parsing-utils.js","names":[],"sources":["../../src/analysis/spec-parsing-utils.ts"],"sourcesContent":["import {\n escapeRegex,\n matchStringField,\n matchVersionField,\n findMatchingDelimiter,\n} from './utils/matchers';\n\nexport function parsePolicy(code: string): { key: string; version: string }[] {\n const policyBlock = code.match(/policy\\s*:\\s*\\{([\\s\\S]*?)\\}/);\n if (!policyBlock?.[1]) return [];\n\n return extractRefList(policyBlock[1], 'policies') ?? [];\n}\n\nexport function extractRefList(\n code: string,\n field: string\n): { key: string; version: string }[] | undefined {\n const regex = new RegExp(`${escapeRegex(field)}\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`);\n const match = code.match(regex);\n if (!match?.[1]) return undefined;\n\n const inner = match[1];\n const items: { key: string; version: string }[] = [];\n\n const parts = inner.match(/\\{[\\s\\S]*?\\}/g);\n if (parts) {\n for (const part of parts) {\n const k = matchStringField(part, 'key');\n const v = matchVersionField(part, 'version');\n if (k) {\n items.push({ key: k, version: v ?? '1.0.0' });\n }\n }\n }\n\n return items.length > 0 ? items : undefined;\n}\n\nexport function extractTestRefs(\n code: string\n): { key: string; version: string; type: 'success' | 'error' }[] | undefined {\n const regex = new RegExp(`testRefs\\\\s*:\\\\s*\\\\[([\\\\s\\\\S]*?)\\\\]`);\n const match = code.match(regex);\n if (!match?.[1]) return undefined;\n\n const inner = match[1];\n const items: { key: string; version: string; type: 'success' | 'error' }[] =\n [];\n\n const parts = inner.match(/\\{[\\s\\S]*?\\}/g);\n if (parts) {\n for (const part of parts) {\n const k = matchStringField(part, 'key');\n const v = matchVersionField(part, 'version');\n const t = matchStringField(part, 'type');\n if (k) {\n items.push({\n key: k,\n version: v ?? '1.0.0',\n type: t === 'error' ? 'error' : 'success',\n });\n }\n }\n }\n\n return items.length > 0 ? items : undefined;\n}\n\n/**\n * Extract test target from a TestSpec source.\n * Parses the `target: { type: 'operation', key, version }` field OR\n * the nested format `target: { type: 'operation', operation: { key, version } }`.\n */\nexport function extractTestTarget(\n code: string\n):\n | { type: 'operation' | 'workflow'; key: string; version: string | undefined }\n | undefined {\n // Find target block start\n const targetStartMatch = code.match(/target\\s*:\\s*\\{/);\n if (!targetStartMatch || targetStartMatch.index === undefined)\n return undefined;\n\n const openBraceIndex =\n targetStartMatch.index + targetStartMatch[0].length - 1;\n const closeBraceIndex = findMatchingDelimiter(code, openBraceIndex, '{', '}');\n\n if (closeBraceIndex === -1) return undefined;\n\n const targetBlock = code.substring(openBraceIndex + 1, closeBraceIndex);\n\n // Extract the type\n const typeMatch = targetBlock.match(/type\\s*:\\s*['\"](\\w+)['\"]/);\n if (!typeMatch?.[1]) return undefined;\n\n const type = typeMatch[1];\n if (type !== 'operation' && type !== 'workflow') return undefined;\n\n // Try flat format first: { type: 'operation', key: '...', version: '...' }\n const flatKey = matchStringField(targetBlock, 'key');\n if (flatKey) {\n const flatVersion = matchVersionField(targetBlock, 'version');\n return {\n type,\n key: flatKey,\n version: flatVersion,\n };\n }\n\n // Try nested format: { type: 'operation', operation: { key: '...', version: '...' } }\n const refBlockMatch = targetBlock.match(\n new RegExp(`${type}\\\\s*:\\\\s*\\\\{([\\\\s\\\\S]*?)\\\\}`)\n );\n\n if (!refBlockMatch?.[1]) return undefined;\n\n const refBlock = refBlockMatch[1];\n\n // Extract key and version from the ref block\n const key = matchStringField(refBlock, 'key');\n if (!key) return undefined;\n\n const version = matchVersionField(refBlock, 'version');\n\n return {\n type,\n key,\n version,\n };\n}\n\n/**\n * Extract test coverage info from a TestSpec source.\n * Checks for presence of success (expectOutput) and failure (expectError) scenarios.\n * Supports both formats:\n * - New: `expectOutput: {}` and `expectError: {}`\n * - Old: `{ type: 'expectOutput', ... }` and `{ type: 'expectError', ... }`\n */\nexport function extractTestCoverage(code: string): {\n hasSuccess: boolean;\n hasError: boolean;\n} {\n // Check new format: expectOutput: or expectError: as keys\n const hasSuccessNew = /expectOutput\\s*:/.test(code);\n const hasErrorNew = /expectError\\s*:/.test(code);\n\n // Check old format: { type: 'expectOutput' } or { type: 'expectError' }\n const hasSuccessOld = /(['\"]?)type\\1\\s*:\\s*['\"]expectOutput['\"]/.test(code);\n const hasErrorOld = /(['\"]?)type\\1\\s*:\\s*['\"]expectError['\"]/.test(code);\n\n return {\n hasSuccess: hasSuccessNew || hasSuccessOld,\n hasError: hasErrorNew || hasErrorOld,\n };\n}\n"],"mappings":";;;AAOA,SAAgB,YAAY,MAAkD;CAC5E,MAAM,cAAc,KAAK,MAAM,8BAA8B;AAC7D,KAAI,CAAC,cAAc,GAAI,QAAO,EAAE;AAEhC,QAAO,eAAe,YAAY,IAAI,WAAW,IAAI,EAAE;;AAGzD,SAAgB,eACd,MACA,OACgD;CAChD,MAAM,wBAAQ,IAAI,OAAO,GAAG,YAAY,MAAM,CAAC,6BAA6B;CAC5E,MAAM,QAAQ,KAAK,MAAM,MAAM;AAC/B,KAAI,CAAC,QAAQ,GAAI,QAAO;CAExB,MAAM,QAAQ,MAAM;CACpB,MAAM,QAA4C,EAAE;CAEpD,MAAM,QAAQ,MAAM,MAAM,gBAAgB;AAC1C,KAAI,MACF,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,IAAI,iBAAiB,MAAM,MAAM;EACvC,MAAM,IAAI,kBAAkB,MAAM,UAAU;AAC5C,MAAI,EACF,OAAM,KAAK;GAAE,KAAK;GAAG,SAAS,KAAK;GAAS,CAAC;;AAKnD,QAAO,MAAM,SAAS,IAAI,QAAQ;;AAGpC,SAAgB,gBACd,MAC2E;CAC3E,MAAM,wBAAQ,IAAI,OAAO,sCAAsC;CAC/D,MAAM,QAAQ,KAAK,MAAM,MAAM;AAC/B,KAAI,CAAC,QAAQ,GAAI,QAAO;CAExB,MAAM,QAAQ,MAAM;CACpB,MAAM,QACJ,EAAE;CAEJ,MAAM,QAAQ,MAAM,MAAM,gBAAgB;AAC1C,KAAI,MACF,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,IAAI,iBAAiB,MAAM,MAAM;EACvC,MAAM,IAAI,kBAAkB,MAAM,UAAU;EAC5C,MAAM,IAAI,iBAAiB,MAAM,OAAO;AACxC,MAAI,EACF,OAAM,KAAK;GACT,KAAK;GACL,SAAS,KAAK;GACd,MAAM,MAAM,UAAU,UAAU;GACjC,CAAC;;AAKR,QAAO,MAAM,SAAS,IAAI,QAAQ;;;;;;;AAQpC,SAAgB,kBACd,MAGY;CAEZ,MAAM,mBAAmB,KAAK,MAAM,kBAAkB;AACtD,KAAI,CAAC,oBAAoB,iBAAiB,UAAU,OAClD,QAAO;CAET,MAAM,iBACJ,iBAAiB,QAAQ,iBAAiB,GAAG,SAAS;CACxD,MAAM,kBAAkB,sBAAsB,MAAM,gBAAgB,KAAK,IAAI;AAE7E,KAAI,oBAAoB,GAAI,QAAO;CAEnC,MAAM,cAAc,KAAK,UAAU,iBAAiB,GAAG,gBAAgB;CAGvE,MAAM,YAAY,YAAY,MAAM,2BAA2B;AAC/D,KAAI,CAAC,YAAY,GAAI,QAAO;CAE5B,MAAM,OAAO,UAAU;AACvB,KAAI,SAAS,eAAe,SAAS,WAAY,QAAO;CAGxD,MAAM,UAAU,iBAAiB,aAAa,MAAM;AACpD,KAAI,QAEF,QAAO;EACL;EACA,KAAK;EACL,SAJkB,kBAAkB,aAAa,UAAU;EAK5D;CAIH,MAAM,gBAAgB,YAAY,sBAChC,IAAI,OAAO,GAAG,KAAK,6BAA6B,CACjD;AAED,KAAI,CAAC,gBAAgB,GAAI,QAAO;CAEhC,MAAM,WAAW,cAAc;CAG/B,MAAM,MAAM,iBAAiB,UAAU,MAAM;AAC7C,KAAI,CAAC,IAAK,QAAO;AAIjB,QAAO;EACL;EACA;EACA,SALc,kBAAkB,UAAU,UAAU;EAMrD;;;;;;;;;AAUH,SAAgB,oBAAoB,MAGlC;CAEA,MAAM,gBAAgB,mBAAmB,KAAK,KAAK;CACnD,MAAM,cAAc,kBAAkB,KAAK,KAAK;CAGhD,MAAM,gBAAgB,2CAA2C,KAAK,KAAK;CAC3E,MAAM,cAAc,0CAA0C,KAAK,KAAK;AAExE,QAAO;EACL,YAAY,iBAAiB;EAC7B,UAAU,eAAe;EAC1B"}
|
|
@@ -1,34 +1,20 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { SpecScanResult } from "../types/analysis-types.js";
|
|
2
|
+
import { extractTestCoverage, extractTestTarget } from "./spec-parsing-utils.js";
|
|
2
3
|
|
|
3
4
|
//#region src/analysis/spec-scan.d.ts
|
|
4
5
|
|
|
5
6
|
/**
|
|
6
|
-
*
|
|
7
|
-
* Supports all contract types from @contractspec/lib.contracts.
|
|
7
|
+
* Scan all specs from a single source file.
|
|
8
8
|
*/
|
|
9
|
-
declare function
|
|
9
|
+
declare function scanAllSpecsFromSource(code: string, filePath: string): SpecScanResult[];
|
|
10
10
|
/**
|
|
11
|
-
* Scan spec source
|
|
11
|
+
* Scan a single spec source string.
|
|
12
12
|
*/
|
|
13
13
|
declare function scanSpecSource(code: string, filePath: string): SpecScanResult;
|
|
14
14
|
/**
|
|
15
|
-
*
|
|
16
|
-
* Looks for sideEffects.emits array entries.
|
|
17
|
-
*/
|
|
18
|
-
declare function extractEmittedEvents(code: string): RefInfo[] | undefined;
|
|
19
|
-
/**
|
|
20
|
-
* Extract policy refs from operation spec source.
|
|
21
|
-
*/
|
|
22
|
-
declare function extractPolicyRefs(code: string): RefInfo[] | undefined;
|
|
23
|
-
/**
|
|
24
|
-
* Extract test spec refs.
|
|
25
|
-
*/
|
|
26
|
-
declare function extractTestRefs(code: string): RefInfo[] | undefined;
|
|
27
|
-
/**
|
|
28
|
-
* Scan spec source code to extract ALL specs from a file.
|
|
29
|
-
* This function finds multiple spec definitions in a single file.
|
|
15
|
+
* Infer spec type from file path convention.
|
|
30
16
|
*/
|
|
31
|
-
declare function
|
|
17
|
+
declare function inferSpecTypeFromFilePath(filePath: string): SpecScanResult['specType'] | 'feature' | 'unknown';
|
|
32
18
|
//#endregion
|
|
33
|
-
export {
|
|
19
|
+
export { inferSpecTypeFromFilePath, scanAllSpecsFromSource, scanSpecSource };
|
|
34
20
|
//# sourceMappingURL=spec-scan.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"spec-scan.d.ts","names":[],"sources":["../../src/analysis/spec-scan.ts"],"sourcesContent":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"spec-scan.d.ts","names":[],"sources":["../../src/analysis/spec-scan.ts"],"sourcesContent":[],"mappings":";;;;;;;;iBAiCgB,sBAAA,kCAGb;;;;iBAsDa,cAAA,kCAAgD;;;;iBAsGhD,yBAAA,oBAEb"}
|