@aigne/ash 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/DESIGN.md +41 -0
  2. package/dist/ai-dev-loop/ash-run-result.cjs +12 -0
  3. package/dist/ai-dev-loop/ash-run-result.d.cts +28 -0
  4. package/dist/ai-dev-loop/ash-run-result.d.cts.map +1 -0
  5. package/dist/ai-dev-loop/ash-run-result.d.mts +28 -0
  6. package/dist/ai-dev-loop/ash-run-result.d.mts.map +1 -0
  7. package/dist/ai-dev-loop/ash-run-result.mjs +11 -0
  8. package/dist/ai-dev-loop/ash-run-result.mjs.map +1 -0
  9. package/dist/ai-dev-loop/ash-typed-error.cjs +51 -0
  10. package/dist/ai-dev-loop/ash-typed-error.d.cts +54 -0
  11. package/dist/ai-dev-loop/ash-typed-error.d.cts.map +1 -0
  12. package/dist/ai-dev-loop/ash-typed-error.d.mts +54 -0
  13. package/dist/ai-dev-loop/ash-typed-error.d.mts.map +1 -0
  14. package/dist/ai-dev-loop/ash-typed-error.mjs +50 -0
  15. package/dist/ai-dev-loop/ash-typed-error.mjs.map +1 -0
  16. package/dist/ai-dev-loop/ash-validate.cjs +27 -0
  17. package/dist/ai-dev-loop/ash-validate.d.cts +7 -0
  18. package/dist/ai-dev-loop/ash-validate.d.cts.map +1 -0
  19. package/dist/ai-dev-loop/ash-validate.d.mts +7 -0
  20. package/dist/ai-dev-loop/ash-validate.d.mts.map +1 -0
  21. package/dist/ai-dev-loop/ash-validate.mjs +28 -0
  22. package/dist/ai-dev-loop/ash-validate.mjs.map +1 -0
  23. package/dist/ai-dev-loop/dev-loop.cjs +134 -0
  24. package/dist/ai-dev-loop/dev-loop.d.cts +28 -0
  25. package/dist/ai-dev-loop/dev-loop.d.cts.map +1 -0
  26. package/dist/ai-dev-loop/dev-loop.d.mts +28 -0
  27. package/dist/ai-dev-loop/dev-loop.d.mts.map +1 -0
  28. package/dist/ai-dev-loop/dev-loop.mjs +135 -0
  29. package/dist/ai-dev-loop/dev-loop.mjs.map +1 -0
  30. package/dist/ai-dev-loop/index.cjs +24 -0
  31. package/dist/ai-dev-loop/index.d.cts +9 -0
  32. package/dist/ai-dev-loop/index.d.mts +9 -0
  33. package/dist/ai-dev-loop/index.mjs +10 -0
  34. package/dist/ai-dev-loop/live-mode.cjs +17 -0
  35. package/dist/ai-dev-loop/live-mode.d.cts +24 -0
  36. package/dist/ai-dev-loop/live-mode.d.cts.map +1 -0
  37. package/dist/ai-dev-loop/live-mode.d.mts +24 -0
  38. package/dist/ai-dev-loop/live-mode.d.mts.map +1 -0
  39. package/dist/ai-dev-loop/live-mode.mjs +17 -0
  40. package/dist/ai-dev-loop/live-mode.mjs.map +1 -0
  41. package/dist/ai-dev-loop/meta-tools.cjs +123 -0
  42. package/dist/ai-dev-loop/meta-tools.d.cts +24 -0
  43. package/dist/ai-dev-loop/meta-tools.d.cts.map +1 -0
  44. package/dist/ai-dev-loop/meta-tools.d.mts +24 -0
  45. package/dist/ai-dev-loop/meta-tools.d.mts.map +1 -0
  46. package/dist/ai-dev-loop/meta-tools.mjs +120 -0
  47. package/dist/ai-dev-loop/meta-tools.mjs.map +1 -0
  48. package/dist/ai-dev-loop/structured-runner.cjs +154 -0
  49. package/dist/ai-dev-loop/structured-runner.d.cts +12 -0
  50. package/dist/ai-dev-loop/structured-runner.d.cts.map +1 -0
  51. package/dist/ai-dev-loop/structured-runner.d.mts +12 -0
  52. package/dist/ai-dev-loop/structured-runner.d.mts.map +1 -0
  53. package/dist/ai-dev-loop/structured-runner.mjs +155 -0
  54. package/dist/ai-dev-loop/structured-runner.mjs.map +1 -0
  55. package/dist/ai-dev-loop/system-prompt.cjs +55 -0
  56. package/dist/ai-dev-loop/system-prompt.d.cts +20 -0
  57. package/dist/ai-dev-loop/system-prompt.d.cts.map +1 -0
  58. package/dist/ai-dev-loop/system-prompt.d.mts +20 -0
  59. package/dist/ai-dev-loop/system-prompt.d.mts.map +1 -0
  60. package/dist/ai-dev-loop/system-prompt.mjs +54 -0
  61. package/dist/ai-dev-loop/system-prompt.mjs.map +1 -0
  62. package/dist/ast.d.cts +140 -0
  63. package/dist/ast.d.cts.map +1 -0
  64. package/dist/ast.d.mts +140 -0
  65. package/dist/ast.d.mts.map +1 -0
  66. package/dist/compiler.cjs +802 -0
  67. package/dist/compiler.d.cts +103 -0
  68. package/dist/compiler.d.cts.map +1 -0
  69. package/dist/compiler.d.mts +103 -0
  70. package/dist/compiler.d.mts.map +1 -0
  71. package/dist/compiler.mjs +802 -0
  72. package/dist/compiler.mjs.map +1 -0
  73. package/dist/index.cjs +14 -0
  74. package/dist/index.d.cts +7 -0
  75. package/dist/index.d.mts +7 -0
  76. package/dist/index.mjs +7 -0
  77. package/dist/lexer.cjs +451 -0
  78. package/dist/lexer.d.cts +14 -0
  79. package/dist/lexer.d.cts.map +1 -0
  80. package/dist/lexer.d.mts +14 -0
  81. package/dist/lexer.d.mts.map +1 -0
  82. package/dist/lexer.mjs +451 -0
  83. package/dist/lexer.mjs.map +1 -0
  84. package/dist/parser.cjs +734 -0
  85. package/dist/parser.d.cts +40 -0
  86. package/dist/parser.d.cts.map +1 -0
  87. package/dist/parser.d.mts +40 -0
  88. package/dist/parser.d.mts.map +1 -0
  89. package/dist/parser.mjs +734 -0
  90. package/dist/parser.mjs.map +1 -0
  91. package/dist/reference.cjs +130 -0
  92. package/dist/reference.d.cts +11 -0
  93. package/dist/reference.d.cts.map +1 -0
  94. package/dist/reference.d.mts +11 -0
  95. package/dist/reference.d.mts.map +1 -0
  96. package/dist/reference.mjs +130 -0
  97. package/dist/reference.mjs.map +1 -0
  98. package/dist/template.cjs +85 -0
  99. package/dist/template.mjs +84 -0
  100. package/dist/template.mjs.map +1 -0
  101. package/dist/type-checker.cjs +582 -0
  102. package/dist/type-checker.d.cts +31 -0
  103. package/dist/type-checker.d.cts.map +1 -0
  104. package/dist/type-checker.d.mts +31 -0
  105. package/dist/type-checker.d.mts.map +1 -0
  106. package/dist/type-checker.mjs +573 -0
  107. package/dist/type-checker.mjs.map +1 -0
  108. package/package.json +29 -0
  109. package/src/ai-dev-loop/ash-run-result.test.ts +113 -0
  110. package/src/ai-dev-loop/ash-run-result.ts +46 -0
  111. package/src/ai-dev-loop/ash-typed-error.test.ts +136 -0
  112. package/src/ai-dev-loop/ash-typed-error.ts +50 -0
  113. package/src/ai-dev-loop/ash-validate.test.ts +54 -0
  114. package/src/ai-dev-loop/ash-validate.ts +34 -0
  115. package/src/ai-dev-loop/dev-loop.test.ts +364 -0
  116. package/src/ai-dev-loop/dev-loop.ts +156 -0
  117. package/src/ai-dev-loop/dry-run.test.ts +107 -0
  118. package/src/ai-dev-loop/e2e-multi-fix.test.ts +473 -0
  119. package/src/ai-dev-loop/e2e.test.ts +324 -0
  120. package/src/ai-dev-loop/index.ts +15 -0
  121. package/src/ai-dev-loop/invariants.test.ts +253 -0
  122. package/src/ai-dev-loop/live-mode.test.ts +63 -0
  123. package/src/ai-dev-loop/live-mode.ts +33 -0
  124. package/src/ai-dev-loop/meta-tools.test.ts +120 -0
  125. package/src/ai-dev-loop/meta-tools.ts +142 -0
  126. package/src/ai-dev-loop/structured-runner.test.ts +159 -0
  127. package/src/ai-dev-loop/structured-runner.ts +209 -0
  128. package/src/ai-dev-loop/system-prompt.test.ts +102 -0
  129. package/src/ai-dev-loop/system-prompt.ts +81 -0
  130. package/src/ast.ts +186 -0
  131. package/src/compiler.test.ts +2933 -0
  132. package/src/compiler.ts +1103 -0
  133. package/src/e2e.test.ts +552 -0
  134. package/src/index.ts +16 -0
  135. package/src/lexer.test.ts +538 -0
  136. package/src/lexer.ts +222 -0
  137. package/src/parser.test.ts +1024 -0
  138. package/src/parser.ts +835 -0
  139. package/src/reference.test.ts +166 -0
  140. package/src/reference.ts +125 -0
  141. package/src/template.test.ts +210 -0
  142. package/src/template.ts +139 -0
  143. package/src/type-checker.test.ts +1494 -0
  144. package/src/type-checker.ts +785 -0
  145. package/tsconfig.json +9 -0
  146. package/tsdown.config.ts +12 -0
@@ -0,0 +1 @@
1
+ {"version":3,"file":"template.mjs","names":[],"sources":["../src/template.ts"],"sourcesContent":["/**\n * ASH Template Parameter Resolution\n *\n * Resolves ${field} references in action parameters and paths\n * against the current stream record.\n */\n\nconst MAX_RESOLVED_SIZE = 65536; // 64KB\n\n// Match ${identifier.path} but not \\${...}\nconst TEMPLATE_RE = /(?<!\\\\)\\$\\{([^}]+)\\}/g;\n// Match whole-value template: entire string is a single ${field}\nconst WHOLE_VALUE_RE = /^\\$\\{([^}]+)\\}$/;\n\n/**\n * Access a nested value via dot-separated path.\n * Supports object fields and array indices (numeric keys).\n *\n * getNestedValue({data: {id: 42}}, \"data.id\") → 42\n * getNestedValue({items: [{name: \"a\"}]}, \"items.0.name\") → \"a\"\n */\nexport function getNestedValue(obj: unknown, path: string): unknown {\n const keys = path.split(\".\");\n let current: unknown = obj;\n for (const key of keys) {\n if (current === null || current === undefined) return undefined;\n if (typeof current !== \"object\") return undefined;\n current = (current as Record<string, unknown>)[key];\n }\n return current;\n}\n\n/**\n * Resolve a single template value against a stream record.\n *\n * - Non-string values pass through unchanged\n * - Whole-value \"${field}\" preserves the field's type\n * - Mixed \"text ${field}\" stringifies substitutions (JSON.stringify for objects)\n * - Escaped \\${...} produces literal ${...}\n * - Missing fields resolve to \"\"\n * - Resolved strings > 64KB throw\n */\nexport function resolveTemplate(value: unknown, record: Record<string, unknown>): unknown {\n if (typeof value !== \"string\") return value;\n\n // Whole-value: single ${field} with nothing else → preserve type\n const wholeMatch = value.match(WHOLE_VALUE_RE);\n if (wholeMatch) {\n const resolved = getNestedValue(record, wholeMatch[1]);\n if (resolved === undefined || resolved === null) return \"\";\n // Size check for string values\n if (typeof resolved === \"string\" && resolved.length > MAX_RESOLVED_SIZE) {\n throw new Error(\n `Template resolution error: resolved value exceeds 64KB limit (${resolved.length} bytes)`,\n );\n }\n return resolved;\n }\n\n // Mixed template: replace ${...} with stringified values\n const result = value.replace(TEMPLATE_RE, (_, field: string) => {\n const val = getNestedValue(record, field);\n if (val === undefined || val === null) return \"\";\n if (typeof val === \"object\") return JSON.stringify(val);\n return String(val);\n });\n\n // Strip escape: \\${ → ${\n const final = result.replace(/\\\\\\$/g, \"$\");\n\n // Size check\n if (final.length > MAX_RESOLVED_SIZE) {\n throw new Error(\n `Template resolution error: resolved value exceeds 64KB limit (${final.length} bytes)`,\n );\n }\n\n return final;\n}\n\n/**\n * Resolve all template references in an action's parameters.\n */\nexport function resolveActionParams(\n params: Record<string, unknown>,\n record: Record<string, unknown>,\n): Record<string, unknown> {\n const resolved: Record<string, unknown> = {};\n for (const [key, value] of Object.entries(params)) {\n resolved[key] = resolveTemplate(value, record);\n }\n return resolved;\n}\n\n/**\n * Resolve template references in a path string.\n * Always returns a string. Validates the resolved path:\n * - No empty segments (//)\n * - No path traversal (..)\n */\nexport function resolveTemplatePath(\n path: string,\n record: Record<string, unknown>,\n): string {\n if (!path.includes(\"${\")) return path;\n\n const resolved = path.replace(TEMPLATE_RE, (_, field: string) => {\n const val = getNestedValue(record, field);\n if (val === undefined || val === null) return \"\";\n return String(val);\n });\n\n // Strip escape: \\${ → ${\n const final = resolved.replace(/\\\\\\$/g, \"$\");\n\n // Validate resolved path\n if (final.includes(\"//\")) {\n throw new Error(\n `Template path resolution error: resolved path '${final}' contains empty segment (//). Check that template fields are not empty.`,\n );\n }\n if (final.split(\"/\").some((seg) => seg === \"..\")) {\n throw new Error(\n `Template path resolution error: resolved path '${final}' contains path traversal (..)`,\n );\n }\n\n return final;\n}\n\n/**\n * Check if a value contains any unescaped ${...} template references.\n */\nexport function hasTemplates(value: unknown): boolean {\n if (typeof value !== \"string\") return false;\n // Reset regex lastIndex since it's global\n TEMPLATE_RE.lastIndex = 0;\n return TEMPLATE_RE.test(value);\n}\n"],"mappings":";;;;;;;AAOA,MAAM,oBAAoB;AAG1B,MAAM,cAAc;AAEpB,MAAM,iBAAiB;;;;;;;;AASvB,SAAgB,eAAe,KAAc,MAAuB;CAClE,MAAM,OAAO,KAAK,MAAM,IAAI;CAC5B,IAAI,UAAmB;AACvB,MAAK,MAAM,OAAO,MAAM;AACtB,MAAI,YAAY,QAAQ,YAAY,OAAW,QAAO;AACtD,MAAI,OAAO,YAAY,SAAU,QAAO;AACxC,YAAW,QAAoC;;AAEjD,QAAO;;;;;;;;;;;;AAaT,SAAgB,gBAAgB,OAAgB,QAA0C;AACxF,KAAI,OAAO,UAAU,SAAU,QAAO;CAGtC,MAAM,aAAa,MAAM,MAAM,eAAe;AAC9C,KAAI,YAAY;EACd,MAAM,WAAW,eAAe,QAAQ,WAAW,GAAG;AACtD,MAAI,aAAa,UAAa,aAAa,KAAM,QAAO;AAExD,MAAI,OAAO,aAAa,YAAY,SAAS,SAAS,kBACpD,OAAM,IAAI,MACR,iEAAiE,SAAS,OAAO,SAClF;AAEH,SAAO;;CAYT,MAAM,QARS,MAAM,QAAQ,cAAc,GAAG,UAAkB;EAC9D,MAAM,MAAM,eAAe,QAAQ,MAAM;AACzC,MAAI,QAAQ,UAAa,QAAQ,KAAM,QAAO;AAC9C,MAAI,OAAO,QAAQ,SAAU,QAAO,KAAK,UAAU,IAAI;AACvD,SAAO,OAAO,IAAI;GAClB,CAGmB,QAAQ,SAAS,IAAI;AAG1C,KAAI,MAAM,SAAS,kBACjB,OAAM,IAAI,MACR,iEAAiE,MAAM,OAAO,SAC/E;AAGH,QAAO;;;;;AAMT,SAAgB,oBACd,QACA,QACyB;CACzB,MAAM,WAAoC,EAAE;AAC5C,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,UAAS,OAAO,gBAAgB,OAAO,OAAO;AAEhD,QAAO;;;;;;;;AAST,SAAgB,oBACd,MACA,QACQ;AACR,KAAI,CAAC,KAAK,SAAS,KAAK,CAAE,QAAO;CASjC,MAAM,QAPW,KAAK,QAAQ,cAAc,GAAG,UAAkB;EAC/D,MAAM,MAAM,eAAe,QAAQ,MAAM;AACzC,MAAI,QAAQ,UAAa,QAAQ,KAAM,QAAO;AAC9C,SAAO,OAAO,IAAI;GAClB,CAGqB,QAAQ,SAAS,IAAI;AAG5C,KAAI,MAAM,SAAS,KAAK,CACtB,OAAM,IAAI,MACR,kDAAkD,MAAM,0EACzD;AAEH,KAAI,MAAM,MAAM,IAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,CAC9C,OAAM,IAAI,MACR,kDAAkD,MAAM,gCACzD;AAGH,QAAO"}
@@ -0,0 +1,582 @@
1
+
2
+ //#region src/type-checker.ts
3
+ const BUILTIN_MANIFESTS = {
4
+ find: {
5
+ name: "find",
6
+ stdin: "none",
7
+ stdout: "object_stream"
8
+ },
9
+ where: {
10
+ name: "where",
11
+ stdin: "object_stream",
12
+ stdout: "object_stream"
13
+ },
14
+ map: {
15
+ name: "map",
16
+ stdin: "object_stream",
17
+ stdout: "object_stream"
18
+ },
19
+ save: {
20
+ name: "save",
21
+ stdin: "object_stream",
22
+ stdout: "none"
23
+ },
24
+ publish: {
25
+ name: "publish",
26
+ stdin: "object_stream",
27
+ stdout: "none"
28
+ },
29
+ tee: {
30
+ name: "tee",
31
+ stdin: "object_stream",
32
+ stdout: "object_stream"
33
+ },
34
+ fanout: {
35
+ name: "fanout",
36
+ stdin: "object_stream",
37
+ stdout: "object_stream"
38
+ },
39
+ output: {
40
+ name: "output",
41
+ stdin: "object_stream",
42
+ stdout: "object_stream"
43
+ },
44
+ input: {
45
+ name: "input",
46
+ stdin: "none",
47
+ stdout: "object_stream"
48
+ },
49
+ count: {
50
+ name: "count",
51
+ stdin: "object_stream",
52
+ stdout: "object_stream"
53
+ },
54
+ "group-by": {
55
+ name: "group-by",
56
+ stdin: "object_stream",
57
+ stdout: "object_stream"
58
+ },
59
+ action: {
60
+ name: "action",
61
+ stdin: "object_stream",
62
+ stdout: "object_stream"
63
+ },
64
+ route: {
65
+ name: "route",
66
+ stdin: "object_stream",
67
+ stdout: "none"
68
+ },
69
+ lookup: {
70
+ name: "lookup",
71
+ stdin: "object_stream",
72
+ stdout: "object_stream"
73
+ }
74
+ };
75
+ function checkPipelineTypes(stages) {
76
+ const errors = [];
77
+ if (stages.length <= 1) return errors;
78
+ for (let i = 1; i < stages.length; i++) {
79
+ const prev = stages[i - 1];
80
+ const curr = stages[i];
81
+ const prevManifest = BUILTIN_MANIFESTS[prev.kind];
82
+ const currManifest = BUILTIN_MANIFESTS[curr.kind];
83
+ if (!prevManifest || !currManifest) {
84
+ errors.push({
85
+ message: `Unknown command at stage ${i}`,
86
+ stage: i,
87
+ kind: "unknown_command"
88
+ });
89
+ continue;
90
+ }
91
+ if (prevManifest.stdout === "none" && currManifest.stdin === "object_stream") errors.push({
92
+ message: `Type mismatch at stage ${i}: ${prev.kind} outputs 'none' but ${curr.kind} expects 'object_stream'`,
93
+ stage: i,
94
+ expected: currManifest.stdin,
95
+ actual: prevManifest.stdout
96
+ });
97
+ }
98
+ return errors;
99
+ }
100
+ function checkProhibitedPatterns(program) {
101
+ const errors = [];
102
+ const jobNames = new Set(program.jobs.map((j) => j.name));
103
+ const seenJobs = /* @__PURE__ */ new Set();
104
+ for (const job of program.jobs) {
105
+ if (seenJobs.has(job.name)) errors.push({
106
+ message: `Duplicate job name '${job.name}' — each job must have a unique name`,
107
+ kind: "prohibited"
108
+ });
109
+ if (job.name.length > 64) errors.push({
110
+ message: `Job name '${job.name.slice(0, 20)}...' exceeds 64 characters — keep job names concise`,
111
+ kind: "prohibited",
112
+ severity: "warning"
113
+ });
114
+ seenJobs.add(job.name);
115
+ }
116
+ if (program.jobs.length > 50) errors.push({
117
+ message: `Program has ${program.jobs.length} jobs — programs with over 50 jobs may indicate over-complexity`,
118
+ kind: "prohibited",
119
+ severity: "warning"
120
+ });
121
+ for (const job of program.jobs) {
122
+ if (job.pipeline.length === 0) errors.push({
123
+ message: `Job '${job.name}' has empty pipeline — did you forget to add stages?`,
124
+ kind: "prohibited",
125
+ severity: "warning"
126
+ });
127
+ if (job.pipeline.length > 30) errors.push({
128
+ message: `Job '${job.name}' has ${job.pipeline.length} stages — pipelines over 30 stages may indicate over-complexity`,
129
+ kind: "prohibited",
130
+ severity: "warning"
131
+ });
132
+ for (const stage of job.pipeline) {
133
+ if (!Object.hasOwn(BUILTIN_MANIFESTS, stage.kind)) errors.push({
134
+ message: `Unknown command '${stage.kind}' — not registered in manifest`,
135
+ kind: "unknown_command"
136
+ });
137
+ if ("path" in stage && typeof stage.path === "string") {
138
+ const path = stage.path;
139
+ if (path.includes("..")) errors.push({
140
+ message: `Path '${path}' contains '..' — path traversal is not allowed`,
141
+ kind: "prohibited"
142
+ });
143
+ if (path.replace(/\$\{[a-zA-Z_][a-zA-Z0-9_.]*\}/g, "").includes("$")) errors.push({
144
+ message: `Path '${path}' contains '$' — variable interpolation in paths is not allowed`,
145
+ kind: "prohibited",
146
+ severity: "warning"
147
+ });
148
+ }
149
+ checkExpressionDivZero(stage, errors);
150
+ if (stage.kind === "route") {
151
+ for (const branch of stage.branches) if (!jobNames.has(branch.targetJob)) errors.push({
152
+ message: `Route target job '${branch.targetJob}' not found in program`,
153
+ kind: "prohibited"
154
+ });
155
+ if (stage.fallback && !jobNames.has(stage.fallback)) errors.push({
156
+ message: `Route fallback job '${stage.fallback}' not found in program`,
157
+ kind: "prohibited"
158
+ });
159
+ if (!stage.fallback) errors.push({
160
+ message: `Job '${job.name}': route has no default branch — unmatched items are silently dropped, add '_ -> fallback_job' to handle unexpected values`,
161
+ kind: "prohibited",
162
+ severity: "warning"
163
+ });
164
+ }
165
+ }
166
+ }
167
+ for (const job of program.jobs) {
168
+ checkFanoutDepth(job.pipeline, 0, errors);
169
+ checkFanoutDuplicateWrites(job, errors);
170
+ }
171
+ for (const job of program.jobs) {
172
+ const teeCount = job.pipeline.filter((s) => s.kind === "tee").length;
173
+ if (teeCount > 5) errors.push({
174
+ message: `Job '${job.name}' has ${teeCount} tee stages — more than 5 tee stages risks data amplification`,
175
+ kind: "prohibited",
176
+ severity: "warning"
177
+ });
178
+ }
179
+ checkRouteDag(program, errors);
180
+ for (const job of program.jobs) if (job.trigger) {
181
+ if (job.trigger.kind === "event") {
182
+ if (!job.trigger.path || !job.trigger.event) errors.push({
183
+ message: `Job '${job.name}' has incomplete trigger declaration`,
184
+ kind: "prohibited"
185
+ });
186
+ } else if (job.trigger.kind === "cron") if (!job.trigger.expression) errors.push({
187
+ message: `Job '${job.name}' has empty cron expression`,
188
+ kind: "prohibited"
189
+ });
190
+ else {
191
+ const fields = job.trigger.expression.trim().split(/\s+/);
192
+ if (fields.length < 5 || fields.length > 7) errors.push({
193
+ message: `Job '${job.name}' has invalid cron expression "${job.trigger.expression}" — expected 5-7 fields, got ${fields.length}`,
194
+ kind: "prohibited"
195
+ });
196
+ }
197
+ }
198
+ for (const job of program.jobs) if (job.trigger?.kind === "event") {
199
+ const triggerPath = job.trigger.path;
200
+ for (const stage of job.pipeline) if ((stage.kind === "save" || stage.kind === "tee") && stage.path === triggerPath) errors.push({
201
+ message: `Job '${job.name}' writes to its own trigger path '${triggerPath}' — this creates an event loop`,
202
+ kind: "prohibited"
203
+ });
204
+ }
205
+ const SELF_EXEC_BLOCKED = new Set(["/.actions/run", "/.actions/exec"]);
206
+ for (const job of program.jobs) for (const stage of job.pipeline) if (stage.kind === "action" && SELF_EXEC_BLOCKED.has(stage.path)) errors.push({
207
+ message: `Job '${job.name}' calls action '${stage.path}' — self-referencing ASH execution risks recursion and bypasses compile-time checks`,
208
+ kind: "prohibited"
209
+ });
210
+ const PROTO_POLLUTION_FIELDS = new Set([
211
+ "constructor",
212
+ "__proto__",
213
+ "prototype",
214
+ "__defineGetter__",
215
+ "__defineSetter__"
216
+ ]);
217
+ const JS_RESERVED_FIELDS = new Set([
218
+ ...PROTO_POLLUTION_FIELDS,
219
+ "toString",
220
+ "valueOf",
221
+ "hasOwnProperty",
222
+ "class",
223
+ "return",
224
+ "delete",
225
+ "void",
226
+ "typeof",
227
+ "new"
228
+ ]);
229
+ for (const job of program.jobs) for (const stage of job.pipeline) {
230
+ const protoFields = [];
231
+ const jsFields = [];
232
+ if (stage.kind === "map") {
233
+ if (stage.field) classifyField(stage.field, PROTO_POLLUTION_FIELDS, JS_RESERVED_FIELDS, protoFields, jsFields);
234
+ if (stage.expression) collectJsReservedFields(stage.expression, JS_RESERVED_FIELDS, jsFields);
235
+ if (stage.exprMappings) {
236
+ for (const key of Object.keys(stage.exprMappings)) classifyField(key, PROTO_POLLUTION_FIELDS, JS_RESERVED_FIELDS, protoFields, jsFields);
237
+ for (const expr of Object.values(stage.exprMappings)) collectJsReservedFields(expr, JS_RESERVED_FIELDS, jsFields);
238
+ }
239
+ if (stage.mappings) for (const key of Object.keys(stage.mappings)) classifyField(key, PROTO_POLLUTION_FIELDS, JS_RESERVED_FIELDS, protoFields, jsFields);
240
+ if (stage.exprMappings) checkMapDuplicateKeys(stage.exprMappings, job.name, errors);
241
+ if (stage.mappings) checkMapDuplicateKeys(stage.mappings, job.name, errors);
242
+ }
243
+ if (stage.kind === "where" && JS_RESERVED_FIELDS.has(stage.left)) classifyField(stage.left, PROTO_POLLUTION_FIELDS, JS_RESERVED_FIELDS, protoFields, jsFields);
244
+ for (const field of protoFields) errors.push({
245
+ message: `Job '${job.name}': field '${field}' is a prototype pollution vector — not allowed as map key or field name`,
246
+ kind: "prohibited"
247
+ });
248
+ for (const field of jsFields) errors.push({
249
+ message: `Job '${job.name}': field '${field}' is a JavaScript reserved identifier — may expose runtime internals`,
250
+ kind: "prohibited",
251
+ severity: "warning"
252
+ });
253
+ }
254
+ for (const job of program.jobs) for (const stage of job.pipeline) if (stage.kind === "map") {
255
+ if (stage.expression) checkDivisionRisk(stage.expression, job.name, errors);
256
+ if (stage.exprMappings) for (const expr of Object.values(stage.exprMappings)) checkDivisionRisk(expr, job.name, errors);
257
+ }
258
+ return errors;
259
+ }
260
+ function checkDivisionRisk(expr, jobName, errors) {
261
+ if (expr.kind === "binary" && expr.op === "/") {
262
+ if (expr.right.kind === "field_access") errors.push({
263
+ message: `Job '${jobName}': division by field '${expr.right.path}' may produce NaN/Infinity if zero — consider adding a where clause to filter`,
264
+ kind: "prohibited",
265
+ severity: "warning"
266
+ });
267
+ }
268
+ if (expr.kind === "binary") {
269
+ checkDivisionRisk(expr.left, jobName, errors);
270
+ checkDivisionRisk(expr.right, jobName, errors);
271
+ }
272
+ }
273
+ function collectJsReservedFields(expr, reserved, out) {
274
+ if (expr.kind === "field_access" && reserved.has(expr.path)) out.push(expr.path);
275
+ if (expr.kind === "binary") {
276
+ collectJsReservedFields(expr.left, reserved, out);
277
+ collectJsReservedFields(expr.right, reserved, out);
278
+ }
279
+ }
280
+ function classifyField(field, proto, jsReserved, protoOut, jsOut) {
281
+ if (proto.has(field)) protoOut.push(field);
282
+ else if (jsReserved.has(field)) jsOut.push(field);
283
+ }
284
+ function checkMapDuplicateKeys(obj, jobName, errors) {
285
+ const seen = /* @__PURE__ */ new Set();
286
+ for (const key of Object.keys(obj)) {
287
+ if (seen.has(key)) errors.push({
288
+ message: `Job '${jobName}': map has duplicate key '${key}' — last value wins silently, potential data confusion`,
289
+ kind: "prohibited",
290
+ severity: "warning"
291
+ });
292
+ seen.add(key);
293
+ }
294
+ }
295
+ function checkFanoutDepth(stages, depth, errors) {
296
+ for (const stage of stages) if (stage.kind === "fanout") {
297
+ if (stage.branches.length > 10) errors.push({
298
+ message: `Fanout has ${stage.branches.length} branches — more than 10 branches may cause excessive parallelism`,
299
+ kind: "prohibited",
300
+ severity: "warning"
301
+ });
302
+ if (depth + 1 > 3) {
303
+ errors.push({
304
+ message: `Fanout nesting depth exceeds 3 levels — deeply nested fanout may indicate over-complexity`,
305
+ kind: "prohibited",
306
+ severity: "warning"
307
+ });
308
+ return;
309
+ }
310
+ for (const branch of stage.branches) checkFanoutDepth(branch, depth + 1, errors);
311
+ }
312
+ }
313
+ function checkFanoutDuplicateWrites(job, errors) {
314
+ for (const stage of job.pipeline) {
315
+ if (stage.kind !== "fanout") continue;
316
+ const writePaths = /* @__PURE__ */ new Map();
317
+ for (const branch of stage.branches) for (const s of branch) if ((s.kind === "save" || s.kind === "tee") && "path" in s) {
318
+ const path = s.path;
319
+ writePaths.set(path, (writePaths.get(path) ?? 0) + 1);
320
+ }
321
+ for (const [path, count] of writePaths) if (count > 1) errors.push({
322
+ message: `Job '${job.name}': fanout has ${count} branches writing to '${path}' — last write wins silently, audit trail lost`,
323
+ kind: "prohibited",
324
+ severity: "warning"
325
+ });
326
+ }
327
+ }
328
+ function checkExpressionDivZero(stage, errors) {
329
+ if (stage.kind !== "map") return;
330
+ if (stage.exprMappings) {
331
+ for (const expr of Object.values(stage.exprMappings)) if (hasLiteralDivByZero(expr)) errors.push({
332
+ message: "Expression contains literal division by zero",
333
+ kind: "prohibited",
334
+ severity: "warning"
335
+ });
336
+ }
337
+ if (stage.expression && hasLiteralDivByZero(stage.expression)) errors.push({
338
+ message: "Expression contains literal division by zero",
339
+ kind: "prohibited",
340
+ severity: "warning"
341
+ });
342
+ }
343
+ function hasLiteralDivByZero(expr) {
344
+ if (expr.kind === "binary" && expr.op === "/" && expr.right.kind === "literal" && expr.right.value === 0) return true;
345
+ if (expr.kind === "binary") return hasLiteralDivByZero(expr.left) || hasLiteralDivByZero(expr.right);
346
+ return false;
347
+ }
348
+ function checkRouteDag(program, errors) {
349
+ const graph = /* @__PURE__ */ new Map();
350
+ for (const job of program.jobs) {
351
+ const targets = /* @__PURE__ */ new Set();
352
+ for (const stage of job.pipeline) if (stage.kind === "route") {
353
+ for (const branch of stage.branches) targets.add(branch.targetJob);
354
+ if (stage.fallback) targets.add(stage.fallback);
355
+ }
356
+ if (targets.size > 0) graph.set(job.name, targets);
357
+ }
358
+ const visited = /* @__PURE__ */ new Set();
359
+ const inStack = /* @__PURE__ */ new Set();
360
+ function dfs(node) {
361
+ if (inStack.has(node)) return true;
362
+ if (visited.has(node)) return false;
363
+ visited.add(node);
364
+ inStack.add(node);
365
+ const neighbors = graph.get(node);
366
+ if (neighbors) {
367
+ for (const n of neighbors) if (dfs(n)) {
368
+ errors.push({
369
+ message: `Route cycle detected: job '${node}' → job '${n}' forms a cycle`,
370
+ kind: "prohibited"
371
+ });
372
+ return true;
373
+ }
374
+ }
375
+ inStack.delete(node);
376
+ return false;
377
+ }
378
+ for (const node of graph.keys()) dfs(node);
379
+ }
380
+ const ANNOTATION_VALIDATORS = {
381
+ approval: (args) => {
382
+ if (args.length === 0) return null;
383
+ if (!new Set(["human", "auto"]).has(args[0])) return `@approval accepts 'human' or 'auto', got '${args[0]}'`;
384
+ return null;
385
+ },
386
+ readonly: () => null,
387
+ retry: (args) => {
388
+ if (args.length === 0) return null;
389
+ const n = Number(args[0]);
390
+ if (isNaN(n) || !Number.isInteger(n) || n < 0) return `@retry requires a non-negative integer, got '${args[0]}'`;
391
+ return null;
392
+ },
393
+ timeout: (args) => {
394
+ if (args.length === 0) return null;
395
+ const n = Number(args[0]);
396
+ if (isNaN(n) || n <= 0) return `@timeout requires a positive number, got '${args[0]}'`;
397
+ if (n > 36e5) return `@timeout(${args[0]}) exceeds hard limit of 3600000ms (1 hour) — reduce timeout`;
398
+ return null;
399
+ },
400
+ on_error: (args) => {
401
+ if (args.length === 0) return `@on_error requires a strategy: skip, fail, or save <path>`;
402
+ if (!new Set([
403
+ "skip",
404
+ "fail",
405
+ "save"
406
+ ]).has(args[0])) return `@on_error accepts 'skip', 'fail', or 'save', got '${args[0]}'`;
407
+ return null;
408
+ },
409
+ budget: (args) => {
410
+ if (args.length === 0) return `@budget requires at least one limit: @budget(actions 50, records 10000)`;
411
+ if (args.length % 2 !== 0) return `@budget requires dimension/value pairs — got odd number of args`;
412
+ const validDims = new Set([
413
+ "actions",
414
+ "writes",
415
+ "records",
416
+ "tokens",
417
+ "cost"
418
+ ]);
419
+ for (let i = 0; i < args.length; i += 2) {
420
+ if (!validDims.has(args[i])) return `@budget dimension must be one of: ${[...validDims].join(", ")} — got '${args[i]}'`;
421
+ const n = Number(args[i + 1]);
422
+ if (isNaN(n) || n <= 0) return `@budget value must be a positive number, got '${args[i + 1]}'`;
423
+ }
424
+ return null;
425
+ },
426
+ caps: (args) => {
427
+ if (args.length === 0) return `@caps requires at least one capability: @caps(read /path/*, write /out/*)`;
428
+ if (args.length % 2 !== 0) return `@caps requires operation/path pairs — got odd number of args`;
429
+ const validOps = new Set([
430
+ "read",
431
+ "write",
432
+ "exec"
433
+ ]);
434
+ for (let i = 0; i < args.length; i += 2) {
435
+ if (!validOps.has(args[i])) return `@caps operation must be 'read', 'write', or 'exec', got '${args[i]}'`;
436
+ if (!args[i + 1].startsWith("/")) return `@caps path must start with '/', got '${args[i + 1]}'`;
437
+ }
438
+ return null;
439
+ }
440
+ };
441
+ const BUILTIN_COMMAND_NAMES = Object.keys(BUILTIN_MANIFESTS);
442
+ function typeErrorsToDiagnostics(errors) {
443
+ return errors.map((e) => ({
444
+ code: "ASH_TYPE_MISMATCH",
445
+ message: e.message,
446
+ stage: e.stage
447
+ }));
448
+ }
449
+ function compileErrorsToDiagnostics(errors) {
450
+ return errors.map((e) => ({
451
+ code: e.kind === "unknown_command" ? "ASH_UNKNOWN_COMMAND" : "ASH_ANNOTATION_INVALID",
452
+ severity: e.severity,
453
+ message: e.message,
454
+ suggestion: e.kind === "unknown_command" ? BUILTIN_COMMAND_NAMES : void 0
455
+ }));
456
+ }
457
+ function annotationErrorsToDiagnostics(errors) {
458
+ return errors.map((e) => ({
459
+ code: "ASH_ANNOTATION_INVALID",
460
+ severity: e.severity,
461
+ message: e.message
462
+ }));
463
+ }
464
+ function parseSyntaxError(errorMessage) {
465
+ const isUnterminated = /unterminated/i.test(errorMessage);
466
+ const isDuplicate = /duplicate variable/i.test(errorMessage);
467
+ const lineMatch = errorMessage.match(/line\s+(\d+)/i);
468
+ const colMatch = errorMessage.match(/column\s+(\d+)/i);
469
+ const cleanMsg = errorMessage.replace(/^Error:\s*/i, "");
470
+ return {
471
+ code: isDuplicate ? "ASH_DUPLICATE_VAR" : isUnterminated ? "ASH_SYNTAX_UNTERMINATED" : "ASH_SYNTAX_UNEXPECTED",
472
+ message: cleanMsg,
473
+ line: lineMatch ? Number(lineMatch[1]) : void 0,
474
+ column: colMatch ? Number(colMatch[1]) : void 0
475
+ };
476
+ }
477
+ function checkAnnotations(job) {
478
+ const errors = [];
479
+ for (const ann of job.annotations) {
480
+ const validator = Object.hasOwn(ANNOTATION_VALIDATORS, ann.name) ? ANNOTATION_VALIDATORS[ann.name] : void 0;
481
+ if (!validator) {
482
+ errors.push({
483
+ message: `Unknown annotation '@${ann.name}' — not in allowed set`,
484
+ kind: "prohibited"
485
+ });
486
+ continue;
487
+ }
488
+ const err = validator(ann.args);
489
+ if (err) errors.push({
490
+ message: err,
491
+ kind: "prohibited"
492
+ });
493
+ }
494
+ const annCounts = /* @__PURE__ */ new Map();
495
+ for (const ann of job.annotations) annCounts.set(ann.name, (annCounts.get(ann.name) ?? 0) + 1);
496
+ for (const [name, count] of annCounts) if (count > 1) errors.push({
497
+ message: `Job '${job.name}': @${name} appears ${count} times — last value wins silently, potential audit deception`,
498
+ kind: "prohibited"
499
+ });
500
+ const retryAnns = job.annotations.filter((a) => a.name === "retry");
501
+ for (const ann of retryAnns) if (ann.args.length > 0) {
502
+ const n = Number(ann.args[0]);
503
+ if (!isNaN(n) && n > 100) errors.push({
504
+ message: `@retry(${n}) exceeds hard limit of 100 — reduce retry count`,
505
+ kind: "prohibited"
506
+ });
507
+ }
508
+ const approvalAnns = job.annotations.filter((a) => a.name === "approval");
509
+ if (approvalAnns.length > 1) {
510
+ const values = approvalAnns.map((a) => a.args[0]).filter(Boolean);
511
+ if (values.includes("human") && values.includes("auto")) errors.push({
512
+ message: `@approval conflict: 'human' and 'auto' cannot both be set on job '${job.name}'`,
513
+ kind: "prohibited"
514
+ });
515
+ }
516
+ return errors;
517
+ }
518
+ function parseCaps(ann) {
519
+ const caps = [];
520
+ for (let i = 0; i < ann.args.length; i += 2) caps.push({
521
+ op: ann.args[i],
522
+ pathGlob: ann.args[i + 1]
523
+ });
524
+ return caps;
525
+ }
526
+ function capMatches(pathGlob, actual) {
527
+ if (pathGlob === actual) return true;
528
+ if (pathGlob.endsWith("/*")) {
529
+ const prefix = pathGlob.slice(0, -1);
530
+ return actual.startsWith(prefix) || actual === pathGlob.slice(0, -2);
531
+ }
532
+ if (pathGlob.endsWith("*")) return actual.startsWith(pathGlob.slice(0, -1));
533
+ return false;
534
+ }
535
+ function hasCapFor(caps, op, path) {
536
+ return caps.some((c) => c.op === op && capMatches(c.pathGlob, path));
537
+ }
538
+ const STAGE_OP_MAP = {
539
+ find: "read",
540
+ lookup: "read",
541
+ save: "write",
542
+ publish: "write",
543
+ tee: "write",
544
+ action: "exec"
545
+ };
546
+ function checkStagesCaps(stages, caps, diagnostics) {
547
+ for (const stage of stages) {
548
+ if (stage.kind === "action" && stage.relative) continue;
549
+ if ("path" in stage && typeof stage.path === "string" && stage.path.includes("${")) continue;
550
+ const op = Object.hasOwn(STAGE_OP_MAP, stage.kind) ? STAGE_OP_MAP[stage.kind] : void 0;
551
+ if (op && "path" in stage && typeof stage.path === "string") {
552
+ const path = stage.path;
553
+ if (!hasCapFor(caps, op, path)) diagnostics.push({
554
+ code: "ASH_CAP_DENIED",
555
+ message: `@caps denied: '${stage.kind} ${path}' requires '${op}' capability for '${path}'`
556
+ });
557
+ }
558
+ if (stage.kind === "fanout") for (const branch of stage.branches) checkStagesCaps(branch, caps, diagnostics);
559
+ }
560
+ }
561
+ function checkJobCaps(job) {
562
+ const capsAnns = job.annotations.filter((a) => a.name === "caps");
563
+ if (capsAnns.length === 0) return [];
564
+ const diagnostics = [];
565
+ const allCaps = [];
566
+ for (const ann of capsAnns) if (ann.args.length > 0 && ann.args.length % 2 === 0) allCaps.push(...parseCaps(ann));
567
+ if (allCaps.length === 0) return diagnostics;
568
+ checkStagesCaps(job.pipeline, allCaps, diagnostics);
569
+ return diagnostics;
570
+ }
571
+
572
+ //#endregion
573
+ exports.annotationErrorsToDiagnostics = annotationErrorsToDiagnostics;
574
+ exports.checkAnnotations = checkAnnotations;
575
+ exports.checkJobCaps = checkJobCaps;
576
+ exports.checkPipelineTypes = checkPipelineTypes;
577
+ exports.checkProhibitedPatterns = checkProhibitedPatterns;
578
+ exports.compileErrorsToDiagnostics = compileErrorsToDiagnostics;
579
+ exports.hasCapFor = hasCapFor;
580
+ exports.parseCaps = parseCaps;
581
+ exports.parseSyntaxError = parseSyntaxError;
582
+ exports.typeErrorsToDiagnostics = typeErrorsToDiagnostics;
@@ -0,0 +1,31 @@
1
+ import { JobDeclaration, PipelineStage, Program } from "./ast.cjs";
2
+
3
+ //#region src/type-checker.d.ts
4
+ type DiagnosticCode = "ASH_SYNTAX_UNTERMINATED" | "ASH_SYNTAX_UNEXPECTED" | "ASH_UNKNOWN_COMMAND" | "ASH_TYPE_MISMATCH" | "ASH_UNDEFINED_VAR" | "ASH_DUPLICATE_VAR" | "ASH_DUPLICATE_JOB" | "ASH_ANNOTATION_INVALID" | "ASH_ANNOTATION_CONFLICT" | "ASH_READONLY_VIOLATION" | "ASH_CAP_DENIED" | "ASH_PATH_TRAVERSAL" | "ASH_EMPTY_JOB" | "ASH_DIV_ZERO" | "ASH_EXCESSIVE_RETRY" | "ASH_EVENT_LOOP" | "ASH_SELF_RECURSION" | "ASH_DIVISION_RISK" | "ASH_MIXED_SECURITY" | "ASH_JS_GLOBAL_FIELD" | "ASH_LET_WRITE" | "ASH_LET_PRE_APPROVAL" | "ASH_PARAM_WRITE_GATE" | "ASH_UNCAPPED_ACTION" | "ASH_ACTION_AMPLIFICATION" | "ASH_CROSS_PROVIDER_ACTION" | "ASH_RELATIVE_ACTION_NO_FIND" | "ASH_UNCAPPED_WRITE" | "ASH_BUDGET_EXCESSIVE";
5
+ interface AshDiagnostic {
6
+ code: DiagnosticCode;
7
+ severity?: "error" | "warning";
8
+ message: string;
9
+ line?: number;
10
+ column?: number;
11
+ stage?: number;
12
+ suggestion?: string[];
13
+ }
14
+ type StreamType = "object_stream" | "single_object" | "none";
15
+ interface TypeError {
16
+ message: string;
17
+ stage: number;
18
+ expected?: StreamType;
19
+ actual?: StreamType;
20
+ }
21
+ interface CompileError {
22
+ message: string;
23
+ kind: "prohibited" | "unknown_command";
24
+ severity?: "error" | "warning";
25
+ }
26
+ declare function checkPipelineTypes(stages: PipelineStage[]): TypeError[];
27
+ declare function checkProhibitedPatterns(program: Program): CompileError[];
28
+ declare function checkAnnotations(job: JobDeclaration): CompileError[];
29
+ //#endregion
30
+ export { AshDiagnostic, DiagnosticCode, checkAnnotations, checkPipelineTypes, checkProhibitedPatterns };
31
+ //# sourceMappingURL=type-checker.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"type-checker.d.cts","names":[],"sources":["../src/type-checker.ts"],"mappings":";;;KAIY,cAAA;AAAA,UAgCK,aAAA;EACf,IAAA,EAAM,cAAA;EACN,QAAA;EACA,OAAA;EACA,IAAA;EACA,MAAA;EACA,KAAA;EACA,UAAA;AAAA;AAAA,KAGU,UAAA;AAAA,UAQK,SAAA;EACf,OAAA;EACA,KAAA;EACA,QAAA,GAAW,UAAA;EACX,MAAA,GAAS,UAAA;AAAA;AAAA,UAGM,YAAA;EACf,OAAA;EACA,IAAA;EACA,QAAA;AAAA;AAAA,iBAoBc,kBAAA,CAAmB,MAAA,EAAQ,aAAA,KAAkB,SAAA;AAAA,iBA4B7C,uBAAA,CAAwB,OAAA,EAAS,OAAA,GAAU,YAAA;AAAA,iBAwhB3C,gBAAA,CAAiB,GAAA,EAAK,cAAA,GAAiB,YAAA"}
@@ -0,0 +1,31 @@
1
+ import { JobDeclaration, PipelineStage, Program } from "./ast.mjs";
2
+
3
+ //#region src/type-checker.d.ts
4
+ type DiagnosticCode = "ASH_SYNTAX_UNTERMINATED" | "ASH_SYNTAX_UNEXPECTED" | "ASH_UNKNOWN_COMMAND" | "ASH_TYPE_MISMATCH" | "ASH_UNDEFINED_VAR" | "ASH_DUPLICATE_VAR" | "ASH_DUPLICATE_JOB" | "ASH_ANNOTATION_INVALID" | "ASH_ANNOTATION_CONFLICT" | "ASH_READONLY_VIOLATION" | "ASH_CAP_DENIED" | "ASH_PATH_TRAVERSAL" | "ASH_EMPTY_JOB" | "ASH_DIV_ZERO" | "ASH_EXCESSIVE_RETRY" | "ASH_EVENT_LOOP" | "ASH_SELF_RECURSION" | "ASH_DIVISION_RISK" | "ASH_MIXED_SECURITY" | "ASH_JS_GLOBAL_FIELD" | "ASH_LET_WRITE" | "ASH_LET_PRE_APPROVAL" | "ASH_PARAM_WRITE_GATE" | "ASH_UNCAPPED_ACTION" | "ASH_ACTION_AMPLIFICATION" | "ASH_CROSS_PROVIDER_ACTION" | "ASH_RELATIVE_ACTION_NO_FIND" | "ASH_UNCAPPED_WRITE" | "ASH_BUDGET_EXCESSIVE";
5
+ interface AshDiagnostic {
6
+ code: DiagnosticCode;
7
+ severity?: "error" | "warning";
8
+ message: string;
9
+ line?: number;
10
+ column?: number;
11
+ stage?: number;
12
+ suggestion?: string[];
13
+ }
14
+ type StreamType = "object_stream" | "single_object" | "none";
15
+ interface TypeError {
16
+ message: string;
17
+ stage: number;
18
+ expected?: StreamType;
19
+ actual?: StreamType;
20
+ }
21
+ interface CompileError {
22
+ message: string;
23
+ kind: "prohibited" | "unknown_command";
24
+ severity?: "error" | "warning";
25
+ }
26
+ declare function checkPipelineTypes(stages: PipelineStage[]): TypeError[];
27
+ declare function checkProhibitedPatterns(program: Program): CompileError[];
28
+ declare function checkAnnotations(job: JobDeclaration): CompileError[];
29
+ //#endregion
30
+ export { AshDiagnostic, DiagnosticCode, checkAnnotations, checkPipelineTypes, checkProhibitedPatterns };
31
+ //# sourceMappingURL=type-checker.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"type-checker.d.mts","names":[],"sources":["../src/type-checker.ts"],"mappings":";;;KAIY,cAAA;AAAA,UAgCK,aAAA;EACf,IAAA,EAAM,cAAA;EACN,QAAA;EACA,OAAA;EACA,IAAA;EACA,MAAA;EACA,KAAA;EACA,UAAA;AAAA;AAAA,KAGU,UAAA;AAAA,UAQK,SAAA;EACf,OAAA;EACA,KAAA;EACA,QAAA,GAAW,UAAA;EACX,MAAA,GAAS,UAAA;AAAA;AAAA,UAGM,YAAA;EACf,OAAA;EACA,IAAA;EACA,QAAA;AAAA;AAAA,iBAoBc,kBAAA,CAAmB,MAAA,EAAQ,aAAA,KAAkB,SAAA;AAAA,iBA4B7C,uBAAA,CAAwB,OAAA,EAAS,OAAA,GAAU,YAAA;AAAA,iBAwhB3C,gBAAA,CAAiB,GAAA,EAAK,cAAA,GAAiB,YAAA"}