flowlint 0.7.6 → 0.7.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -24870,7 +24870,14 @@ function createHardcodedStringRule({
24870
24870
  };
24871
24871
  return createNodeRule(ruleId, configKey, logic);
24872
24872
  }
24873
- var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
24873
+ var metadata = {
24874
+ id: "R1",
24875
+ name: "rate_limit_retry",
24876
+ severity: "must",
24877
+ description: "Ensures that nodes making external API calls have a retry mechanism configured.",
24878
+ details: "Critical for building resilient workflows that can handle transient network issues or temporary service unavailability."
24879
+ };
24880
+ var r1Retry = createNodeRule(metadata.id, metadata.name, (node, graph, ctx) => {
24874
24881
  if (!isApiNode(node.type)) return null;
24875
24882
  const params = node.params ?? {};
24876
24883
  const options = params.options ?? {};
@@ -24890,8 +24897,8 @@ var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
24890
24897
  }
24891
24898
  }
24892
24899
  return {
24893
- rule: "R1",
24894
- severity: "must",
24900
+ rule: metadata.id,
24901
+ severity: metadata.severity,
24895
24902
  path: ctx.path,
24896
24903
  message: `Node ${node.name || node.id} is missing retry/backoff configuration`,
24897
24904
  raw_details: `In the node properties, enable "Retry on Fail" under Options.`,
@@ -24899,11 +24906,18 @@ var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
24899
24906
  line: ctx.nodeLines?.[node.id]
24900
24907
  };
24901
24908
  });
24902
- var r2ErrorHandling = createNodeRule("R2", "error_handling", (node, graph, ctx) => {
24909
+ var metadata2 = {
24910
+ id: "R2",
24911
+ name: "error_handling",
24912
+ severity: "must",
24913
+ description: "Prevents the use of configurations that might hide errors.",
24914
+ details: "Workflows should explicitly handle errors rather than ignoring them with continueOnFail: true."
24915
+ };
24916
+ var r2ErrorHandling = createNodeRule(metadata2.id, metadata2.name, (node, graph, ctx) => {
24903
24917
  if (ctx.cfg.rules.error_handling.forbid_continue_on_fail && node.flags?.continueOnFail) {
24904
24918
  return {
24905
- rule: "R2",
24906
- severity: "must",
24919
+ rule: metadata2.id,
24920
+ severity: metadata2.severity,
24907
24921
  path: ctx.path,
24908
24922
  message: `Node ${node.name || node.id} has continueOnFail enabled (disable it and route errors explicitly)`,
24909
24923
  nodeId: node.id,
@@ -24913,152 +24927,13 @@ var r2ErrorHandling = createNodeRule("R2", "error_handling", (node, graph, ctx)
24913
24927
  }
24914
24928
  return null;
24915
24929
  });
24916
- var r4Secrets = createHardcodedStringRule({
24917
- ruleId: "R4",
24918
- severity: "must",
24919
- configKey: "secrets",
24920
- messageFn: (node) => `Node ${node.name || node.id} contains a hardcoded secret (move it to credentials/env vars)`,
24921
- details: "Move API keys/tokens into Credentials or environment variables; the workflow should only reference {{$credentials.*}} expressions."
24922
- });
24923
- var r9ConfigLiterals = createHardcodedStringRule({
24924
- ruleId: "R9",
24930
+ var metadata3 = {
24931
+ id: "R3",
24932
+ name: "idempotency",
24925
24933
  severity: "should",
24926
- configKey: "config_literals",
24927
- messageFn: (node, value) => `Node ${node.name || node.id} contains env-specific literal "${value.substring(0, 40)}" (move to expression/credential)`,
24928
- details: "Move environment-specific URLs/IDs into expressions or credentials (e.g., {{$env.API_BASE_URL}}) so the workflow is portable."
24929
- });
24930
- var r10NamingConvention = createNodeRule("R10", "naming_convention", (node, graph, ctx) => {
24931
- const genericNames = new Set(ctx.cfg.rules.naming_convention.generic_names ?? []);
24932
- if (!node.name || genericNames.has(node.name.toLowerCase())) {
24933
- return {
24934
- rule: "R10",
24935
- severity: "nit",
24936
- path: ctx.path,
24937
- message: `Node ${node.id} uses a generic name "${node.name ?? ""}" (rename it to describe the action)`,
24938
- nodeId: node.id,
24939
- line: ctx.nodeLines?.[node.id],
24940
- raw_details: 'Rename the node to describe its purpose (e.g., "Check subscription status" instead of "IF") for easier reviews and debugging.'
24941
- };
24942
- }
24943
- return null;
24944
- });
24945
- var DEPRECATED_NODES = {
24946
- "n8n-nodes-base.splitInBatches": "Use Loop over items instead",
24947
- "n8n-nodes-base.executeWorkflow": "Use Execute Workflow (Sub-Workflow) instead"
24934
+ description: "Guards against operations that are not idempotent with retries configured.",
24935
+ details: "Detects patterns where a webhook trigger could lead to duplicate processing in databases or external services."
24948
24936
  };
24949
- var r11DeprecatedNodes = createNodeRule("R11", "deprecated_nodes", (node, graph, ctx) => {
24950
- if (DEPRECATED_NODES[node.type]) {
24951
- return {
24952
- rule: "R11",
24953
- severity: "should",
24954
- path: ctx.path,
24955
- message: `Node ${node.name || node.id} uses deprecated type ${node.type} (replace with ${DEPRECATED_NODES[node.type]})`,
24956
- nodeId: node.id,
24957
- line: ctx.nodeLines?.[node.id],
24958
- raw_details: `Replace this node with ${DEPRECATED_NODES[node.type]} so future n8n upgrades don\xE2\u20AC\u2122t break the workflow.`
24959
- };
24960
- }
24961
- return null;
24962
- });
24963
- var r12UnhandledErrorPath = createNodeRule("R12", "unhandled_error_path", (node, graph, ctx) => {
24964
- if (!isErrorProneNode(node.type)) return null;
24965
- const hasErrorPath = graph.edges.some((edge) => {
24966
- if (edge.from !== node.id) return false;
24967
- if (edge.on === "error") return true;
24968
- const targetNode = graph.nodes.find((candidate) => candidate.id === edge.to);
24969
- return targetNode ? isErrorHandlerNode(targetNode.type, targetNode.name) : false;
24970
- });
24971
- if (!hasErrorPath) {
24972
- return {
24973
- rule: "R12",
24974
- severity: "must",
24975
- path: ctx.path,
24976
- message: `Node ${node.name || node.id} has no error branch (add a red connector to handler)`,
24977
- nodeId: node.id,
24978
- line: ctx.nodeLines?.[node.id],
24979
- raw_details: "Add an error (red) branch to a Stop and Error or logging/alert node so failures do not disappear silently."
24980
- };
24981
- }
24982
- return null;
24983
- });
24984
- function r13WebhookAcknowledgment(graph, ctx) {
24985
- const cfg = ctx.cfg.rules.webhook_acknowledgment;
24986
- if (!cfg?.enabled) return [];
24987
- const findings = [];
24988
- const webhookNodes = graph.nodes.filter(
24989
- (node) => node.type === "n8n-nodes-base.webhook" || node.type.includes("webhook") && !node.type.includes("respondToWebhook")
24990
- );
24991
- for (const webhookNode of webhookNodes) {
24992
- const directDownstream = graph.edges.filter((edge) => edge.from === webhookNode.id).map((edge) => graph.nodes.find((n) => n.id === edge.to)).filter((n) => !!n);
24993
- if (directDownstream.length === 0) continue;
24994
- const hasImmediateResponse = directDownstream.some(
24995
- (node) => node.type === "n8n-nodes-base.respondToWebhook" || /respond.*webhook/i.test(node.type) || /respond.*webhook/i.test(node.name || "")
24996
- );
24997
- if (hasImmediateResponse) continue;
24998
- const heavyNodeTypes = cfg.heavy_node_types || [
24999
- "n8n-nodes-base.httpRequest",
25000
- "n8n-nodes-base.postgres",
25001
- "n8n-nodes-base.mysql",
25002
- "n8n-nodes-base.mongodb",
25003
- "n8n-nodes-base.openAi",
25004
- "n8n-nodes-base.anthropic"
25005
- ];
25006
- const hasHeavyProcessing = directDownstream.some(
25007
- (node) => heavyNodeTypes.includes(node.type) || /loop|batch/i.test(node.type)
25008
- );
25009
- if (hasHeavyProcessing) {
25010
- findings.push({
25011
- rule: "R13",
25012
- severity: "must",
25013
- path: ctx.path,
25014
- message: `Webhook "${webhookNode.name || webhookNode.id}" performs heavy processing before acknowledgment (risk of timeout/duplicates)`,
25015
- nodeId: webhookNode.id,
25016
- line: ctx.nodeLines?.[webhookNode.id],
25017
- raw_details: `Add a "Respond to Webhook" node immediately after the webhook trigger (return 200/204), then perform heavy processing. This prevents webhook timeouts and duplicate events.`
25018
- });
25019
- }
25020
- }
25021
- return findings;
25022
- }
25023
- var r14RetryAfterCompliance = createNodeRule("R14", "retry_after_compliance", (node, graph, ctx) => {
25024
- if (!isApiNode(node.type)) return null;
25025
- const params = node.params ?? {};
25026
- const options = params.options ?? {};
25027
- const retryCandidates = [
25028
- options.retryOnFail,
25029
- params.retryOnFail,
25030
- node.flags?.retryOnFail
25031
- ];
25032
- const retryOnFail = retryCandidates.find((value) => value !== void 0 && value !== null);
25033
- if (!retryOnFail || retryOnFail === false) return null;
25034
- if (typeof retryOnFail === "string") {
25035
- const normalized = retryOnFail.trim().toLowerCase();
25036
- if (retryOnFail.includes("{{") && normalized !== "true") {
25037
- return null;
25038
- }
25039
- }
25040
- const waitBetweenTries = node.flags?.waitBetweenTries;
25041
- if (waitBetweenTries !== void 0 && waitBetweenTries !== null) {
25042
- if (typeof waitBetweenTries === "number") return null;
25043
- if (typeof waitBetweenTries === "string" && !isNaN(Number(waitBetweenTries)) && !waitBetweenTries.includes("{{")) {
25044
- return null;
25045
- }
25046
- }
25047
- const nodeStr = JSON.stringify(node);
25048
- const hasRetryAfterLogic = /retry[-_]?after|retryafter/i.test(nodeStr);
25049
- if (hasRetryAfterLogic) {
25050
- return null;
25051
- }
25052
- return {
25053
- rule: "R14",
25054
- severity: "should",
25055
- path: ctx.path,
25056
- message: `Node ${node.name || node.id} has retry logic but ignores Retry-After headers (429/503 responses)`,
25057
- raw_details: `Add expression to parse Retry-After header: const retryAfter = $json.headers['retry-after']; const delay = retryAfter ? (parseInt(retryAfter) || new Date(retryAfter) - Date.now()) : Math.min(1000 * Math.pow(2, $execution.retryCount), 60000); This prevents API bans and respects server rate limits.`,
25058
- nodeId: node.id,
25059
- line: ctx.nodeLines?.[node.id]
25060
- };
25061
- });
25062
24937
  function r3Idempotency(graph, ctx) {
25063
24938
  const cfg = ctx.cfg.rules.idempotency;
25064
24939
  if (!cfg?.enabled) return [];
@@ -25075,8 +24950,8 @@ function r3Idempotency(graph, ctx) {
25075
24950
  );
25076
24951
  if (!hasGuard) {
25077
24952
  findings.push({
25078
- rule: "R3",
25079
- severity: "must",
24953
+ rule: metadata3.id,
24954
+ severity: metadata3.severity,
25080
24955
  path: ctx.path,
25081
24956
  message: `The mutation path ending at "${mutationNode.name || mutationNode.id}" appears to be missing an idempotency guard.`,
25082
24957
  raw_details: `Ensure one of the upstream nodes or the mutation node itself uses an idempotency key, such as one of: ${(cfg.key_field_candidates ?? []).join(
@@ -25089,6 +24964,27 @@ function r3Idempotency(graph, ctx) {
25089
24964
  }
25090
24965
  return findings;
25091
24966
  }
24967
+ var metadata4 = {
24968
+ id: "R4",
24969
+ name: "secrets",
24970
+ severity: "must",
24971
+ description: "Detects hardcoded secrets, API keys, or credentials within node parameters.",
24972
+ details: "All secrets should be stored securely using credential management systems."
24973
+ };
24974
+ var r4Secrets = createHardcodedStringRule({
24975
+ ruleId: metadata4.id,
24976
+ severity: metadata4.severity,
24977
+ configKey: "secrets",
24978
+ messageFn: (node) => `Node ${node.name || node.id} contains a hardcoded secret (move it to credentials/env vars)`,
24979
+ details: "Move API keys/tokens into Credentials or environment variables; the workflow should only reference {{$credentials.*}} expressions."
24980
+ });
24981
+ var metadata5 = {
24982
+ id: "R5",
24983
+ name: "dead_ends",
24984
+ severity: "should",
24985
+ description: "Finds nodes or workflow branches not connected to any other node.",
24986
+ details: "Indicates incomplete or dead logic that should be reviewed or removed."
24987
+ };
25092
24988
  function r5DeadEnds(graph, ctx) {
25093
24989
  const cfg = ctx.cfg.rules.dead_ends;
25094
24990
  if (!cfg?.enabled) return [];
@@ -25100,8 +24996,8 @@ function r5DeadEnds(graph, ctx) {
25100
24996
  for (const node of graph.nodes) {
25101
24997
  if ((outgoing.get(node.id) || 0) === 0 && !isTerminalNode(node.type, node.name)) {
25102
24998
  findings.push({
25103
- rule: "R5",
25104
- severity: "nit",
24999
+ rule: metadata5.id,
25000
+ severity: metadata5.severity,
25105
25001
  path: ctx.path,
25106
25002
  message: `Node ${node.name || node.id} has no outgoing connections (either wire it up or remove it)`,
25107
25003
  nodeId: node.id,
@@ -25112,6 +25008,13 @@ function r5DeadEnds(graph, ctx) {
25112
25008
  }
25113
25009
  return findings;
25114
25010
  }
25011
+ var metadata6 = {
25012
+ id: "R6",
25013
+ name: "long_running",
25014
+ severity: "should",
25015
+ description: "Flags workflows with potential for excessive runtime.",
25016
+ details: "Detects loops with high iteration counts or long timeouts that could cause performance issues."
25017
+ };
25115
25018
  function r6LongRunning(graph, ctx) {
25116
25019
  const cfg = ctx.cfg.rules.long_running;
25117
25020
  if (!cfg?.enabled) return [];
@@ -25126,32 +25029,39 @@ function r6LongRunning(graph, ctx) {
25126
25029
  ]);
25127
25030
  if (!iterations || cfg.max_iterations && iterations > cfg.max_iterations) {
25128
25031
  findings.push({
25129
- rule: "R6",
25130
- severity: "should",
25032
+ rule: metadata6.id,
25033
+ severity: metadata6.severity,
25131
25034
  path: ctx.path,
25132
25035
  message: `Node ${node.name || node.id} allows ${iterations ?? "unbounded"} iterations (limit ${cfg.max_iterations}; set a lower cap)`,
25133
25036
  nodeId: node.id,
25134
25037
  line: ctx.nodeLines?.[node.id],
25135
- raw_details: `Set Options > Max iterations to \xE2\u2030\xA4 ${cfg.max_iterations} or split the processing into smaller batches.`
25038
+ raw_details: `Set Options > Max iterations to \u2264 ${cfg.max_iterations} or split the processing into smaller batches.`
25136
25039
  });
25137
25040
  }
25138
25041
  if (cfg.timeout_ms) {
25139
25042
  const timeout = readNumber(node.params, ["timeout", "timeoutMs", "options.timeout"]);
25140
25043
  if (timeout && timeout > cfg.timeout_ms) {
25141
25044
  findings.push({
25142
- rule: "R6",
25143
- severity: "should",
25045
+ rule: metadata6.id,
25046
+ severity: metadata6.severity,
25144
25047
  path: ctx.path,
25145
25048
  message: `Node ${node.name || node.id} uses timeout ${timeout}ms (limit ${cfg.timeout_ms}ms; shorten the timeout or break work apart)`,
25146
25049
  nodeId: node.id,
25147
25050
  line: ctx.nodeLines?.[node.id],
25148
- raw_details: `Lower the timeout to \xE2\u2030\xA4 ${cfg.timeout_ms}ms or split the workflow so no single step blocks for too long.`
25051
+ raw_details: `Lower the timeout to \u2264 ${cfg.timeout_ms}ms or split the workflow so no single step blocks for too long.`
25149
25052
  });
25150
25053
  }
25151
25054
  }
25152
25055
  }
25153
25056
  return findings;
25154
25057
  }
25058
+ var metadata7 = {
25059
+ id: "R7",
25060
+ name: "alert_log_enforcement",
25061
+ severity: "should",
25062
+ description: "Ensures critical paths include logging or alerting steps.",
25063
+ details: "For example, a failed payment processing branch should trigger an alert for monitoring."
25064
+ };
25155
25065
  function r7AlertLogEnforcement(graph, ctx) {
25156
25066
  const cfg = ctx.cfg.rules.alert_log_enforcement;
25157
25067
  if (!cfg?.enabled) return [];
@@ -25183,8 +25093,8 @@ function r7AlertLogEnforcement(graph, ctx) {
25183
25093
  }
25184
25094
  if (!isHandled) {
25185
25095
  findings.push({
25186
- rule: "R7",
25187
- severity: "should",
25096
+ rule: metadata7.id,
25097
+ severity: metadata7.severity,
25188
25098
  path: ctx.path,
25189
25099
  message: `Error path from node ${fromNode.name || fromNode.id} has no log/alert before rejoining (add notification node)`,
25190
25100
  nodeId: fromNode.id,
@@ -25195,6 +25105,13 @@ function r7AlertLogEnforcement(graph, ctx) {
25195
25105
  }
25196
25106
  return findings;
25197
25107
  }
25108
+ var metadata8 = {
25109
+ id: "R8",
25110
+ name: "unused_data",
25111
+ severity: "nit",
25112
+ description: "Detects when node output data is not consumed by subsequent nodes.",
25113
+ details: "Identifies unnecessary data processing that could be optimized or removed."
25114
+ };
25198
25115
  function r8UnusedData(graph, ctx) {
25199
25116
  const cfg = ctx.cfg.rules.unused_data;
25200
25117
  if (!cfg?.enabled) return [];
@@ -25211,18 +25128,199 @@ function r8UnusedData(graph, ctx) {
25211
25128
  });
25212
25129
  if (!leadsToConsumer) {
25213
25130
  findings.push({
25214
- rule: "R8",
25215
- severity: "nit",
25131
+ rule: metadata8.id,
25132
+ severity: metadata8.severity,
25216
25133
  path: ctx.path,
25217
25134
  message: `Node "${node.name || node.id}" produces data that never reaches any consumer`,
25218
25135
  nodeId: node.id,
25219
25136
  line: ctx.nodeLines?.[node.id],
25220
- raw_details: "Wire this branch into a consumer (DB/API/response) or remove it\xE2\u20AC\u201Dotherwise the data produced here is never used."
25137
+ raw_details: "Wire this branch into a consumer (DB/API/response) or remove it\u2014otherwise the data produced here is never used."
25221
25138
  });
25222
25139
  }
25223
25140
  }
25224
25141
  return findings;
25225
25142
  }
25143
+ var metadata9 = {
25144
+ id: "R9",
25145
+ name: "config_literals",
25146
+ severity: "should",
25147
+ description: "Flags hardcoded literals (URLs, environment tags, tenant IDs) that should come from configuration.",
25148
+ details: "Promotes externalized configuration and prevents hardcoded environment-specific values."
25149
+ };
25150
+ var r9ConfigLiterals = createHardcodedStringRule({
25151
+ ruleId: metadata9.id,
25152
+ severity: metadata9.severity,
25153
+ configKey: "config_literals",
25154
+ messageFn: (node, value) => `Node ${node.name || node.id} contains env-specific literal "${value.substring(0, 40)}" (move to expression/credential)`,
25155
+ details: "Move environment-specific URLs/IDs into expressions or credentials (e.g., {{$env.API_BASE_URL}}) so the workflow is portable."
25156
+ });
25157
+ var metadata10 = {
25158
+ id: "R10",
25159
+ name: "naming_convention",
25160
+ severity: "nit",
25161
+ description: "Enforces consistent and descriptive naming for nodes.",
25162
+ details: "Enforces consistent and descriptive naming for nodes. Improves workflow readability and maintainability (e.g., 'Fetch Customer Data from CRM' vs 'HTTP Request')."
25163
+ };
25164
+ var r10NamingConvention = createNodeRule(metadata10.id, metadata10.name, (node, graph, ctx) => {
25165
+ const genericNames = new Set(ctx.cfg.rules.naming_convention.generic_names ?? []);
25166
+ if (!node.name || genericNames.has(node.name.toLowerCase())) {
25167
+ return {
25168
+ rule: metadata10.id,
25169
+ severity: metadata10.severity,
25170
+ path: ctx.path,
25171
+ message: `Node ${node.id} uses a generic name "${node.name ?? ""}" (rename it to describe the action)`,
25172
+ nodeId: node.id,
25173
+ line: ctx.nodeLines?.[node.id],
25174
+ raw_details: 'Rename the node to describe its purpose (e.g., "Check subscription status" instead of "IF") for easier reviews and debugging.'
25175
+ };
25176
+ }
25177
+ return null;
25178
+ });
25179
+ var metadata11 = {
25180
+ id: "R11",
25181
+ name: "deprecated_nodes",
25182
+ severity: "should",
25183
+ description: "Warns about deprecated node types and suggests alternatives.",
25184
+ details: "Helps maintain workflows using current, supported node implementations."
25185
+ };
25186
+ var DEPRECATED_NODES = {
25187
+ "n8n-nodes-base.splitInBatches": "Use Loop over items instead",
25188
+ "n8n-nodes-base.executeWorkflow": "Use Execute Workflow (Sub-Workflow) instead"
25189
+ };
25190
+ var r11DeprecatedNodes = createNodeRule(metadata11.id, metadata11.name, (node, graph, ctx) => {
25191
+ if (DEPRECATED_NODES[node.type]) {
25192
+ return {
25193
+ rule: metadata11.id,
25194
+ severity: metadata11.severity,
25195
+ path: ctx.path,
25196
+ message: `Node ${node.name || node.id} uses deprecated type ${node.type} (replace with ${DEPRECATED_NODES[node.type]})`,
25197
+ nodeId: node.id,
25198
+ line: ctx.nodeLines?.[node.id],
25199
+ raw_details: `Replace this node with ${DEPRECATED_NODES[node.type]} so future n8n upgrades don\u2019t break the workflow.`
25200
+ };
25201
+ }
25202
+ return null;
25203
+ });
25204
+ var metadata12 = {
25205
+ id: "R12",
25206
+ name: "unhandled_error_path",
25207
+ severity: "must",
25208
+ description: "Ensures nodes with error outputs have connected error handling branches.",
25209
+ details: "Prevents silent failures by requiring explicit error path handling."
25210
+ };
25211
+ var r12UnhandledErrorPath = createNodeRule(metadata12.id, metadata12.name, (node, graph, ctx) => {
25212
+ if (!isErrorProneNode(node.type)) return null;
25213
+ const hasErrorPath = graph.edges.some((edge) => {
25214
+ if (edge.from !== node.id) return false;
25215
+ if (edge.on === "error") return true;
25216
+ const targetNode = graph.nodes.find((candidate) => candidate.id === edge.to);
25217
+ return targetNode ? isErrorHandlerNode(targetNode.type, targetNode.name) : false;
25218
+ });
25219
+ if (!hasErrorPath) {
25220
+ return {
25221
+ rule: metadata12.id,
25222
+ severity: metadata12.severity,
25223
+ path: ctx.path,
25224
+ message: `Node ${node.name || node.id} has no error branch (add a red connector to handler)`,
25225
+ nodeId: node.id,
25226
+ line: ctx.nodeLines?.[node.id],
25227
+ raw_details: "Add an error (red) branch to a Stop and Error or logging/alert node so failures do not disappear silently."
25228
+ };
25229
+ }
25230
+ return null;
25231
+ });
25232
+ var metadata13 = {
25233
+ id: "R13",
25234
+ name: "webhook_acknowledgment",
25235
+ severity: "must",
25236
+ description: "Detects webhooks performing heavy processing without immediate acknowledgment.",
25237
+ details: "Prevents timeout and duplicate events by requiring 'Respond to Webhook' node before heavy operations (HTTP requests, database queries, AI/LLM calls)."
25238
+ };
25239
+ function r13WebhookAcknowledgment(graph, ctx) {
25240
+ const cfg = ctx.cfg.rules.webhook_acknowledgment;
25241
+ if (!cfg?.enabled) return [];
25242
+ const findings = [];
25243
+ const webhookNodes = graph.nodes.filter(
25244
+ (node) => node.type === "n8n-nodes-base.webhook" || node.type.includes("webhook") && !node.type.includes("respondToWebhook")
25245
+ );
25246
+ for (const webhookNode of webhookNodes) {
25247
+ const directDownstream = graph.edges.filter((edge) => edge.from === webhookNode.id).map((edge) => graph.nodes.find((n) => n.id === edge.to)).filter((n) => !!n);
25248
+ if (directDownstream.length === 0) continue;
25249
+ const hasImmediateResponse = directDownstream.some(
25250
+ (node) => node.type === "n8n-nodes-base.respondToWebhook" || /respond.*webhook/i.test(node.type) || /respond.*webhook/i.test(node.name || "")
25251
+ );
25252
+ if (hasImmediateResponse) continue;
25253
+ const heavyNodeTypes = cfg.heavy_node_types || [
25254
+ "n8n-nodes-base.httpRequest",
25255
+ "n8n-nodes-base.postgres",
25256
+ "n8n-nodes-base.mysql",
25257
+ "n8n-nodes-base.mongodb",
25258
+ "n8n-nodes-base.openAi",
25259
+ "n8n-nodes-base.anthropic"
25260
+ ];
25261
+ const hasHeavyProcessing = directDownstream.some(
25262
+ (node) => heavyNodeTypes.includes(node.type) || /loop|batch/i.test(node.type)
25263
+ );
25264
+ if (hasHeavyProcessing) {
25265
+ findings.push({
25266
+ rule: metadata13.id,
25267
+ severity: metadata13.severity,
25268
+ path: ctx.path,
25269
+ message: `Webhook "${webhookNode.name || webhookNode.id}" performs heavy processing before acknowledgment (risk of timeout/duplicates)`,
25270
+ nodeId: webhookNode.id,
25271
+ line: ctx.nodeLines?.[webhookNode.id],
25272
+ raw_details: `Add a "Respond to Webhook" node immediately after the webhook trigger (return 200/204), then perform heavy processing. This prevents webhook timeouts and duplicate events.`
25273
+ });
25274
+ }
25275
+ }
25276
+ return findings;
25277
+ }
25278
+ var metadata14 = {
25279
+ id: "R14",
25280
+ name: "retry_after_compliance",
25281
+ severity: "should",
25282
+ description: "Detects HTTP nodes with retry logic that ignore Retry-After headers from 429/503 responses.",
25283
+ details: "APIs return Retry-After headers (seconds or HTTP date) to indicate when to retry. Ignoring these causes aggressive retry storms, wasted attempts, and potential API bans. Respecting server guidance prevents IP blocking and extended backoffs."
25284
+ };
25285
+ var r14RetryAfterCompliance = createNodeRule(metadata14.id, metadata14.name, (node, graph, ctx) => {
25286
+ if (!isApiNode(node.type)) return null;
25287
+ const params = node.params ?? {};
25288
+ const options = params.options ?? {};
25289
+ const retryCandidates = [
25290
+ options.retryOnFail,
25291
+ params.retryOnFail,
25292
+ node.flags?.retryOnFail
25293
+ ];
25294
+ const retryOnFail = retryCandidates.find((value) => value !== void 0 && value !== null);
25295
+ if (!retryOnFail || retryOnFail === false) return null;
25296
+ if (typeof retryOnFail === "string") {
25297
+ const normalized = retryOnFail.trim().toLowerCase();
25298
+ if (retryOnFail.includes("{{") && normalized !== "true") {
25299
+ return null;
25300
+ }
25301
+ }
25302
+ const waitBetweenTries = node.flags?.waitBetweenTries;
25303
+ if (waitBetweenTries !== void 0 && waitBetweenTries !== null) {
25304
+ if (typeof waitBetweenTries === "number") return null;
25305
+ if (typeof waitBetweenTries === "string" && !isNaN(Number(waitBetweenTries)) && !waitBetweenTries.includes("{{")) {
25306
+ return null;
25307
+ }
25308
+ }
25309
+ const nodeStr = JSON.stringify(node);
25310
+ const hasRetryAfterLogic = /retry[-_]?after|retryafter/i.test(nodeStr);
25311
+ if (hasRetryAfterLogic) {
25312
+ return null;
25313
+ }
25314
+ return {
25315
+ rule: metadata14.id,
25316
+ severity: metadata14.severity,
25317
+ path: ctx.path,
25318
+ message: `Node ${node.name || node.id} has retry logic but ignores Retry-After headers (429/503 responses)`,
25319
+ raw_details: `Add expression to parse Retry-After header: const retryAfter = $json.headers['retry-after']; const delay = retryAfter ? (parseInt(retryAfter) || new Date(retryAfter) - Date.now()) : Math.min(1000 * Math.pow(2, $execution.retryCount), 60000); This prevents API bans and respects server rate limits.`,
25320
+ nodeId: node.id,
25321
+ line: ctx.nodeLines?.[node.id]
25322
+ };
25323
+ });
25226
25324
  var rules = [
25227
25325
  r1Retry,
25228
25326
  r2ErrorHandling,
@@ -25625,7 +25723,7 @@ var initCommand = new Command("init").description("Initialize FlowLint configura
25625
25723
 
25626
25724
  // src/cli.ts
25627
25725
  var program2 = new Command();
25628
- program2.name("flowlint").description("Static analysis tool for n8n workflows").version("0.7.6");
25726
+ program2.name("flowlint").description("Static analysis tool for n8n workflows").version("0.7.7");
25629
25727
  program2.addCommand(scanCommand);
25630
25728
  program2.addCommand(initCommand);
25631
25729
  program2.parse(process.argv);