@replikanti/flowlint-core 0.8.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -170,6 +170,15 @@ type RuleContext = {
170
170
  };
171
171
  declare function runAllRules(graph: Graph, ctx: RuleContext): Finding[];
172
172
 
173
+ interface RuleMetadata {
174
+ id: string;
175
+ name: string;
176
+ severity: 'must' | 'should' | 'nit';
177
+ description: string;
178
+ details: string;
179
+ }
180
+ declare const RULES_METADATA: RuleMetadata[];
181
+
173
182
  declare class ValidationError extends Error {
174
183
  errors: Array<{
175
184
  path: string;
@@ -242,4 +251,4 @@ declare function buildCheckOutput({ findings, cfg, summaryOverride, conclusionOv
242
251
  };
243
252
  declare function buildAnnotations(findings: Finding[]): any[];
244
253
 
245
- export { type Edge, type FilesConfig, type Finding, type FindingSeverity, type FlowLintConfig, type Graph, type NodeRef, type PRFile, type ReportConfig, type RuleConfig, type RuleContext$1 as RuleContext, type RuleRunner, ValidationError, buildAnnotations, buildCheckOutput, countFindingsBySeverity, defaultConfig, flattenConnections, getExampleLink, isApiNode, isErrorProneNode, isMutationNode, isNotificationNode, isTerminalNode, loadConfig, parseConfig, parseN8n, runAllRules, sortFindingsBySeverity, validateConfig, validateN8nWorkflow };
254
+ export { type Edge, type FilesConfig, type Finding, type FindingSeverity, type FlowLintConfig, type Graph, type NodeRef, type PRFile, RULES_METADATA, type ReportConfig, type RuleConfig, type RuleContext$1 as RuleContext, type RuleMetadata, type RuleRunner, ValidationError, buildAnnotations, buildCheckOutput, countFindingsBySeverity, defaultConfig, flattenConnections, getExampleLink, isApiNode, isErrorProneNode, isMutationNode, isNotificationNode, isTerminalNode, loadConfig, parseConfig, parseN8n, runAllRules, sortFindingsBySeverity, validateConfig, validateN8nWorkflow };
package/dist/index.d.ts CHANGED
@@ -170,6 +170,15 @@ type RuleContext = {
170
170
  };
171
171
  declare function runAllRules(graph: Graph, ctx: RuleContext): Finding[];
172
172
 
173
+ interface RuleMetadata {
174
+ id: string;
175
+ name: string;
176
+ severity: 'must' | 'should' | 'nit';
177
+ description: string;
178
+ details: string;
179
+ }
180
+ declare const RULES_METADATA: RuleMetadata[];
181
+
173
182
  declare class ValidationError extends Error {
174
183
  errors: Array<{
175
184
  path: string;
@@ -242,4 +251,4 @@ declare function buildCheckOutput({ findings, cfg, summaryOverride, conclusionOv
242
251
  };
243
252
  declare function buildAnnotations(findings: Finding[]): any[];
244
253
 
245
- export { type Edge, type FilesConfig, type Finding, type FindingSeverity, type FlowLintConfig, type Graph, type NodeRef, type PRFile, type ReportConfig, type RuleConfig, type RuleContext$1 as RuleContext, type RuleRunner, ValidationError, buildAnnotations, buildCheckOutput, countFindingsBySeverity, defaultConfig, flattenConnections, getExampleLink, isApiNode, isErrorProneNode, isMutationNode, isNotificationNode, isTerminalNode, loadConfig, parseConfig, parseN8n, runAllRules, sortFindingsBySeverity, validateConfig, validateN8nWorkflow };
254
+ export { type Edge, type FilesConfig, type Finding, type FindingSeverity, type FlowLintConfig, type Graph, type NodeRef, type PRFile, RULES_METADATA, type ReportConfig, type RuleConfig, type RuleContext$1 as RuleContext, type RuleMetadata, type RuleRunner, ValidationError, buildAnnotations, buildCheckOutput, countFindingsBySeverity, defaultConfig, flattenConnections, getExampleLink, isApiNode, isErrorProneNode, isMutationNode, isNotificationNode, isTerminalNode, loadConfig, parseConfig, parseN8n, runAllRules, sortFindingsBySeverity, validateConfig, validateN8nWorkflow };
package/dist/index.js CHANGED
@@ -30,6 +30,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
30
30
  // src/index.ts
31
31
  var index_exports = {};
32
32
  __export(index_exports, {
33
+ RULES_METADATA: () => RULES_METADATA,
33
34
  ValidationError: () => ValidationError,
34
35
  buildAnnotations: () => buildAnnotations,
35
36
  buildCheckOutput: () => buildCheckOutput,
@@ -677,8 +678,15 @@ function createHardcodedStringRule({
677
678
  return createNodeRule(ruleId, configKey, logic);
678
679
  }
679
680
 
680
- // src/rules/index.ts
681
- var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
681
+ // src/rules/lib/r1-retry.ts
682
+ var metadata = {
683
+ id: "R1",
684
+ name: "rate_limit_retry",
685
+ severity: "must",
686
+ description: "Ensures that nodes making external API calls have a retry mechanism configured.",
687
+ details: "Critical for building resilient workflows that can handle transient network issues or temporary service unavailability."
688
+ };
689
+ var r1Retry = createNodeRule(metadata.id, metadata.name, (node, graph, ctx) => {
682
690
  if (!isApiNode(node.type)) return null;
683
691
  const params = node.params ?? {};
684
692
  const options = params.options ?? {};
@@ -698,8 +706,8 @@ var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
698
706
  }
699
707
  }
700
708
  return {
701
- rule: "R1",
702
- severity: "must",
709
+ rule: metadata.id,
710
+ severity: metadata.severity,
703
711
  path: ctx.path,
704
712
  message: `Node ${node.name || node.id} is missing retry/backoff configuration`,
705
713
  raw_details: `In the node properties, enable "Retry on Fail" under Options.`,
@@ -707,11 +715,20 @@ var r1Retry = createNodeRule("R1", "rate_limit_retry", (node, graph, ctx) => {
707
715
  line: ctx.nodeLines?.[node.id]
708
716
  };
709
717
  });
710
- var r2ErrorHandling = createNodeRule("R2", "error_handling", (node, graph, ctx) => {
718
+
719
+ // src/rules/lib/r2-error-handling.ts
720
+ var metadata2 = {
721
+ id: "R2",
722
+ name: "error_handling",
723
+ severity: "must",
724
+ description: "Prevents the use of configurations that might hide errors.",
725
+ details: "Workflows should explicitly handle errors rather than ignoring them with continueOnFail: true."
726
+ };
727
+ var r2ErrorHandling = createNodeRule(metadata2.id, metadata2.name, (node, graph, ctx) => {
711
728
  if (ctx.cfg.rules.error_handling.forbid_continue_on_fail && node.flags?.continueOnFail) {
712
729
  return {
713
- rule: "R2",
714
- severity: "must",
730
+ rule: metadata2.id,
731
+ severity: metadata2.severity,
715
732
  path: ctx.path,
716
733
  message: `Node ${node.name || node.id} has continueOnFail enabled (disable it and route errors explicitly)`,
717
734
  nodeId: node.id,
@@ -721,152 +738,15 @@ var r2ErrorHandling = createNodeRule("R2", "error_handling", (node, graph, ctx)
721
738
  }
722
739
  return null;
723
740
  });
724
- var r4Secrets = createHardcodedStringRule({
725
- ruleId: "R4",
726
- severity: "must",
727
- configKey: "secrets",
728
- messageFn: (node) => `Node ${node.name || node.id} contains a hardcoded secret (move it to credentials/env vars)`,
729
- details: "Move API keys/tokens into Credentials or environment variables; the workflow should only reference {{$credentials.*}} expressions."
730
- });
731
- var r9ConfigLiterals = createHardcodedStringRule({
732
- ruleId: "R9",
741
+
742
+ // src/rules/lib/r3-idempotency.ts
743
+ var metadata3 = {
744
+ id: "R3",
745
+ name: "idempotency",
733
746
  severity: "should",
734
- configKey: "config_literals",
735
- messageFn: (node, value) => `Node ${node.name || node.id} contains env-specific literal "${value.substring(0, 40)}" (move to expression/credential)`,
736
- details: "Move environment-specific URLs/IDs into expressions or credentials (e.g., {{$env.API_BASE_URL}}) so the workflow is portable."
737
- });
738
- var r10NamingConvention = createNodeRule("R10", "naming_convention", (node, graph, ctx) => {
739
- const genericNames = new Set(ctx.cfg.rules.naming_convention.generic_names ?? []);
740
- if (!node.name || genericNames.has(node.name.toLowerCase())) {
741
- return {
742
- rule: "R10",
743
- severity: "nit",
744
- path: ctx.path,
745
- message: `Node ${node.id} uses a generic name "${node.name ?? ""}" (rename it to describe the action)`,
746
- nodeId: node.id,
747
- line: ctx.nodeLines?.[node.id],
748
- raw_details: 'Rename the node to describe its purpose (e.g., "Check subscription status" instead of "IF") for easier reviews and debugging.'
749
- };
750
- }
751
- return null;
752
- });
753
- var DEPRECATED_NODES = {
754
- "n8n-nodes-base.splitInBatches": "Use Loop over items instead",
755
- "n8n-nodes-base.executeWorkflow": "Use Execute Workflow (Sub-Workflow) instead"
747
+ description: "Guards against operations that are not idempotent with retries configured.",
748
+ details: "Detects patterns where a webhook trigger could lead to duplicate processing in databases or external services."
756
749
  };
757
- var r11DeprecatedNodes = createNodeRule("R11", "deprecated_nodes", (node, graph, ctx) => {
758
- if (DEPRECATED_NODES[node.type]) {
759
- return {
760
- rule: "R11",
761
- severity: "should",
762
- path: ctx.path,
763
- message: `Node ${node.name || node.id} uses deprecated type ${node.type} (replace with ${DEPRECATED_NODES[node.type]})`,
764
- nodeId: node.id,
765
- line: ctx.nodeLines?.[node.id],
766
- raw_details: `Replace this node with ${DEPRECATED_NODES[node.type]} so future n8n upgrades don\xE2\u20AC\u2122t break the workflow.`
767
- };
768
- }
769
- return null;
770
- });
771
- var r12UnhandledErrorPath = createNodeRule("R12", "unhandled_error_path", (node, graph, ctx) => {
772
- if (!isErrorProneNode(node.type)) return null;
773
- const hasErrorPath = graph.edges.some((edge) => {
774
- if (edge.from !== node.id) return false;
775
- if (edge.on === "error") return true;
776
- const targetNode = graph.nodes.find((candidate) => candidate.id === edge.to);
777
- return targetNode ? isErrorHandlerNode(targetNode.type, targetNode.name) : false;
778
- });
779
- if (!hasErrorPath) {
780
- return {
781
- rule: "R12",
782
- severity: "must",
783
- path: ctx.path,
784
- message: `Node ${node.name || node.id} has no error branch (add a red connector to handler)`,
785
- nodeId: node.id,
786
- line: ctx.nodeLines?.[node.id],
787
- raw_details: "Add an error (red) branch to a Stop and Error or logging/alert node so failures do not disappear silently."
788
- };
789
- }
790
- return null;
791
- });
792
- function r13WebhookAcknowledgment(graph, ctx) {
793
- const cfg = ctx.cfg.rules.webhook_acknowledgment;
794
- if (!cfg?.enabled) return [];
795
- const findings = [];
796
- const webhookNodes = graph.nodes.filter(
797
- (node) => node.type === "n8n-nodes-base.webhook" || node.type.includes("webhook") && !node.type.includes("respondToWebhook")
798
- );
799
- for (const webhookNode of webhookNodes) {
800
- const directDownstream = graph.edges.filter((edge) => edge.from === webhookNode.id).map((edge) => graph.nodes.find((n) => n.id === edge.to)).filter((n) => !!n);
801
- if (directDownstream.length === 0) continue;
802
- const hasImmediateResponse = directDownstream.some(
803
- (node) => node.type === "n8n-nodes-base.respondToWebhook" || /respond.*webhook/i.test(node.type) || /respond.*webhook/i.test(node.name || "")
804
- );
805
- if (hasImmediateResponse) continue;
806
- const heavyNodeTypes = cfg.heavy_node_types || [
807
- "n8n-nodes-base.httpRequest",
808
- "n8n-nodes-base.postgres",
809
- "n8n-nodes-base.mysql",
810
- "n8n-nodes-base.mongodb",
811
- "n8n-nodes-base.openAi",
812
- "n8n-nodes-base.anthropic"
813
- ];
814
- const hasHeavyProcessing = directDownstream.some(
815
- (node) => heavyNodeTypes.includes(node.type) || /loop|batch/i.test(node.type)
816
- );
817
- if (hasHeavyProcessing) {
818
- findings.push({
819
- rule: "R13",
820
- severity: "must",
821
- path: ctx.path,
822
- message: `Webhook "${webhookNode.name || webhookNode.id}" performs heavy processing before acknowledgment (risk of timeout/duplicates)`,
823
- nodeId: webhookNode.id,
824
- line: ctx.nodeLines?.[webhookNode.id],
825
- raw_details: `Add a "Respond to Webhook" node immediately after the webhook trigger (return 200/204), then perform heavy processing. This prevents webhook timeouts and duplicate events.`
826
- });
827
- }
828
- }
829
- return findings;
830
- }
831
- var r14RetryAfterCompliance = createNodeRule("R14", "retry_after_compliance", (node, graph, ctx) => {
832
- if (!isApiNode(node.type)) return null;
833
- const params = node.params ?? {};
834
- const options = params.options ?? {};
835
- const retryCandidates = [
836
- options.retryOnFail,
837
- params.retryOnFail,
838
- node.flags?.retryOnFail
839
- ];
840
- const retryOnFail = retryCandidates.find((value) => value !== void 0 && value !== null);
841
- if (!retryOnFail || retryOnFail === false) return null;
842
- if (typeof retryOnFail === "string") {
843
- const normalized = retryOnFail.trim().toLowerCase();
844
- if (retryOnFail.includes("{{") && normalized !== "true") {
845
- return null;
846
- }
847
- }
848
- const waitBetweenTries = node.flags?.waitBetweenTries;
849
- if (waitBetweenTries !== void 0 && waitBetweenTries !== null) {
850
- if (typeof waitBetweenTries === "number") return null;
851
- if (typeof waitBetweenTries === "string" && !isNaN(Number(waitBetweenTries)) && !waitBetweenTries.includes("{{")) {
852
- return null;
853
- }
854
- }
855
- const nodeStr = JSON.stringify(node);
856
- const hasRetryAfterLogic = /retry[-_]?after|retryafter/i.test(nodeStr);
857
- if (hasRetryAfterLogic) {
858
- return null;
859
- }
860
- return {
861
- rule: "R14",
862
- severity: "should",
863
- path: ctx.path,
864
- message: `Node ${node.name || node.id} has retry logic but ignores Retry-After headers (429/503 responses)`,
865
- raw_details: `Add expression to parse Retry-After header: const retryAfter = $json.headers['retry-after']; const delay = retryAfter ? (parseInt(retryAfter) || new Date(retryAfter) - Date.now()) : Math.min(1000 * Math.pow(2, $execution.retryCount), 60000); This prevents API bans and respects server rate limits.`,
866
- nodeId: node.id,
867
- line: ctx.nodeLines?.[node.id]
868
- };
869
- });
870
750
  function r3Idempotency(graph, ctx) {
871
751
  const cfg = ctx.cfg.rules.idempotency;
872
752
  if (!cfg?.enabled) return [];
@@ -883,8 +763,8 @@ function r3Idempotency(graph, ctx) {
883
763
  );
884
764
  if (!hasGuard) {
885
765
  findings.push({
886
- rule: "R3",
887
- severity: "must",
766
+ rule: metadata3.id,
767
+ severity: metadata3.severity,
888
768
  path: ctx.path,
889
769
  message: `The mutation path ending at "${mutationNode.name || mutationNode.id}" appears to be missing an idempotency guard.`,
890
770
  raw_details: `Ensure one of the upstream nodes or the mutation node itself uses an idempotency key, such as one of: ${(cfg.key_field_candidates ?? []).join(
@@ -897,6 +777,31 @@ function r3Idempotency(graph, ctx) {
897
777
  }
898
778
  return findings;
899
779
  }
780
+
781
+ // src/rules/lib/r4-secrets.ts
782
+ var metadata4 = {
783
+ id: "R4",
784
+ name: "secrets",
785
+ severity: "must",
786
+ description: "Detects hardcoded secrets, API keys, or credentials within node parameters.",
787
+ details: "All secrets should be stored securely using credential management systems."
788
+ };
789
+ var r4Secrets = createHardcodedStringRule({
790
+ ruleId: metadata4.id,
791
+ severity: metadata4.severity,
792
+ configKey: "secrets",
793
+ messageFn: (node) => `Node ${node.name || node.id} contains a hardcoded secret (move it to credentials/env vars)`,
794
+ details: "Move API keys/tokens into Credentials or environment variables; the workflow should only reference {{$credentials.*}} expressions."
795
+ });
796
+
797
+ // src/rules/lib/r5-dead-ends.ts
798
+ var metadata5 = {
799
+ id: "R5",
800
+ name: "dead_ends",
801
+ severity: "should",
802
+ description: "Finds nodes or workflow branches not connected to any other node.",
803
+ details: "Indicates incomplete or dead logic that should be reviewed or removed."
804
+ };
900
805
  function r5DeadEnds(graph, ctx) {
901
806
  const cfg = ctx.cfg.rules.dead_ends;
902
807
  if (!cfg?.enabled) return [];
@@ -908,8 +813,8 @@ function r5DeadEnds(graph, ctx) {
908
813
  for (const node of graph.nodes) {
909
814
  if ((outgoing.get(node.id) || 0) === 0 && !isTerminalNode(node.type, node.name)) {
910
815
  findings.push({
911
- rule: "R5",
912
- severity: "nit",
816
+ rule: metadata5.id,
817
+ severity: metadata5.severity,
913
818
  path: ctx.path,
914
819
  message: `Node ${node.name || node.id} has no outgoing connections (either wire it up or remove it)`,
915
820
  nodeId: node.id,
@@ -920,6 +825,15 @@ function r5DeadEnds(graph, ctx) {
920
825
  }
921
826
  return findings;
922
827
  }
828
+
829
+ // src/rules/lib/r6-long-running.ts
830
+ var metadata6 = {
831
+ id: "R6",
832
+ name: "long_running",
833
+ severity: "should",
834
+ description: "Flags workflows with potential for excessive runtime.",
835
+ details: "Detects loops with high iteration counts or long timeouts that could cause performance issues."
836
+ };
923
837
  function r6LongRunning(graph, ctx) {
924
838
  const cfg = ctx.cfg.rules.long_running;
925
839
  if (!cfg?.enabled) return [];
@@ -934,32 +848,41 @@ function r6LongRunning(graph, ctx) {
934
848
  ]);
935
849
  if (!iterations || cfg.max_iterations && iterations > cfg.max_iterations) {
936
850
  findings.push({
937
- rule: "R6",
938
- severity: "should",
851
+ rule: metadata6.id,
852
+ severity: metadata6.severity,
939
853
  path: ctx.path,
940
854
  message: `Node ${node.name || node.id} allows ${iterations ?? "unbounded"} iterations (limit ${cfg.max_iterations}; set a lower cap)`,
941
855
  nodeId: node.id,
942
856
  line: ctx.nodeLines?.[node.id],
943
- raw_details: `Set Options > Max iterations to \xE2\u2030\xA4 ${cfg.max_iterations} or split the processing into smaller batches.`
857
+ raw_details: `Set Options > Max iterations to \u2264 ${cfg.max_iterations} or split the processing into smaller batches.`
944
858
  });
945
859
  }
946
860
  if (cfg.timeout_ms) {
947
861
  const timeout = readNumber(node.params, ["timeout", "timeoutMs", "options.timeout"]);
948
862
  if (timeout && timeout > cfg.timeout_ms) {
949
863
  findings.push({
950
- rule: "R6",
951
- severity: "should",
864
+ rule: metadata6.id,
865
+ severity: metadata6.severity,
952
866
  path: ctx.path,
953
867
  message: `Node ${node.name || node.id} uses timeout ${timeout}ms (limit ${cfg.timeout_ms}ms; shorten the timeout or break work apart)`,
954
868
  nodeId: node.id,
955
869
  line: ctx.nodeLines?.[node.id],
956
- raw_details: `Lower the timeout to \xE2\u2030\xA4 ${cfg.timeout_ms}ms or split the workflow so no single step blocks for too long.`
870
+ raw_details: `Lower the timeout to \u2264 ${cfg.timeout_ms}ms or split the workflow so no single step blocks for too long.`
957
871
  });
958
872
  }
959
873
  }
960
874
  }
961
875
  return findings;
962
876
  }
877
+
878
+ // src/rules/lib/r7-alert-log-enforcement.ts
879
+ var metadata7 = {
880
+ id: "R7",
881
+ name: "alert_log_enforcement",
882
+ severity: "should",
883
+ description: "Ensures critical paths include logging or alerting steps.",
884
+ details: "For example, a failed payment processing branch should trigger an alert for monitoring."
885
+ };
963
886
  function r7AlertLogEnforcement(graph, ctx) {
964
887
  const cfg = ctx.cfg.rules.alert_log_enforcement;
965
888
  if (!cfg?.enabled) return [];
@@ -991,8 +914,8 @@ function r7AlertLogEnforcement(graph, ctx) {
991
914
  }
992
915
  if (!isHandled) {
993
916
  findings.push({
994
- rule: "R7",
995
- severity: "should",
917
+ rule: metadata7.id,
918
+ severity: metadata7.severity,
996
919
  path: ctx.path,
997
920
  message: `Error path from node ${fromNode.name || fromNode.id} has no log/alert before rejoining (add notification node)`,
998
921
  nodeId: fromNode.id,
@@ -1003,6 +926,15 @@ function r7AlertLogEnforcement(graph, ctx) {
1003
926
  }
1004
927
  return findings;
1005
928
  }
929
+
930
+ // src/rules/lib/r8-unused-data.ts
931
+ var metadata8 = {
932
+ id: "R8",
933
+ name: "unused_data",
934
+ severity: "nit",
935
+ description: "Detects when node output data is not consumed by subsequent nodes.",
936
+ details: "Identifies unnecessary data processing that could be optimized or removed."
937
+ };
1006
938
  function r8UnusedData(graph, ctx) {
1007
939
  const cfg = ctx.cfg.rules.unused_data;
1008
940
  if (!cfg?.enabled) return [];
@@ -1019,18 +951,213 @@ function r8UnusedData(graph, ctx) {
1019
951
  });
1020
952
  if (!leadsToConsumer) {
1021
953
  findings.push({
1022
- rule: "R8",
1023
- severity: "nit",
954
+ rule: metadata8.id,
955
+ severity: metadata8.severity,
1024
956
  path: ctx.path,
1025
957
  message: `Node "${node.name || node.id}" produces data that never reaches any consumer`,
1026
958
  nodeId: node.id,
1027
959
  line: ctx.nodeLines?.[node.id],
1028
- raw_details: "Wire this branch into a consumer (DB/API/response) or remove it\xE2\u20AC\u201Dotherwise the data produced here is never used."
960
+ raw_details: "Wire this branch into a consumer (DB/API/response) or remove it\u2014otherwise the data produced here is never used."
961
+ });
962
+ }
963
+ }
964
+ return findings;
965
+ }
966
+
967
+ // src/rules/lib/r9-config-literals.ts
968
+ var metadata9 = {
969
+ id: "R9",
970
+ name: "config_literals",
971
+ severity: "should",
972
+ description: "Flags hardcoded literals (URLs, environment tags, tenant IDs) that should come from configuration.",
973
+ details: "Promotes externalized configuration and prevents hardcoded environment-specific values."
974
+ };
975
+ var r9ConfigLiterals = createHardcodedStringRule({
976
+ ruleId: metadata9.id,
977
+ severity: metadata9.severity,
978
+ configKey: "config_literals",
979
+ messageFn: (node, value) => `Node ${node.name || node.id} contains env-specific literal "${value.substring(0, 40)}" (move to expression/credential)`,
980
+ details: "Move environment-specific URLs/IDs into expressions or credentials (e.g., {{$env.API_BASE_URL}}) so the workflow is portable."
981
+ });
982
+
983
+ // src/rules/lib/r10-naming-convention.ts
984
+ var metadata10 = {
985
+ id: "R10",
986
+ name: "naming_convention",
987
+ severity: "nit",
988
+ description: "Enforces consistent and descriptive naming for nodes.",
989
+ details: "Enforces consistent and descriptive naming for nodes. Improves workflow readability and maintainability (e.g., 'Fetch Customer Data from CRM' vs 'HTTP Request')."
990
+ };
991
+ var r10NamingConvention = createNodeRule(metadata10.id, metadata10.name, (node, graph, ctx) => {
992
+ const genericNames = new Set(ctx.cfg.rules.naming_convention.generic_names ?? []);
993
+ if (!node.name || genericNames.has(node.name.toLowerCase())) {
994
+ return {
995
+ rule: metadata10.id,
996
+ severity: metadata10.severity,
997
+ path: ctx.path,
998
+ message: `Node ${node.id} uses a generic name "${node.name ?? ""}" (rename it to describe the action)`,
999
+ nodeId: node.id,
1000
+ line: ctx.nodeLines?.[node.id],
1001
+ raw_details: 'Rename the node to describe its purpose (e.g., "Check subscription status" instead of "IF") for easier reviews and debugging.'
1002
+ };
1003
+ }
1004
+ return null;
1005
+ });
1006
+
1007
+ // src/rules/lib/r11-deprecated-nodes.ts
1008
+ var metadata11 = {
1009
+ id: "R11",
1010
+ name: "deprecated_nodes",
1011
+ severity: "should",
1012
+ description: "Warns about deprecated node types and suggests alternatives.",
1013
+ details: "Helps maintain workflows using current, supported node implementations."
1014
+ };
1015
+ var DEPRECATED_NODES = {
1016
+ "n8n-nodes-base.splitInBatches": "Use Loop over items instead",
1017
+ "n8n-nodes-base.executeWorkflow": "Use Execute Workflow (Sub-Workflow) instead"
1018
+ };
1019
+ var r11DeprecatedNodes = createNodeRule(metadata11.id, metadata11.name, (node, graph, ctx) => {
1020
+ if (DEPRECATED_NODES[node.type]) {
1021
+ return {
1022
+ rule: metadata11.id,
1023
+ severity: metadata11.severity,
1024
+ path: ctx.path,
1025
+ message: `Node ${node.name || node.id} uses deprecated type ${node.type} (replace with ${DEPRECATED_NODES[node.type]})`,
1026
+ nodeId: node.id,
1027
+ line: ctx.nodeLines?.[node.id],
1028
+ raw_details: `Replace this node with ${DEPRECATED_NODES[node.type]} so future n8n upgrades don\u2019t break the workflow.`
1029
+ };
1030
+ }
1031
+ return null;
1032
+ });
1033
+
1034
+ // src/rules/lib/r12-unhandled-error-path.ts
1035
+ var metadata12 = {
1036
+ id: "R12",
1037
+ name: "unhandled_error_path",
1038
+ severity: "must",
1039
+ description: "Ensures nodes with error outputs have connected error handling branches.",
1040
+ details: "Prevents silent failures by requiring explicit error path handling."
1041
+ };
1042
+ var r12UnhandledErrorPath = createNodeRule(metadata12.id, metadata12.name, (node, graph, ctx) => {
1043
+ if (!isErrorProneNode(node.type)) return null;
1044
+ const hasErrorPath = graph.edges.some((edge) => {
1045
+ if (edge.from !== node.id) return false;
1046
+ if (edge.on === "error") return true;
1047
+ const targetNode = graph.nodes.find((candidate) => candidate.id === edge.to);
1048
+ return targetNode ? isErrorHandlerNode(targetNode.type, targetNode.name) : false;
1049
+ });
1050
+ if (!hasErrorPath) {
1051
+ return {
1052
+ rule: metadata12.id,
1053
+ severity: metadata12.severity,
1054
+ path: ctx.path,
1055
+ message: `Node ${node.name || node.id} has no error branch (add a red connector to handler)`,
1056
+ nodeId: node.id,
1057
+ line: ctx.nodeLines?.[node.id],
1058
+ raw_details: "Add an error (red) branch to a Stop and Error or logging/alert node so failures do not disappear silently."
1059
+ };
1060
+ }
1061
+ return null;
1062
+ });
1063
+
1064
+ // src/rules/lib/r13-webhook-acknowledgment.ts
1065
+ var metadata13 = {
1066
+ id: "R13",
1067
+ name: "webhook_acknowledgment",
1068
+ severity: "must",
1069
+ description: "Detects webhooks performing heavy processing without immediate acknowledgment.",
1070
+ details: "Prevents timeout and duplicate events by requiring 'Respond to Webhook' node before heavy operations (HTTP requests, database queries, AI/LLM calls)."
1071
+ };
1072
+ function r13WebhookAcknowledgment(graph, ctx) {
1073
+ const cfg = ctx.cfg.rules.webhook_acknowledgment;
1074
+ if (!cfg?.enabled) return [];
1075
+ const findings = [];
1076
+ const webhookNodes = graph.nodes.filter(
1077
+ (node) => node.type === "n8n-nodes-base.webhook" || node.type.includes("webhook") && !node.type.includes("respondToWebhook")
1078
+ );
1079
+ for (const webhookNode of webhookNodes) {
1080
+ const directDownstream = graph.edges.filter((edge) => edge.from === webhookNode.id).map((edge) => graph.nodes.find((n) => n.id === edge.to)).filter((n) => !!n);
1081
+ if (directDownstream.length === 0) continue;
1082
+ const hasImmediateResponse = directDownstream.some(
1083
+ (node) => node.type === "n8n-nodes-base.respondToWebhook" || /respond.*webhook/i.test(node.type) || /respond.*webhook/i.test(node.name || "")
1084
+ );
1085
+ if (hasImmediateResponse) continue;
1086
+ const heavyNodeTypes = cfg.heavy_node_types || [
1087
+ "n8n-nodes-base.httpRequest",
1088
+ "n8n-nodes-base.postgres",
1089
+ "n8n-nodes-base.mysql",
1090
+ "n8n-nodes-base.mongodb",
1091
+ "n8n-nodes-base.openAi",
1092
+ "n8n-nodes-base.anthropic"
1093
+ ];
1094
+ const hasHeavyProcessing = directDownstream.some(
1095
+ (node) => heavyNodeTypes.includes(node.type) || /loop|batch/i.test(node.type)
1096
+ );
1097
+ if (hasHeavyProcessing) {
1098
+ findings.push({
1099
+ rule: metadata13.id,
1100
+ severity: metadata13.severity,
1101
+ path: ctx.path,
1102
+ message: `Webhook "${webhookNode.name || webhookNode.id}" performs heavy processing before acknowledgment (risk of timeout/duplicates)`,
1103
+ nodeId: webhookNode.id,
1104
+ line: ctx.nodeLines?.[webhookNode.id],
1105
+ raw_details: `Add a "Respond to Webhook" node immediately after the webhook trigger (return 200/204), then perform heavy processing. This prevents webhook timeouts and duplicate events.`
1029
1106
  });
1030
1107
  }
1031
1108
  }
1032
1109
  return findings;
1033
1110
  }
1111
+
1112
+ // src/rules/lib/r14-retry-after-compliance.ts
1113
+ var metadata14 = {
1114
+ id: "R14",
1115
+ name: "retry_after_compliance",
1116
+ severity: "should",
1117
+ description: "Detects HTTP nodes with retry logic that ignore Retry-After headers from 429/503 responses.",
1118
+ details: "APIs return Retry-After headers (seconds or HTTP date) to indicate when to retry. Ignoring these causes aggressive retry storms, wasted attempts, and potential API bans. Respecting server guidance prevents IP blocking and extended backoffs."
1119
+ };
1120
+ var r14RetryAfterCompliance = createNodeRule(metadata14.id, metadata14.name, (node, graph, ctx) => {
1121
+ if (!isApiNode(node.type)) return null;
1122
+ const params = node.params ?? {};
1123
+ const options = params.options ?? {};
1124
+ const retryCandidates = [
1125
+ options.retryOnFail,
1126
+ params.retryOnFail,
1127
+ node.flags?.retryOnFail
1128
+ ];
1129
+ const retryOnFail = retryCandidates.find((value) => value !== void 0 && value !== null);
1130
+ if (!retryOnFail || retryOnFail === false) return null;
1131
+ if (typeof retryOnFail === "string") {
1132
+ const normalized = retryOnFail.trim().toLowerCase();
1133
+ if (retryOnFail.includes("{{") && normalized !== "true") {
1134
+ return null;
1135
+ }
1136
+ }
1137
+ const waitBetweenTries = node.flags?.waitBetweenTries;
1138
+ if (waitBetweenTries !== void 0 && waitBetweenTries !== null) {
1139
+ if (typeof waitBetweenTries === "number") return null;
1140
+ if (typeof waitBetweenTries === "string" && !isNaN(Number(waitBetweenTries)) && !waitBetweenTries.includes("{{")) {
1141
+ return null;
1142
+ }
1143
+ }
1144
+ const nodeStr = JSON.stringify(node);
1145
+ const hasRetryAfterLogic = /retry[-_]?after|retryafter/i.test(nodeStr);
1146
+ if (hasRetryAfterLogic) {
1147
+ return null;
1148
+ }
1149
+ return {
1150
+ rule: metadata14.id,
1151
+ severity: metadata14.severity,
1152
+ path: ctx.path,
1153
+ message: `Node ${node.name || node.id} has retry logic but ignores Retry-After headers (429/503 responses)`,
1154
+ raw_details: `Add expression to parse Retry-After header: const retryAfter = $json.headers['retry-after']; const delay = retryAfter ? (parseInt(retryAfter) || new Date(retryAfter) - Date.now()) : Math.min(1000 * Math.pow(2, $execution.retryCount), 60000); This prevents API bans and respects server rate limits.`,
1155
+ nodeId: node.id,
1156
+ line: ctx.nodeLines?.[node.id]
1157
+ };
1158
+ });
1159
+
1160
+ // src/rules/index.ts
1034
1161
  var rules = [
1035
1162
  r1Retry,
1036
1163
  r2ErrorHandling,
@@ -1051,6 +1178,24 @@ function runAllRules(graph, ctx) {
1051
1178
  return rules.flatMap((rule) => rule(graph, ctx));
1052
1179
  }
1053
1180
 
1181
+ // src/rules/metadata.ts
1182
+ var RULES_METADATA = [
1183
+ metadata,
1184
+ metadata2,
1185
+ metadata3,
1186
+ metadata4,
1187
+ metadata5,
1188
+ metadata6,
1189
+ metadata7,
1190
+ metadata8,
1191
+ metadata9,
1192
+ metadata10,
1193
+ metadata11,
1194
+ metadata12,
1195
+ metadata13,
1196
+ metadata14
1197
+ ];
1198
+
1054
1199
  // src/config/default-config.ts
1055
1200
  var defaultConfig = {
1056
1201
  files: {