@eagleoutice/flowr 2.6.3 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +13 -13
  2. package/abstract-interpretation/data-frame/absint-visitor.d.ts +1 -1
  3. package/abstract-interpretation/data-frame/absint-visitor.js +3 -3
  4. package/abstract-interpretation/data-frame/dataframe-domain.d.ts +4 -7
  5. package/abstract-interpretation/data-frame/dataframe-domain.js +5 -11
  6. package/abstract-interpretation/data-frame/mappers/access-mapper.d.ts +3 -1
  7. package/abstract-interpretation/data-frame/mappers/access-mapper.js +3 -2
  8. package/abstract-interpretation/data-frame/mappers/arguments.js +2 -2
  9. package/abstract-interpretation/data-frame/mappers/assignment-mapper.d.ts +3 -1
  10. package/abstract-interpretation/data-frame/mappers/assignment-mapper.js +3 -2
  11. package/abstract-interpretation/data-frame/mappers/function-mapper.d.ts +1 -1
  12. package/abstract-interpretation/data-frame/mappers/function-mapper.js +8 -8
  13. package/abstract-interpretation/data-frame/mappers/replacement-mapper.d.ts +3 -1
  14. package/abstract-interpretation/data-frame/mappers/replacement-mapper.js +3 -2
  15. package/abstract-interpretation/data-frame/semantics.js +47 -42
  16. package/abstract-interpretation/data-frame/shape-inference.d.ts +1 -1
  17. package/abstract-interpretation/domains/abstract-domain.d.ts +1 -0
  18. package/abstract-interpretation/domains/abstract-domain.js +3 -2
  19. package/abstract-interpretation/domains/bounded-set-domain.js +1 -1
  20. package/abstract-interpretation/domains/interval-domain.d.ts +2 -2
  21. package/abstract-interpretation/domains/interval-domain.js +3 -6
  22. package/abstract-interpretation/domains/lattice.d.ts +2 -0
  23. package/abstract-interpretation/domains/lattice.js +3 -1
  24. package/abstract-interpretation/domains/positive-interval-domain.d.ts +1 -1
  25. package/abstract-interpretation/domains/positive-interval-domain.js +1 -1
  26. package/abstract-interpretation/domains/satisfiable-domain.d.ts +2 -2
  27. package/abstract-interpretation/domains/satisfiable-domain.js +2 -2
  28. package/abstract-interpretation/domains/set-range-domain.d.ts +98 -0
  29. package/abstract-interpretation/domains/set-range-domain.js +400 -0
  30. package/abstract-interpretation/domains/set-upper-bound-domain.js +2 -2
  31. package/abstract-interpretation/domains/singleton-domain.js +2 -2
  32. package/benchmark/slicer.d.ts +2 -1
  33. package/benchmark/slicer.js +37 -15
  34. package/benchmark/stats/print.js +8 -5
  35. package/benchmark/stats/stats.d.ts +3 -2
  36. package/benchmark/summarizer/data.d.ts +11 -8
  37. package/benchmark/summarizer/first-phase/process.js +11 -8
  38. package/benchmark/summarizer/second-phase/process.js +24 -18
  39. package/control-flow/cfg-dead-code.js +3 -2
  40. package/control-flow/useless-loop.js +4 -2
  41. package/core/steps/all/static-slicing/00-slice.d.ts +3 -0
  42. package/core/steps/all/static-slicing/00-slice.js +2 -1
  43. package/core/steps/pipeline/default-pipelines.d.ts +42 -42
  44. package/dataflow/cluster.js +2 -2
  45. package/dataflow/environments/append.d.ts +5 -0
  46. package/dataflow/environments/append.js +6 -20
  47. package/dataflow/environments/built-in.d.ts +2 -1
  48. package/dataflow/environments/clone.d.ts +1 -1
  49. package/dataflow/environments/clone.js +3 -27
  50. package/dataflow/environments/define.d.ts +7 -3
  51. package/dataflow/environments/define.js +9 -56
  52. package/dataflow/environments/diff.js +1 -1
  53. package/dataflow/environments/environment.d.ts +48 -28
  54. package/dataflow/environments/environment.js +187 -62
  55. package/dataflow/environments/overwrite.js +2 -45
  56. package/dataflow/environments/reference-to-maybe.d.ts +13 -0
  57. package/dataflow/environments/reference-to-maybe.js +54 -0
  58. package/dataflow/environments/resolve-by-name.d.ts +6 -1
  59. package/dataflow/environments/resolve-by-name.js +56 -4
  60. package/dataflow/environments/scoping.d.ts +2 -2
  61. package/dataflow/environments/scoping.js +7 -7
  62. package/dataflow/eval/resolve/alias-tracking.d.ts +10 -4
  63. package/dataflow/eval/resolve/alias-tracking.js +15 -13
  64. package/dataflow/eval/resolve/resolve-argument.d.ts +2 -1
  65. package/dataflow/eval/resolve/resolve-argument.js +8 -8
  66. package/dataflow/eval/resolve/resolve.d.ts +13 -11
  67. package/dataflow/eval/resolve/resolve.js +16 -15
  68. package/dataflow/extractor.js +1 -7
  69. package/dataflow/fn/higher-order-function.d.ts +2 -1
  70. package/dataflow/fn/higher-order-function.js +4 -4
  71. package/dataflow/graph/dataflowgraph-builder.d.ts +9 -5
  72. package/dataflow/graph/dataflowgraph-builder.js +21 -11
  73. package/dataflow/graph/diff-dataflow-graph.js +2 -2
  74. package/dataflow/graph/graph.d.ts +10 -2
  75. package/dataflow/graph/graph.js +41 -12
  76. package/dataflow/graph/invert-dfg.d.ts +3 -2
  77. package/dataflow/graph/invert-dfg.js +3 -3
  78. package/dataflow/graph/resolve-graph.d.ts +2 -1
  79. package/dataflow/graph/resolve-graph.js +2 -2
  80. package/dataflow/graph/vertex.d.ts +3 -3
  81. package/dataflow/graph/vertex.js +3 -3
  82. package/dataflow/info.d.ts +1 -1
  83. package/dataflow/internal/linker.js +3 -7
  84. package/dataflow/internal/process/functions/call/argument/unpack-argument.d.ts +7 -1
  85. package/dataflow/internal/process/functions/call/argument/unpack-argument.js +12 -3
  86. package/dataflow/internal/process/functions/call/built-in/built-in-access.js +3 -3
  87. package/dataflow/internal/process/functions/call/built-in/built-in-apply.js +2 -2
  88. package/dataflow/internal/process/functions/call/built-in/built-in-assignment.d.ts +3 -1
  89. package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +3 -3
  90. package/dataflow/internal/process/functions/call/built-in/built-in-eval.js +9 -9
  91. package/dataflow/internal/process/functions/call/built-in/built-in-expression-list.js +9 -7
  92. package/dataflow/internal/process/functions/call/built-in/built-in-for-loop.js +3 -3
  93. package/dataflow/internal/process/functions/call/built-in/built-in-function-definition.d.ts +2 -1
  94. package/dataflow/internal/process/functions/call/built-in/built-in-function-definition.js +9 -13
  95. package/dataflow/internal/process/functions/call/built-in/built-in-get.js +1 -1
  96. package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.d.ts +3 -1
  97. package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +8 -6
  98. package/dataflow/internal/process/functions/call/built-in/built-in-library.js +1 -1
  99. package/dataflow/internal/process/functions/call/built-in/built-in-pipe.js +1 -1
  100. package/dataflow/internal/process/functions/call/built-in/built-in-repeat-loop.js +1 -1
  101. package/dataflow/internal/process/functions/call/built-in/built-in-replacement.js +3 -3
  102. package/dataflow/internal/process/functions/call/built-in/built-in-rm.js +6 -4
  103. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +1 -1
  104. package/dataflow/internal/process/functions/call/built-in/built-in-while-loop.js +5 -5
  105. package/dataflow/internal/process/functions/call/common.js +2 -3
  106. package/dataflow/internal/process/functions/call/known-call-handling.js +1 -1
  107. package/dataflow/internal/process/functions/call/unnamed-call-handling.js +1 -1
  108. package/dataflow/internal/process/functions/process-argument.js +1 -1
  109. package/dataflow/internal/process/process-symbol.js +1 -1
  110. package/dataflow/internal/process/process-value.d.ts +1 -1
  111. package/dataflow/internal/process/process-value.js +7 -7
  112. package/dataflow/processor.d.ts +1 -5
  113. package/documentation/doc-util/doc-dfg.js +3 -2
  114. package/documentation/doc-util/doc-normalized-ast.js +3 -2
  115. package/documentation/doc-util/doc-types.d.ts +1 -1
  116. package/documentation/doc-util/doc-types.js +2 -2
  117. package/documentation/wiki-analyzer.js +14 -1
  118. package/documentation/wiki-dataflow-graph.js +4 -5
  119. package/documentation/wiki-faq.js +0 -1
  120. package/documentation/wiki-linter.js +1 -1
  121. package/documentation/wiki-mk/doc-maker.js +2 -1
  122. package/linter/linter-rules.d.ts +2 -2
  123. package/linter/rules/absolute-path.js +4 -4
  124. package/linter/rules/dataframe-access-validation.d.ts +1 -1
  125. package/linter/rules/dataframe-access-validation.js +1 -1
  126. package/linter/rules/function-finder-util.d.ts +2 -2
  127. package/linter/rules/function-finder-util.js +1 -1
  128. package/linter/rules/network-functions.js +1 -1
  129. package/linter/rules/seeded-randomness.d.ts +1 -1
  130. package/linter/rules/seeded-randomness.js +5 -5
  131. package/package.json +1 -2
  132. package/project/context/flowr-analyzer-context.d.ts +7 -0
  133. package/project/context/flowr-analyzer-context.js +3 -0
  134. package/project/context/flowr-analyzer-environment-context.d.ts +47 -0
  135. package/project/context/flowr-analyzer-environment-context.js +50 -0
  136. package/queries/catalog/call-context-query/call-context-query-executor.js +1 -4
  137. package/queries/catalog/control-flow-query/control-flow-query-format.js +3 -2
  138. package/queries/catalog/dataflow-lens-query/dataflow-lens-query-executor.js +1 -1
  139. package/queries/catalog/dependencies-query/dependencies-query-executor.js +4 -4
  140. package/queries/catalog/df-shape-query/df-shape-query-executor.d.ts +1 -1
  141. package/queries/catalog/df-shape-query/df-shape-query-executor.js +1 -1
  142. package/queries/catalog/df-shape-query/df-shape-query-format.d.ts +4 -4
  143. package/queries/catalog/df-shape-query/df-shape-query-format.js +2 -2
  144. package/queries/catalog/inspect-higher-order-query/inspect-higher-order-query-executor.js +3 -3
  145. package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +1 -1
  146. package/queries/catalog/static-slice-query/static-slice-query-executor.js +1 -1
  147. package/slicing/static/slice-call.d.ts +3 -2
  148. package/slicing/static/slice-call.js +4 -4
  149. package/slicing/static/static-slicer.d.ts +3 -1
  150. package/slicing/static/static-slicer.js +6 -7
  151. package/statistics/features/supported/control-flow/control-flow.js +1 -1
  152. package/statistics/features/supported/used-functions/used-functions.js +1 -1
  153. package/statistics/features/supported/variables/variables.js +2 -1
  154. package/util/containers.js +1 -1
  155. package/util/mermaid/dfg.d.ts +1 -0
  156. package/util/mermaid/dfg.js +3 -3
  157. package/util/simple-df/dfg-view.d.ts +2 -1
  158. package/util/simple-df/dfg-view.js +2 -2
  159. package/util/version.js +1 -1
  160. package/dataflow/environments/remove.d.ts +0 -12
  161. package/dataflow/environments/remove.js +0 -52
@@ -266,26 +266,29 @@ function summarizeDfShapeStats({ perNodeStats, ...stats }) {
266
266
  numberOfEntriesPerNode: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.numberOfEntries)),
267
267
  numberOfOperations: (0, arrays_1.arraySum)(nodeStats.map(s => s.mappedOperations?.length).filter(assert_1.isNotUndefined)),
268
268
  numberOfTotalValues: nodeStats.filter(s => isValue(s.inferredColNames) && isValue(s.inferredColCount) && isValue(s.inferredRowCount)).length,
269
- numberOfTotalTop: nodeStats.filter(s => isTop(s.inferredColNames) && isTop(s.inferredColCount) && isTop(s.inferredRowCount)).length,
270
269
  numberOfTotalBottom: nodeStats.filter(s => s.inferredColNames === 0 && isBottom(s.inferredColCount) && isBottom(s.inferredRowCount)).length,
270
+ numberOfTotalTop: nodeStats.filter(s => isTop(s.inferredColNames) && isTop(s.inferredColCount) && isTop(s.inferredRowCount)).length,
271
271
  inferredColNames: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.inferredColNames).filter(isValue)),
272
+ approxRangeColNames: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.approxRangeColNames).filter(assert_1.isNotUndefined).filter(isFinite)),
273
+ numberOfColNamesExact: nodeStats.map(s => s.approxRangeColNames).filter(range => range === 0).length,
272
274
  numberOfColNamesValues: nodeStats.map(s => s.inferredColNames).filter(isValue).length,
275
+ numberOfColNamesBottom: nodeStats.map(s => s.inferredColNames).filter(isBottom).length,
276
+ numberOfColNamesInfinite: nodeStats.map(s => s.inferredColNames).filter(isInfinite).length,
273
277
  numberOfColNamesTop: nodeStats.map(s => s.inferredColNames).filter(isTop).length,
274
- numberOfColNamesBottom: nodeStats.map(s => s.inferredColNames).filter(number => number === 0).length,
275
278
  inferredColCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.inferredColCount).filter(isValue)),
279
+ approxRangeColCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.approxRangeColCount).filter(assert_1.isNotUndefined).filter(isFinite)),
276
280
  numberOfColCountExact: nodeStats.map(s => s.approxRangeColCount).filter(range => range === 0).length,
277
281
  numberOfColCountValues: nodeStats.map(s => s.inferredColCount).filter(isValue).length,
278
- numberOfColCountTop: nodeStats.map(s => s.inferredColCount).filter(isTop).length,
279
- numberOfColCountInfinite: nodeStats.map(s => s.inferredColCount).filter(isInfinite).length,
280
282
  numberOfColCountBottom: nodeStats.map(s => s.inferredColCount).filter(isBottom).length,
281
- approxRangeColCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.approxRangeColCount).filter(assert_1.isNotUndefined).filter(isFinite)),
283
+ numberOfColCountInfinite: nodeStats.map(s => s.inferredColCount).filter(isInfinite).length,
284
+ numberOfColCountTop: nodeStats.map(s => s.inferredColCount).filter(isTop).length,
282
285
  inferredRowCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.inferredRowCount).filter(isValue)),
286
+ approxRangeRowCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.approxRangeRowCount).filter(assert_1.isNotUndefined).filter(isFinite)),
283
287
  numberOfRowCountExact: nodeStats.map(s => s.approxRangeRowCount).filter(range => range === 0).length,
284
288
  numberOfRowCountValues: nodeStats.map(s => s.inferredRowCount).filter(isValue).length,
285
- numberOfRowCountTop: nodeStats.map(s => s.inferredRowCount).filter(isTop).length,
286
- numberOfRowCountInfinite: nodeStats.map(s => s.inferredRowCount).filter(isInfinite).length,
287
289
  numberOfRowCountBottom: nodeStats.map(s => s.inferredRowCount).filter(isBottom).length,
288
- approxRangeRowCount: (0, summarizer_1.summarizeMeasurement)(nodeStats.map(s => s.approxRangeRowCount).filter(assert_1.isNotUndefined).filter(isFinite)),
290
+ numberOfRowCountInfinite: nodeStats.map(s => s.inferredRowCount).filter(isInfinite).length,
291
+ numberOfRowCountTop: nodeStats.map(s => s.inferredRowCount).filter(isTop).length,
289
292
  perOperationNumber: summarizePerOperationStats(nodeStats),
290
293
  };
291
294
  }
@@ -114,8 +114,8 @@ function summarizeAllSummarizedStats(stats) {
114
114
  numberOfNonDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfNonDataFrameFiles).filter(assert_1.isNotUndefined)),
115
115
  numberOfResultConstraints: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultConstraints).filter(assert_1.isNotUndefined)),
116
116
  numberOfResultingValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingValues).filter(assert_1.isNotUndefined)),
117
- numberOfResultingTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)),
118
117
  numberOfResultingBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingBottom).filter(assert_1.isNotUndefined)),
118
+ numberOfResultingTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)),
119
119
  numberOfEmptyNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEmptyNodes).filter(assert_1.isNotUndefined)),
120
120
  numberOfOperationNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperationNodes).filter(assert_1.isNotUndefined)),
121
121
  numberOfValueNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfValueNodes).filter(assert_1.isNotUndefined)),
@@ -123,26 +123,29 @@ function summarizeAllSummarizedStats(stats) {
123
123
  numberOfEntriesPerNode: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEntriesPerNode).filter(assert_1.isNotUndefined)),
124
124
  numberOfOperations: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperations).filter(assert_1.isNotUndefined)),
125
125
  numberOfTotalValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalValues).filter(assert_1.isNotUndefined)),
126
- numberOfTotalTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)),
127
126
  numberOfTotalBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalBottom).filter(assert_1.isNotUndefined)),
127
+ numberOfTotalTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)),
128
128
  inferredColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColNames).filter(assert_1.isNotUndefined)),
129
+ approxRangeColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColNames).filter(assert_1.isNotUndefined)),
130
+ numberOfColNamesExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesExact).filter(assert_1.isNotUndefined)),
129
131
  numberOfColNamesValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesValues).filter(assert_1.isNotUndefined)),
130
- numberOfColNamesTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)),
131
132
  numberOfColNamesBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesBottom).filter(assert_1.isNotUndefined)),
133
+ numberOfColNamesInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesInfinite).filter(assert_1.isNotUndefined)),
134
+ numberOfColNamesTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)),
132
135
  inferredColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColCount).filter(assert_1.isNotUndefined)),
136
+ approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)),
133
137
  numberOfColCountExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountExact).filter(assert_1.isNotUndefined)),
134
138
  numberOfColCountValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountValues).filter(assert_1.isNotUndefined)),
135
- numberOfColCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)),
136
- numberOfColCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)),
137
139
  numberOfColCountBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountBottom).filter(assert_1.isNotUndefined)),
138
- approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)),
140
+ numberOfColCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)),
141
+ numberOfColCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)),
139
142
  inferredRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredRowCount).filter(assert_1.isNotUndefined)),
143
+ approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)),
140
144
  numberOfRowCountExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountExact).filter(assert_1.isNotUndefined)),
141
145
  numberOfRowCountValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountValues).filter(assert_1.isNotUndefined)),
142
- numberOfRowCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)),
143
- numberOfRowCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)),
144
146
  numberOfRowCountBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountBottom).filter(assert_1.isNotUndefined)),
145
- approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)),
147
+ numberOfRowCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)),
148
+ numberOfRowCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)),
146
149
  perOperationNumber: new Map(semantics_1.DataFrameOperationNames.map(n => [n, (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.perOperationNumber.get(n) ?? 0))]))
147
150
  } : undefined
148
151
  };
@@ -198,8 +201,8 @@ function summarizeAllUltimateStats(stats) {
198
201
  numberOfNonDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfNonDataFrameFiles).filter(assert_1.isNotUndefined)),
199
202
  numberOfResultConstraints: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultConstraints).filter(assert_1.isNotUndefined)),
200
203
  numberOfResultingValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingValues).filter(assert_1.isNotUndefined)),
201
- numberOfResultingTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)),
202
204
  numberOfResultingBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingBottom).filter(assert_1.isNotUndefined)),
205
+ numberOfResultingTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)),
203
206
  numberOfEmptyNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEmptyNodes).filter(assert_1.isNotUndefined)),
204
207
  numberOfOperationNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperationNodes).filter(assert_1.isNotUndefined)),
205
208
  numberOfValueNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfValueNodes).filter(assert_1.isNotUndefined)),
@@ -207,26 +210,29 @@ function summarizeAllUltimateStats(stats) {
207
210
  numberOfEntriesPerNode: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEntriesPerNode).filter(assert_1.isNotUndefined)),
208
211
  numberOfOperations: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperations).filter(assert_1.isNotUndefined)),
209
212
  numberOfTotalValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalValues).filter(assert_1.isNotUndefined)),
210
- numberOfTotalTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)),
211
213
  numberOfTotalBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalBottom).filter(assert_1.isNotUndefined)),
214
+ numberOfTotalTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)),
212
215
  inferredColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColNames).filter(assert_1.isNotUndefined)),
216
+ approxRangeColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColNames).filter(assert_1.isNotUndefined)),
217
+ numberOfColNamesExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesExact).filter(assert_1.isNotUndefined)),
213
218
  numberOfColNamesValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesValues).filter(assert_1.isNotUndefined)),
214
- numberOfColNamesTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)),
215
219
  numberOfColNamesBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesBottom).filter(assert_1.isNotUndefined)),
220
+ numberOfColNamesInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesInfinite).filter(assert_1.isNotUndefined)),
221
+ numberOfColNamesTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)),
216
222
  inferredColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColCount).filter(assert_1.isNotUndefined)),
223
+ approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)),
217
224
  numberOfColCountExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountExact).filter(assert_1.isNotUndefined)),
218
225
  numberOfColCountValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountValues).filter(assert_1.isNotUndefined)),
219
- numberOfColCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)),
220
- numberOfColCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)),
221
226
  numberOfColCountBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountBottom).filter(assert_1.isNotUndefined)),
222
- approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)),
227
+ numberOfColCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)),
228
+ numberOfColCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)),
223
229
  inferredRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredRowCount).filter(assert_1.isNotUndefined)),
230
+ approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)),
224
231
  numberOfRowCountExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountExact).filter(assert_1.isNotUndefined)),
225
232
  numberOfRowCountValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountValues).filter(assert_1.isNotUndefined)),
226
- numberOfRowCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)),
227
- numberOfRowCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)),
228
233
  numberOfRowCountBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountBottom).filter(assert_1.isNotUndefined)),
229
- approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)),
234
+ numberOfRowCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)),
235
+ numberOfRowCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)),
230
236
  perOperationNumber: new Map(semantics_1.DataFrameOperationNames.map(n => [n, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.perOperationNumber.get(n)).filter(assert_1.isNotUndefined))]))
231
237
  } : undefined
232
238
  };
@@ -58,7 +58,8 @@ class CfgConditionalDeadCodeRemoval extends semantic_cfg_guided_visitor_1.Semant
58
58
  graph: this.config.dfg,
59
59
  full: true,
60
60
  idMap: this.config.normalizedAst.idMap,
61
- resolve: this.config.ctx.config.solver.variables
61
+ resolve: this.config.ctx.config.solver.variables,
62
+ ctx: this.config.ctx,
62
63
  }));
63
64
  if (values === undefined || values.elements.length !== 1 || values.elements[0].type != 'logical' || !(0, r_value_1.isValue)(values.elements[0].value)) {
64
65
  this.unableToCalculateValue(id);
@@ -83,7 +84,7 @@ class CfgConditionalDeadCodeRemoval extends semantic_cfg_guided_visitor_1.Semant
83
84
  graph: this.config.dfg,
84
85
  full: true,
85
86
  idMap: this.config.normalizedAst.idMap,
86
- resolve: this.config.ctx.config.solver.variables
87
+ ctx: this.config.ctx,
87
88
  }));
88
89
  if (values === undefined || values.elements.length !== 1 || values.elements[0].type != 'logical' || !(0, r_value_1.isValue)(values.elements[0].value)) {
89
90
  return undefined;
@@ -39,7 +39,8 @@ function onlyLoopsOnce(loop, dataflow, controlflow, ast, ctx) {
39
39
  const values = (0, general_1.valueSetGuard)((0, alias_tracking_1.resolveIdToValue)(vectorOfLoop.nodeId, {
40
40
  graph: dataflow,
41
41
  idMap: dataflow.idMap,
42
- resolve: ctx.config.solver.variables
42
+ resolve: ctx.config.solver.variables,
43
+ ctx: ctx
43
44
  }));
44
45
  if (values === undefined || values.elements.length !== 1 || values.elements[0].type !== 'vector' || !(0, r_value_1.isValue)(values.elements[0].elements)) {
45
46
  return undefined;
@@ -75,7 +76,8 @@ class CfgSingleIterationLoopDetector extends semantic_cfg_guided_visitor_1.Seman
75
76
  graph: this.config.dfg,
76
77
  full: true,
77
78
  idMap: this.config.normalizedAst.idMap,
78
- resolve: this.config.ctx.config.solver.variables
79
+ resolve: this.config.ctx.config.solver.variables,
80
+ ctx: this.config.ctx
79
81
  }));
80
82
  if (values === undefined || values.elements.length !== 1 || values.elements[0].type != 'logical' || !(0, r_value_1.isValue)(values.elements[0].value)) {
81
83
  return undefined;
@@ -3,6 +3,7 @@ import { PipelineStepStage } from '../../pipeline-step';
3
3
  import type { DataflowInformation } from '../../../../dataflow/info';
4
4
  import type { SlicingCriteria } from '../../../../slicing/criterion/parse';
5
5
  import type { NormalizedAst } from '../../../../r-bridge/lang-4.x/ast/model/processing/decorate';
6
+ import type { ReadOnlyFlowrAnalyzerContext } from '../../../../project/context/flowr-analyzer-context';
6
7
  export interface SliceRequiredInput {
7
8
  /** The slicing criterion is only of interest if you actually want to slice the R code */
8
9
  readonly criterion: SlicingCriteria;
@@ -10,6 +11,8 @@ export interface SliceRequiredInput {
10
11
  readonly threshold?: number;
11
12
  /** The direction to slice in. Defaults to backward slicing if unset. */
12
13
  readonly direction?: SliceDirection;
14
+ /** The context of the analysis */
15
+ readonly context?: ReadOnlyFlowrAnalyzerContext;
13
16
  }
14
17
  export declare enum SliceDirection {
15
18
  Backward = "backward",
@@ -10,7 +10,8 @@ var SliceDirection;
10
10
  })(SliceDirection || (exports.SliceDirection = SliceDirection = {}));
11
11
  function processor(results, input) {
12
12
  const direction = input.direction ?? SliceDirection.Backward;
13
- return (0, static_slicer_1.staticSlice)(results.dataflow, results.normalize, input.criterion, direction, input.threshold);
13
+ const threshold = input.threshold ?? input.context?.config.solver.slicer?.threshold;
14
+ return (0, static_slicer_1.staticSlice)(input.context, results.dataflow, results.normalize, input.criterion, direction, threshold);
14
15
  }
15
16
  exports.STATIC_SLICE = {
16
17
  name: 'slice',
@@ -22,6 +22,20 @@ export declare const DEFAULT_SLICING_PIPELINE: import("./pipeline").Pipeline<{
22
22
  };
23
23
  readonly dependencies: readonly [];
24
24
  readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<string>;
25
+ } | {
26
+ readonly name: "slice";
27
+ readonly humanReadableName: "static slice";
28
+ readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
29
+ readonly processor: (results: {
30
+ dataflow?: import("../../../dataflow/info").DataflowInformation;
31
+ normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
32
+ }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
33
+ readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
34
+ readonly printer: {
35
+ readonly 0: typeof import("../../print/print").internalPrinter;
36
+ };
37
+ readonly dependencies: readonly ["dataflow"];
38
+ readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
25
39
  } | {
26
40
  readonly name: "normalize";
27
41
  readonly humanReadableName: "normalize";
@@ -39,20 +53,6 @@ export declare const DEFAULT_SLICING_PIPELINE: import("./pipeline").Pipeline<{
39
53
  };
40
54
  readonly dependencies: readonly ["parse"];
41
55
  readonly requiredInput: import("../all/core/10-normalize").NormalizeRequiredInput;
42
- } | {
43
- readonly name: "slice";
44
- readonly humanReadableName: "static slice";
45
- readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
46
- readonly processor: (results: {
47
- dataflow?: import("../../../dataflow/info").DataflowInformation;
48
- normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
49
- }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
50
- readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
51
- readonly printer: {
52
- readonly 0: typeof import("../../print/print").internalPrinter;
53
- };
54
- readonly dependencies: readonly ["dataflow"];
55
- readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
56
56
  } | {
57
57
  readonly humanReadableName: "dataflow";
58
58
  readonly processor: (results: {
@@ -106,6 +106,20 @@ export declare const DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE: import("./pipeline"
106
106
  };
107
107
  readonly dependencies: readonly [];
108
108
  readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<string>;
109
+ } | {
110
+ readonly name: "slice";
111
+ readonly humanReadableName: "static slice";
112
+ readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
113
+ readonly processor: (results: {
114
+ dataflow?: import("../../../dataflow/info").DataflowInformation;
115
+ normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
116
+ }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
117
+ readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
118
+ readonly printer: {
119
+ readonly 0: typeof import("../../print/print").internalPrinter;
120
+ };
121
+ readonly dependencies: readonly ["dataflow"];
122
+ readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
109
123
  } | {
110
124
  readonly name: "normalize";
111
125
  readonly humanReadableName: "normalize";
@@ -123,20 +137,6 @@ export declare const DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE: import("./pipeline"
123
137
  };
124
138
  readonly dependencies: readonly ["parse"];
125
139
  readonly requiredInput: import("../all/core/10-normalize").NormalizeRequiredInput;
126
- } | {
127
- readonly name: "slice";
128
- readonly humanReadableName: "static slice";
129
- readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
130
- readonly processor: (results: {
131
- dataflow?: import("../../../dataflow/info").DataflowInformation;
132
- normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
133
- }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
134
- readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
135
- readonly printer: {
136
- readonly 0: typeof import("../../print/print").internalPrinter;
137
- };
138
- readonly dependencies: readonly ["dataflow"];
139
- readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
140
140
  } | {
141
141
  readonly humanReadableName: "dataflow";
142
142
  readonly processor: (results: {
@@ -190,6 +190,20 @@ export declare const DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./pipel
190
190
  };
191
191
  readonly dependencies: readonly [];
192
192
  readonly requiredInput: import("../../../r-bridge/parser").ParseRequiredInput<string>;
193
+ } | {
194
+ readonly name: "slice";
195
+ readonly humanReadableName: "static slice";
196
+ readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
197
+ readonly processor: (results: {
198
+ dataflow?: import("../../../dataflow/info").DataflowInformation;
199
+ normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
200
+ }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
201
+ readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
202
+ readonly printer: {
203
+ readonly 0: typeof import("../../print/print").internalPrinter;
204
+ };
205
+ readonly dependencies: readonly ["dataflow"];
206
+ readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
193
207
  } | {
194
208
  readonly name: "normalize";
195
209
  readonly humanReadableName: "normalize";
@@ -207,20 +221,6 @@ export declare const DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./pipel
207
221
  };
208
222
  readonly dependencies: readonly ["parse"];
209
223
  readonly requiredInput: import("../all/core/10-normalize").NormalizeRequiredInput;
210
- } | {
211
- readonly name: "slice";
212
- readonly humanReadableName: "static slice";
213
- readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
214
- readonly processor: (results: {
215
- dataflow?: import("../../../dataflow/info").DataflowInformation;
216
- normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
217
- }, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
218
- readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
219
- readonly printer: {
220
- readonly 0: typeof import("../../print/print").internalPrinter;
221
- };
222
- readonly dependencies: readonly ["dataflow"];
223
- readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
224
224
  } | {
225
225
  readonly humanReadableName: "dataflow";
226
226
  readonly processor: (results: {
@@ -10,13 +10,13 @@ const assert_1 = require("../util/assert");
10
10
  function findAllClusters(graph) {
11
11
  const clusters = [];
12
12
  // we reverse the vertices since dependencies usually point "backwards" from later nodes
13
- const notReached = new Set([...graph.vertices(true)].map(([id]) => id).reverse());
13
+ const notReached = new Set(graph.vertices(true).map(([id]) => id).toArray().reverse());
14
14
  while (notReached.size > 0) {
15
15
  const [startNode] = notReached;
16
16
  notReached.delete(startNode);
17
17
  clusters.push({
18
18
  startNode: startNode,
19
- members: [...makeCluster(graph, startNode, notReached)],
19
+ members: Array.from(makeCluster(graph, startNode, notReached)),
20
20
  hasUnknownSideEffects: graph.unknownSideEffects.has(startNode)
21
21
  });
22
22
  }
@@ -1,4 +1,9 @@
1
1
  import { type REnvironmentInformation } from './environment';
2
+ import type { IdentifierDefinition } from './identifier';
3
+ /**
4
+ * Merges two arrays of identifier definitions, ensuring uniqueness based on `nodeId` and `definedAt`.
5
+ */
6
+ export declare function uniqueMergeValuesInDefinitions(old: IdentifierDefinition[], value: readonly IdentifierDefinition[]): IdentifierDefinition[];
2
7
  /**
3
8
  * Adds all writes of `next` to `base` (i.e., the operations of `next` *might* happen).
4
9
  */
@@ -1,9 +1,12 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.uniqueMergeValuesInDefinitions = uniqueMergeValuesInDefinitions;
3
4
  exports.appendEnvironment = appendEnvironment;
4
5
  const assert_1 = require("../../util/assert");
5
- const environment_1 = require("./environment");
6
- function uniqueMergeValues(old, value) {
6
+ /**
7
+ * Merges two arrays of identifier definitions, ensuring uniqueness based on `nodeId` and `definedAt`.
8
+ */
9
+ function uniqueMergeValuesInDefinitions(old, value) {
7
10
  const result = old;
8
11
  for (const v of value) {
9
12
  const find = result.findIndex(o => o.nodeId === v.nodeId && o.definedAt === v.definedAt);
@@ -13,23 +16,6 @@ function uniqueMergeValues(old, value) {
13
16
  }
14
17
  return result;
15
18
  }
16
- function appendIEnvironmentWith(base, next) {
17
- (0, assert_1.guard)(base !== undefined && next !== undefined, 'can not append environments with undefined');
18
- const map = new Map(base.memory);
19
- for (const [key, value] of next.memory) {
20
- const old = map.get(key);
21
- if (old) {
22
- map.set(key, uniqueMergeValues(old, value));
23
- }
24
- else {
25
- map.set(key, value);
26
- }
27
- }
28
- const parent = base.parent.builtInEnv ? base.parent : appendIEnvironmentWith(base.parent, next.parent);
29
- const out = new environment_1.Environment(parent);
30
- out.memory = map;
31
- return out;
32
- }
33
19
  function appendEnvironment(base, next) {
34
20
  if (base === undefined) {
35
21
  return next;
@@ -39,7 +25,7 @@ function appendEnvironment(base, next) {
39
25
  }
40
26
  (0, assert_1.guard)(base.level === next.level, 'environments must have the same level to be handled, it is up to the caller to ensure that');
41
27
  return {
42
- current: appendIEnvironmentWith(base.current, next.current),
28
+ current: base.current.append(next.current),
43
29
  level: base.level,
44
30
  };
45
31
  }
@@ -33,6 +33,7 @@ import { resolveAsMinus, resolveAsPlus, resolveAsSeq, resolveAsVector } from '..
33
33
  import type { DataflowGraph } from '../graph/graph';
34
34
  import type { VariableResolve } from '../../config';
35
35
  import type { BuiltInConstantDefinition, BuiltInDefinition, BuiltInFunctionDefinition, BuiltInReplacementDefinition } from './built-in-config';
36
+ import type { ReadOnlyFlowrAnalyzerContext } from '../../project/context/flowr-analyzer-context';
36
37
  export type BuiltIn = `built-in:${string}`;
37
38
  /**
38
39
  * Generate a built-in id for the given name
@@ -69,7 +70,7 @@ export interface DefaultBuiltInProcessorConfiguration extends ForceArguments {
69
70
  */
70
71
  readonly useAsProcessor?: UseAsProcessors;
71
72
  }
72
- export type BuiltInEvalHandler = (resolve: VariableResolve, a: RNodeWithParent, env?: REnvironmentInformation, graph?: DataflowGraph, map?: AstIdMap) => Value;
73
+ export type BuiltInEvalHandler = (resolve: VariableResolve, a: RNodeWithParent, ctx: ReadOnlyFlowrAnalyzerContext, env?: REnvironmentInformation, graph?: DataflowGraph, map?: AstIdMap) => Value;
73
74
  declare function defaultBuiltInProcessor<OtherInfo>(name: RSymbol<OtherInfo & ParentInformation>, args: readonly RFunctionArgument<OtherInfo & ParentInformation>[], rootId: NodeId, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, { returnsNthArgument, useAsProcessor, forceArgs, readAllArguments, cfg, hasUnknownSideEffects, treatAsFnCall }: DefaultBuiltInProcessorConfiguration): DataflowInformation;
74
75
  export declare const BuiltInProcessorMapper: {
75
76
  readonly 'builtin:default': typeof defaultBuiltInProcessor;
@@ -4,4 +4,4 @@ import { type REnvironmentInformation } from './environment';
4
4
  * @param environment - The environment information to clone.
5
5
  * @param recurseParents - Whether to clone the parent environments as well.
6
6
  */
7
- export declare function cloneEnvironmentInformation(environment: REnvironmentInformation, recurseParents?: boolean): REnvironmentInformation;
7
+ export declare function cloneEnvironmentInformation({ current, level }: REnvironmentInformation, recurseParents?: boolean): REnvironmentInformation;
@@ -1,39 +1,15 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.cloneEnvironmentInformation = cloneEnvironmentInformation;
4
- const environment_1 = require("./environment");
5
- function cloneEnvironment(environment, recurseParents) {
6
- if (environment === undefined) {
7
- return undefined;
8
- }
9
- else if (environment.builtInEnv) {
10
- return environment; // do not clone the built-in environment
11
- }
12
- /* make sure the clone has the same id */
13
- const clone = new environment_1.Environment(recurseParents ? cloneEnvironment(environment.parent, recurseParents) : environment.parent, environment.builtInEnv);
14
- if (environment.memory.size < 10) {
15
- clone.memory = new Map(JSON.parse(JSON.stringify(environment.memory.entries().toArray())));
16
- }
17
- else {
18
- clone.memory = new Map(environment.memory.entries()
19
- .map(([k, v]) => [k,
20
- v.map(s => ({
21
- ...s,
22
- controlDependencies: s.controlDependencies?.slice()
23
- }))
24
- ]));
25
- }
26
- return clone;
27
- }
28
4
  /**
29
5
  * Produce a clone of the given environment information.
30
6
  * @param environment - The environment information to clone.
31
7
  * @param recurseParents - Whether to clone the parent environments as well.
32
8
  */
33
- function cloneEnvironmentInformation(environment, recurseParents = true) {
9
+ function cloneEnvironmentInformation({ current, level }, recurseParents = true) {
34
10
  return {
35
- current: cloneEnvironment(environment.current, recurseParents),
36
- level: environment.level
11
+ current: current.clone(recurseParents),
12
+ level
37
13
  };
38
14
  }
39
15
  //# sourceMappingURL=clone.js.map
@@ -1,12 +1,16 @@
1
1
  import type { REnvironmentInformation } from './environment';
2
- import type { IdentifierDefinition, InGraphIdentifierDefinition } from './identifier';
2
+ import type { Identifier, IdentifierDefinition, InGraphIdentifierDefinition } from './identifier';
3
3
  import type { FlowrConfigOptions } from '../../config';
4
4
  /**
5
5
  * assumes: existing is not undefined, the overwrite has indices
6
6
  */
7
- export declare function mergeDefinitions(existing: readonly IdentifierDefinition[], definition: InGraphIdentifierDefinition): InGraphIdentifierDefinition[];
7
+ export declare function mergeDefinitionsForPointer(existing: readonly IdentifierDefinition[], definition: InGraphIdentifierDefinition): InGraphIdentifierDefinition[];
8
8
  /**
9
9
  * Insert the given `definition` --- defined within the given scope --- into the passed along `environments` will take care of propagation.
10
10
  * Does not modify the passed along `environments` in-place! It returns the new reference.
11
+ * @see {@link Environment#define} - for details on how (local) definitions are handled.
12
+ * @see {@link Environment#defineSuper} - for details on how (super) definitions are handled.
11
13
  */
12
- export declare function define(definition: IdentifierDefinition, superAssign: boolean | undefined, environment: REnvironmentInformation, config: FlowrConfigOptions): REnvironmentInformation;
14
+ export declare function define(definition: IdentifierDefinition & {
15
+ name: Identifier;
16
+ }, superAssign: boolean | undefined, { level, current }: REnvironmentInformation, config: FlowrConfigOptions): REnvironmentInformation;
@@ -1,39 +1,13 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.mergeDefinitions = mergeDefinitions;
3
+ exports.mergeDefinitionsForPointer = mergeDefinitionsForPointer;
4
4
  exports.define = define;
5
5
  const assert_1 = require("../../util/assert");
6
- const clone_1 = require("./clone");
7
6
  const vertex_1 = require("../graph/vertex");
8
- function defInEnv(newEnvironments, name, definition, config) {
9
- const existing = newEnvironments.memory.get(name);
10
- // When there are defined indices, merge the definitions
11
- const inGraphDefinition = definition;
12
- if (config.solver.pointerTracking &&
13
- existing !== undefined &&
14
- inGraphDefinition.controlDependencies === undefined) {
15
- if (inGraphDefinition.indicesCollection !== undefined) {
16
- newEnvironments.memory.set(name, mergeDefinitions(existing, inGraphDefinition));
17
- return;
18
- }
19
- else if (existing?.flatMap(i => i.indicesCollection ?? []).length > 0) {
20
- // When indices couldn't be resolved, but indices where defined before, just add the definition
21
- existing.push(definition);
22
- return;
23
- }
24
- }
25
- // check if it is maybe or not
26
- if (existing === undefined || definition.controlDependencies === undefined) {
27
- newEnvironments.memory.set(name, [definition]);
28
- }
29
- else {
30
- existing.push(definition);
31
- }
32
- }
33
7
  /**
34
8
  * assumes: existing is not undefined, the overwrite has indices
35
9
  */
36
- function mergeDefinitions(existing, definition) {
10
+ function mergeDefinitionsForPointer(existing, definition) {
37
11
  // When new definition is not a single index, e.g., a list redefinition, then reset existing definition
38
12
  if (definition.indicesCollection?.some(indices => indices.isContainer)) {
39
13
  return [definition];
@@ -119,34 +93,13 @@ function overwriteContainerIndices(existingIndices, overwriteIndex) {
119
93
  /**
120
94
  * Insert the given `definition` --- defined within the given scope --- into the passed along `environments` will take care of propagation.
121
95
  * Does not modify the passed along `environments` in-place! It returns the new reference.
96
+ * @see {@link Environment#define} - for details on how (local) definitions are handled.
97
+ * @see {@link Environment#defineSuper} - for details on how (super) definitions are handled.
122
98
  */
123
- function define(definition, superAssign, environment, config) {
124
- const name = definition.name;
125
- (0, assert_1.guard)(name !== undefined, () => `Name must be defined, but isn't for ${JSON.stringify(definition)}`);
126
- let newEnvironment;
127
- if (superAssign) {
128
- newEnvironment = (0, clone_1.cloneEnvironmentInformation)(environment, true);
129
- let current = newEnvironment.current;
130
- let last = undefined;
131
- let found = false;
132
- do {
133
- if (current.memory.has(name)) {
134
- current.memory.set(name, [definition]);
135
- found = true;
136
- break;
137
- }
138
- last = current;
139
- current = current.parent;
140
- } while (!current.builtInEnv);
141
- if (!found) {
142
- (0, assert_1.guard)(last !== undefined, () => `Could not find global scope for ${name}`);
143
- last.memory.set(name, [definition]);
144
- }
145
- }
146
- else {
147
- newEnvironment = (0, clone_1.cloneEnvironmentInformation)(environment, false);
148
- defInEnv(newEnvironment.current, name, definition, config);
149
- }
150
- return newEnvironment;
99
+ function define(definition, superAssign, { level, current }, config) {
100
+ return {
101
+ level,
102
+ current: superAssign ? current.defineSuper(definition) : current.define(definition, config),
103
+ };
151
104
  }
152
105
  //# sourceMappingURL=define.js.map
@@ -69,7 +69,7 @@ function diffEnvironment(a, b, info, depth) {
69
69
  }
70
70
  if (a.memory.size !== b.memory.size) {
71
71
  info.report.addComment(`${info.position}[at level: ${depth}] Different number of definitions in environment. ${info.leftname}: ${a.memory.size} vs. ${info.rightname}: ${b.memory.size}`);
72
- (0, diff_1.setDifference)(new Set([...a.memory.keys()]), new Set([...b.memory.keys()]), {
72
+ (0, diff_1.setDifference)(new Set(a.memory.keys()), new Set(b.memory.keys()), {
73
73
  ...info,
74
74
  position: `${info.position}[at level: ${depth}] Key comparison. `
75
75
  });