@eagleoutice/flowr 2.2.15 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +226 -6
- package/abstract-interpretation/data-frame/absint-info.d.ts +109 -0
- package/abstract-interpretation/data-frame/absint-info.js +31 -0
- package/abstract-interpretation/data-frame/absint-visitor.d.ts +59 -0
- package/abstract-interpretation/data-frame/absint-visitor.js +173 -0
- package/abstract-interpretation/data-frame/domain.d.ts +107 -0
- package/abstract-interpretation/data-frame/domain.js +315 -0
- package/abstract-interpretation/data-frame/mappers/access-mapper.d.ts +17 -0
- package/abstract-interpretation/data-frame/mappers/access-mapper.js +166 -0
- package/abstract-interpretation/data-frame/mappers/arguments.d.ts +117 -0
- package/abstract-interpretation/data-frame/mappers/arguments.js +188 -0
- package/abstract-interpretation/data-frame/mappers/assignment-mapper.d.ts +20 -0
- package/abstract-interpretation/data-frame/mappers/assignment-mapper.js +34 -0
- package/abstract-interpretation/data-frame/mappers/function-mapper.d.ts +261 -0
- package/abstract-interpretation/data-frame/mappers/function-mapper.js +1219 -0
- package/abstract-interpretation/data-frame/mappers/replacement-mapper.d.ts +12 -0
- package/abstract-interpretation/data-frame/mappers/replacement-mapper.js +206 -0
- package/abstract-interpretation/data-frame/resolve-args.d.ts +42 -0
- package/abstract-interpretation/data-frame/resolve-args.js +118 -0
- package/abstract-interpretation/data-frame/semantics.d.ts +213 -0
- package/abstract-interpretation/data-frame/semantics.js +366 -0
- package/abstract-interpretation/data-frame/shape-inference.d.ts +38 -0
- package/abstract-interpretation/data-frame/shape-inference.js +117 -0
- package/benchmark/slicer.d.ts +18 -2
- package/benchmark/slicer.js +143 -5
- package/benchmark/stats/print.js +123 -45
- package/benchmark/stats/size-of.d.ts +7 -0
- package/benchmark/stats/size-of.js +1 -0
- package/benchmark/stats/stats.d.ts +30 -1
- package/benchmark/stats/stats.js +4 -2
- package/benchmark/summarizer/data.d.ts +33 -2
- package/benchmark/summarizer/first-phase/input.js +5 -1
- package/benchmark/summarizer/first-phase/process.d.ts +2 -1
- package/benchmark/summarizer/first-phase/process.js +49 -3
- package/benchmark/summarizer/second-phase/process.js +101 -3
- package/cli/benchmark-app.d.ts +2 -0
- package/cli/benchmark-app.js +5 -1
- package/cli/benchmark-helper-app.d.ts +2 -0
- package/cli/benchmark-helper-app.js +13 -8
- package/cli/common/options.js +4 -0
- package/cli/export-quads-app.js +2 -1
- package/cli/flowr.js +58 -57
- package/cli/repl/commands/repl-cfg.js +13 -13
- package/cli/repl/commands/repl-commands.js +2 -2
- package/cli/repl/commands/repl-dataflow.js +10 -10
- package/cli/repl/commands/repl-execute.d.ts +2 -3
- package/cli/repl/commands/repl-execute.js +4 -4
- package/cli/repl/commands/repl-lineage.js +4 -4
- package/cli/repl/commands/repl-main.d.ts +12 -1
- package/cli/repl/commands/repl-normalize.js +6 -6
- package/cli/repl/commands/repl-parse.js +2 -2
- package/cli/repl/commands/repl-query.js +9 -9
- package/cli/repl/commands/repl-version.js +1 -1
- package/cli/repl/core.d.ts +5 -2
- package/cli/repl/core.js +10 -8
- package/cli/repl/server/connection.d.ts +3 -1
- package/cli/repl/server/connection.js +7 -5
- package/cli/repl/server/server.d.ts +3 -2
- package/cli/repl/server/server.js +4 -2
- package/cli/script-core/statistics-core.d.ts +2 -1
- package/cli/script-core/statistics-core.js +2 -2
- package/cli/script-core/statistics-helper-core.d.ts +2 -1
- package/cli/script-core/statistics-helper-core.js +5 -4
- package/cli/slicer-app.js +4 -2
- package/cli/statistics-app.js +2 -1
- package/cli/statistics-helper-app.js +2 -1
- package/config.d.ts +43 -10
- package/config.js +47 -43
- package/control-flow/cfg-dead-code.js +45 -2
- package/control-flow/cfg-simplification.d.ts +2 -0
- package/control-flow/control-flow-graph.d.ts +2 -0
- package/control-flow/control-flow-graph.js +8 -0
- package/control-flow/dfg-cfg-guided-visitor.d.ts +5 -3
- package/control-flow/dfg-cfg-guided-visitor.js +15 -4
- package/control-flow/extract-cfg.d.ts +4 -2
- package/control-flow/extract-cfg.js +4 -3
- package/control-flow/semantic-cfg-guided-visitor.d.ts +20 -2
- package/control-flow/semantic-cfg-guided-visitor.js +24 -4
- package/core/pipeline-executor.d.ts +4 -1
- package/core/pipeline-executor.js +6 -5
- package/core/steps/all/core/10-normalize.d.ts +2 -0
- package/core/steps/all/core/10-normalize.js +1 -1
- package/core/steps/all/core/11-normalize-tree-sitter.d.ts +2 -1
- package/core/steps/all/core/11-normalize-tree-sitter.js +2 -2
- package/core/steps/all/core/20-dataflow.d.ts +2 -1
- package/core/steps/all/core/20-dataflow.js +2 -2
- package/core/steps/all/static-slicing/00-slice.d.ts +2 -1
- package/core/steps/all/static-slicing/00-slice.js +2 -2
- package/core/steps/pipeline/default-pipelines.d.ts +32 -31
- package/core/steps/pipeline/default-pipelines.js +8 -8
- package/core/steps/pipeline-step.d.ts +2 -1
- package/dataflow/environments/built-in-config.d.ts +3 -3
- package/dataflow/environments/built-in.d.ts +11 -3
- package/dataflow/environments/built-in.js +5 -3
- package/dataflow/environments/default-builtin-config.js +4 -2
- package/dataflow/environments/define.d.ts +2 -1
- package/dataflow/environments/define.js +4 -5
- package/dataflow/environments/remove.d.ts +6 -0
- package/dataflow/environments/remove.js +29 -0
- package/dataflow/eval/resolve/alias-tracking.d.ts +7 -2
- package/dataflow/eval/resolve/alias-tracking.js +11 -8
- package/dataflow/eval/resolve/resolve-argument.d.ts +8 -0
- package/dataflow/eval/resolve/resolve-argument.js +118 -0
- package/dataflow/eval/resolve/resolve.d.ts +65 -18
- package/dataflow/eval/resolve/resolve.js +144 -48
- package/dataflow/eval/values/string/string-constants.d.ts +1 -1
- package/dataflow/eval/values/string/string-constants.js +7 -2
- package/dataflow/extractor.d.ts +2 -1
- package/dataflow/extractor.js +2 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-access.js +5 -6
- package/dataflow/internal/process/functions/call/built-in/built-in-apply.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.d.ts +4 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +11 -11
- package/dataflow/internal/process/functions/call/built-in/built-in-eval.js +10 -11
- package/dataflow/internal/process/functions/call/built-in/built-in-expression-list.js +7 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-for-loop.js +2 -3
- package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-list.js +2 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-replacement.js +2 -3
- package/dataflow/internal/process/functions/call/built-in/built-in-source.d.ts +6 -3
- package/dataflow/internal/process/functions/call/built-in/built-in-source.js +19 -15
- package/dataflow/internal/process/functions/call/built-in/built-in-vector.js +2 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-while-loop.js +1 -1
- package/dataflow/internal/process/functions/call/common.js +1 -1
- package/dataflow/internal/process/functions/process-parameter.js +1 -1
- package/dataflow/origin/dfg-get-symbol-refs.d.ts +21 -0
- package/dataflow/origin/dfg-get-symbol-refs.js +50 -0
- package/dataflow/processor.d.ts +5 -0
- package/documentation/doc-util/doc-cfg.js +4 -3
- package/documentation/doc-util/doc-code.d.ts +1 -1
- package/documentation/doc-util/doc-dfg.js +3 -2
- package/documentation/doc-util/doc-functions.d.ts +24 -0
- package/documentation/doc-util/doc-functions.js +65 -0
- package/documentation/doc-util/doc-normalized-ast.js +3 -2
- package/documentation/doc-util/doc-print.d.ts +5 -0
- package/documentation/doc-util/doc-print.js +36 -0
- package/documentation/doc-util/doc-query.js +13 -2
- package/documentation/doc-util/doc-repl.js +2 -1
- package/documentation/doc-util/doc-search.js +3 -2
- package/documentation/doc-util/doc-types.d.ts +28 -6
- package/documentation/doc-util/doc-types.js +89 -45
- package/documentation/print-cfg-wiki.js +6 -7
- package/documentation/print-core-wiki.js +5 -5
- package/documentation/print-dataflow-graph-wiki.js +10 -10
- package/documentation/print-engines-wiki.js +1 -2
- package/documentation/print-faq-wiki.js +8 -2
- package/documentation/print-interface-wiki.js +12 -2
- package/documentation/print-linter-issue.d.ts +1 -0
- package/documentation/print-linter-issue.js +71 -0
- package/documentation/print-linter-wiki.js +223 -34
- package/documentation/print-linting-and-testing-wiki.js +2 -4
- package/documentation/print-normalized-ast-wiki.js +3 -3
- package/documentation/print-query-wiki.js +18 -2
- package/documentation/print-readme.js +24 -1
- package/documentation/print-search-wiki.js +1 -2
- package/linter/linter-executor.d.ts +3 -1
- package/linter/linter-executor.js +3 -2
- package/linter/linter-format.d.ts +67 -7
- package/linter/linter-format.js +12 -1
- package/linter/linter-rules.d.ts +178 -16
- package/linter/linter-rules.js +14 -4
- package/linter/linter-tags.d.ts +80 -0
- package/linter/linter-tags.js +85 -0
- package/linter/rules/absolute-path.d.ts +71 -0
- package/linter/rules/absolute-path.js +177 -0
- package/linter/rules/dataframe-access-validation.d.ts +53 -0
- package/linter/rules/dataframe-access-validation.js +116 -0
- package/linter/rules/deprecated-functions.d.ts +43 -0
- package/linter/rules/deprecated-functions.js +58 -0
- package/linter/rules/{2-file-path-validity.d.ts → file-path-validity.d.ts} +16 -6
- package/linter/rules/{2-file-path-validity.js → file-path-validity.js} +21 -13
- package/linter/rules/naming-convention.d.ts +71 -0
- package/linter/rules/naming-convention.js +168 -0
- package/linter/rules/seeded-randomness.d.ts +65 -0
- package/linter/rules/seeded-randomness.js +122 -0
- package/linter/rules/unused-definition.d.ts +41 -0
- package/linter/rules/unused-definition.js +105 -0
- package/package.json +5 -2
- package/queries/base-query-format.d.ts +2 -0
- package/queries/catalog/call-context-query/call-context-query-executor.d.ts +1 -1
- package/queries/catalog/call-context-query/call-context-query-executor.js +2 -2
- package/queries/catalog/cluster-query/cluster-query-format.d.ts +1 -1
- package/queries/catalog/config-query/config-query-executor.d.ts +1 -1
- package/queries/catalog/config-query/config-query-executor.js +2 -3
- package/queries/catalog/control-flow-query/control-flow-query-executor.d.ts +1 -1
- package/queries/catalog/control-flow-query/control-flow-query-executor.js +2 -2
- package/queries/catalog/control-flow-query/control-flow-query-format.d.ts +1 -1
- package/queries/catalog/dataflow-lens-query/dataflow-lens-query-format.d.ts +1 -1
- package/queries/catalog/dataflow-query/dataflow-query-format.d.ts +1 -1
- package/queries/catalog/dependencies-query/dependencies-query-executor.js +4 -116
- package/queries/catalog/dependencies-query/dependencies-query-format.d.ts +1 -1
- package/queries/catalog/df-shape-query/df-shape-query-executor.d.ts +3 -0
- package/queries/catalog/df-shape-query/df-shape-query-executor.js +46 -0
- package/queries/catalog/df-shape-query/df-shape-query-format.d.ts +72 -0
- package/queries/catalog/df-shape-query/df-shape-query-format.js +31 -0
- package/queries/catalog/happens-before-query/happens-before-query-format.d.ts +1 -1
- package/queries/catalog/id-map-query/id-map-query-format.d.ts +1 -1
- package/queries/catalog/lineage-query/lineage-query-format.d.ts +1 -1
- package/queries/catalog/linter-query/linter-query-executor.d.ts +1 -1
- package/queries/catalog/linter-query/linter-query-executor.js +2 -2
- package/queries/catalog/linter-query/linter-query-format.d.ts +1 -1
- package/queries/catalog/linter-query/linter-query-format.js +16 -12
- package/queries/catalog/normalized-ast-query/normalized-ast-query-format.d.ts +1 -1
- package/queries/catalog/origin-query/origin-query-format.d.ts +1 -1
- package/queries/catalog/project-query/project-query-format.d.ts +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.d.ts +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +2 -2
- package/queries/catalog/resolve-value-query/resolve-value-query-format.d.ts +1 -1
- package/queries/catalog/search-query/search-query-executor.d.ts +1 -1
- package/queries/catalog/search-query/search-query-executor.js +2 -2
- package/queries/catalog/search-query/search-query-format.d.ts +1 -1
- package/queries/catalog/static-slice-query/static-slice-query-executor.d.ts +1 -1
- package/queries/catalog/static-slice-query/static-slice-query-executor.js +2 -2
- package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +1 -1
- package/queries/query.d.ts +76 -16
- package/queries/query.js +2 -0
- package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +2 -1
- package/r-bridge/lang-4.x/ast/parser/json/parser.js +4 -2
- package/r-bridge/lang-4.x/convert-values.js +2 -1
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-executor.d.ts +3 -1
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-executor.js +4 -4
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.d.ts +1 -1
- package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +7 -5
- package/r-bridge/shell.d.ts +3 -2
- package/r-bridge/shell.js +4 -5
- package/search/flowr-search-builder.d.ts +6 -2
- package/search/flowr-search-builder.js +7 -0
- package/search/flowr-search-filters.d.ts +32 -8
- package/search/flowr-search-filters.js +42 -15
- package/search/flowr-search.d.ts +4 -0
- package/search/search-executor/search-enrichers.d.ts +7 -3
- package/search/search-executor/search-enrichers.js +29 -20
- package/search/search-executor/search-generators.js +1 -1
- package/search/search-executor/search-transformer.d.ts +2 -0
- package/search/search-executor/search-transformer.js +10 -1
- package/slicing/static/static-slicer.d.ts +1 -1
- package/slicing/static/static-slicer.js +2 -3
- package/statistics/statistics.d.ts +3 -1
- package/statistics/statistics.js +5 -4
- package/util/containers.d.ts +12 -9
- package/util/containers.js +12 -9
- package/util/files.d.ts +8 -2
- package/util/files.js +22 -4
- package/util/objects.d.ts +5 -4
- package/util/r-value.d.ts +23 -0
- package/util/r-value.js +113 -0
- package/util/range.d.ts +5 -1
- package/util/range.js +11 -3
- package/util/text/strings.d.ts +6 -0
- package/util/text/strings.js +35 -0
- package/util/version.js +1 -1
- package/linter/rules/1-deprecated-functions.d.ts +0 -34
- package/linter/rules/1-deprecated-functions.js +0 -54
- package/util/cfg/cfg.d.ts +0 -0
- package/util/cfg/cfg.js +0 -2
|
@@ -1,22 +1,41 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.FlowrFilterCombinator = exports.FlowrFilters = exports.ValidFlowrFiltersReverse = exports.ValidFlowrFilters = exports.FlowrFilter = void 0;
|
|
4
|
+
exports.testFunctionsIgnoringPackage = testFunctionsIgnoringPackage;
|
|
4
5
|
exports.binaryTreeToString = binaryTreeToString;
|
|
5
6
|
exports.isBinaryTree = isBinaryTree;
|
|
6
7
|
exports.evalFilter = evalFilter;
|
|
7
8
|
const type_1 = require("../r-bridge/lang-4.x/ast/model/type");
|
|
8
9
|
const vertex_1 = require("../dataflow/graph/vertex");
|
|
10
|
+
const search_enrichers_1 = require("./search-executor/search-enrichers");
|
|
9
11
|
var FlowrFilter;
|
|
10
12
|
(function (FlowrFilter) {
|
|
13
|
+
/**
|
|
14
|
+
* Drops search elements that represent empty arguments. Specifically, all nodes that are arguments and have an undefined name are skipped.
|
|
15
|
+
* This filter does not accept any arguments.
|
|
16
|
+
*/
|
|
11
17
|
FlowrFilter["DropEmptyArguments"] = "drop-empty-arguments";
|
|
18
|
+
/**
|
|
19
|
+
* Only returns search elements whose enrichments' JSON representations match a given test regular expression.
|
|
20
|
+
* This filter accepts {@link MatchesEnrichmentArgs}, which includes the enrichment to match for, as well as the regular expression to test the enrichment's (non-pretty-printed) JSON representation for.
|
|
21
|
+
* To test for included function names in an enrichment like {@link Enrichment.CallTargets}, the helper function {@link testFunctionsIgnoringPackage} can be used.
|
|
22
|
+
*/
|
|
23
|
+
FlowrFilter["MatchesEnrichment"] = "matches-enrichment";
|
|
12
24
|
})(FlowrFilter || (exports.FlowrFilter = FlowrFilter = {}));
|
|
13
25
|
exports.ValidFlowrFilters = new Set(Object.values(FlowrFilter));
|
|
14
26
|
exports.ValidFlowrFiltersReverse = Object.fromEntries(Object.entries(FlowrFilter).map(([k, v]) => [v, k]));
|
|
15
27
|
exports.FlowrFilters = {
|
|
16
|
-
[FlowrFilter.DropEmptyArguments]: (
|
|
17
|
-
return
|
|
18
|
-
}
|
|
28
|
+
[FlowrFilter.DropEmptyArguments]: ((e, _args) => {
|
|
29
|
+
return e.node.type !== type_1.RType.Argument || e.node.name !== undefined;
|
|
30
|
+
}),
|
|
31
|
+
[FlowrFilter.MatchesEnrichment]: ((e, args) => {
|
|
32
|
+
const content = JSON.stringify((0, search_enrichers_1.enrichmentContent)(e, args.enrichment));
|
|
33
|
+
return content !== undefined && args.test.test(content);
|
|
34
|
+
})
|
|
19
35
|
};
|
|
36
|
+
function testFunctionsIgnoringPackage(functions) {
|
|
37
|
+
return new RegExp(`"(.+:::?)?(${functions.join('|')})"`);
|
|
38
|
+
}
|
|
20
39
|
/**
|
|
21
40
|
* @see {@link FlowrFilterCombinator.is}
|
|
22
41
|
* @see {@link evalFilter}
|
|
@@ -28,8 +47,17 @@ class FlowrFilterCombinator {
|
|
|
28
47
|
this.tree = this.unpack(init);
|
|
29
48
|
}
|
|
30
49
|
static is(value) {
|
|
31
|
-
if (typeof value === '
|
|
32
|
-
return new this(value);
|
|
50
|
+
if (typeof value === 'string' && exports.ValidFlowrFilters.has(value)) {
|
|
51
|
+
return new this({ type: 'special', value: value });
|
|
52
|
+
}
|
|
53
|
+
else if (typeof value === 'object') {
|
|
54
|
+
const name = value?.name;
|
|
55
|
+
if (name && exports.ValidFlowrFilters.has(name)) {
|
|
56
|
+
return new this({ type: 'special', value: value });
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
return new this(value);
|
|
60
|
+
}
|
|
33
61
|
}
|
|
34
62
|
else if (type_1.ValidRTypes.has(value)) {
|
|
35
63
|
return new this({ type: 'r-type', value: value });
|
|
@@ -37,9 +65,6 @@ class FlowrFilterCombinator {
|
|
|
37
65
|
else if (vertex_1.ValidVertexTypes.has(value)) {
|
|
38
66
|
return new this({ type: 'vertex-type', value: value });
|
|
39
67
|
}
|
|
40
|
-
else if (exports.ValidFlowrFilters.has(value)) {
|
|
41
|
-
return new this({ type: 'special', value: value });
|
|
42
|
-
}
|
|
43
68
|
else {
|
|
44
69
|
throw new Error(`Invalid filter value: ${value}`);
|
|
45
70
|
}
|
|
@@ -97,7 +122,7 @@ const typeToSymbol = {
|
|
|
97
122
|
};
|
|
98
123
|
function treeToStringImpl(tree, depth) {
|
|
99
124
|
if (tree.type === 'r-type' || tree.type === 'vertex-type' || tree.type === 'special') {
|
|
100
|
-
return tree.value
|
|
125
|
+
return typeof tree.value === 'string' ? tree.value : `${tree.value.name}@${JSON.stringify(tree.value.args)}`;
|
|
101
126
|
}
|
|
102
127
|
if (tree.type === 'not') {
|
|
103
128
|
return `${typeToSymbol[tree.type]}${treeToStringImpl(tree.operand, depth)}`;
|
|
@@ -114,14 +139,16 @@ const evalVisit = {
|
|
|
114
139
|
or: ({ left, right }, data) => evalTree(left, data) || evalTree(right, data),
|
|
115
140
|
xor: ({ left, right }, data) => evalTree(left, data) !== evalTree(right, data),
|
|
116
141
|
not: ({ operand }, data) => !evalTree(operand, data),
|
|
117
|
-
'r-type': ({ value }, {
|
|
118
|
-
'vertex-type': ({ value }, { dataflow: { graph },
|
|
119
|
-
'special': ({ value }, {
|
|
120
|
-
const
|
|
142
|
+
'r-type': ({ value }, { element }) => element.node.type === value,
|
|
143
|
+
'vertex-type': ({ value }, { dataflow: { graph }, element }) => graph.getVertex(element.node.info.id)?.tag === value,
|
|
144
|
+
'special': ({ value }, { element }) => {
|
|
145
|
+
const name = typeof value === 'string' ? value : value.name;
|
|
146
|
+
const args = typeof value === 'string' ? undefined : value.args;
|
|
147
|
+
const getHandler = exports.FlowrFilters[name];
|
|
121
148
|
if (getHandler) {
|
|
122
|
-
return getHandler(
|
|
149
|
+
return getHandler(element, args);
|
|
123
150
|
}
|
|
124
|
-
throw new Error(`
|
|
151
|
+
throw new Error(`Couldn't find special filter with name ${name}`);
|
|
125
152
|
}
|
|
126
153
|
};
|
|
127
154
|
function evalTree(tree, data) {
|
package/search/flowr-search.d.ts
CHANGED
|
@@ -5,6 +5,8 @@ import type { NodeId } from '../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
|
5
5
|
import type { DataflowInformation } from '../dataflow/info';
|
|
6
6
|
import type { BaseQueryResult } from '../queries/base-query-format';
|
|
7
7
|
import type { Query } from '../queries/query';
|
|
8
|
+
import type { FlowrConfigOptions } from '../config';
|
|
9
|
+
import type { MarkOptional } from 'ts-essentials';
|
|
8
10
|
/**
|
|
9
11
|
* Yes, for now we do technically not need a wrapper around the RNode, but this allows us to attach caches etc.
|
|
10
12
|
* just for the respective search.
|
|
@@ -16,6 +18,7 @@ export interface FlowrSearchElementFromQuery<Info> extends FlowrSearchElement<In
|
|
|
16
18
|
readonly query: Query['type'];
|
|
17
19
|
readonly queryResult: BaseQueryResult;
|
|
18
20
|
}
|
|
21
|
+
export type FlowrSearchElementMaybeFromQuery<Info> = MarkOptional<FlowrSearchElementFromQuery<Info>, 'query' | 'queryResult'>;
|
|
19
22
|
export interface FlowrSearchNodeBase<Type extends string, Name extends string, Args extends Record<string, unknown> | undefined> {
|
|
20
23
|
readonly type: Type;
|
|
21
24
|
readonly name: Name;
|
|
@@ -49,6 +52,7 @@ export interface FlowrSearchGetFilter extends Record<string, unknown> {
|
|
|
49
52
|
type MinimumInputForFlowrSearch<P extends Pipeline> = PipelineStepOutputWithName<P, 'normalize'> extends NormalizedAst ? (PipelineStepOutputWithName<P, 'dataflow'> extends DataflowInformation ? PipelineOutput<P> & {
|
|
50
53
|
normalize: NormalizedAst;
|
|
51
54
|
dataflow: DataflowInformation;
|
|
55
|
+
config: FlowrConfigOptions;
|
|
52
56
|
} : never) : never;
|
|
53
57
|
/** we allow any pipeline, which provides us with a 'normalize' and 'dataflow' step */
|
|
54
58
|
export type FlowrSearchInput<P extends Pipeline> = MinimumInputForFlowrSearch<P>;
|
|
@@ -17,7 +17,7 @@ export interface EnrichmentData<EnrichmentContent extends MergeableRecord, Enric
|
|
|
17
17
|
/**
|
|
18
18
|
* A function that is applied to each element of the search to enrich it with additional data.
|
|
19
19
|
*/
|
|
20
|
-
readonly enrich: (e: FlowrSearchElement<ParentInformation>, data: FlowrSearchInput<Pipeline>, args: EnrichmentArguments | undefined) => EnrichmentContent;
|
|
20
|
+
readonly enrich: (e: FlowrSearchElement<ParentInformation>, data: FlowrSearchInput<Pipeline>, args: EnrichmentArguments | undefined, previousValue: EnrichmentContent | undefined) => EnrichmentContent;
|
|
21
21
|
/**
|
|
22
22
|
* The mapping function used by the {@link Mapper.Enrichment} mapper.
|
|
23
23
|
*/
|
|
@@ -52,14 +52,18 @@ export declare const Enrichments: {
|
|
|
52
52
|
enrich: (e: FlowrSearchElement<ParentInformation>, data: import("../../core/steps/pipeline/pipeline").PipelineOutput<Pipeline<import("../../core/steps/pipeline-step").IPipelineStep<import("../../core/steps/pipeline-step").PipelineStepName, (...args: any[]) => any>>> & {
|
|
53
53
|
normalize: import("../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
54
54
|
dataflow: import("../../dataflow/info").DataflowInformation;
|
|
55
|
-
|
|
55
|
+
config: import("../../config").FlowrConfigOptions;
|
|
56
|
+
}, args: {
|
|
57
|
+
onlyBuiltin?: boolean;
|
|
58
|
+
} | undefined, prev: CallTargetsContent | undefined) => CallTargetsContent;
|
|
56
59
|
mapper: ({ targets }: CallTargetsContent) => FlowrSearchElement<ParentInformation>[];
|
|
57
60
|
};
|
|
58
61
|
readonly "last-call": {
|
|
59
62
|
enrich: (e: FlowrSearchElement<ParentInformation>, data: import("../../core/steps/pipeline/pipeline").PipelineOutput<Pipeline<import("../../core/steps/pipeline-step").IPipelineStep<import("../../core/steps/pipeline-step").PipelineStepName, (...args: any[]) => any>>> & {
|
|
60
63
|
normalize: import("../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
61
64
|
dataflow: import("../../dataflow/info").DataflowInformation;
|
|
62
|
-
|
|
65
|
+
config: import("../../config").FlowrConfigOptions;
|
|
66
|
+
}, args: Omit<LinkToLastCall<string | RegExp>, "type">[] | undefined, prev: LastCallContent | undefined) => LastCallContent;
|
|
63
67
|
mapper: ({ linkedIds }: LastCallContent) => FlowrSearchElement<ParentInformation>[];
|
|
64
68
|
};
|
|
65
69
|
};
|
|
@@ -24,7 +24,7 @@ var Enrichment;
|
|
|
24
24
|
*/
|
|
25
25
|
exports.Enrichments = {
|
|
26
26
|
[Enrichment.CallTargets]: {
|
|
27
|
-
enrich: (e, data) => {
|
|
27
|
+
enrich: (e, data, args, prev) => {
|
|
28
28
|
// we don't resolve aliases here yet!
|
|
29
29
|
const content = { targets: [] };
|
|
30
30
|
const callVertex = data.dataflow.graph.getVertex(e.node.info.id);
|
|
@@ -32,34 +32,42 @@ exports.Enrichments = {
|
|
|
32
32
|
const origins = (0, dfg_get_origin_1.getOriginInDfg)(data.dataflow.graph, callVertex.id);
|
|
33
33
|
if (!origins || origins.length === 0) {
|
|
34
34
|
content.targets = [(0, node_id_1.recoverName)(callVertex.id, data.normalize.idMap)];
|
|
35
|
-
return content;
|
|
36
35
|
}
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
36
|
+
else {
|
|
37
|
+
// find call targets in user code (which have ids!)
|
|
38
|
+
content.targets = content.targets.concat(origins.map(o => {
|
|
39
|
+
switch (o.type) {
|
|
40
|
+
case 2 /* OriginType.FunctionCallOrigin */:
|
|
41
|
+
return {
|
|
42
|
+
node: data.normalize.idMap.get(o.id),
|
|
43
|
+
};
|
|
44
|
+
case 3 /* OriginType.BuiltInFunctionOrigin */:
|
|
45
|
+
return o.fn.name;
|
|
46
|
+
default:
|
|
47
|
+
return undefined;
|
|
48
|
+
}
|
|
49
|
+
}).filter(assert_1.isNotUndefined));
|
|
50
|
+
if (content.targets.length === 0) {
|
|
51
|
+
content.targets = [(0, node_id_1.recoverName)(callVertex.id, data.normalize.idMap)];
|
|
48
52
|
}
|
|
49
|
-
}).filter(assert_1.isNotUndefined));
|
|
50
|
-
if (content.targets.length === 0) {
|
|
51
|
-
content.targets = [(0, node_id_1.recoverName)(callVertex.id, data.normalize.idMap)];
|
|
52
53
|
}
|
|
53
54
|
}
|
|
55
|
+
// if there is a call target that is not built-in (ie a custom function), we don't want to include it here
|
|
56
|
+
if (args?.onlyBuiltin && content.targets.some(t => typeof t !== 'string')) {
|
|
57
|
+
content.targets = [];
|
|
58
|
+
}
|
|
59
|
+
if (prev) {
|
|
60
|
+
content.targets.push(...prev.targets);
|
|
61
|
+
}
|
|
54
62
|
return content;
|
|
55
63
|
},
|
|
56
64
|
// as built-in call target enrichments are not nodes, we don't return them as part of the mapper!
|
|
57
65
|
mapper: ({ targets }) => targets.map(t => t).filter(t => t.node !== undefined)
|
|
58
66
|
},
|
|
59
67
|
[Enrichment.LastCall]: {
|
|
60
|
-
enrich: (e, data, args) => {
|
|
68
|
+
enrich: (e, data, args, prev) => {
|
|
61
69
|
(0, assert_1.guard)(args && args.length, `${Enrichment.LastCall} enrichment requires at least one argument`);
|
|
62
|
-
const content = { linkedIds: [] };
|
|
70
|
+
const content = prev ?? { linkedIds: [] };
|
|
63
71
|
const vertex = data.dataflow.graph.get(e.node.info.id);
|
|
64
72
|
if (vertex !== undefined && vertex[0].tag === vertex_1.VertexType.FunctionCall) {
|
|
65
73
|
const cfg = (0, extract_cfg_1.extractSimpleCfg)(data.normalize);
|
|
@@ -90,11 +98,12 @@ function enrichmentContent(e, enrichment) {
|
|
|
90
98
|
}
|
|
91
99
|
function enrich(e, data, enrichment, args) {
|
|
92
100
|
const enrichmentData = exports.Enrichments[enrichment];
|
|
101
|
+
const prev = e?.enrichments;
|
|
93
102
|
return {
|
|
94
103
|
...e,
|
|
95
104
|
enrichments: {
|
|
96
|
-
...
|
|
97
|
-
[enrichment]: enrichmentData.enrich(e, data, args)
|
|
105
|
+
...prev ?? {},
|
|
106
|
+
[enrichment]: enrichmentData.enrich(e, data, args, prev?.[enrichment])
|
|
98
107
|
}
|
|
99
108
|
};
|
|
100
109
|
}
|
|
@@ -56,7 +56,7 @@ function generateFrom(data, args) {
|
|
|
56
56
|
}
|
|
57
57
|
function generateFromQuery(data, args) {
|
|
58
58
|
const nodes = new Set();
|
|
59
|
-
const result = (0, query_1.executeQueries)({ ast: data.normalize, dataflow: data.dataflow }, args.from);
|
|
59
|
+
const result = (0, query_1.executeQueries)({ ast: data.normalize, dataflow: data.dataflow, config: data.config }, args.from);
|
|
60
60
|
for (const [query, content] of Object.entries(result)) {
|
|
61
61
|
if (query === '.meta') {
|
|
62
62
|
continue;
|
|
@@ -29,6 +29,7 @@ export declare const transformers: {
|
|
|
29
29
|
readonly skip: typeof getSkip;
|
|
30
30
|
readonly filter: typeof getFilter;
|
|
31
31
|
readonly merge: typeof getMerge;
|
|
32
|
+
readonly unique: typeof getUnique;
|
|
32
33
|
readonly select: typeof getSelect;
|
|
33
34
|
readonly with: typeof getWith;
|
|
34
35
|
readonly map: typeof getMap;
|
|
@@ -66,4 +67,5 @@ declare function getMerge<Elements extends FlowrSearchElement<ParentInformation>
|
|
|
66
67
|
search: unknown[];
|
|
67
68
|
generator: FlowrSearchGeneratorNode;
|
|
68
69
|
}): FlowrSearchElements<ParentInformation, FlowrSearchElement<ParentInformation>[]>;
|
|
70
|
+
declare function getUnique<Elements extends FlowrSearchElement<ParentInformation>[], FSE extends FlowrSearchElements<ParentInformation, Elements>>(data: FlowrSearchInput<Pipeline>, elements: FSE): CascadeEmpty<Elements, Elements>;
|
|
69
71
|
export {};
|
|
@@ -19,6 +19,7 @@ exports.transformers = {
|
|
|
19
19
|
skip: getSkip,
|
|
20
20
|
filter: getFilter,
|
|
21
21
|
merge: getMerge,
|
|
22
|
+
unique: getUnique,
|
|
22
23
|
select: getSelect,
|
|
23
24
|
with: getWith,
|
|
24
25
|
map: getMap
|
|
@@ -92,7 +93,7 @@ function getSkip(data, elements, { count }) {
|
|
|
92
93
|
return elements.mutate(e => sortFully(e).slice(count));
|
|
93
94
|
}
|
|
94
95
|
function getFilter(data, elements, { filter }) {
|
|
95
|
-
return elements.mutate(e => e.filter(
|
|
96
|
+
return elements.mutate(e => e.filter(e => (0, flowr_search_filters_1.evalFilter)(filter, { element: e, normalize: data.normalize, dataflow: data.dataflow })));
|
|
96
97
|
}
|
|
97
98
|
function getWith(data, elements, { info, args }) {
|
|
98
99
|
return elements.mutate(elements => elements.map(e => (0, search_enrichers_1.enrich)(e, data, info, args)));
|
|
@@ -106,4 +107,12 @@ data, elements, other) {
|
|
|
106
107
|
const resultOther = (0, flowr_search_executor_1.runSearch)(other, data);
|
|
107
108
|
return elements.addAll(resultOther);
|
|
108
109
|
}
|
|
110
|
+
function getUnique(data, elements) {
|
|
111
|
+
return elements.mutate(e => e.reduce((acc, cur) => {
|
|
112
|
+
if (!acc.some(el => el.node.id === cur.node.id)) {
|
|
113
|
+
acc.push(cur);
|
|
114
|
+
}
|
|
115
|
+
return acc;
|
|
116
|
+
}, []));
|
|
117
|
+
}
|
|
109
118
|
//# sourceMappingURL=search-transformer.js.map
|
|
@@ -14,7 +14,7 @@ export declare const slicerLogger: import("tslog").Logger<import("tslog").ILogOb
|
|
|
14
14
|
*
|
|
15
15
|
* @param graph - The dataflow graph to conduct the slicing on.
|
|
16
16
|
* @param ast - The normalized AST of the code (used to get static nesting information of the lexemes in case of control flow dependencies that may have no effect on the slicing scope).
|
|
17
|
-
* @param criteria - The
|
|
17
|
+
* @param criteria - The criteria to slice on.
|
|
18
18
|
* @param threshold - The maximum number of nodes to visit in the graph. If the threshold is reached, the slice will side with inclusion and drop its minimal guarantee. The limit ensures that the algorithm halts.
|
|
19
19
|
* @param cache - A cache to store the results of the slice. If provided, the slice may use this cache to speed up the slicing process.
|
|
20
20
|
*/
|
|
@@ -12,7 +12,6 @@ const parse_1 = require("../criterion/parse");
|
|
|
12
12
|
const environment_1 = require("../../dataflow/environments/environment");
|
|
13
13
|
const vertex_1 = require("../../dataflow/graph/vertex");
|
|
14
14
|
const edge_1 = require("../../dataflow/graph/edge");
|
|
15
|
-
const config_1 = require("../../config");
|
|
16
15
|
exports.slicerLogger = log_1.log.getSubLogger({ name: 'slicer' });
|
|
17
16
|
/**
|
|
18
17
|
* This returns the ids to include in the static backward slice, when slicing with the given seed id's (must be at least one).
|
|
@@ -21,11 +20,11 @@ exports.slicerLogger = log_1.log.getSubLogger({ name: 'slicer' });
|
|
|
21
20
|
*
|
|
22
21
|
* @param graph - The dataflow graph to conduct the slicing on.
|
|
23
22
|
* @param ast - The normalized AST of the code (used to get static nesting information of the lexemes in case of control flow dependencies that may have no effect on the slicing scope).
|
|
24
|
-
* @param criteria - The
|
|
23
|
+
* @param criteria - The criteria to slice on.
|
|
25
24
|
* @param threshold - The maximum number of nodes to visit in the graph. If the threshold is reached, the slice will side with inclusion and drop its minimal guarantee. The limit ensures that the algorithm halts.
|
|
26
25
|
* @param cache - A cache to store the results of the slice. If provided, the slice may use this cache to speed up the slicing process.
|
|
27
26
|
*/
|
|
28
|
-
function staticSlicing(graph, { idMap }, criteria, threshold =
|
|
27
|
+
function staticSlicing(graph, { idMap }, criteria, threshold = 75, cache) {
|
|
29
28
|
(0, assert_1.guard)(criteria.length > 0, 'must have at least one seed id to calculate slice');
|
|
30
29
|
const decodedCriteria = (0, parse_1.convertAllSlicingCriteriaToIds)(criteria, idMap);
|
|
31
30
|
(0, log_1.expensiveTrace)(exports.slicerLogger, () => `calculating slice for ${decodedCriteria.length} seed criteria: ${decodedCriteria.map(s => JSON.stringify(s)).join(', ')}`);
|
|
@@ -4,6 +4,7 @@ import type { PipelineOutput } from '../core/steps/pipeline/pipeline';
|
|
|
4
4
|
import { DEFAULT_DATAFLOW_PIPELINE } from '../core/steps/pipeline/default-pipelines';
|
|
5
5
|
import type { RShell } from '../r-bridge/shell';
|
|
6
6
|
import type { FeatureSelection, FeatureStatistics } from './features/feature';
|
|
7
|
+
import type { FlowrConfigOptions } from '../config';
|
|
7
8
|
/**
|
|
8
9
|
* By default, {@link extractUsageStatistics} requires a generator, but sometimes you already know all the files
|
|
9
10
|
* that you want to process. This function simply reps your requests as a generator.
|
|
@@ -14,13 +15,14 @@ type DataflowResult = PipelineOutput<typeof DEFAULT_DATAFLOW_PIPELINE>;
|
|
|
14
15
|
* Extract all wanted statistic information from a set of requests using the presented R session.
|
|
15
16
|
*
|
|
16
17
|
* @param shell - The R session to use
|
|
18
|
+
* @param config - The flowr config
|
|
17
19
|
* @param onRequest - A callback that is called at the beginning of each request, this may be used to debug the requests.
|
|
18
20
|
* @param features - The features to extract (see {@link allFeatureNames}).
|
|
19
21
|
* @param requests - The requests to extract the features from. May generate them on demand (e.g., by traversing a folder).
|
|
20
22
|
* If your request is statically known, you can use {@link staticRequests} to create this generator.
|
|
21
23
|
* @param rootPath - The root path to the project, this is used to relativize the file paths in the statistics.
|
|
22
24
|
*/
|
|
23
|
-
export declare function extractUsageStatistics<T extends RParseRequestFromText | RParseRequestFromFile>(shell: RShell, onRequest: (request: T) => void, features: FeatureSelection, requests: AsyncGenerator<T>, rootPath?: string): Promise<{
|
|
25
|
+
export declare function extractUsageStatistics<T extends RParseRequestFromText | RParseRequestFromFile>(shell: RShell, config: FlowrConfigOptions, onRequest: (request: T) => void, features: FeatureSelection, requests: AsyncGenerator<T>, rootPath?: string): Promise<{
|
|
24
26
|
features: FeatureStatistics;
|
|
25
27
|
meta: MetaStatistics;
|
|
26
28
|
outputs: Map<T, DataflowResult>;
|
package/statistics/statistics.js
CHANGED
|
@@ -30,13 +30,14 @@ function staticRequests(...requests) {
|
|
|
30
30
|
* Extract all wanted statistic information from a set of requests using the presented R session.
|
|
31
31
|
*
|
|
32
32
|
* @param shell - The R session to use
|
|
33
|
+
* @param config - The flowr config
|
|
33
34
|
* @param onRequest - A callback that is called at the beginning of each request, this may be used to debug the requests.
|
|
34
35
|
* @param features - The features to extract (see {@link allFeatureNames}).
|
|
35
36
|
* @param requests - The requests to extract the features from. May generate them on demand (e.g., by traversing a folder).
|
|
36
37
|
* If your request is statically known, you can use {@link staticRequests} to create this generator.
|
|
37
38
|
* @param rootPath - The root path to the project, this is used to relativize the file paths in the statistics.
|
|
38
39
|
*/
|
|
39
|
-
async function extractUsageStatistics(shell, onRequest, features, requests, rootPath) {
|
|
40
|
+
async function extractUsageStatistics(shell, config, onRequest, features, requests, rootPath) {
|
|
40
41
|
let result = initializeFeatureStatistics();
|
|
41
42
|
const meta = (0, meta_statistics_1.initialMetaStatistics)();
|
|
42
43
|
const outputs = new Map();
|
|
@@ -46,7 +47,7 @@ async function extractUsageStatistics(shell, onRequest, features, requests, root
|
|
|
46
47
|
const suffix = request.request === 'file' ? request.content.replace(new RegExp('^' + (rootPath ?? '')), '') : undefined;
|
|
47
48
|
try {
|
|
48
49
|
let output;
|
|
49
|
-
({ stats: result, output } = await extractSingle(result, shell, request, features, suffix));
|
|
50
|
+
({ stats: result, output } = await extractSingle(result, shell, request, features, suffix, config));
|
|
50
51
|
outputs.set(request, output);
|
|
51
52
|
processMetaOnSuccessful(meta, request);
|
|
52
53
|
meta.numberOfNormalizedNodes.push(output.normalize.idMap.size);
|
|
@@ -80,10 +81,10 @@ function processMetaOnSuccessful(meta, request) {
|
|
|
80
81
|
}
|
|
81
82
|
}
|
|
82
83
|
const parser = new xmldom_1.DOMParser();
|
|
83
|
-
async function extractSingle(result, shell, request, features, suffixFilePath) {
|
|
84
|
+
async function extractSingle(result, shell, request, features, suffixFilePath, config) {
|
|
84
85
|
const slicerOutput = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
85
86
|
request, parser: shell
|
|
86
|
-
}).allRemainingSteps();
|
|
87
|
+
}, config).allRemainingSteps();
|
|
87
88
|
// retrieve parsed xml through (legacy) xmlparsedata
|
|
88
89
|
const suffix = request.request === 'file' ? ', encoding="utf-8"' : '';
|
|
89
90
|
shell.sendCommands(`try(flowr_parsed<-parse(${request.request}=${JSON.stringify(request.content)},keep.source=TRUE${suffix}),silent=FALSE)`, 'try(flowr_output<-xmlparsedata::xml_parse_data(flowr_parsed,includeText=TRUE,pretty=FALSE),silent=FALSE)');
|
package/util/containers.d.ts
CHANGED
|
@@ -13,7 +13,7 @@ export declare function getAccessOperands<OtherInfo>(args: readonly RFunctionArg
|
|
|
13
13
|
accessArg: RArgument<OtherInfo & ParentInformation> | undefined;
|
|
14
14
|
};
|
|
15
15
|
/**
|
|
16
|
-
* Resolves the passed name
|
|
16
|
+
* Resolves the passed name within the passed environment and returns the indicesCollection of the resolved definitions.
|
|
17
17
|
*
|
|
18
18
|
* @param name - Name to resolve
|
|
19
19
|
* @param environment - Environment in which name is resolved
|
|
@@ -25,9 +25,10 @@ export declare function resolveIndicesByName(name: Identifier, environment: REnv
|
|
|
25
25
|
*
|
|
26
26
|
* If no indices could be found that match the `accessArg`, the original indices are returned as overapproximation.
|
|
27
27
|
*
|
|
28
|
-
* @param accessedArg
|
|
29
|
-
* @param accessArg
|
|
30
|
-
* @param environment
|
|
28
|
+
* @param accessedArg - The argument to resolve
|
|
29
|
+
* @param accessArg - The argument which is used to filter the indices
|
|
30
|
+
* @param environment - The environment in which {@link accessedArg} is resolved
|
|
31
|
+
* @param isIndexBasedAccess - Whether the access is index-based (e.g. `x[1]`) or name-based (e.g. `x$name`)
|
|
31
32
|
* @returns The filtered {@link ContainerIndicesCollection} of the resolved {@link accessedArg}
|
|
32
33
|
*/
|
|
33
34
|
export declare function resolveSingleIndex(accessedArg: {
|
|
@@ -38,8 +39,9 @@ export declare function resolveSingleIndex(accessedArg: {
|
|
|
38
39
|
/**
|
|
39
40
|
* Filters the single indices of the {@link indicesCollection} according to the lexeme of the {@link accessArg}.
|
|
40
41
|
*
|
|
41
|
-
* @param indicesCollection
|
|
42
|
-
* @param accessArg
|
|
42
|
+
* @param indicesCollection - The {@link ContainerIndicesCollection} to filter
|
|
43
|
+
* @param accessArg - The argument which is used to filter {@link indicesCollection}
|
|
44
|
+
* @param isIndexBasedAccess - Whether the access is index-based (e.g. `x[1]`) or name-based (e.g. `x$name`)
|
|
43
45
|
* @returns The filtered copy of {@link indicesCollection}
|
|
44
46
|
*/
|
|
45
47
|
export declare function filterIndices(indicesCollection: ContainerIndicesCollection, accessArg: {
|
|
@@ -54,8 +56,9 @@ export declare function filterIndices(indicesCollection: ContainerIndicesCollect
|
|
|
54
56
|
* ```
|
|
55
57
|
* would result in a list with the index `credentials`, which has the subIndex `username`.
|
|
56
58
|
*
|
|
57
|
-
* @param accessedArg
|
|
58
|
-
* @param leafIndices
|
|
59
|
+
* @param accessedArg - The top level argument that is accessed
|
|
60
|
+
* @param leafIndices - The index at the end of the nested access i.e. `c` in `a$b$c`.
|
|
61
|
+
* @param constructIdentifier - A function that constructs the identifier for the index from the argument
|
|
59
62
|
* @returns The constructed nested access
|
|
60
63
|
*/
|
|
61
64
|
export declare function constructNestedAccess<OtherInfo>(accessedArg: RAccess<OtherInfo & ParentInformation>, leafIndices: ContainerIndices, constructIdentifier: (arg: RArgument<OtherInfo & ParentInformation>) => IndexIdentifier): ContainerIndices[];
|
|
@@ -63,6 +66,6 @@ export declare function constructNestedAccess<OtherInfo>(accessedArg: RAccess<Ot
|
|
|
63
66
|
* Adds the passed list of {@link leafSubIndices} to the leaf (sub-)indices of {@link indicesCollection}.
|
|
64
67
|
*
|
|
65
68
|
* @param indicesCollection - Indices where to add the sub indices.
|
|
66
|
-
* @param leafSubIndices
|
|
69
|
+
* @param leafSubIndices - Indices that are added to the leaf indices.
|
|
67
70
|
*/
|
|
68
71
|
export declare function addSubIndicesToLeafIndices(indicesCollection: ContainerIndices[], leafSubIndices: ContainerIndices[]): ContainerIndices[];
|
package/util/containers.js
CHANGED
|
@@ -20,7 +20,7 @@ function getAccessOperands(args) {
|
|
|
20
20
|
return { accessedArg, accessArg };
|
|
21
21
|
}
|
|
22
22
|
/**
|
|
23
|
-
* Resolves the passed name
|
|
23
|
+
* Resolves the passed name within the passed environment and returns the indicesCollection of the resolved definitions.
|
|
24
24
|
*
|
|
25
25
|
* @param name - Name to resolve
|
|
26
26
|
* @param environment - Environment in which name is resolved
|
|
@@ -35,9 +35,10 @@ function resolveIndicesByName(name, environment) {
|
|
|
35
35
|
*
|
|
36
36
|
* If no indices could be found that match the `accessArg`, the original indices are returned as overapproximation.
|
|
37
37
|
*
|
|
38
|
-
* @param accessedArg
|
|
39
|
-
* @param accessArg
|
|
40
|
-
* @param environment
|
|
38
|
+
* @param accessedArg - The argument to resolve
|
|
39
|
+
* @param accessArg - The argument which is used to filter the indices
|
|
40
|
+
* @param environment - The environment in which {@link accessedArg} is resolved
|
|
41
|
+
* @param isIndexBasedAccess - Whether the access is index-based (e.g. `x[1]`) or name-based (e.g. `x$name`)
|
|
41
42
|
* @returns The filtered {@link ContainerIndicesCollection} of the resolved {@link accessedArg}
|
|
42
43
|
*/
|
|
43
44
|
function resolveSingleIndex(accessedArg, accessArg, environment, isIndexBasedAccess) {
|
|
@@ -55,8 +56,9 @@ function resolveSingleIndex(accessedArg, accessArg, environment, isIndexBasedAcc
|
|
|
55
56
|
/**
|
|
56
57
|
* Filters the single indices of the {@link indicesCollection} according to the lexeme of the {@link accessArg}.
|
|
57
58
|
*
|
|
58
|
-
* @param indicesCollection
|
|
59
|
-
* @param accessArg
|
|
59
|
+
* @param indicesCollection - The {@link ContainerIndicesCollection} to filter
|
|
60
|
+
* @param accessArg - The argument which is used to filter {@link indicesCollection}
|
|
61
|
+
* @param isIndexBasedAccess - Whether the access is index-based (e.g. `x[1]`) or name-based (e.g. `x$name`)
|
|
60
62
|
* @returns The filtered copy of {@link indicesCollection}
|
|
61
63
|
*/
|
|
62
64
|
function filterIndices(indicesCollection, accessArg, isIndexBasedAccess) {
|
|
@@ -83,8 +85,9 @@ function filterIndices(indicesCollection, accessArg, isIndexBasedAccess) {
|
|
|
83
85
|
* ```
|
|
84
86
|
* would result in a list with the index `credentials`, which has the subIndex `username`.
|
|
85
87
|
*
|
|
86
|
-
* @param accessedArg
|
|
87
|
-
* @param leafIndices
|
|
88
|
+
* @param accessedArg - The top level argument that is accessed
|
|
89
|
+
* @param leafIndices - The index at the end of the nested access i.e. `c` in `a$b$c`.
|
|
90
|
+
* @param constructIdentifier - A function that constructs the identifier for the index from the argument
|
|
88
91
|
* @returns The constructed nested access
|
|
89
92
|
*/
|
|
90
93
|
function constructNestedAccess(accessedArg, leafIndices, constructIdentifier) {
|
|
@@ -116,7 +119,7 @@ function constructNestedAccess(accessedArg, leafIndices, constructIdentifier) {
|
|
|
116
119
|
* Adds the passed list of {@link leafSubIndices} to the leaf (sub-)indices of {@link indicesCollection}.
|
|
117
120
|
*
|
|
118
121
|
* @param indicesCollection - Indices where to add the sub indices.
|
|
119
|
-
* @param leafSubIndices
|
|
122
|
+
* @param leafSubIndices - Indices that are added to the leaf indices.
|
|
120
123
|
*/
|
|
121
124
|
function addSubIndicesToLeafIndices(indicesCollection, leafSubIndices) {
|
|
122
125
|
const result = [];
|
package/util/files.d.ts
CHANGED
|
@@ -40,17 +40,23 @@ export declare function writeTableAsCsv(table: Table, file: string, sep?: string
|
|
|
40
40
|
/**
|
|
41
41
|
* Reads a file line by line and calls the given function for each line.
|
|
42
42
|
* The `lineNumber` starts at `0`.
|
|
43
|
+
* The `maxLines` option limits the maximum number of read lines and is `Infinity` by default.
|
|
44
|
+
*
|
|
45
|
+
* @returns Whether all lines have been successfully read (`false` if `maxLines` was reached)
|
|
43
46
|
*
|
|
44
47
|
* See {@link readLineByLineSync} for a synchronous version.
|
|
45
48
|
*/
|
|
46
|
-
export declare function readLineByLine(filePath: string, onLine: (line: Buffer, lineNumber: number) => Promise<void
|
|
49
|
+
export declare function readLineByLine(filePath: string, onLine: (line: Buffer, lineNumber: number) => Promise<void>, maxLines?: number): Promise<boolean>;
|
|
47
50
|
/**
|
|
48
51
|
* Reads a file line by line and calls the given function for each line.
|
|
49
52
|
* The `lineNumber` starts at `0`.
|
|
53
|
+
* The `maxLines` option limits the maximum number of read lines and is `Infinity` by default.
|
|
54
|
+
*
|
|
55
|
+
* @returns Whether the file exists and all lines have been successfully read (`false` if `maxLines` was reached)
|
|
50
56
|
*
|
|
51
57
|
* See {@link readLineByLine} for an asynchronous version.
|
|
52
58
|
*/
|
|
53
|
-
export declare function readLineByLineSync(filePath: string, onLine: (line: Buffer, lineNumber: number) => void):
|
|
59
|
+
export declare function readLineByLineSync(filePath: string, onLine: (line: Buffer, lineNumber: number) => void, maxLines?: number): boolean;
|
|
54
60
|
/**
|
|
55
61
|
* Chops off the last part of the given directory path after a path separator, essentially returning the path's parent directory.
|
|
56
62
|
* If an absolute path is passed, the returned path is also absolute.
|
package/util/files.js
CHANGED
|
@@ -124,36 +124,54 @@ function writeTableAsCsv(table, file, sep = ',', newline = '\n') {
|
|
|
124
124
|
/**
|
|
125
125
|
* Reads a file line by line and calls the given function for each line.
|
|
126
126
|
* The `lineNumber` starts at `0`.
|
|
127
|
+
* The `maxLines` option limits the maximum number of read lines and is `Infinity` by default.
|
|
128
|
+
*
|
|
129
|
+
* @returns Whether all lines have been successfully read (`false` if `maxLines` was reached)
|
|
127
130
|
*
|
|
128
131
|
* See {@link readLineByLineSync} for a synchronous version.
|
|
129
132
|
*/
|
|
130
|
-
async function readLineByLine(filePath, onLine) {
|
|
133
|
+
async function readLineByLine(filePath, onLine, maxLines = Infinity) {
|
|
134
|
+
if (!(await fs_1.default.promises.stat(filePath).catch(() => { }))?.isFile()) {
|
|
135
|
+
log_1.log.warn(`File ${filePath} does not exist`);
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
131
138
|
const reader = new n_readlines_1.default(filePath);
|
|
132
139
|
let line;
|
|
133
140
|
let counter = 0;
|
|
134
141
|
// eslint-disable-next-line no-cond-assign
|
|
135
142
|
while (line = reader.next()) {
|
|
143
|
+
if (counter >= maxLines) {
|
|
144
|
+
return false;
|
|
145
|
+
}
|
|
136
146
|
await onLine(line, counter++);
|
|
137
147
|
}
|
|
148
|
+
return true;
|
|
138
149
|
}
|
|
139
150
|
/**
|
|
140
151
|
* Reads a file line by line and calls the given function for each line.
|
|
141
152
|
* The `lineNumber` starts at `0`.
|
|
153
|
+
* The `maxLines` option limits the maximum number of read lines and is `Infinity` by default.
|
|
154
|
+
*
|
|
155
|
+
* @returns Whether the file exists and all lines have been successfully read (`false` if `maxLines` was reached)
|
|
142
156
|
*
|
|
143
157
|
* See {@link readLineByLine} for an asynchronous version.
|
|
144
158
|
*/
|
|
145
|
-
function readLineByLineSync(filePath, onLine) {
|
|
146
|
-
if (!fs_1.default.
|
|
159
|
+
function readLineByLineSync(filePath, onLine, maxLines = Infinity) {
|
|
160
|
+
if (!fs_1.default.statSync(filePath, { throwIfNoEntry: false })?.isFile()) {
|
|
147
161
|
log_1.log.warn(`File ${filePath} does not exist`);
|
|
148
|
-
return;
|
|
162
|
+
return false;
|
|
149
163
|
}
|
|
150
164
|
const reader = new n_readlines_1.default(filePath);
|
|
151
165
|
let line;
|
|
152
166
|
let counter = 0;
|
|
153
167
|
// eslint-disable-next-line no-cond-assign
|
|
154
168
|
while (line = reader.next()) {
|
|
169
|
+
if (counter >= maxLines) {
|
|
170
|
+
return false;
|
|
171
|
+
}
|
|
155
172
|
onLine(line, counter++);
|
|
156
173
|
}
|
|
174
|
+
return true;
|
|
157
175
|
}
|
|
158
176
|
/**
|
|
159
177
|
* Chops off the last part of the given directory path after a path separator, essentially returning the path's parent directory.
|
package/util/objects.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type { DeepPartial, DeepRequired } from 'ts-essentials';
|
|
1
|
+
import type { DeepPartial, DeepReadonly, DeepRequired } from 'ts-essentials';
|
|
2
2
|
/**
|
|
3
3
|
* checks if `item` is an object (it may be an array, ...)
|
|
4
4
|
*/
|
|
@@ -6,13 +6,14 @@ export declare function isObjectOrArray(item: unknown): boolean;
|
|
|
6
6
|
export type MergeableRecord = Record<string, unknown>;
|
|
7
7
|
export type MergeableArray = unknown[];
|
|
8
8
|
export type Mergeable = MergeableRecord | MergeableArray;
|
|
9
|
+
type OrReadonly<T> = T | Readonly<T> | DeepReadonly<T>;
|
|
9
10
|
/**
|
|
10
11
|
* Given two objects deeply merges them, if an object is an array it will merge the array values!
|
|
11
12
|
* Guarantees some type safety by requiring objects to merge to be from the same type (allows undefined)
|
|
12
13
|
*/
|
|
13
|
-
export declare function deepMergeObject<T extends Mergeable>(base: Required<T
|
|
14
|
-
export declare function deepMergeObject<T extends Mergeable>(base: DeepRequired<T
|
|
15
|
-
export declare function deepMergeObject<T extends Mergeable>(base: T
|
|
14
|
+
export declare function deepMergeObject<T extends Mergeable>(base: Required<OrReadonly<T>>, addon?: T | DeepPartial<T> | Partial<T>): Required<T>;
|
|
15
|
+
export declare function deepMergeObject<T extends Mergeable>(base: DeepRequired<OrReadonly<T>>, addon?: T | DeepPartial<T> | Partial<T>): DeepRequired<T>;
|
|
16
|
+
export declare function deepMergeObject<T extends Mergeable>(base: OrReadonly<T>, addon?: DeepPartial<T> | Partial<T>): T;
|
|
16
17
|
export declare function deepMergeObject(base: Mergeable, addon: Mergeable): Mergeable;
|
|
17
18
|
export declare function deepMergeObject(base?: Mergeable, addon?: Mergeable): Mergeable | undefined;
|
|
18
19
|
type Defined<T> = Exclude<T, undefined>;
|