@eagleoutice/flowr 2.1.0 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/cli/repl/commands/repl-parse.js +7 -16
- package/cli/repl/commands/repl-query.js +72 -3
- package/core/print/parse-printer.js +1 -22
- package/core/steps/pipeline/default-pipelines.d.ts +64 -0
- package/core/steps/pipeline/default-pipelines.js +2 -1
- package/dataflow/cluster.d.ts +20 -0
- package/dataflow/cluster.js +46 -0
- package/dataflow/graph/edge.d.ts +2 -3
- package/dataflow/graph/graph.d.ts +1 -1
- package/documentation/data/server/doc-data-server-messages.d.ts +1 -1
- package/documentation/data/server/doc-data-server-messages.js +10 -4
- package/documentation/doc-util/doc-code.d.ts +1 -0
- package/documentation/doc-util/doc-code.js +9 -0
- package/documentation/doc-util/doc-dfg.d.ts +1 -0
- package/documentation/doc-util/doc-dfg.js +5 -4
- package/documentation/doc-util/doc-normalized-ast.js +2 -2
- package/documentation/doc-util/doc-query.js +5 -7
- package/documentation/doc-util/doc-server-message.js +3 -5
- package/documentation/doc-util/doc-types.js +19 -11
- package/documentation/print-dataflow-graph-wiki.js +10 -1
- package/documentation/print-interface-wiki.js +1 -1
- package/documentation/print-normalized-ast-wiki.js +2 -2
- package/documentation/print-query-wiki.js +164 -37
- package/package.json +1 -1
- package/queries/{call-context-query → catalog/call-context-query}/call-context-query-executor.d.ts +4 -3
- package/queries/{call-context-query → catalog/call-context-query}/call-context-query-executor.js +22 -16
- package/queries/{call-context-query → catalog/call-context-query}/call-context-query-format.d.ts +6 -2
- package/queries/catalog/cluster-query/cluster-query-executor.d.ts +3 -0
- package/queries/catalog/cluster-query/cluster-query-executor.js +19 -0
- package/queries/catalog/cluster-query/cluster-query-format.d.ts +12 -0
- package/queries/catalog/cluster-query/cluster-query-format.js +3 -0
- package/queries/catalog/dataflow-query/dataflow-query-executor.d.ts +3 -0
- package/queries/catalog/dataflow-query/dataflow-query-executor.js +17 -0
- package/queries/catalog/dataflow-query/dataflow-query-format.d.ts +12 -0
- package/queries/catalog/dataflow-query/dataflow-query-format.js +3 -0
- package/queries/catalog/id-map-query/id-map-query-executor.d.ts +3 -0
- package/queries/catalog/id-map-query/id-map-query-executor.js +17 -0
- package/queries/catalog/id-map-query/id-map-query-format.d.ts +8 -0
- package/{r-bridge/lang-4.x/ast/parser/xml/normalizer-data.js → queries/catalog/id-map-query/id-map-query-format.js} +1 -1
- package/queries/catalog/normalized-ast-query/normalized-ast-query-executor.d.ts +3 -0
- package/queries/catalog/normalized-ast-query/normalized-ast-query-executor.js +17 -0
- package/queries/catalog/normalized-ast-query/normalized-ast-query-format.d.ts +11 -0
- package/queries/catalog/normalized-ast-query/normalized-ast-query-format.js +3 -0
- package/queries/catalog/static-slice-query/static-slice-query-executor.d.ts +4 -0
- package/queries/catalog/static-slice-query/static-slice-query-executor.js +40 -0
- package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +24 -0
- package/queries/catalog/static-slice-query/static-slice-query-format.js +3 -0
- package/queries/query-schema.d.ts +5 -0
- package/queries/query-schema.js +22 -3
- package/queries/query.d.ts +19 -4
- package/queries/query.js +12 -2
- package/r-bridge/lang-4.x/ast/parser/json/format.d.ts +37 -13
- package/r-bridge/lang-4.x/ast/parser/json/format.js +59 -6
- package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +0 -3
- package/r-bridge/lang-4.x/ast/parser/json/parser.js +2 -40
- package/r-bridge/lang-4.x/ast/parser/main/internal/control/normalize-if-then-else.d.ts +15 -0
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/control/normalize-if-then-else.js +5 -8
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/control/normalize-if-then.d.ts +7 -7
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/control/normalize-if-then.js +11 -12
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/expression/normalize-expression.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/expression/normalize-expression.js +4 -7
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-argument.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-argument.js +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-call.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-call.js +6 -9
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-definition.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-definition.js +1 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-parameter.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/functions/normalize-parameter.js +6 -5
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-break.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-for.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-for.js +5 -6
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-next.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-repeat.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/main/internal/loops/normalize-while.d.ts +4 -0
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-while.js +4 -4
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/normalize-access.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/normalize-access.js +1 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/operators/normalize-binary.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/operators/normalize-binary.js +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/operators/normalize-unary.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/other/normalize-comment.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/other/normalize-comment.js +1 -1
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/other/normalize-line-directive.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/other/normalize-line-directive.js +1 -1
- package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-delimiter.d.ts +3 -0
- package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-expressions.d.ts +10 -0
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-root.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-root.js +3 -4
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-single-node.d.ts +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-single-node.js +2 -2
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-number.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-number.js +1 -1
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-string.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-string.js +1 -1
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-symbol.d.ts +3 -3
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/values/normalize-symbol.js +1 -3
- package/r-bridge/lang-4.x/ast/parser/main/normalize-meta.d.ts +41 -0
- package/r-bridge/lang-4.x/ast/parser/main/normalize-meta.js +85 -0
- package/r-bridge/lang-4.x/ast/parser/{xml → main}/normalizer-data.d.ts +10 -4
- package/r-bridge/lang-4.x/ast/parser/main/normalizer-data.js +14 -0
- package/slicing/criterion/parse.d.ts +1 -1
- package/statistics/summarizer/post-process/clusterer.d.ts +1 -1
- package/util/ansi.js +9 -2
- package/util/time.d.ts +4 -0
- package/util/time.js +8 -0
- package/util/version.js +1 -1
- package/documentation/doc-util/doc-ms.d.ts +0 -1
- package/documentation/doc-util/doc-ms.js +0 -8
- package/r-bridge/lang-4.x/ast/parser/xml/input-format.d.ts +0 -39
- package/r-bridge/lang-4.x/ast/parser/xml/input-format.js +0 -38
- package/r-bridge/lang-4.x/ast/parser/xml/internal/control/normalize-if-then-else.d.ts +0 -15
- package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/normalize-while.d.ts +0 -4
- package/r-bridge/lang-4.x/ast/parser/xml/internal/structure/normalize-delimiter.d.ts +0 -3
- package/r-bridge/lang-4.x/ast/parser/xml/internal/structure/normalize-expressions.d.ts +0 -10
- package/r-bridge/lang-4.x/ast/parser/xml/normalize-meta.d.ts +0 -47
- package/r-bridge/lang-4.x/ast/parser/xml/normalize-meta.js +0 -110
- /package/queries/{call-context-query → catalog/call-context-query}/call-context-query-format.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-break.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-next.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/loops/normalize-repeat.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/operators/normalize-unary.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-delimiter.js +0 -0
- /package/r-bridge/lang-4.x/ast/parser/{xml → main}/internal/structure/normalize-expressions.js +0 -0
package/README.md
CHANGED
|
@@ -24,7 +24,8 @@ You can enter `:help` to gain more information on its capabilities.
|
|
|
24
24
|
|
|
25
25
|
## 📜 More Information
|
|
26
26
|
|
|
27
|
-
For more details on how to use _flowR_ please refer to the
|
|
27
|
+
For more details on how to use _flowR_ please refer to the [wiki pages](https://github.com/flowr-analysis/flowr/wiki),
|
|
28
|
+
as well as the deployed [code documentation](https://flowr-analysis.github.io/flowr/doc/).
|
|
28
29
|
|
|
29
30
|
## 🚀 Contributing
|
|
30
31
|
|
|
@@ -3,22 +3,18 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.parseCommand = void 0;
|
|
4
4
|
const pipeline_executor_1 = require("../../../core/pipeline-executor");
|
|
5
5
|
const format_1 = require("../../../r-bridge/lang-4.x/ast/parser/json/format");
|
|
6
|
-
const
|
|
7
|
-
const input_format_1 = require("../../../r-bridge/lang-4.x/ast/parser/xml/input-format");
|
|
8
|
-
const type_1 = require("../../../r-bridge/lang-4.x/ast/model/type");
|
|
9
|
-
const normalize_meta_1 = require("../../../r-bridge/lang-4.x/ast/parser/xml/normalize-meta");
|
|
6
|
+
const normalize_meta_1 = require("../../../r-bridge/lang-4.x/ast/parser/main/normalize-meta");
|
|
10
7
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
11
8
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
12
|
-
function toDepthMap(
|
|
13
|
-
const
|
|
14
|
-
const visit = [{ depth: 0, node: root }];
|
|
9
|
+
function toDepthMap(entry) {
|
|
10
|
+
const visit = [{ depth: 0, node: entry }];
|
|
15
11
|
const result = [];
|
|
16
12
|
while (visit.length > 0) {
|
|
17
13
|
const current = visit.pop();
|
|
18
14
|
if (current === undefined) {
|
|
19
15
|
continue;
|
|
20
16
|
}
|
|
21
|
-
const children = current.node
|
|
17
|
+
const children = current.node.children;
|
|
22
18
|
result.push({ ...current, leaf: children.length === 0 });
|
|
23
19
|
children.reverse();
|
|
24
20
|
const nextDepth = current.depth + 1;
|
|
@@ -75,13 +71,8 @@ function depthListToTextTree(list, f) {
|
|
|
75
71
|
deadDepths.delete(depth);
|
|
76
72
|
result += initialIndentation(i, depth, deadDepths, nextDepth, list, f);
|
|
77
73
|
result += f.reset();
|
|
78
|
-
const
|
|
79
|
-
const
|
|
80
|
-
const locationRaw = raw[input_format_1.attributesKey];
|
|
81
|
-
let location = '';
|
|
82
|
-
if (locationRaw !== undefined) {
|
|
83
|
-
location = retrieveLocationString(locationRaw);
|
|
84
|
-
}
|
|
74
|
+
const content = node.text;
|
|
75
|
+
const location = retrieveLocationString(node);
|
|
85
76
|
const type = (0, normalize_meta_1.getTokenType)(node);
|
|
86
77
|
if (leaf) {
|
|
87
78
|
const suffix = `${f.format(content ? JSON.stringify(content) : '', { style: 1 /* FontStyles.Bold */ })}${f.format(location, { style: 3 /* FontStyles.Italic */ })}`;
|
|
@@ -104,7 +95,7 @@ exports.parseCommand = {
|
|
|
104
95
|
shell,
|
|
105
96
|
request: (0, retriever_1.requestFromInput)((0, retriever_1.removeRQuotes)(remainingLine.trim()))
|
|
106
97
|
}).allRemainingSteps();
|
|
107
|
-
const object = (0,
|
|
98
|
+
const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(result.parse));
|
|
108
99
|
output.stdout(depthListToTextTree(toDepthMap(object), output.formatter));
|
|
109
100
|
}
|
|
110
101
|
};
|
|
@@ -12,6 +12,10 @@ const query_1 = require("../../../queries/query");
|
|
|
12
12
|
const json_1 = require("../../../util/json");
|
|
13
13
|
const query_schema_1 = require("../../../queries/query-schema");
|
|
14
14
|
const built_in_1 = require("../../../dataflow/environments/built-in");
|
|
15
|
+
const dfg_1 = require("../../../util/mermaid/dfg");
|
|
16
|
+
const ast_1 = require("../../../util/mermaid/ast");
|
|
17
|
+
const time_1 = require("../../../util/time");
|
|
18
|
+
const doc_hover_over_1 = require("../../../documentation/doc-util/doc-hover-over");
|
|
15
19
|
async function getDataflow(shell, remainingLine) {
|
|
16
20
|
return await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
17
21
|
shell,
|
|
@@ -99,6 +103,21 @@ function asciiCallContext(formatter, results, processed) {
|
|
|
99
103
|
}
|
|
100
104
|
return result.join('\n');
|
|
101
105
|
}
|
|
106
|
+
function summarizeIdsIfTooLong(ids) {
|
|
107
|
+
const naive = ids.join(', ');
|
|
108
|
+
if (naive.length <= 20) {
|
|
109
|
+
return naive;
|
|
110
|
+
}
|
|
111
|
+
let acc = '';
|
|
112
|
+
let i = 0;
|
|
113
|
+
while (acc.length <= 20) {
|
|
114
|
+
acc += ids[i++] + ', ';
|
|
115
|
+
}
|
|
116
|
+
if (i < ids.length) {
|
|
117
|
+
acc += '... (see JSON below)';
|
|
118
|
+
}
|
|
119
|
+
return (0, doc_hover_over_1.textWithTooltip)(acc, JSON.stringify(ids));
|
|
120
|
+
}
|
|
102
121
|
function asciiSummaryOfQueryResult(formatter, totalInMs, results, processed) {
|
|
103
122
|
const result = [];
|
|
104
123
|
for (const [query, queryResults] of Object.entries(results)) {
|
|
@@ -107,10 +126,60 @@ function asciiSummaryOfQueryResult(formatter, totalInMs, results, processed) {
|
|
|
107
126
|
}
|
|
108
127
|
if (query === 'call-context') {
|
|
109
128
|
const out = queryResults;
|
|
110
|
-
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${out['.meta'].timing
|
|
129
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
111
130
|
result.push(asciiCallContext(formatter, out, processed));
|
|
112
131
|
continue;
|
|
113
132
|
}
|
|
133
|
+
else if (query === 'dataflow') {
|
|
134
|
+
const out = queryResults;
|
|
135
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
136
|
+
result.push(` ╰ [Dataflow Graph](${(0, dfg_1.graphToMermaidUrl)(out.graph)})`);
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
else if (query === 'id-map') {
|
|
140
|
+
const out = queryResults;
|
|
141
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
142
|
+
result.push(` ╰ Id List: {${summarizeIdsIfTooLong([...out.idMap.keys()])}}`);
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
else if (query === 'normalized-ast') {
|
|
146
|
+
const out = queryResults;
|
|
147
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
148
|
+
result.push(` ╰ [Normalized AST](${(0, ast_1.normalizedAstToMermaidUrl)(out.normalized.ast)})`);
|
|
149
|
+
continue;
|
|
150
|
+
}
|
|
151
|
+
else if (query === 'static-slice') {
|
|
152
|
+
const out = queryResults;
|
|
153
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
154
|
+
for (const [fingerprint, obj] of Object.entries(out.results)) {
|
|
155
|
+
const { criteria, noMagicComments, noReconstruction } = JSON.parse(fingerprint);
|
|
156
|
+
const addons = [];
|
|
157
|
+
if (noReconstruction) {
|
|
158
|
+
addons.push('no reconstruction');
|
|
159
|
+
}
|
|
160
|
+
if (noMagicComments) {
|
|
161
|
+
addons.push('no magic comments');
|
|
162
|
+
}
|
|
163
|
+
result.push(` ╰ Slice for {${criteria.join(', ')}} ${addons.join(', ')}`);
|
|
164
|
+
if ('reconstruct' in obj) {
|
|
165
|
+
result.push(' ╰ Code (newline as <code>\n</code>): <code>' + obj.reconstruct.code.split('\n').join('\\n') + '</code>');
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
result.push(` ╰ Id List: {${summarizeIdsIfTooLong([...obj.slice.result])}}`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
continue;
|
|
172
|
+
}
|
|
173
|
+
else if (query === 'dataflow-cluster') {
|
|
174
|
+
const out = queryResults;
|
|
175
|
+
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)} (${out['.meta'].timing.toFixed(0)}ms)`);
|
|
176
|
+
result.push(` ╰ Found ${out.clusters.length} cluster${out.clusters.length === 1 ? '' : 's'}`);
|
|
177
|
+
for (const cluster of out.clusters) {
|
|
178
|
+
const unknownSideEffects = cluster.hasUnknownSideEffects ? '(has unknown side effect)' : '';
|
|
179
|
+
result.push(` ╰ ${unknownSideEffects} {${summarizeIdsIfTooLong(cluster.members)}} ([marked](${(0, dfg_1.graphToMermaidUrl)(processed.dataflow.graph, false, new Set(cluster.members))}))`);
|
|
180
|
+
}
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
114
183
|
result.push(`Query: ${(0, ansi_1.bold)(query, formatter)}`);
|
|
115
184
|
let timing = -1;
|
|
116
185
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
|
@@ -121,9 +190,9 @@ function asciiSummaryOfQueryResult(formatter, totalInMs, results, processed) {
|
|
|
121
190
|
}
|
|
122
191
|
result.push(` ╰ ${key}: ${JSON.stringify(value)}`);
|
|
123
192
|
}
|
|
124
|
-
result.push(` - Took ${
|
|
193
|
+
result.push(` - Took ${(0, time_1.printAsMs)(timing, 0)}`);
|
|
125
194
|
}
|
|
126
|
-
result.push((0, ansi_1.italic)(`All queries together required ≈${results['.meta'].timing
|
|
195
|
+
result.push((0, ansi_1.italic)(`All queries together required ≈${(0, time_1.printAsMs)(results['.meta'].timing, 0)} (1ms accuracy, total ${(0, time_1.printAsMs)(totalInMs, 0)})`, formatter));
|
|
127
196
|
return formatter.format(result.join('\n'));
|
|
128
197
|
}
|
|
129
198
|
exports.queryCommand = {
|
|
@@ -2,29 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.parseToQuads = parseToQuads;
|
|
4
4
|
const quads_1 = require("../../util/quads");
|
|
5
|
-
const parser_1 = require("../../r-bridge/lang-4.x/ast/parser/json/parser");
|
|
6
5
|
const format_1 = require("../../r-bridge/lang-4.x/ast/parser/json/format");
|
|
7
|
-
const input_format_1 = require("../../r-bridge/lang-4.x/ast/parser/xml/input-format");
|
|
8
|
-
function filterObject(obj, keys) {
|
|
9
|
-
if (typeof obj !== 'object') {
|
|
10
|
-
return obj;
|
|
11
|
-
}
|
|
12
|
-
else if (Array.isArray(obj)) {
|
|
13
|
-
return obj.map(e => filterObject(e, keys));
|
|
14
|
-
}
|
|
15
|
-
if (Object.keys(obj).some(k => keys.has(k))) {
|
|
16
|
-
return Object.fromEntries(Object.entries(obj)
|
|
17
|
-
.filter(([k]) => keys.has(k))
|
|
18
|
-
.map(([k, v]) => [k, filterObject(v, keys)]));
|
|
19
|
-
}
|
|
20
|
-
else {
|
|
21
|
-
return Object.fromEntries(Object.entries(obj)
|
|
22
|
-
.map(([k, v]) => [k, filterObject(v, keys)]));
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
6
|
function parseToQuads(code, config) {
|
|
26
|
-
|
|
27
|
-
// recursively filter so that if the object contains one of the keys 'a', 'b' or 'c', all other keys are ignored
|
|
28
|
-
return (0, quads_1.serialize2quads)(filterObject(obj, new Set([input_format_1.attributesKey, input_format_1.childrenKey, input_format_1.contentKey])), config);
|
|
7
|
+
return (0, quads_1.serialize2quads)((0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(code)), config);
|
|
29
8
|
}
|
|
30
9
|
//# sourceMappingURL=parse-printer.js.map
|
|
@@ -154,6 +154,70 @@ export declare const DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE: import("./pipeline"
|
|
|
154
154
|
readonly dependencies: readonly ["slice"];
|
|
155
155
|
readonly requiredInput: import("../all/static-slicing/10-reconstruct").ReconstructRequiredInput;
|
|
156
156
|
}>;
|
|
157
|
+
export declare const DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./pipeline").Pipeline<{
|
|
158
|
+
readonly name: "parse";
|
|
159
|
+
readonly humanReadableName: "parse with R shell";
|
|
160
|
+
readonly description: "Parse the given R code into an AST";
|
|
161
|
+
readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
|
|
162
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
163
|
+
readonly printer: {
|
|
164
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
165
|
+
readonly 2: (text: string) => string;
|
|
166
|
+
readonly 5: typeof import("../../print/parse-printer").parseToQuads;
|
|
167
|
+
};
|
|
168
|
+
readonly dependencies: readonly [];
|
|
169
|
+
readonly requiredInput: import("../all/core/00-parse").ParseRequiredInput;
|
|
170
|
+
} | {
|
|
171
|
+
readonly name: "normalize";
|
|
172
|
+
readonly humanReadableName: "normalize";
|
|
173
|
+
readonly description: "Normalize the AST to flowR's AST";
|
|
174
|
+
readonly processor: (results: {
|
|
175
|
+
parse?: string;
|
|
176
|
+
}, input: Partial<import("../all/core/10-normalize").NormalizeRequiredInput>) => import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>>;
|
|
177
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
178
|
+
readonly printer: {
|
|
179
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
180
|
+
readonly 2: typeof import("../../print/normalize-printer").normalizedAstToJson;
|
|
181
|
+
readonly 5: typeof import("../../print/normalize-printer").normalizedAstToQuads;
|
|
182
|
+
readonly 3: typeof import("../../print/normalize-printer").printNormalizedAstToMermaid;
|
|
183
|
+
readonly 4: typeof import("../../print/normalize-printer").printNormalizedAstToMermaidUrl;
|
|
184
|
+
};
|
|
185
|
+
readonly dependencies: readonly ["parse"];
|
|
186
|
+
readonly requiredInput: import("../all/core/10-normalize").NormalizeRequiredInput;
|
|
187
|
+
} | {
|
|
188
|
+
readonly humanReadableName: "dataflow";
|
|
189
|
+
readonly processor: (results: {
|
|
190
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
191
|
+
}, input: {
|
|
192
|
+
request?: import("../../../r-bridge/retriever").RParseRequests;
|
|
193
|
+
}) => import("../../../dataflow/info").DataflowInformation;
|
|
194
|
+
readonly requiredInput: {};
|
|
195
|
+
readonly name: "dataflow";
|
|
196
|
+
readonly description: "Construct the dataflow graph";
|
|
197
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
|
|
198
|
+
readonly printer: {
|
|
199
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
200
|
+
readonly 2: typeof import("../../print/dataflow-printer").dataflowGraphToJson;
|
|
201
|
+
readonly 5: typeof import("../../print/dataflow-printer").dataflowGraphToQuads;
|
|
202
|
+
readonly 3: typeof import("../../print/dataflow-printer").dataflowGraphToMermaid;
|
|
203
|
+
readonly 4: typeof import("../../print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
204
|
+
};
|
|
205
|
+
readonly dependencies: readonly ["normalize"];
|
|
206
|
+
} | {
|
|
207
|
+
readonly name: "slice";
|
|
208
|
+
readonly humanReadableName: "static slice";
|
|
209
|
+
readonly description: "Calculate the actual static slice from the dataflow graph and the given slicing criteria";
|
|
210
|
+
readonly processor: (results: {
|
|
211
|
+
dataflow?: import("../../../dataflow/info").DataflowInformation;
|
|
212
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
213
|
+
}, input: Partial<import("../all/static-slicing/00-slice").SliceRequiredInput>) => Readonly<import("../../../slicing/static/slicer-types").SliceResult>;
|
|
214
|
+
readonly executed: import("../pipeline-step").PipelineStepStage.OncePerRequest;
|
|
215
|
+
readonly printer: {
|
|
216
|
+
readonly 0: typeof import("../../print/print").internalPrinter;
|
|
217
|
+
};
|
|
218
|
+
readonly dependencies: readonly ["dataflow"];
|
|
219
|
+
readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
|
|
220
|
+
}>;
|
|
157
221
|
export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
|
|
158
222
|
readonly name: "parse";
|
|
159
223
|
readonly humanReadableName: "parse with R shell";
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.DEFAULT_PARSE_PIPELINE = exports.DEFAULT_NORMALIZE_PIPELINE = exports.DEFAULT_DATAFLOW_PIPELINE = exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE = void 0;
|
|
3
|
+
exports.DEFAULT_PARSE_PIPELINE = exports.DEFAULT_NORMALIZE_PIPELINE = exports.DEFAULT_DATAFLOW_PIPELINE = exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE = void 0;
|
|
4
4
|
/**
|
|
5
5
|
* Contains the default pipeline for working with flowr
|
|
6
6
|
*/
|
|
@@ -12,6 +12,7 @@ const _00_slice_1 = require("../all/static-slicing/00-slice");
|
|
|
12
12
|
const _10_reconstruct_1 = require("../all/static-slicing/10-reconstruct");
|
|
13
13
|
exports.DEFAULT_SLICING_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE, _10_reconstruct_1.NAIVE_RECONSTRUCT);
|
|
14
14
|
exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE;
|
|
15
|
+
exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE);
|
|
15
16
|
exports.DEFAULT_DATAFLOW_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW);
|
|
16
17
|
exports.DEFAULT_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE);
|
|
17
18
|
exports.DEFAULT_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP);
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { DataflowGraph } from './graph/graph';
|
|
2
|
+
import type { NodeId } from '../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
3
|
+
export type DataflowGraphClusters = DataflowGraphCluster[];
|
|
4
|
+
export interface DataflowGraphCluster {
|
|
5
|
+
/**
|
|
6
|
+
* The node which started the cluster,
|
|
7
|
+
* as this is theoretically picked random, there are just two guarantees you can rely on:
|
|
8
|
+
*
|
|
9
|
+
* 1. The node is part of the `members` as well
|
|
10
|
+
* 2. At one point during the clustering, the node wsa considered as a starting point
|
|
11
|
+
*
|
|
12
|
+
* In general, this is more of a debugging aid/representative of the cluster.
|
|
13
|
+
*/
|
|
14
|
+
readonly startNode: NodeId;
|
|
15
|
+
/** All nodes that are part of this cluster */
|
|
16
|
+
readonly members: readonly NodeId[];
|
|
17
|
+
/** If the cluster contains unknown side effects */
|
|
18
|
+
readonly hasUnknownSideEffects: boolean;
|
|
19
|
+
}
|
|
20
|
+
export declare function findAllClusters(graph: DataflowGraph): DataflowGraphClusters;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.findAllClusters = findAllClusters;
|
|
4
|
+
const edge_1 = require("./graph/edge");
|
|
5
|
+
const vertex_1 = require("./graph/vertex");
|
|
6
|
+
const assert_1 = require("../util/assert");
|
|
7
|
+
function findAllClusters(graph) {
|
|
8
|
+
const clusters = [];
|
|
9
|
+
// we reverse the vertices since dependencies usually point "backwards" from later nodes
|
|
10
|
+
const notReached = new Set([...graph.vertices(true)].map(([id]) => id).reverse());
|
|
11
|
+
while (notReached.size > 0) {
|
|
12
|
+
const [startNode] = notReached;
|
|
13
|
+
notReached.delete(startNode);
|
|
14
|
+
clusters.push({
|
|
15
|
+
startNode: startNode,
|
|
16
|
+
members: [...makeCluster(graph, startNode, notReached)],
|
|
17
|
+
hasUnknownSideEffects: graph.unknownSideEffects.has(startNode)
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
return clusters;
|
|
21
|
+
}
|
|
22
|
+
function makeCluster(graph, from, notReached) {
|
|
23
|
+
const info = graph.getVertex(from);
|
|
24
|
+
(0, assert_1.guard)(info !== undefined, () => `Vertex ${from} not found in graph`);
|
|
25
|
+
const nodes = new Set([from]);
|
|
26
|
+
// cluster function def exit points
|
|
27
|
+
if (info.tag === vertex_1.VertexType.FunctionDefinition) {
|
|
28
|
+
for (const sub of info.exitPoints) {
|
|
29
|
+
if (notReached.delete(sub)) {
|
|
30
|
+
makeCluster(graph, sub, notReached).forEach(n => nodes.add(n));
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
// cluster adjacent edges
|
|
35
|
+
for (const [dest, { types }] of [...graph.outgoingEdges(from) ?? [], ...graph.ingoingEdges(from) ?? []]) {
|
|
36
|
+
// don't cluster for function content if it isn't returned
|
|
37
|
+
if ((0, edge_1.edgeDoesNotIncludeType)(types, edge_1.EdgeType.Returns) && info.onlyBuiltin && info.name == '{') {
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
if (notReached.delete(dest)) {
|
|
41
|
+
makeCluster(graph, dest, notReached).forEach(n => nodes.add(n));
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return nodes;
|
|
45
|
+
}
|
|
46
|
+
//# sourceMappingURL=cluster.js.map
|
package/dataflow/graph/edge.d.ts
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* An edge consist of
|
|
3
|
-
*
|
|
4
|
-
* - a type (if it is read or used in the context), and
|
|
2
|
+
* An edge consist of only of the type (source and target are encoded with the Dataflow Graph).
|
|
3
|
+
* Multiple edges are encoded by joining the respective type bits.
|
|
5
4
|
*/
|
|
6
5
|
export interface DataflowGraphEdge {
|
|
7
6
|
types: EdgeTypeBits;
|
|
@@ -59,7 +59,7 @@ export interface DataflowGraphJson {
|
|
|
59
59
|
export declare class DataflowGraph<Vertex extends DataflowGraphVertexInfo = DataflowGraphVertexInfo, Edge extends DataflowGraphEdge = DataflowGraphEdge> {
|
|
60
60
|
private static DEFAULT_ENVIRONMENT;
|
|
61
61
|
private _idMap;
|
|
62
|
-
private _unknownSideEffects;
|
|
62
|
+
private readonly _unknownSideEffects;
|
|
63
63
|
constructor(idMap: AstIdMap | undefined);
|
|
64
64
|
/** Contains the vertices of the root level graph (i.e., included those vertices from the complete graph, that are nested within function definitions) */
|
|
65
65
|
protected rootVertices: Set<NodeId>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare function
|
|
1
|
+
export declare function documentAllServerMessages(): void;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.documentAllServerMessages = documentAllServerMessages;
|
|
4
4
|
const doc_server_message_1 = require("../../doc-util/doc-server-message");
|
|
5
5
|
const message_hello_1 = require("../../../cli/repl/server/messages/message-hello");
|
|
6
6
|
const shell_1 = require("../../../r-bridge/shell");
|
|
@@ -16,9 +16,10 @@ const message_slice_1 = require("../../../cli/repl/server/messages/message-slice
|
|
|
16
16
|
const message_repl_1 = require("../../../cli/repl/server/messages/message-repl");
|
|
17
17
|
const message_query_1 = require("../../../cli/repl/server/messages/message-query");
|
|
18
18
|
const example_query_code_1 = require("../query/example-query-code");
|
|
19
|
-
const call_context_query_format_1 = require("../../../queries/call-context-query/call-context-query-format");
|
|
19
|
+
const call_context_query_format_1 = require("../../../queries/catalog/call-context-query/call-context-query-format");
|
|
20
20
|
const message_lineage_1 = require("../../../cli/repl/server/messages/message-lineage");
|
|
21
|
-
|
|
21
|
+
const doc_structure_1 = require("../../doc-util/doc-structure");
|
|
22
|
+
function documentAllServerMessages() {
|
|
22
23
|
(0, doc_server_message_1.documentServerMessage)({
|
|
23
24
|
title: 'Hello',
|
|
24
25
|
type: 'response',
|
|
@@ -217,9 +218,14 @@ While the context is derived from the \`filename\`, we currently offer no way to
|
|
|
217
218
|
end
|
|
218
219
|
deactivate Server
|
|
219
220
|
`,
|
|
220
|
-
shortDescription:
|
|
221
|
+
shortDescription: `([DEPRECATED](${doc_files_1.FlowrWikiBaseRef}/Query%20API)) The server slices a file based on the given criteria.`,
|
|
221
222
|
text: async (shell) => {
|
|
222
223
|
return `
|
|
224
|
+
${(0, doc_structure_1.block)({
|
|
225
|
+
type: 'WARNING',
|
|
226
|
+
content: `We deprecated the slice request in favor of the \`static-slice\` [Query](${doc_files_1.FlowrWikiBaseRef}/Query%20API).`
|
|
227
|
+
})}
|
|
228
|
+
|
|
223
229
|
To slice, you have to send a file analysis request first. The \`filetoken\` you assign is of use here as you can re-use it to repeatedly slice the same file.
|
|
224
230
|
Besides that, you only need to add an array of slicing criteria, using one of the formats described on the [terminology wiki page](${doc_files_1.FlowrWikiBaseRef}/Terminology#slicing-criterion)
|
|
225
231
|
(however, instead of using \`;\`, you can simply pass separate array elements).
|
|
@@ -1,7 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.codeBlock = codeBlock;
|
|
4
|
+
exports.jsonWithLimit = jsonWithLimit;
|
|
5
|
+
const json_1 = require("../../util/json");
|
|
4
6
|
function codeBlock(language, code) {
|
|
5
7
|
return `\n\`\`\`${language}\n${code?.trim() ?? ''}\n\`\`\`\n`;
|
|
6
8
|
}
|
|
9
|
+
function jsonWithLimit(object, maxLength = 5_000, tooLongText = '_As the code is pretty long, we inhibit pretty printing and syntax highlighting (JSON):_') {
|
|
10
|
+
const prettyPrinted = JSON.stringify(object, json_1.jsonReplacer, 2);
|
|
11
|
+
return `
|
|
12
|
+
${prettyPrinted.length > maxLength ? tooLongText : ''}
|
|
13
|
+
${codeBlock(prettyPrinted.length > maxLength ? 'text' : 'json', prettyPrinted.length > 5_000 ? JSON.stringify(object, json_1.jsonReplacer) : prettyPrinted)}
|
|
14
|
+
`;
|
|
15
|
+
}
|
|
7
16
|
//# sourceMappingURL=doc-code.js.map
|
|
@@ -10,6 +10,7 @@ export interface PrintDataflowGraphOptions {
|
|
|
10
10
|
readonly codeOpen?: boolean;
|
|
11
11
|
readonly exposeResult?: boolean;
|
|
12
12
|
readonly switchCodeAndGraph?: boolean;
|
|
13
|
+
readonly hideEnvInMermaid?: boolean;
|
|
13
14
|
}
|
|
14
15
|
export declare function printDfGraphForCode(shell: RShell, code: string, options: PrintDataflowGraphOptions & {
|
|
15
16
|
exposeResult: true;
|
|
@@ -11,8 +11,8 @@ const decorate_1 = require("../../r-bridge/lang-4.x/ast/model/processing/decorat
|
|
|
11
11
|
const resolve_graph_1 = require("../../dataflow/graph/resolve-graph");
|
|
12
12
|
const diff_1 = require("../../dataflow/graph/diff");
|
|
13
13
|
const assert_1 = require("../../util/assert");
|
|
14
|
-
const doc_ms_1 = require("./doc-ms");
|
|
15
14
|
const json_1 = require("../../util/json");
|
|
15
|
+
const time_1 = require("../../util/time");
|
|
16
16
|
function printDfGraph(graph, mark) {
|
|
17
17
|
return `
|
|
18
18
|
\`\`\`mermaid
|
|
@@ -24,7 +24,7 @@ ${(0, dfg_1.graphToMermaid)({
|
|
|
24
24
|
\`\`\`
|
|
25
25
|
`;
|
|
26
26
|
}
|
|
27
|
-
async function printDfGraphForCode(shell, code, { mark, showCode = true, codeOpen = false, exposeResult, switchCodeAndGraph = false } = {}) {
|
|
27
|
+
async function printDfGraphForCode(shell, code, { mark, showCode = true, codeOpen = false, exposeResult, switchCodeAndGraph = false, hideEnvInMermaid = false } = {}) {
|
|
28
28
|
const now = performance.now();
|
|
29
29
|
const result = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
30
30
|
shell,
|
|
@@ -34,7 +34,7 @@ async function printDfGraphForCode(shell, code, { mark, showCode = true, codeOpe
|
|
|
34
34
|
if (switchCodeAndGraph) {
|
|
35
35
|
(0, assert_1.guard)(showCode, 'can not switch code and graph if code is not shown');
|
|
36
36
|
}
|
|
37
|
-
const metaInfo = `The analysis required _${(0,
|
|
37
|
+
const metaInfo = `The analysis required _${(0, time_1.printAsMs)(duration)}_ (incl. parse and normalize) within the generation environment.`;
|
|
38
38
|
const dfGraph = printDfGraph(result.dataflow.graph, mark);
|
|
39
39
|
let resultText = '\n\n';
|
|
40
40
|
if (showCode) {
|
|
@@ -59,7 +59,8 @@ ${switchCodeAndGraph ? dfGraph : codeText}
|
|
|
59
59
|
\`\`\`
|
|
60
60
|
${(0, dfg_1.graphToMermaid)({
|
|
61
61
|
graph: result.dataflow.graph,
|
|
62
|
-
prefix: 'flowchart LR'
|
|
62
|
+
prefix: 'flowchart LR',
|
|
63
|
+
includeEnvironments: !hideEnvInMermaid
|
|
63
64
|
}).string}
|
|
64
65
|
\`\`\`
|
|
65
66
|
|
|
@@ -10,8 +10,8 @@ const decorate_1 = require("../../r-bridge/lang-4.x/ast/model/processing/decorat
|
|
|
10
10
|
const resolve_graph_1 = require("../../dataflow/graph/resolve-graph");
|
|
11
11
|
const diff_1 = require("../../dataflow/graph/diff");
|
|
12
12
|
const assert_1 = require("../../util/assert");
|
|
13
|
-
const doc_ms_1 = require("./doc-ms");
|
|
14
13
|
const ast_1 = require("../../util/mermaid/ast");
|
|
14
|
+
const time_1 = require("../../util/time");
|
|
15
15
|
function printNormalizedAst(ast, prefix = 'flowchart TD\n') {
|
|
16
16
|
return `
|
|
17
17
|
\`\`\`mermaid
|
|
@@ -26,7 +26,7 @@ async function printNormalizedAstForCode(shell, code, { showCode = true, prefix
|
|
|
26
26
|
request: (0, retriever_1.requestFromInput)(code)
|
|
27
27
|
}).allRemainingSteps();
|
|
28
28
|
const duration = performance.now() - now;
|
|
29
|
-
const metaInfo = `The analysis required _${(0,
|
|
29
|
+
const metaInfo = `The analysis required _${(0, time_1.printAsMs)(duration)}_ (including parsing) within the generation environment.`;
|
|
30
30
|
return '\n\n' + printNormalizedAst(result.normalize.ast, prefix) + (showCode ? `
|
|
31
31
|
<details>
|
|
32
32
|
|
|
@@ -9,12 +9,13 @@ const query_1 = require("../../queries/query");
|
|
|
9
9
|
const pipeline_executor_1 = require("../../core/pipeline-executor");
|
|
10
10
|
const default_pipelines_1 = require("../../core/steps/pipeline/default-pipelines");
|
|
11
11
|
const retriever_1 = require("../../r-bridge/retriever");
|
|
12
|
-
const doc_ms_1 = require("./doc-ms");
|
|
13
12
|
const json_1 = require("../../util/json");
|
|
14
13
|
const ansi_1 = require("../../util/ansi");
|
|
15
14
|
const repl_query_1 = require("../../cli/repl/commands/repl-query");
|
|
16
15
|
const doc_files_1 = require("./doc-files");
|
|
17
16
|
const doc_dfg_1 = require("./doc-dfg");
|
|
17
|
+
const doc_code_1 = require("./doc-code");
|
|
18
|
+
const time_1 = require("../../util/time");
|
|
18
19
|
async function showQuery(shell, code, queries, { showCode, collapseResult } = {}) {
|
|
19
20
|
const now = performance.now();
|
|
20
21
|
const analysis = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
@@ -24,9 +25,8 @@ async function showQuery(shell, code, queries, { showCode, collapseResult } = {}
|
|
|
24
25
|
const results = (0, query_1.executeQueries)({ graph: analysis.dataflow.graph, ast: analysis.normalize }, queries);
|
|
25
26
|
const duration = performance.now() - now;
|
|
26
27
|
const metaInfo = `
|
|
27
|
-
The analysis required _${(0,
|
|
28
|
+
The analysis required _${(0, time_1.printAsMs)(duration)}_ (including parsing and normalization and the query) within the generation environment.
|
|
28
29
|
`.trim();
|
|
29
|
-
const resultAsString = JSON.stringify(results, json_1.jsonReplacer, 2);
|
|
30
30
|
return `
|
|
31
31
|
|
|
32
32
|
\`\`\`json
|
|
@@ -46,9 +46,7 @@ ${metaInfo}
|
|
|
46
46
|
In general, the JSON contains the Ids of the nodes in question as they are present in the normalized AST or the dataflow graph of flowR.
|
|
47
47
|
Please consult the [Interface](${doc_files_1.FlowrWikiBaseRef}/Interface) wiki page for more information on how to get those.
|
|
48
48
|
|
|
49
|
-
|
|
50
|
-
${resultAsString}
|
|
51
|
-
\`\`\`
|
|
49
|
+
${(0, doc_code_1.jsonWithLimit)(results)}
|
|
52
50
|
|
|
53
51
|
</details>
|
|
54
52
|
|
|
@@ -79,7 +77,7 @@ function linkify(name) {
|
|
|
79
77
|
return name.toLowerCase().replace(/ /g, '-');
|
|
80
78
|
}
|
|
81
79
|
function tocForQueryType(type) {
|
|
82
|
-
const queries = exports.RegisteredQueries[type];
|
|
80
|
+
const queries = [...exports.RegisteredQueries[type].entries()].sort(([, { name: a }], [, { name: b }]) => a.localeCompare(b));
|
|
83
81
|
const result = [];
|
|
84
82
|
for (const [id, { name, shortDescription }] of queries) {
|
|
85
83
|
result.push(`1. [${name}](#${linkify(name)}) (\`${id}\`):\\\n ${shortDescription}`);
|
|
@@ -9,8 +9,8 @@ const schema_1 = require("../../util/schema");
|
|
|
9
9
|
const ansi_1 = require("../../util/ansi");
|
|
10
10
|
const net_1 = require("../../../test/functionality/_helper/net");
|
|
11
11
|
const doc_code_1 = require("./doc-code");
|
|
12
|
-
const doc_ms_1 = require("./doc-ms");
|
|
13
12
|
const assert_1 = require("../../util/assert");
|
|
13
|
+
const time_1 = require("../../util/time");
|
|
14
14
|
const messages = [];
|
|
15
15
|
function documentServerMessage(description) {
|
|
16
16
|
messages.push(description);
|
|
@@ -29,7 +29,6 @@ async function inServerContext(shell, fn) {
|
|
|
29
29
|
}
|
|
30
30
|
function explainMsg(msg, type, desc = '', open = false) {
|
|
31
31
|
const bold = open ? s => `<b>${s}</b>` : s => s;
|
|
32
|
-
const msgPrettyPrint = JSON.stringify(msg, null, 2);
|
|
33
32
|
return `
|
|
34
33
|
<li> ${bold('<code>' + msg.type + `</code> (${type})`)}
|
|
35
34
|
<details${open ? ' open' : ''}>
|
|
@@ -38,8 +37,7 @@ function explainMsg(msg, type, desc = '', open = false) {
|
|
|
38
37
|
|
|
39
38
|
${desc}
|
|
40
39
|
|
|
41
|
-
${
|
|
42
|
-
${(0, doc_code_1.codeBlock)(msgPrettyPrint.length > 5_000 ? 'text' : 'json', msgPrettyPrint.length > 5_000 ? JSON.stringify(msg) : msgPrettyPrint)}
|
|
40
|
+
${(0, doc_code_1.jsonWithLimit)(msg)}
|
|
43
41
|
|
|
44
42
|
</details>
|
|
45
43
|
</li>
|
|
@@ -102,7 +100,7 @@ The following lists all messages that were sent and received in case you want to
|
|
|
102
100
|
|
|
103
101
|
${explainPingPong(messages, response)}
|
|
104
102
|
|
|
105
|
-
The complete round-trip took ${(0,
|
|
103
|
+
The complete round-trip took ${(0, time_1.printAsMs)(end - start)} (including time required to validate the messages, start, and stop the internal mock server).
|
|
106
104
|
|
|
107
105
|
</details>
|
|
108
106
|
`;
|