@eagleoutice/flowr 2.2.5 → 2.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/cli/repl/commands/repl-cfg.js +4 -3
- package/cli/repl/commands/repl-dataflow.js +4 -3
- package/cli/repl/commands/repl-normalize.js +4 -3
- package/dataflow/environments/resolve-by-name.d.ts +24 -2
- package/dataflow/environments/resolve-by-name.js +85 -4
- package/documentation/print-query-wiki.js +6 -4
- package/documentation/print-readme.js +5 -5
- package/package.json +1 -1
- package/queries/catalog/call-context-query/call-context-query-executor.js +20 -6
- package/queries/catalog/call-context-query/call-context-query-format.d.ts +12 -6
- package/queries/catalog/call-context-query/call-context-query-format.js +8 -6
- package/queries/catalog/dependencies-query/dependencies-query-executor.js +157 -53
- package/queries/catalog/dependencies-query/dependencies-query-format.d.ts +16 -1
- package/queries/catalog/dependencies-query/dependencies-query-format.js +121 -112
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.d.ts +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +3 -4
- package/queries/query-print.js +7 -4
- package/util/version.js +1 -1
package/README.md
CHANGED
|
@@ -48,7 +48,7 @@ It offers a wide variety of features, for example:
|
|
|
48
48
|
|
|
49
49
|
```shell
|
|
50
50
|
$ docker run -it --rm eagleoutice/flowr # or npm run flowr
|
|
51
|
-
flowR repl using flowR v2.2.
|
|
51
|
+
flowR repl using flowR v2.2.5, R v4.4.0 (r-shell engine)
|
|
52
52
|
R> :slicer test/testfiles/example.R --criterion "11@sum"
|
|
53
53
|
```
|
|
54
54
|
|
|
@@ -94,8 +94,8 @@ It offers a wide variety of features, for example:
|
|
|
94
94
|
</details>
|
|
95
95
|
|
|
96
96
|
|
|
97
|
-
* 🚀 **fast data and control-flow graphs**\
|
|
98
|
-
Within just <i><span title="This measurement is automatically fetched from the latest
|
|
97
|
+
* 🚀 **fast data- and control-flow graphs**\
|
|
98
|
+
Within just <i><span title="This measurement is automatically fetched from the latest benchmark!">117.7 ms</i></span> (as of Feb 17, 2025),
|
|
99
99
|
_flowR_ can analyze the data- and control-flow of the average real-world R script. See the [benchmarks](https://flowr-analysis.github.io/flowr/wiki/stats/benchmark) for more information,
|
|
100
100
|
and consult the [wiki pages](https://github.com/flowr-analysis/flowr/wiki/Dataflow-Graph) for more details on the dataflow graph.
|
|
101
101
|
|
|
@@ -131,7 +131,7 @@ It offers a wide variety of features, for example:
|
|
|
131
131
|
|
|
132
132
|
```shell
|
|
133
133
|
$ docker run -it --rm eagleoutice/flowr # or npm run flowr
|
|
134
|
-
flowR repl using flowR v2.2.
|
|
134
|
+
flowR repl using flowR v2.2.5, R v4.4.0 (r-shell engine)
|
|
135
135
|
R> :dataflow* test/testfiles/example.R
|
|
136
136
|
```
|
|
137
137
|
|
|
@@ -375,7 +375,7 @@ It offers a wide variety of features, for example:
|
|
|
375
375
|
52 -->|"argument"| 50
|
|
376
376
|
```
|
|
377
377
|
|
|
378
|
-
(The analysis required
|
|
378
|
+
(The analysis required _24.34 ms_ (including parse and normalize, using the [r-shell](https://github.com/flowr-analysis/flowr/wiki/Engines) engine) within the generation environment.)
|
|
379
379
|
|
|
380
380
|
|
|
381
381
|
|
|
@@ -390,11 +390,11 @@ It offers a wide variety of features, for example:
|
|
|
390
390
|
|
|
391
391
|
If you want to use flowR and the features it provides, feel free to check out the:
|
|
392
392
|
|
|
393
|
-
- [Visual Studio Code extension](https://marketplace.visualstudio.com/items?itemName=code-inspect.vscode-flowr): provides access to flowR
|
|
393
|
+
- [Visual Studio Code extension](https://marketplace.visualstudio.com/items?itemName=code-inspect.vscode-flowr): provides access to flowR directly in VS Code (or [vscode.dev](https://vscode.dev/))
|
|
394
394
|
- [RStudio Addin](https://github.com/flowr-analysis/rstudio-addin-flowr): integrates flowR into [RStudio](https://posit.co/downloads/)
|
|
395
|
-
- [R package](https://github.com/flowr-analysis/flowr-r-adapter):
|
|
395
|
+
- [R package](https://github.com/flowr-analysis/flowr-r-adapter): use flowR in your R scripts
|
|
396
396
|
- [Docker image](https://hub.docker.com/r/eagleoutice/flowr): run flowR in a container, this also includes [flowR's server](https://github.com/flowr-analysis/flowr/wiki/Interface#communicating-with-the-server)
|
|
397
|
-
- [NPM package](https://www.npmjs.com/package/@eagleoutice/flowr): include flowR in your TypeScript and JavaScript projects
|
|
397
|
+
- [NPM package](https://www.npmjs.com/package/@eagleoutice/flowr): include flowR in your TypeScript and JavaScript projects
|
|
398
398
|
|
|
399
399
|
## ⭐ Getting Started
|
|
400
400
|
|
|
@@ -39,7 +39,6 @@ const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipeli
|
|
|
39
39
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
40
40
|
const cfg_2 = require("../../../util/mermaid/cfg");
|
|
41
41
|
const ansi_1 = require("../../../util/ansi");
|
|
42
|
-
const clipboard = Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
43
42
|
async function controlflow(parser, remainingLine) {
|
|
44
43
|
return await (0, default_pipelines_1.createDataflowPipeline)(parser, {
|
|
45
44
|
request: (0, retriever_1.requestFromInput)(remainingLine.trim())
|
|
@@ -62,7 +61,8 @@ exports.controlflowCommand = {
|
|
|
62
61
|
const mermaid = (0, cfg_2.cfgToMermaid)(cfg, result.normalize);
|
|
63
62
|
output.stdout(mermaid);
|
|
64
63
|
try {
|
|
65
|
-
|
|
64
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
65
|
+
clipboard.default.writeSync(mermaid);
|
|
66
66
|
output.stdout(formatInfo(output, 'mermaid code'));
|
|
67
67
|
}
|
|
68
68
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -79,7 +79,8 @@ exports.controlflowStarCommand = {
|
|
|
79
79
|
const mermaid = (0, cfg_2.cfgToMermaidUrl)(cfg, result.normalize);
|
|
80
80
|
output.stdout(mermaid);
|
|
81
81
|
try {
|
|
82
|
-
|
|
82
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
83
|
+
clipboard.default.writeSync(mermaid);
|
|
83
84
|
output.stdout(formatInfo(output, 'mermaid url'));
|
|
84
85
|
}
|
|
85
86
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -37,7 +37,6 @@ exports.dataflowStarCommand = exports.dataflowCommand = void 0;
|
|
|
37
37
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
38
38
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
39
39
|
const dfg_1 = require("../../../util/mermaid/dfg");
|
|
40
|
-
const clipboard = Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
41
40
|
const ansi_1 = require("../../../util/ansi");
|
|
42
41
|
/**
|
|
43
42
|
* Obtain the dataflow graph using a known parser (such as the {@link RShell} or {@link TreeSitterExecutor}).
|
|
@@ -63,7 +62,8 @@ exports.dataflowCommand = {
|
|
|
63
62
|
const mermaid = (0, dfg_1.graphToMermaid)({ graph: result.dataflow.graph, includeEnvironments: false }).string;
|
|
64
63
|
output.stdout(mermaid);
|
|
65
64
|
try {
|
|
66
|
-
|
|
65
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
66
|
+
clipboard.default.writeSync(mermaid);
|
|
67
67
|
output.stdout(formatInfo(output, 'mermaid code', result.dataflow['.meta'].timing));
|
|
68
68
|
}
|
|
69
69
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -79,7 +79,8 @@ exports.dataflowStarCommand = {
|
|
|
79
79
|
const mermaid = (0, dfg_1.graphToMermaidUrl)(result.dataflow.graph, false);
|
|
80
80
|
output.stdout(mermaid);
|
|
81
81
|
try {
|
|
82
|
-
|
|
82
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
83
|
+
clipboard.default.writeSync(mermaid);
|
|
83
84
|
output.stdout(formatInfo(output, 'mermaid url', result.dataflow['.meta'].timing));
|
|
84
85
|
}
|
|
85
86
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -37,7 +37,6 @@ exports.normalizeStarCommand = exports.normalizeCommand = void 0;
|
|
|
37
37
|
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
|
|
38
38
|
const retriever_1 = require("../../../r-bridge/retriever");
|
|
39
39
|
const ast_1 = require("../../../util/mermaid/ast");
|
|
40
|
-
const clipboard = Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
41
40
|
const ansi_1 = require("../../../util/ansi");
|
|
42
41
|
async function normalize(parser, remainingLine) {
|
|
43
42
|
return await (0, default_pipelines_1.createNormalizePipeline)(parser, {
|
|
@@ -60,7 +59,8 @@ exports.normalizeCommand = {
|
|
|
60
59
|
const mermaid = (0, ast_1.normalizedAstToMermaid)(result.normalize.ast);
|
|
61
60
|
output.stdout(mermaid);
|
|
62
61
|
try {
|
|
63
|
-
|
|
62
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
63
|
+
clipboard.default.writeSync(mermaid);
|
|
64
64
|
output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
|
|
65
65
|
}
|
|
66
66
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -76,7 +76,8 @@ exports.normalizeStarCommand = {
|
|
|
76
76
|
const mermaid = (0, ast_1.normalizedAstToMermaidUrl)(result.normalize.ast);
|
|
77
77
|
output.stdout(mermaid);
|
|
78
78
|
try {
|
|
79
|
-
|
|
79
|
+
const clipboard = await Promise.resolve().then(() => __importStar(require('clipboardy')));
|
|
80
|
+
clipboard.default.writeSync(mermaid);
|
|
80
81
|
output.stdout(formatInfo(output, 'mermaid url', result.normalize['.meta'].timing));
|
|
81
82
|
}
|
|
82
83
|
catch { /* do nothing this is a service thing */ }
|
|
@@ -4,7 +4,7 @@ import type { Identifier, IdentifierDefinition } from './identifier';
|
|
|
4
4
|
import { ReferenceType } from './identifier';
|
|
5
5
|
import type { NodeId } from '../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
6
6
|
import type { DataflowGraph } from '../graph/graph';
|
|
7
|
-
import type { AstIdMap } from '../../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
7
|
+
import type { AstIdMap, RNodeWithParent } from '../../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
8
8
|
/**
|
|
9
9
|
* Resolves a given identifier name to a list of its possible definition location using R scoping and resolving rules.
|
|
10
10
|
*
|
|
@@ -21,9 +21,31 @@ export declare function resolvesToBuiltInConstant(name: Identifier | undefined,
|
|
|
21
21
|
export declare function resolveToConstants(name: Identifier | undefined, environment: REnvironmentInformation): unknown[] | undefined;
|
|
22
22
|
export declare function getAliases(sourceIds: readonly NodeId[], dataflow: DataflowGraph, environment: REnvironmentInformation): NodeId[] | undefined;
|
|
23
23
|
/** Please use {@link resolveValueOfVariable} */
|
|
24
|
-
export declare function
|
|
24
|
+
export declare function trackAliasInEnvironments(identifier: Identifier | undefined, use: REnvironmentInformation, idMap?: AstIdMap): unknown[] | undefined;
|
|
25
|
+
export declare function trackAliasesInGraph(id: NodeId, graph: DataflowGraph, idMap?: AstIdMap): unknown[] | undefined;
|
|
25
26
|
/**
|
|
26
27
|
* Convenience function using the variable resolver as specified within the configuration file
|
|
27
28
|
* In the future we may want to have this set once at the start of the analysis
|
|
29
|
+
*
|
|
30
|
+
* @see {@link resolve} - for a more general approach which "evaluates" a node based on value resolve
|
|
28
31
|
*/
|
|
29
32
|
export declare function resolveValueOfVariable(identifier: Identifier | undefined, environment: REnvironmentInformation, idMap?: AstIdMap): unknown[] | undefined;
|
|
33
|
+
export interface ResolveInfo {
|
|
34
|
+
/** The current environment used for name resolution */
|
|
35
|
+
environment?: REnvironmentInformation;
|
|
36
|
+
/** The id map to resolve the node if given as an id */
|
|
37
|
+
idMap?: AstIdMap;
|
|
38
|
+
/** The graph to resolve in */
|
|
39
|
+
graph?: DataflowGraph;
|
|
40
|
+
/** Whether to track variables */
|
|
41
|
+
full?: boolean;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Generalized {@link resolveValueOfVariable} function which evaluates a node based on the value resolve
|
|
45
|
+
*
|
|
46
|
+
* @param id - The node id or node to resolve
|
|
47
|
+
* @param environment - The current environment used for name resolution
|
|
48
|
+
* @param idMap - The id map to resolve the node if given as an id
|
|
49
|
+
* @param full - Whether to track variables
|
|
50
|
+
*/
|
|
51
|
+
export declare function resolve(id: NodeId | RNodeWithParent, { environment, graph, idMap, full }: ResolveInfo): unknown[] | undefined;
|
|
@@ -4,8 +4,10 @@ exports.resolveByName = resolveByName;
|
|
|
4
4
|
exports.resolvesToBuiltInConstant = resolvesToBuiltInConstant;
|
|
5
5
|
exports.resolveToConstants = resolveToConstants;
|
|
6
6
|
exports.getAliases = getAliases;
|
|
7
|
-
exports.
|
|
7
|
+
exports.trackAliasInEnvironments = trackAliasInEnvironments;
|
|
8
|
+
exports.trackAliasesInGraph = trackAliasesInGraph;
|
|
8
9
|
exports.resolveValueOfVariable = resolveValueOfVariable;
|
|
10
|
+
exports.resolve = resolve;
|
|
9
11
|
const environment_1 = require("./environment");
|
|
10
12
|
const logic_1 = require("../../util/logic");
|
|
11
13
|
const identifier_1 = require("./identifier");
|
|
@@ -14,6 +16,10 @@ const node_id_1 = require("../../r-bridge/lang-4.x/ast/model/processing/node-id"
|
|
|
14
16
|
const vertex_1 = require("../graph/vertex");
|
|
15
17
|
const config_1 = require("../../config");
|
|
16
18
|
const assert_1 = require("../../util/assert");
|
|
19
|
+
const type_1 = require("../../r-bridge/lang-4.x/ast/model/type");
|
|
20
|
+
const visiting_queue_1 = require("../../slicing/static/visiting-queue");
|
|
21
|
+
const fingerprint_1 = require("../../slicing/static/fingerprint");
|
|
22
|
+
const edge_1 = require("../graph/edge");
|
|
17
23
|
const FunctionTargetTypes = identifier_1.ReferenceType.Function | identifier_1.ReferenceType.BuiltInFunction | identifier_1.ReferenceType.Unknown | identifier_1.ReferenceType.Argument | identifier_1.ReferenceType.Parameter;
|
|
18
24
|
const VariableTargetTypes = identifier_1.ReferenceType.Variable | identifier_1.ReferenceType.Parameter | identifier_1.ReferenceType.Argument | identifier_1.ReferenceType.Unknown;
|
|
19
25
|
const ConstantTargetTypes = identifier_1.ReferenceType.Constant | identifier_1.ReferenceType.BuiltInConstant | identifier_1.ReferenceType.Unknown;
|
|
@@ -149,11 +155,11 @@ function getAliases(sourceIds, dataflow, environment) {
|
|
|
149
155
|
return [...definitions];
|
|
150
156
|
}
|
|
151
157
|
/** Please use {@link resolveValueOfVariable} */
|
|
152
|
-
function
|
|
158
|
+
function trackAliasInEnvironments(identifier, use, idMap) {
|
|
153
159
|
if (identifier === undefined) {
|
|
154
160
|
return undefined;
|
|
155
161
|
}
|
|
156
|
-
const defs = resolveByName(identifier,
|
|
162
|
+
const defs = resolveByName(identifier, use);
|
|
157
163
|
if (defs === undefined) {
|
|
158
164
|
return undefined;
|
|
159
165
|
}
|
|
@@ -183,17 +189,92 @@ function resolveToValues(identifier, environment, idMap) {
|
|
|
183
189
|
}
|
|
184
190
|
return values;
|
|
185
191
|
}
|
|
192
|
+
function trackAliasesInGraph(id, graph, idMap) {
|
|
193
|
+
idMap ??= graph.idMap;
|
|
194
|
+
(0, assert_1.guard)(idMap !== undefined, 'The ID map is required to get the lineage of a node');
|
|
195
|
+
const start = graph.getVertex(id);
|
|
196
|
+
(0, assert_1.guard)(start !== undefined, 'Unable to find start for alias tracking');
|
|
197
|
+
const queue = new visiting_queue_1.VisitingQueue(25);
|
|
198
|
+
const clean = (0, environment_1.initializeCleanEnvironments)();
|
|
199
|
+
const cleanFingerprint = (0, fingerprint_1.envFingerprint)(clean);
|
|
200
|
+
queue.add(id, clean, cleanFingerprint, false);
|
|
201
|
+
const resultIds = [];
|
|
202
|
+
while (queue.nonEmpty()) {
|
|
203
|
+
const { id, baseEnvironment } = queue.next();
|
|
204
|
+
const res = graph.get(id);
|
|
205
|
+
if (!res) {
|
|
206
|
+
continue;
|
|
207
|
+
}
|
|
208
|
+
const [vertex, outgoingEdges] = res;
|
|
209
|
+
if (vertex.tag === vertex_1.VertexType.Value) {
|
|
210
|
+
resultIds.push(id);
|
|
211
|
+
continue;
|
|
212
|
+
}
|
|
213
|
+
// travel all read and defined-by edges
|
|
214
|
+
for (const [targetId, edge] of outgoingEdges) {
|
|
215
|
+
if ((0, edge_1.edgeIncludesType)(edge.types, edge_1.EdgeType.Reads | edge_1.EdgeType.DefinedBy | edge_1.EdgeType.DefinedByOnCall)) {
|
|
216
|
+
queue.add(targetId, baseEnvironment, cleanFingerprint, false);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
if (resultIds.length === 0) {
|
|
221
|
+
return undefined;
|
|
222
|
+
}
|
|
223
|
+
const values = [];
|
|
224
|
+
for (const id of resultIds) {
|
|
225
|
+
const node = idMap.get(id);
|
|
226
|
+
if (node !== undefined) {
|
|
227
|
+
values.push(node.content);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
return values;
|
|
231
|
+
}
|
|
186
232
|
/**
|
|
187
233
|
* Convenience function using the variable resolver as specified within the configuration file
|
|
188
234
|
* In the future we may want to have this set once at the start of the analysis
|
|
235
|
+
*
|
|
236
|
+
* @see {@link resolve} - for a more general approach which "evaluates" a node based on value resolve
|
|
189
237
|
*/
|
|
190
238
|
function resolveValueOfVariable(identifier, environment, idMap) {
|
|
191
239
|
const resolve = (0, config_1.getConfig)().solver.variables;
|
|
192
240
|
switch (resolve) {
|
|
193
|
-
case config_1.VariableResolve.Alias: return
|
|
241
|
+
case config_1.VariableResolve.Alias: return trackAliasInEnvironments(identifier, environment, idMap);
|
|
194
242
|
case config_1.VariableResolve.Builtin: return resolveToConstants(identifier, environment);
|
|
195
243
|
case config_1.VariableResolve.Disabled: return [];
|
|
196
244
|
default: (0, assert_1.assertUnreachable)(resolve);
|
|
197
245
|
}
|
|
198
246
|
}
|
|
247
|
+
/**
|
|
248
|
+
* Generalized {@link resolveValueOfVariable} function which evaluates a node based on the value resolve
|
|
249
|
+
*
|
|
250
|
+
* @param id - The node id or node to resolve
|
|
251
|
+
* @param environment - The current environment used for name resolution
|
|
252
|
+
* @param idMap - The id map to resolve the node if given as an id
|
|
253
|
+
* @param full - Whether to track variables
|
|
254
|
+
*/
|
|
255
|
+
function resolve(id, { environment, graph, idMap, full }) {
|
|
256
|
+
idMap ??= graph?.idMap;
|
|
257
|
+
const node = typeof id === 'object' ? id : idMap?.get(id);
|
|
258
|
+
if (node === undefined) {
|
|
259
|
+
return undefined;
|
|
260
|
+
}
|
|
261
|
+
switch (node.type) {
|
|
262
|
+
case type_1.RType.Symbol:
|
|
263
|
+
if (environment) {
|
|
264
|
+
return full ? resolveValueOfVariable(node.lexeme, environment, idMap) : undefined;
|
|
265
|
+
}
|
|
266
|
+
else if (graph && (0, config_1.getConfig)().solver.variables === config_1.VariableResolve.Alias) {
|
|
267
|
+
return full ? trackAliasesInGraph(node.info.id, graph, idMap) : undefined;
|
|
268
|
+
}
|
|
269
|
+
else {
|
|
270
|
+
return undefined;
|
|
271
|
+
}
|
|
272
|
+
case type_1.RType.String:
|
|
273
|
+
case type_1.RType.Number:
|
|
274
|
+
case type_1.RType.Logical:
|
|
275
|
+
return [node.content];
|
|
276
|
+
default:
|
|
277
|
+
return undefined;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
199
280
|
//# sourceMappingURL=resolve-by-name.js.map
|
|
@@ -208,9 +208,10 @@ ${await (0, doc_query_1.showQuery)(shell, example_query_code_1.exampleQueryCode,
|
|
|
208
208
|
buildExplanation: async (shell) => {
|
|
209
209
|
const exampleCode = 'x <- 1\nprint(x)';
|
|
210
210
|
return `
|
|
211
|
-
With this query you can use flowR's value-tracking capabilities to resolve identifiers to all potential values they may have at runtime (if possible).
|
|
211
|
+
With this query you can use flowR's value-tracking capabilities to resolve identifiers to all potential values they may have at runtime (if possible).
|
|
212
|
+
The extent to which flowR traces values (e.g., built-ins vs. constants) can be configured in flowR's Configuration file (see the [Interface](${doc_files_1.FlowrWikiBaseRef}/Interface) wiki page for more information).
|
|
212
213
|
|
|
213
|
-
Using the example code \`${exampleCode}
|
|
214
|
+
Using the example code \`${exampleCode}\` (with the \`print(x)\` in the second line), the following query returns all values of \`x\` in the code:
|
|
214
215
|
${await (0, doc_query_1.showQuery)(shell, exampleCode, [{
|
|
215
216
|
type: 'resolve-value',
|
|
216
217
|
criteria: ['2@x']
|
|
@@ -419,7 +420,7 @@ ${await (0, doc_query_1.showQuery)(shell, longerCode, [{
|
|
|
419
420
|
type: 'dependencies'
|
|
420
421
|
}], { showCode: false, collapseQuery: true, collapseResult: true })}
|
|
421
422
|
|
|
422
|
-
Currently the dependency extraction may fail as it is essentially a set of heuristics guessing the dependencies.
|
|
423
|
+
Currently, the dependency extraction may fail as it is essentially a set of heuristics guessing the dependencies.
|
|
423
424
|
We welcome any feedback on this (consider opening a [new issue](${doc_issue_1.NewIssueUrl})).
|
|
424
425
|
|
|
425
426
|
In the meantime we offer several properties to overwrite the default behavior (e.g., function names that should be collected)
|
|
@@ -427,12 +428,13 @@ In the meantime we offer several properties to overwrite the default behavior (e
|
|
|
427
428
|
${await (0, doc_query_1.showQuery)(shell, longerCode, [{
|
|
428
429
|
type: 'dependencies',
|
|
429
430
|
ignoreDefaultFunctions: true,
|
|
430
|
-
libraryFunctions: [{ name: 'print', argIdx: 0, argName: 'library' }],
|
|
431
|
+
libraryFunctions: [{ name: 'print', argIdx: 0, argName: 'library', resolveValue: true }],
|
|
431
432
|
sourceFunctions: [],
|
|
432
433
|
readFunctions: [],
|
|
433
434
|
writeFunctions: []
|
|
434
435
|
}], { showCode: false, collapseQuery: false, collapseResult: true })}
|
|
435
436
|
|
|
437
|
+
Here, \`resolveValue\` tells the dependency query to resolve the value of this argument in case it is not a constant.
|
|
436
438
|
`;
|
|
437
439
|
}
|
|
438
440
|
});
|
|
@@ -64,8 +64,8 @@ The following showcases the dependency view of the [Visual Studio Code extension
|
|
|
64
64
|
|
|
65
65
|
`), ' ')}
|
|
66
66
|
|
|
67
|
-
* 🚀 **fast data and control-flow graphs**\\
|
|
68
|
-
Within just ${'<i>' + (0, html_hover_over_1.textWithTooltip)((0, numbers_1.roundToDecimals)(await (0, doc_benchmarks_1.getLatestDfAnalysisTime)('"social-science" Benchmark Suite (tree-sitter)'), 1) + ' ms</i>', 'This measurement is automatically fetched from the latest
|
|
67
|
+
* 🚀 **fast data- and control-flow graphs**\\
|
|
68
|
+
Within just ${'<i>' + (0, html_hover_over_1.textWithTooltip)((0, numbers_1.roundToDecimals)(await (0, doc_benchmarks_1.getLatestDfAnalysisTime)('"social-science" Benchmark Suite (tree-sitter)'), 1) + ' ms</i>', 'This measurement is automatically fetched from the latest benchmark!')} (as of ${new Date(await (0, doc_benchmarks_1.getLastBenchmarkUpdate)()).toLocaleDateString('en-US', dateOptions)}),
|
|
69
69
|
_flowR_ can analyze the data- and control-flow of the average real-world R script. See the [benchmarks](https://flowr-analysis.github.io/flowr/wiki/stats/benchmark) for more information,
|
|
70
70
|
and consult the [wiki pages](${doc_files_1.FlowrWikiBaseRef}/Dataflow-Graph) for more details on the dataflow graph.
|
|
71
71
|
|
|
@@ -90,11 +90,11 @@ ${await (0, doc_dfg_1.printDfGraphForCode)(shell, (0, doc_files_1.getFileContent
|
|
|
90
90
|
|
|
91
91
|
If you want to use flowR and the features it provides, feel free to check out the:
|
|
92
92
|
|
|
93
|
-
- [Visual Studio Code extension](${doc_files_1.FlowrVsCode}): provides access to flowR
|
|
93
|
+
- [Visual Studio Code extension](${doc_files_1.FlowrVsCode}): provides access to flowR directly in VS Code (or [vscode.dev](https://vscode.dev/))
|
|
94
94
|
- [RStudio Addin](${doc_files_1.FlowrGithubBaseRef}/rstudio-addin-flowr): integrates flowR into [RStudio](https://posit.co/downloads/)
|
|
95
|
-
- [R package](${doc_files_1.FlowrGithubBaseRef}/flowr-r-adapter):
|
|
95
|
+
- [R package](${doc_files_1.FlowrGithubBaseRef}/flowr-r-adapter): use flowR in your R scripts
|
|
96
96
|
- [Docker image](${doc_files_1.FlowrDockerRef}): run flowR in a container, this also includes [flowR's server](${doc_files_1.FlowrWikiBaseRef}/Interface#communicating-with-the-server)
|
|
97
|
-
- [NPM package](${doc_files_1.FlowrNpmRef}): include flowR in your TypeScript and JavaScript projects
|
|
97
|
+
- [NPM package](${doc_files_1.FlowrNpmRef}): include flowR in your TypeScript and JavaScript projects
|
|
98
98
|
|
|
99
99
|
## ⭐ Getting Started
|
|
100
100
|
|
package/package.json
CHANGED
|
@@ -55,7 +55,10 @@ function promoteQueryCallNames(queries) {
|
|
|
55
55
|
...q.fileFilter,
|
|
56
56
|
filter: new RegExp(q.fileFilter.filter)
|
|
57
57
|
},
|
|
58
|
-
linkTo: {
|
|
58
|
+
linkTo: Array.isArray(q.linkTo) ? q.linkTo.map(l => ({
|
|
59
|
+
...l,
|
|
60
|
+
callName: new RegExp(l.callName)
|
|
61
|
+
})) : {
|
|
59
62
|
...q.linkTo,
|
|
60
63
|
/* we have to add another promotion layer whenever we add something without this call name */
|
|
61
64
|
callName: new RegExp(q.linkTo.callName)
|
|
@@ -208,13 +211,24 @@ function executeCallContextQueries({ dataflow: { graph }, ast }, queries) {
|
|
|
208
211
|
}
|
|
209
212
|
let linkedIds = undefined;
|
|
210
213
|
if (cfg && isSubCallQuery(query)) {
|
|
211
|
-
|
|
212
|
-
const
|
|
213
|
-
|
|
214
|
-
|
|
214
|
+
const linked = Array.isArray(query.linkTo) ? query.linkTo : [query.linkTo];
|
|
215
|
+
for (const link of linked) {
|
|
216
|
+
/* if we have a linkTo query, we have to find the last call */
|
|
217
|
+
const lastCall = (0, identify_link_to_last_call_relation_1.identifyLinkToLastCallRelation)(nodeId, cfg.graph, graph, link);
|
|
218
|
+
if (lastCall) {
|
|
219
|
+
linkedIds ??= new Set();
|
|
220
|
+
for (const l of lastCall) {
|
|
221
|
+
if (link.attachLinkInfo) {
|
|
222
|
+
linkedIds.add({ id: l, info: link.attachLinkInfo });
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
linkedIds.add(l);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
215
229
|
}
|
|
216
230
|
}
|
|
217
|
-
initialIdCollector.add(query.kind ?? '.', query.subkind ?? '.', (0, objects_1.compactRecord)({ id: nodeId, name: info.name, calls: targets, linkedIds }));
|
|
231
|
+
initialIdCollector.add(query.kind ?? '.', query.subkind ?? '.', (0, objects_1.compactRecord)({ id: nodeId, name: info.name, calls: targets, linkedIds: linkedIds ? [...linkedIds] : undefined }));
|
|
218
232
|
}
|
|
219
233
|
}
|
|
220
234
|
removeIdenticalDuplicates(initialIdCollector);
|
|
@@ -9,6 +9,7 @@ import { CallTargets } from './identify-link-to-last-call-relation';
|
|
|
9
9
|
import type { DataflowGraph } from '../../../dataflow/graph/graph';
|
|
10
10
|
import type { DataflowGraphVertexInfo } from '../../../dataflow/graph/vertex';
|
|
11
11
|
import type { CascadeAction } from './cascade-action';
|
|
12
|
+
import type { NoInfo } from '../../../r-bridge/lang-4.x/ast/model/model';
|
|
12
13
|
export interface FileFilter<FilterType> {
|
|
13
14
|
/**
|
|
14
15
|
* Regex that a node's file attribute must match to be considered
|
|
@@ -66,9 +67,11 @@ interface LinkToLastCall<CallName extends RegExp | string = RegExp | string> ext
|
|
|
66
67
|
*/
|
|
67
68
|
readonly cascadeIf?: (target: DataflowGraphVertexInfo, from: NodeId, graph: DataflowGraph) => CascadeAction;
|
|
68
69
|
}
|
|
69
|
-
export type LinkTo<CallName extends RegExp | string> = LinkToLastCall<CallName
|
|
70
|
-
|
|
71
|
-
|
|
70
|
+
export type LinkTo<CallName extends RegExp | string = RegExp | string, AttachLinkInfo = NoInfo> = (LinkToLastCall<CallName>) & {
|
|
71
|
+
attachLinkInfo?: AttachLinkInfo;
|
|
72
|
+
};
|
|
73
|
+
export interface SubCallContextQueryFormat<CallName extends RegExp | string = RegExp | string, AttachLinkInfo = NoInfo> extends DefaultCallContextQueryFormat<CallName> {
|
|
74
|
+
readonly linkTo: LinkTo<CallName, AttachLinkInfo> | LinkTo<CallName, AttachLinkInfo>[];
|
|
72
75
|
}
|
|
73
76
|
export interface CallContextQuerySubKindResult {
|
|
74
77
|
/** The id of the call vertex identified within the supplied dataflow graph */
|
|
@@ -81,8 +84,11 @@ export interface CallContextQuerySubKindResult {
|
|
|
81
84
|
* An empty array means that the call targets only non-local functions.
|
|
82
85
|
*/
|
|
83
86
|
readonly calls?: readonly NodeId[];
|
|
84
|
-
/** ids attached by the linkTo query */
|
|
85
|
-
readonly linkedIds?: readonly NodeId
|
|
87
|
+
/** ids attached by the linkTo query, if you attached information with the `attachLinkInfo` field you can find it here */
|
|
88
|
+
readonly linkedIds?: readonly (NodeId | {
|
|
89
|
+
id: NodeId;
|
|
90
|
+
info: object;
|
|
91
|
+
})[];
|
|
86
92
|
/**
|
|
87
93
|
* (Direct) alias locations this call stems from
|
|
88
94
|
*/
|
|
@@ -95,7 +101,7 @@ export type CallContextQueryKindResult = Record<string, {
|
|
|
95
101
|
export interface CallContextQueryResult extends BaseQueryResult {
|
|
96
102
|
readonly kinds: CallContextQueryKindResult;
|
|
97
103
|
}
|
|
98
|
-
export type CallContextQuery<CallName extends RegExp | string = RegExp | string> = DefaultCallContextQueryFormat<CallName> | SubCallContextQueryFormat<CallName>;
|
|
104
|
+
export type CallContextQuery<CallName extends RegExp | string = RegExp | string, AttachLinkInfo = NoInfo> = DefaultCallContextQueryFormat<CallName> | SubCallContextQueryFormat<CallName, AttachLinkInfo>;
|
|
99
105
|
export declare const CallContextQueryDefinition: {
|
|
100
106
|
readonly executor: typeof executeCallContextQueries;
|
|
101
107
|
readonly asciiSummarizer: (formatter: OutputFormatter, processed: PipelineOutput<typeof DEFAULT_DATAFLOW_PIPELINE>, queryResults: BaseQueryResult, result: string[]) => boolean;
|
|
@@ -10,6 +10,13 @@ const time_1 = require("../../../util/time");
|
|
|
10
10
|
const joi_1 = __importDefault(require("joi"));
|
|
11
11
|
const query_print_1 = require("../../query-print");
|
|
12
12
|
const identify_link_to_last_call_relation_1 = require("./identify-link-to-last-call-relation");
|
|
13
|
+
const CallContextQueryLinkTo = joi_1.default.object({
|
|
14
|
+
type: joi_1.default.string().valid('link-to-last-call').required().description('The type of the linkTo sub-query.'),
|
|
15
|
+
callName: joi_1.default.string().required().description('Regex regarding the function name of the last call. Similar to `callName`, strings are interpreted as a regular expression.'),
|
|
16
|
+
ignoreIf: joi_1.default.function().optional().description('Should we ignore this (source) call? Currently, there is no well working serialization for this.'),
|
|
17
|
+
cascadeIf: joi_1.default.function().optional().description('Should we continue searching after the link was created? Currently, there is no well working serialization for this.'),
|
|
18
|
+
attachLinkInfo: joi_1.default.object().optional().description('Additional information to attach to the link.')
|
|
19
|
+
});
|
|
13
20
|
exports.CallContextQueryDefinition = {
|
|
14
21
|
executor: call_context_query_executor_1.executeCallContextQueries,
|
|
15
22
|
asciiSummarizer: (formatter, processed, queryResults, result) => {
|
|
@@ -30,12 +37,7 @@ exports.CallContextQueryDefinition = {
|
|
|
30
37
|
fileFilter: joi_1.default.string().required().description('Regex that a node\'s file attribute must match to be considered'),
|
|
31
38
|
includeUndefinedFiles: joi_1.default.boolean().optional().description('If `fileFilter` is set, but a nodes `file` attribute is `undefined`, should we include it in the results? Defaults to `true`.')
|
|
32
39
|
}).optional().description('Filter that, when set, a node\'s file attribute must match to be considered'),
|
|
33
|
-
linkTo: joi_1.default.
|
|
34
|
-
type: joi_1.default.string().valid('link-to-last-call').required().description('The type of the linkTo sub-query.'),
|
|
35
|
-
callName: joi_1.default.string().required().description('Regex regarding the function name of the last call. Similar to `callName`, strings are interpreted as a regular expression.'),
|
|
36
|
-
ignoreIf: joi_1.default.function().optional().description('Should we ignore this (source) call? Currently, there is no well working serialization for this.'),
|
|
37
|
-
cascadeIf: joi_1.default.function().optional().description('Should we continue searching after the link was created? Currently, there is no well working serialization for this.')
|
|
38
|
-
}).optional().description('Links the current call to the last call of the given kind. This way, you can link a call like `points` to the latest graphics plot etc.')
|
|
40
|
+
linkTo: joi_1.default.alternatives(CallContextQueryLinkTo, joi_1.default.array().items(CallContextQueryLinkTo)).optional().description('Links the current call to the last call of the given kind. This way, you can link a call like `points` to the latest graphics plot etc.')
|
|
39
41
|
}).description('Call context query used to find calls in the dataflow graph')
|
|
40
42
|
};
|
|
41
43
|
//# sourceMappingURL=call-context-query-format.js.map
|
|
@@ -3,15 +3,28 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.executeDependenciesQuery = executeDependenciesQuery;
|
|
4
4
|
const query_1 = require("../../query");
|
|
5
5
|
const dependencies_query_format_1 = require("./dependencies-query-format");
|
|
6
|
+
const vertex_1 = require("../../../dataflow/graph/vertex");
|
|
6
7
|
const graph_1 = require("../../../dataflow/graph/graph");
|
|
7
8
|
const log_1 = require("../../../util/log");
|
|
8
9
|
const type_1 = require("../../../r-bridge/lang-4.x/ast/model/type");
|
|
9
|
-
const retriever_1 = require("../../../r-bridge/retriever");
|
|
10
10
|
const r_function_call_1 = require("../../../r-bridge/lang-4.x/ast/model/nodes/r-function-call");
|
|
11
11
|
const visitor_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/visitor");
|
|
12
12
|
const assert_1 = require("../../../util/assert");
|
|
13
13
|
const objects_1 = require("../../../util/objects");
|
|
14
|
-
const
|
|
14
|
+
const resolve_by_name_1 = require("../../../dataflow/environments/resolve-by-name");
|
|
15
|
+
function collectNamespaceAccesses(data, libraries) {
|
|
16
|
+
/* for libraries, we have to additionally track all uses of `::` and `:::`, for this we currently simply traverse all uses */
|
|
17
|
+
(0, visitor_1.visitAst)(data.ast.ast, n => {
|
|
18
|
+
if (n.type === type_1.RType.Symbol && n.namespace) {
|
|
19
|
+
/* we should improve the identification of ':::' */
|
|
20
|
+
libraries.push({
|
|
21
|
+
nodeId: n.info.id,
|
|
22
|
+
functionName: (n.info.fullLexeme ?? n.lexeme).includes(':::') ? ':::' : '::',
|
|
23
|
+
libraryName: n.namespace
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
}
|
|
15
28
|
function executeDependenciesQuery(data, queries) {
|
|
16
29
|
if (queries.length !== 1) {
|
|
17
30
|
log_1.log.warn('Dependencies query expects only up to one query, but got ', queries.length, 'only using the first query');
|
|
@@ -25,58 +38,46 @@ function executeDependenciesQuery(data, queries) {
|
|
|
25
38
|
const writeFunctions = getFunctionsToCheck(query.writeFunctions, ignoreDefault, dependencies_query_format_1.WriteFunctions);
|
|
26
39
|
const numberOfFunctions = libraryFunctions.length + sourceFunctions.length + readFunctions.length + writeFunctions.length;
|
|
27
40
|
const results = numberOfFunctions === 0 ? { kinds: {}, '.meta': { timing: 0 } } : (0, query_1.executeQueriesOfSameType)(data, ...makeCallContextQuery(libraryFunctions, 'library'), ...makeCallContextQuery(sourceFunctions, 'source'), ...makeCallContextQuery(readFunctions, 'read'), ...makeCallContextQuery(writeFunctions, 'write'));
|
|
28
|
-
|
|
41
|
+
function getLexeme(argument, id) {
|
|
29
42
|
if ((argument && argument !== dependencies_query_format_1.Unknown) || !id) {
|
|
30
43
|
return undefined;
|
|
31
44
|
}
|
|
32
45
|
let get = data.ast.idMap.get(id);
|
|
33
|
-
if (
|
|
34
|
-
return undefined;
|
|
35
|
-
}
|
|
36
|
-
if (get.type === type_1.RType.Argument) {
|
|
46
|
+
if (get?.type === type_1.RType.Argument) {
|
|
37
47
|
get = get.value;
|
|
38
48
|
}
|
|
39
49
|
return get?.info.fullLexeme ?? get?.lexeme;
|
|
40
|
-
}
|
|
41
|
-
const libraries = getResults(data, results, 'library', libraryFunctions, (id, vertex, argId,
|
|
50
|
+
}
|
|
51
|
+
const libraries = getResults(data, results, 'library', libraryFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
42
52
|
nodeId: id,
|
|
43
53
|
functionName: vertex.name,
|
|
44
|
-
lexemeOfArgument: getLexeme(
|
|
45
|
-
libraryName:
|
|
46
|
-
|
|
54
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
55
|
+
libraryName: value ?? dependencies_query_format_1.Unknown,
|
|
56
|
+
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
57
|
+
}));
|
|
47
58
|
if (!ignoreDefault) {
|
|
48
|
-
|
|
49
|
-
(0, visitor_1.visitAst)(data.ast.ast, n => {
|
|
50
|
-
if (n.type === type_1.RType.Symbol && n.namespace) {
|
|
51
|
-
/* we should improve the identification of ':::' */
|
|
52
|
-
libraries.push({
|
|
53
|
-
nodeId: n.info.id,
|
|
54
|
-
functionName: (n.info.fullLexeme ?? n.lexeme).includes(':::') ? ':::' : '::',
|
|
55
|
-
libraryName: n.namespace
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
});
|
|
59
|
+
collectNamespaceAccesses(data, libraries);
|
|
59
60
|
}
|
|
60
|
-
const sourcedFiles = getResults(data, results, 'source', sourceFunctions, (id, vertex, argId,
|
|
61
|
+
const sourcedFiles = getResults(data, results, 'source', sourceFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
61
62
|
nodeId: id,
|
|
62
63
|
functionName: vertex.name,
|
|
63
|
-
file:
|
|
64
|
-
lexemeOfArgument: getLexeme(
|
|
64
|
+
file: value ?? dependencies_query_format_1.Unknown,
|
|
65
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
65
66
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
66
67
|
}));
|
|
67
|
-
const readData = getResults(data, results, 'read', readFunctions, (id, vertex, argId,
|
|
68
|
+
const readData = getResults(data, results, 'read', readFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
68
69
|
nodeId: id,
|
|
69
70
|
functionName: vertex.name,
|
|
70
|
-
source:
|
|
71
|
-
lexemeOfArgument: getLexeme(
|
|
71
|
+
source: value ?? dependencies_query_format_1.Unknown,
|
|
72
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
72
73
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
73
74
|
}));
|
|
74
|
-
const writtenData = getResults(data, results, 'write', writeFunctions, (id, vertex, argId,
|
|
75
|
+
const writtenData = getResults(data, results, 'write', writeFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
75
76
|
nodeId: id,
|
|
76
77
|
functionName: vertex.name,
|
|
77
78
|
// write functions that don't have argIndex are assumed to write to stdout
|
|
78
|
-
destination:
|
|
79
|
-
lexemeOfArgument: getLexeme(
|
|
79
|
+
destination: value ?? (linkedIds?.length ? dependencies_query_format_1.Unknown : 'stdout'),
|
|
80
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
80
81
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
81
82
|
}));
|
|
82
83
|
return {
|
|
@@ -93,31 +94,32 @@ function makeCallContextQuery(functions, kind) {
|
|
|
93
94
|
includeAliases: false,
|
|
94
95
|
callNameExact: true,
|
|
95
96
|
subkind: f.name,
|
|
96
|
-
linkTo: f.linkTo
|
|
97
|
+
linkTo: f.linkTo,
|
|
97
98
|
kind
|
|
98
99
|
}));
|
|
99
100
|
}
|
|
100
|
-
function
|
|
101
|
+
function dropInfoOnLinkedIds(linkedIds) {
|
|
102
|
+
if (!linkedIds) {
|
|
103
|
+
return undefined;
|
|
104
|
+
}
|
|
105
|
+
return linkedIds.map(id => typeof id === 'object' ? id.id : id);
|
|
106
|
+
}
|
|
107
|
+
function getResults(data, results, kind, functions, makeInfo) {
|
|
101
108
|
const kindEntries = Object.entries(results?.kinds[kind]?.subkinds ?? {});
|
|
102
109
|
return kindEntries.flatMap(([name, results]) => results.flatMap(({ id, linkedIds }) => {
|
|
103
110
|
const vertex = data.dataflow.graph.getVertex(id);
|
|
104
111
|
const info = functions.find(f => f.name === name);
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
const args = index !== undefined ? getArgumentValue(data, vertex, index, additionalAllowedTypes) : undefined;
|
|
113
|
-
if (!args) {
|
|
114
|
-
const record = (0, objects_1.compactRecord)(makeInfo(id, vertex, undefined, undefined, linkedIds));
|
|
112
|
+
const args = getArgumentValue(data, vertex, info.argIdx, info.argName, info.resolveValue);
|
|
113
|
+
const linkedArgs = collectValuesFromLinks(args, data, linkedIds);
|
|
114
|
+
const foundValues = linkedArgs ?? args;
|
|
115
|
+
if (!foundValues) {
|
|
116
|
+
const record = (0, objects_1.compactRecord)(makeInfo(id, vertex, undefined, undefined, dropInfoOnLinkedIds(linkedIds)));
|
|
115
117
|
return record ? [record] : [];
|
|
116
118
|
}
|
|
117
119
|
const results = [];
|
|
118
|
-
for (const [arg, values] of
|
|
120
|
+
for (const [arg, values] of foundValues.entries()) {
|
|
119
121
|
for (const value of values) {
|
|
120
|
-
const result = (0, objects_1.compactRecord)(makeInfo(id, vertex, arg, value, linkedIds));
|
|
122
|
+
const result = (0, objects_1.compactRecord)(makeInfo(id, vertex, arg, value, dropInfoOnLinkedIds(linkedIds)));
|
|
121
123
|
if (result) {
|
|
122
124
|
results.push(result);
|
|
123
125
|
}
|
|
@@ -126,11 +128,110 @@ function getResults(data, results, kind, functions, makeInfo, additionalAllowedT
|
|
|
126
128
|
return results;
|
|
127
129
|
})) ?? [];
|
|
128
130
|
}
|
|
131
|
+
function collectValuesFromLinks(args, data, linkedIds) {
|
|
132
|
+
if (!linkedIds || linkedIds.length === 0) {
|
|
133
|
+
return undefined;
|
|
134
|
+
}
|
|
135
|
+
const hasAtLeastAValue = args !== undefined && [...args.values()].some(set => [...set].some(v => v !== dependencies_query_format_1.Unknown && v !== undefined));
|
|
136
|
+
const map = new Map();
|
|
137
|
+
for (const linkedId of linkedIds) {
|
|
138
|
+
if (typeof linkedId !== 'object' || !linkedId.info) {
|
|
139
|
+
continue;
|
|
140
|
+
}
|
|
141
|
+
const info = linkedId.info;
|
|
142
|
+
// do not collect this one
|
|
143
|
+
if (hasAtLeastAValue && info.when !== dependencies_query_format_1.DependencyInfoLinkConstraint.Always) {
|
|
144
|
+
continue;
|
|
145
|
+
}
|
|
146
|
+
// collect this one!
|
|
147
|
+
const vertex = data.dataflow.graph.getVertex(linkedId.id);
|
|
148
|
+
if (vertex === undefined || vertex.tag !== vertex_1.VertexType.FunctionCall) {
|
|
149
|
+
continue;
|
|
150
|
+
}
|
|
151
|
+
const args = getArgumentValue(data, vertex, info.argIdx, info.argName, info.resolveValue);
|
|
152
|
+
if (args === undefined) {
|
|
153
|
+
continue;
|
|
154
|
+
}
|
|
155
|
+
for (const [arg, values] of args.entries()) {
|
|
156
|
+
const set = map.get(arg) ?? new Set();
|
|
157
|
+
map.set(arg, set);
|
|
158
|
+
for (const value of values) {
|
|
159
|
+
set.add(value);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return map;
|
|
164
|
+
}
|
|
165
|
+
function hasCharacterOnly(data, vertex, idMap) {
|
|
166
|
+
if (!vertex.args || vertex.args.length === 0 || !idMap) {
|
|
167
|
+
return false;
|
|
168
|
+
}
|
|
169
|
+
const treatAsChar = getArgumentValue(data, vertex, 5, 'character.only', true);
|
|
170
|
+
if (!treatAsChar) {
|
|
171
|
+
return false;
|
|
172
|
+
}
|
|
173
|
+
const hasTrue = [...treatAsChar.values()].some(set => set?.has('TRUE'));
|
|
174
|
+
const hasFalse = hasTrue ? [...treatAsChar.values()].some(set => set === undefined || set.has('FALSE')) : false;
|
|
175
|
+
if (hasTrue && hasFalse) {
|
|
176
|
+
return 'maybe';
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
return hasTrue;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
function resolveBasedOnConfig(data, vertex, argument, environment, idMap, resolveValue) {
|
|
183
|
+
let full = true;
|
|
184
|
+
if (!resolveValue) {
|
|
185
|
+
full = false;
|
|
186
|
+
}
|
|
187
|
+
if (resolveValue === 'library') {
|
|
188
|
+
const hasChar = hasCharacterOnly(data, vertex, idMap);
|
|
189
|
+
if (hasChar === false) {
|
|
190
|
+
if (argument.type === type_1.RType.Symbol) {
|
|
191
|
+
return [argument.lexeme];
|
|
192
|
+
}
|
|
193
|
+
full = false;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
return (0, resolve_by_name_1.resolve)(argument, { environment, graph: data.dataflow.graph, full });
|
|
197
|
+
}
|
|
198
|
+
function unwrapRValue(value) {
|
|
199
|
+
if (value === undefined) {
|
|
200
|
+
return undefined;
|
|
201
|
+
}
|
|
202
|
+
switch (typeof value) {
|
|
203
|
+
case 'string':
|
|
204
|
+
return value;
|
|
205
|
+
case 'number':
|
|
206
|
+
return value.toString();
|
|
207
|
+
case 'boolean':
|
|
208
|
+
return value ? 'TRUE' : 'FALSE';
|
|
209
|
+
}
|
|
210
|
+
if (typeof value !== 'object' || value === null) {
|
|
211
|
+
return JSON.stringify(value);
|
|
212
|
+
}
|
|
213
|
+
if ('str' in value) {
|
|
214
|
+
return value.str;
|
|
215
|
+
}
|
|
216
|
+
else if ('num' in value) {
|
|
217
|
+
return value.num.toString();
|
|
218
|
+
}
|
|
219
|
+
else {
|
|
220
|
+
return JSON.stringify(value);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
129
223
|
/**
|
|
130
224
|
* Get the values of all arguments matching the criteria.
|
|
131
225
|
*/
|
|
132
|
-
function getArgumentValue(
|
|
133
|
-
|
|
226
|
+
function getArgumentValue(data, vertex, argumentIndex, argumentName, resolveValue) {
|
|
227
|
+
const graph = data.dataflow.graph;
|
|
228
|
+
if (argumentName) {
|
|
229
|
+
const arg = vertex?.args.findIndex(arg => arg !== r_function_call_1.EmptyArgument && arg.name === argumentName);
|
|
230
|
+
if (arg >= 0) {
|
|
231
|
+
argumentIndex = arg;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
if (!vertex || argumentIndex === undefined) {
|
|
134
235
|
return undefined;
|
|
135
236
|
}
|
|
136
237
|
if (argumentIndex === 'unnamed') {
|
|
@@ -143,9 +244,11 @@ function getArgumentValue({ dataflow: { graph } }, vertex, argumentIndex, additi
|
|
|
143
244
|
valueNode = valueNode.value;
|
|
144
245
|
}
|
|
145
246
|
if (valueNode) {
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
247
|
+
// TDODO: extend vector support etc.
|
|
248
|
+
// this should be evaluated in the callee-context
|
|
249
|
+
const values = resolveBasedOnConfig(data, vertex, valueNode, vertex.environment, graph.idMap, resolveValue)
|
|
250
|
+
?.map(unwrapRValue) ?? [dependencies_query_format_1.Unknown];
|
|
251
|
+
map.set(ref, new Set(values));
|
|
149
252
|
}
|
|
150
253
|
}
|
|
151
254
|
return map;
|
|
@@ -160,8 +263,9 @@ function getArgumentValue({ dataflow: { graph } }, vertex, argumentIndex, additi
|
|
|
160
263
|
valueNode = valueNode.value;
|
|
161
264
|
}
|
|
162
265
|
if (valueNode) {
|
|
163
|
-
const
|
|
164
|
-
|
|
266
|
+
const values = resolveBasedOnConfig(data, vertex, valueNode, vertex.environment, graph.idMap, resolveValue)
|
|
267
|
+
?.map(unwrapRValue) ?? [dependencies_query_format_1.Unknown];
|
|
268
|
+
return new Map([[arg, new Set(values)]]);
|
|
165
269
|
}
|
|
166
270
|
}
|
|
167
271
|
return undefined;
|
|
@@ -2,7 +2,21 @@ import type { BaseQueryFormat, BaseQueryResult } from '../../base-query-format';
|
|
|
2
2
|
import type { NodeId } from '../../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
3
3
|
import Joi from 'joi';
|
|
4
4
|
import { executeDependenciesQuery } from './dependencies-query-executor';
|
|
5
|
+
import type { LinkTo } from '../call-context-query/call-context-query-format';
|
|
5
6
|
export declare const Unknown = "unknown";
|
|
7
|
+
/** when to read the argument value from a linked function */
|
|
8
|
+
export declare enum DependencyInfoLinkConstraint {
|
|
9
|
+
Always = "always",
|
|
10
|
+
IfUnknown = "if-unknown"
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* A dependency link may have attached information. If you pass it, we try to resolve the argument value from the linked function
|
|
14
|
+
* if the `when` constraint is met.
|
|
15
|
+
*/
|
|
16
|
+
export type DependencyInfoLink = LinkTo<RegExp | string, Omit<FunctionInfo, 'name' | 'linkTo'> & {
|
|
17
|
+
when: DependencyInfoLinkConstraint;
|
|
18
|
+
} | undefined>;
|
|
19
|
+
export type DependencyInfoLinkAttachedInfo = DependencyInfoLink['attachLinkInfo'];
|
|
6
20
|
export declare const LibraryFunctions: FunctionInfo[];
|
|
7
21
|
export declare const SourceFunctions: FunctionInfo[];
|
|
8
22
|
export declare const ReadFunctions: FunctionInfo[];
|
|
@@ -11,7 +25,8 @@ export interface FunctionInfo {
|
|
|
11
25
|
name: string;
|
|
12
26
|
argIdx?: number | 'unnamed';
|
|
13
27
|
argName?: string;
|
|
14
|
-
linkTo?:
|
|
28
|
+
linkTo?: DependencyInfoLink[];
|
|
29
|
+
resolveValue?: boolean | 'library';
|
|
15
30
|
}
|
|
16
31
|
export interface DependenciesQuery extends BaseQueryFormat {
|
|
17
32
|
readonly type: 'dependencies';
|
|
@@ -3,146 +3,155 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.DependenciesQueryDefinition = exports.WriteFunctions = exports.ReadFunctions = exports.SourceFunctions = exports.LibraryFunctions = exports.Unknown = void 0;
|
|
6
|
+
exports.DependenciesQueryDefinition = exports.WriteFunctions = exports.ReadFunctions = exports.SourceFunctions = exports.LibraryFunctions = exports.DependencyInfoLinkConstraint = exports.Unknown = void 0;
|
|
7
7
|
const ansi_1 = require("../../../util/ansi");
|
|
8
8
|
const time_1 = require("../../../util/time");
|
|
9
9
|
const joi_1 = __importDefault(require("joi"));
|
|
10
10
|
const dependencies_query_executor_1 = require("./dependencies-query-executor");
|
|
11
11
|
exports.Unknown = 'unknown';
|
|
12
|
+
/** when to read the argument value from a linked function */
|
|
13
|
+
var DependencyInfoLinkConstraint;
|
|
14
|
+
(function (DependencyInfoLinkConstraint) {
|
|
15
|
+
DependencyInfoLinkConstraint["Always"] = "always";
|
|
16
|
+
DependencyInfoLinkConstraint["IfUnknown"] = "if-unknown";
|
|
17
|
+
})(DependencyInfoLinkConstraint || (exports.DependencyInfoLinkConstraint = DependencyInfoLinkConstraint = {}));
|
|
12
18
|
// these lists are originally based on https://github.com/duncantl/CodeDepends/blob/7fd96dfee16b252e5f642c77a7ababf48e9326f8/R/codeTypes.R
|
|
13
19
|
exports.LibraryFunctions = [
|
|
14
|
-
{ name: 'library', argIdx: 0, argName: 'package' },
|
|
15
|
-
{ name: 'require', argIdx: 0, argName: 'package' },
|
|
16
|
-
{ name: 'loadNamespace', argIdx: 0, argName: 'package' },
|
|
17
|
-
{ name: 'attachNamespace', argIdx: 0, argName: 'ns' },
|
|
18
|
-
{ name: 'attach', argIdx: 0, argName: 'what' },
|
|
19
|
-
{ name: 'groundhog.library', argIdx: 0, argName: 'pkg' },
|
|
20
|
-
{ name: 'p_load', argIdx: 'unnamed' }, // pacman
|
|
21
|
-
{ name: 'p_load_gh', argIdx: 'unnamed' }, // pacman
|
|
22
|
-
{ name: 'from_import', argIdx: 0, argName: 'package' }, // easypackages
|
|
23
|
-
{ name: 'libraries', argIdx: 'unnamed' }, // easypackages
|
|
24
|
-
{ name: 'shelf', argIdx: 'unnamed' } // librarian
|
|
20
|
+
{ name: 'library', argIdx: 0, argName: 'package', resolveValue: 'library' },
|
|
21
|
+
{ name: 'require', argIdx: 0, argName: 'package', resolveValue: true },
|
|
22
|
+
{ name: 'loadNamespace', argIdx: 0, argName: 'package', resolveValue: true },
|
|
23
|
+
{ name: 'attachNamespace', argIdx: 0, argName: 'ns', resolveValue: true },
|
|
24
|
+
{ name: 'attach', argIdx: 0, argName: 'what', resolveValue: true },
|
|
25
|
+
{ name: 'groundhog.library', argIdx: 0, argName: 'pkg', resolveValue: true },
|
|
26
|
+
{ name: 'p_load', argIdx: 'unnamed', resolveValue: 'library' }, // pacman
|
|
27
|
+
{ name: 'p_load_gh', argIdx: 'unnamed', resolveValue: 'library' }, // pacman
|
|
28
|
+
{ name: 'from_import', argIdx: 0, argName: 'package', resolveValue: true }, // easypackages
|
|
29
|
+
{ name: 'libraries', argIdx: 'unnamed', resolveValue: true }, // easypackages
|
|
30
|
+
{ name: 'shelf', argIdx: 'unnamed', resolveValue: true } // librarian
|
|
25
31
|
];
|
|
26
32
|
exports.SourceFunctions = [
|
|
27
|
-
{ name: 'source', argIdx: 0, argName: 'file' },
|
|
28
|
-
{ name: 'sys.source', argIdx: 0, argName: 'file' }
|
|
33
|
+
{ name: 'source', argIdx: 0, argName: 'file', resolveValue: true },
|
|
34
|
+
{ name: 'sys.source', argIdx: 0, argName: 'file', resolveValue: true }
|
|
29
35
|
];
|
|
30
36
|
exports.ReadFunctions = [
|
|
31
|
-
{ name: 'read.table', argIdx: 0, argName: 'file' },
|
|
32
|
-
{ name: 'read.csv', argIdx: 0, argName: 'file' },
|
|
33
|
-
{ name: 'read.csv2', argIdx: 0, argName: 'file' },
|
|
34
|
-
{ name: 'read.delim', argIdx: 0, argName: 'file' },
|
|
35
|
-
{ name: 'read.dcf', argIdx: 0, argName: 'file' },
|
|
36
|
-
{ name: 'scan', argIdx: 0, argName: 'file' },
|
|
37
|
-
{ name: 'read.fwf', argIdx: 0, argName: 'file' },
|
|
38
|
-
{ name: 'file', argIdx: 1, argName: 'open' },
|
|
39
|
-
{ name: 'url', argIdx: 1, argName: 'open' },
|
|
40
|
-
{ name: 'load', argIdx: 0, argName: 'file' },
|
|
41
|
-
{ name: 'gzfile', argIdx: 1, argName: 'open' },
|
|
42
|
-
{ name: 'bzfile', argIdx: 1, argName: 'open' },
|
|
43
|
-
{ name: 'download.file', argIdx: 0, argName: 'url' },
|
|
44
|
-
{ name: 'pipe', argIdx: 1, argName: 'open' },
|
|
45
|
-
{ name: 'fifo', argIdx: 1, argName: 'open' },
|
|
46
|
-
{ name: 'unz', argIdx: 1, argName: 'open' },
|
|
47
|
-
{ name: 'matrix', argIdx: 0, argName: 'data' },
|
|
48
|
-
{ name: 'readRDS', argIdx: 0, argName: 'file' },
|
|
49
|
-
{ name: 'readLines', argIdx: 0, argName: 'con' },
|
|
50
|
-
{ name: 'readRenviron', argIdx: 0, argName: 'path' },
|
|
37
|
+
{ name: 'read.table', argIdx: 0, argName: 'file', resolveValue: true },
|
|
38
|
+
{ name: 'read.csv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
39
|
+
{ name: 'read.csv2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
40
|
+
{ name: 'read.delim', argIdx: 0, argName: 'file', resolveValue: true },
|
|
41
|
+
{ name: 'read.dcf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
42
|
+
{ name: 'scan', argIdx: 0, argName: 'file', resolveValue: true },
|
|
43
|
+
{ name: 'read.fwf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
44
|
+
{ name: 'file', argIdx: 1, argName: 'open', resolveValue: true },
|
|
45
|
+
{ name: 'url', argIdx: 1, argName: 'open', resolveValue: true },
|
|
46
|
+
{ name: 'load', argIdx: 0, argName: 'file', resolveValue: true },
|
|
47
|
+
{ name: 'gzfile', argIdx: 1, argName: 'open', resolveValue: true },
|
|
48
|
+
{ name: 'bzfile', argIdx: 1, argName: 'open', resolveValue: true },
|
|
49
|
+
{ name: 'download.file', argIdx: 0, argName: 'url', resolveValue: true },
|
|
50
|
+
{ name: 'pipe', argIdx: 1, argName: 'open', resolveValue: true },
|
|
51
|
+
{ name: 'fifo', argIdx: 1, argName: 'open', resolveValue: true },
|
|
52
|
+
{ name: 'unz', argIdx: 1, argName: 'open', resolveValue: true },
|
|
53
|
+
{ name: 'matrix', argIdx: 0, argName: 'data', resolveValue: true },
|
|
54
|
+
{ name: 'readRDS', argIdx: 0, argName: 'file', resolveValue: true },
|
|
55
|
+
{ name: 'readLines', argIdx: 0, argName: 'con', resolveValue: true },
|
|
56
|
+
{ name: 'readRenviron', argIdx: 0, argName: 'path', resolveValue: true },
|
|
51
57
|
// readr
|
|
52
|
-
{ name: 'read_csv', argIdx: 0, argName: 'file' },
|
|
53
|
-
{ name: 'read_csv2', argIdx: 0, argName: 'file' },
|
|
54
|
-
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
55
|
-
{ name: 'read_delim', argIdx: 0, argName: 'file' },
|
|
56
|
-
{ name: 'read_dsv', argIdx: 0, argName: 'file' },
|
|
57
|
-
{ name: 'read_fwf', argIdx: 0, argName: 'file' },
|
|
58
|
-
{ name: 'read_tsv', argIdx: 0, argName: 'file' },
|
|
59
|
-
{ name: 'read_table', argIdx: 0, argName: 'file' },
|
|
60
|
-
{ name: 'read_log', argIdx: 0, argName: 'file' },
|
|
61
|
-
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
62
|
-
{ name: 'read_lines_chunked', argIdx: 0, argName: 'file' },
|
|
58
|
+
{ name: 'read_csv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
59
|
+
{ name: 'read_csv2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
60
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file', resolveValue: true },
|
|
61
|
+
{ name: 'read_delim', argIdx: 0, argName: 'file', resolveValue: true },
|
|
62
|
+
{ name: 'read_dsv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
63
|
+
{ name: 'read_fwf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
64
|
+
{ name: 'read_tsv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
65
|
+
{ name: 'read_table', argIdx: 0, argName: 'file', resolveValue: true },
|
|
66
|
+
{ name: 'read_log', argIdx: 0, argName: 'file', resolveValue: true },
|
|
67
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file', resolveValue: true },
|
|
68
|
+
{ name: 'read_lines_chunked', argIdx: 0, argName: 'file', resolveValue: true },
|
|
63
69
|
// xlsx
|
|
64
|
-
{ name: 'read.xlsx', argIdx: 0, argName: 'file' },
|
|
65
|
-
{ name: 'read.xlsx2', argIdx: 0, argName: 'file' },
|
|
70
|
+
{ name: 'read.xlsx', argIdx: 0, argName: 'file', resolveValue: true },
|
|
71
|
+
{ name: 'read.xlsx2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
66
72
|
// data.table
|
|
67
|
-
{ name: 'fread', argIdx: 0, argName: 'file' },
|
|
73
|
+
{ name: 'fread', argIdx: 0, argName: 'file', resolveValue: true },
|
|
68
74
|
// haven
|
|
69
|
-
{ name: 'read_sas', argIdx: 0, argName: 'file' },
|
|
70
|
-
{ name: 'read_sav', argIdx: 0, argName: 'file' },
|
|
71
|
-
{ name: 'read_por', argIdx: 0, argName: 'file' },
|
|
72
|
-
{ name: 'read_dta', argIdx: 0, argName: 'file' },
|
|
73
|
-
{ name: 'read_xpt', argIdx: 0, argName: 'file' },
|
|
75
|
+
{ name: 'read_sas', argIdx: 0, argName: 'file', resolveValue: true },
|
|
76
|
+
{ name: 'read_sav', argIdx: 0, argName: 'file', resolveValue: true },
|
|
77
|
+
{ name: 'read_por', argIdx: 0, argName: 'file', resolveValue: true },
|
|
78
|
+
{ name: 'read_dta', argIdx: 0, argName: 'file', resolveValue: true },
|
|
79
|
+
{ name: 'read_xpt', argIdx: 0, argName: 'file', resolveValue: true },
|
|
74
80
|
// feather
|
|
75
|
-
{ name: 'read_feather', argIdx: 0, argName: 'file' },
|
|
81
|
+
{ name: 'read_feather', argIdx: 0, argName: 'file', resolveValue: true },
|
|
76
82
|
// foreign
|
|
77
|
-
{ name: 'read.arff', argIdx: 0, argName: 'file' },
|
|
78
|
-
{ name: 'read.dbf', argIdx: 0, argName: 'file' },
|
|
79
|
-
{ name: 'read.dta', argIdx: 0, argName: 'file' },
|
|
80
|
-
{ name: 'read.epiinfo', argIdx: 0, argName: 'file' },
|
|
81
|
-
{ name: 'read.mtp', argIdx: 0, argName: 'file' },
|
|
82
|
-
{ name: 'read.octave', argIdx: 0, argName: 'file' },
|
|
83
|
-
{ name: 'read.spss', argIdx: 0, argName: 'file' },
|
|
84
|
-
{ name: 'read.ssd', argIdx: 0, argName: 'file' },
|
|
85
|
-
{ name: 'read.systat', argIdx: 0, argName: 'file' },
|
|
86
|
-
{ name: 'read.xport', argIdx: 0, argName: 'file' },
|
|
83
|
+
{ name: 'read.arff', argIdx: 0, argName: 'file', resolveValue: true },
|
|
84
|
+
{ name: 'read.dbf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
85
|
+
{ name: 'read.dta', argIdx: 0, argName: 'file', resolveValue: true },
|
|
86
|
+
{ name: 'read.epiinfo', argIdx: 0, argName: 'file', resolveValue: true },
|
|
87
|
+
{ name: 'read.mtp', argIdx: 0, argName: 'file', resolveValue: true },
|
|
88
|
+
{ name: 'read.octave', argIdx: 0, argName: 'file', resolveValue: true },
|
|
89
|
+
{ name: 'read.spss', argIdx: 0, argName: 'file', resolveValue: true },
|
|
90
|
+
{ name: 'read.ssd', argIdx: 0, argName: 'file', resolveValue: true },
|
|
91
|
+
{ name: 'read.systat', argIdx: 0, argName: 'file', resolveValue: true },
|
|
92
|
+
{ name: 'read.xport', argIdx: 0, argName: 'file', resolveValue: true },
|
|
87
93
|
// car
|
|
88
|
-
{ name: 'Import', argIdx: 0, argName: 'file' },
|
|
94
|
+
{ name: 'Import', argIdx: 0, argName: 'file', resolveValue: true },
|
|
95
|
+
];
|
|
96
|
+
const OutputRedirects = [
|
|
97
|
+
{ type: 'link-to-last-call', callName: 'sink', attachLinkInfo: { argIdx: 0, argName: 'file', when: DependencyInfoLinkConstraint.IfUnknown, resolveValue: true } }
|
|
89
98
|
];
|
|
90
99
|
exports.WriteFunctions = [
|
|
91
|
-
{ name: 'save',
|
|
92
|
-
{ name: 'save.image', argIdx:
|
|
93
|
-
{ name: 'write', argIdx: 1, argName: 'file' },
|
|
94
|
-
{ name: 'dput', argIdx: 1, argName: 'file' },
|
|
95
|
-
{ name: 'dump', argIdx: 1, argName: 'file' },
|
|
96
|
-
{ name: 'write.table', argIdx: 1, argName: 'file' },
|
|
97
|
-
{ name: 'write.csv', argIdx: 1, argName: 'file' },
|
|
98
|
-
{ name: 'saveRDS', argIdx: 1, argName: 'file' },
|
|
100
|
+
{ name: 'save', argName: 'file', resolveValue: true },
|
|
101
|
+
{ name: 'save.image', argIdx: 1, argName: 'file', resolveValue: true },
|
|
102
|
+
{ name: 'write', argIdx: 1, argName: 'file', resolveValue: true },
|
|
103
|
+
{ name: 'dput', argIdx: 1, argName: 'file', resolveValue: true },
|
|
104
|
+
{ name: 'dump', argIdx: 1, argName: 'file', resolveValue: true },
|
|
105
|
+
{ name: 'write.table', argIdx: 1, argName: 'file', resolveValue: true },
|
|
106
|
+
{ name: 'write.csv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
107
|
+
{ name: 'saveRDS', argIdx: 1, argName: 'file', resolveValue: true },
|
|
99
108
|
// write functions that don't have argIndex are assumed to write to stdout
|
|
100
|
-
{ name: 'print', linkTo:
|
|
101
|
-
{ name: 'cat', linkTo:
|
|
102
|
-
{ name: 'message', linkTo:
|
|
103
|
-
{ name: 'warning', linkTo:
|
|
109
|
+
{ name: 'print', linkTo: OutputRedirects, resolveValue: true },
|
|
110
|
+
{ name: 'cat', linkTo: OutputRedirects, argName: 'file', resolveValue: true },
|
|
111
|
+
{ name: 'message', linkTo: OutputRedirects, resolveValue: true },
|
|
112
|
+
{ name: 'warning', linkTo: OutputRedirects, resolveValue: true },
|
|
104
113
|
// readr
|
|
105
|
-
{ name: 'write_csv', argIdx: 1, argName: 'file' },
|
|
106
|
-
{ name: 'write_csv2', argIdx: 1, argName: 'file' },
|
|
107
|
-
{ name: 'write_delim', argIdx: 1, argName: 'file' },
|
|
108
|
-
{ name: 'write_dsv', argIdx: 1, argName: 'file' },
|
|
109
|
-
{ name: 'write_fwf', argIdx: 1, argName: 'file' },
|
|
110
|
-
{ name: 'write_tsv', argIdx: 1, argName: 'file' },
|
|
111
|
-
{ name: 'write_table', argIdx: 1, argName: 'file' },
|
|
112
|
-
{ name: 'write_log', argIdx: 1, argName: 'file' },
|
|
114
|
+
{ name: 'write_csv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
115
|
+
{ name: 'write_csv2', argIdx: 1, argName: 'file', resolveValue: true },
|
|
116
|
+
{ name: 'write_delim', argIdx: 1, argName: 'file', resolveValue: true },
|
|
117
|
+
{ name: 'write_dsv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
118
|
+
{ name: 'write_fwf', argIdx: 1, argName: 'file', resolveValue: true },
|
|
119
|
+
{ name: 'write_tsv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
120
|
+
{ name: 'write_table', argIdx: 1, argName: 'file', resolveValue: true },
|
|
121
|
+
{ name: 'write_log', argIdx: 1, argName: 'file', resolveValue: true },
|
|
113
122
|
// heaven
|
|
114
|
-
{ name: 'write_sas', argIdx: 1, argName: 'file' },
|
|
115
|
-
{ name: 'write_sav', argIdx: 1, argName: 'file' },
|
|
116
|
-
{ name: 'write_por', argIdx: 1, argName: 'file' },
|
|
117
|
-
{ name: 'write_dta', argIdx: 1, argName: 'file' },
|
|
118
|
-
{ name: 'write_xpt', argIdx: 1, argName: 'file' },
|
|
123
|
+
{ name: 'write_sas', argIdx: 1, argName: 'file', resolveValue: true },
|
|
124
|
+
{ name: 'write_sav', argIdx: 1, argName: 'file', resolveValue: true },
|
|
125
|
+
{ name: 'write_por', argIdx: 1, argName: 'file', resolveValue: true },
|
|
126
|
+
{ name: 'write_dta', argIdx: 1, argName: 'file', resolveValue: true },
|
|
127
|
+
{ name: 'write_xpt', argIdx: 1, argName: 'file', resolveValue: true },
|
|
119
128
|
// feather
|
|
120
|
-
{ name: 'write_feather', argIdx: 1, argName: 'file' },
|
|
129
|
+
{ name: 'write_feather', argIdx: 1, argName: 'file', resolveValue: true },
|
|
121
130
|
// foreign
|
|
122
|
-
{ name: 'write.arff', argIdx: 1, argName: 'file' },
|
|
123
|
-
{ name: 'write.dbf', argIdx: 1, argName: 'file' },
|
|
124
|
-
{ name: 'write.dta', argIdx: 1, argName: 'file' },
|
|
125
|
-
{ name: 'write.foreign', argIdx: 1, argName: 'file' },
|
|
131
|
+
{ name: 'write.arff', argIdx: 1, argName: 'file', resolveValue: true },
|
|
132
|
+
{ name: 'write.dbf', argIdx: 1, argName: 'file', resolveValue: true },
|
|
133
|
+
{ name: 'write.dta', argIdx: 1, argName: 'file', resolveValue: true },
|
|
134
|
+
{ name: 'write.foreign', argIdx: 1, argName: 'file', resolveValue: true },
|
|
126
135
|
// xlsx
|
|
127
|
-
{ name: 'write.xlsx', argIdx: 1, argName: 'file' },
|
|
128
|
-
{ name: 'write.xlsx2', argIdx: 1, argName: 'file' },
|
|
136
|
+
{ name: 'write.xlsx', argIdx: 1, argName: 'file', resolveValue: true },
|
|
137
|
+
{ name: 'write.xlsx2', argIdx: 1, argName: 'file', resolveValue: true },
|
|
129
138
|
// graphics
|
|
130
|
-
{ name: 'pdf', argIdx: 0, argName: 'file' },
|
|
131
|
-
{ name: 'jpeg', argIdx: 0, argName: 'file' },
|
|
132
|
-
{ name: 'png', argIdx: 0, argName: 'file' },
|
|
133
|
-
{ name: 'windows', argIdx: 0, argName: 'file' },
|
|
134
|
-
{ name: 'postscript', argIdx: 0, argName: 'file' },
|
|
135
|
-
{ name: 'xfix', argIdx: 0, argName: 'file' },
|
|
136
|
-
{ name: 'bitmap', argIdx: 0, argName: 'file' },
|
|
137
|
-
{ name: 'pictex', argIdx: 0, argName: 'file' },
|
|
138
|
-
{ name: 'cairo_pdf', argIdx: 0, argName: 'file' },
|
|
139
|
-
{ name: 'svg', argIdx: 0, argName: 'file' },
|
|
140
|
-
{ name: 'bmp', argIdx: 0, argName: 'file' },
|
|
141
|
-
{ name: 'tiff', argIdx: 0, argName: 'file' },
|
|
142
|
-
{ name: 'X11', argIdx: 0, argName: 'file' },
|
|
143
|
-
{ name: 'quartz', argIdx: 0, argName: 'file' },
|
|
139
|
+
{ name: 'pdf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
140
|
+
{ name: 'jpeg', argIdx: 0, argName: 'file', resolveValue: true },
|
|
141
|
+
{ name: 'png', argIdx: 0, argName: 'file', resolveValue: true },
|
|
142
|
+
{ name: 'windows', argIdx: 0, argName: 'file', resolveValue: true },
|
|
143
|
+
{ name: 'postscript', argIdx: 0, argName: 'file', resolveValue: true },
|
|
144
|
+
{ name: 'xfix', argIdx: 0, argName: 'file', resolveValue: true },
|
|
145
|
+
{ name: 'bitmap', argIdx: 0, argName: 'file', resolveValue: true },
|
|
146
|
+
{ name: 'pictex', argIdx: 0, argName: 'file', resolveValue: true },
|
|
147
|
+
{ name: 'cairo_pdf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
148
|
+
{ name: 'svg', argIdx: 0, argName: 'file', resolveValue: true },
|
|
149
|
+
{ name: 'bmp', argIdx: 0, argName: 'file', resolveValue: true },
|
|
150
|
+
{ name: 'tiff', argIdx: 0, argName: 'file', resolveValue: true },
|
|
151
|
+
{ name: 'X11', argIdx: 0, argName: 'file', resolveValue: true },
|
|
152
|
+
{ name: 'quartz', argIdx: 0, argName: 'file', resolveValue: true },
|
|
144
153
|
// car
|
|
145
|
-
{ name: 'Export', argIdx: 0, argName: 'file' },
|
|
154
|
+
{ name: 'Export', argIdx: 0, argName: 'file', resolveValue: true },
|
|
146
155
|
];
|
|
147
156
|
function printResultSection(title, infos, result, sectionSpecifics) {
|
|
148
157
|
if (infos.length <= 0) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import type { ResolveValueQuery, ResolveValueQueryResult } from './resolve-value-query-format';
|
|
2
2
|
import type { BasicQueryData } from '../../base-query-format';
|
|
3
3
|
export declare function fingerPrintOfQuery(query: ResolveValueQuery): string;
|
|
4
|
-
export declare function executeResolveValueQuery({ dataflow: { graph
|
|
4
|
+
export declare function executeResolveValueQuery({ dataflow: { graph }, ast }: BasicQueryData, queries: readonly ResolveValueQuery[]): ResolveValueQueryResult;
|
|
@@ -5,11 +5,10 @@ exports.executeResolveValueQuery = executeResolveValueQuery;
|
|
|
5
5
|
const log_1 = require("../../../util/log");
|
|
6
6
|
const parse_1 = require("../../../slicing/criterion/parse");
|
|
7
7
|
const resolve_by_name_1 = require("../../../dataflow/environments/resolve-by-name");
|
|
8
|
-
const node_id_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/node-id");
|
|
9
8
|
function fingerPrintOfQuery(query) {
|
|
10
9
|
return JSON.stringify(query);
|
|
11
10
|
}
|
|
12
|
-
function executeResolveValueQuery({ dataflow: { graph
|
|
11
|
+
function executeResolveValueQuery({ dataflow: { graph }, ast }, queries) {
|
|
13
12
|
const start = Date.now();
|
|
14
13
|
const results = {};
|
|
15
14
|
for (const query of queries) {
|
|
@@ -18,8 +17,8 @@ function executeResolveValueQuery({ dataflow: { graph, environment }, ast }, que
|
|
|
18
17
|
log_1.log.warn(`Duplicate Key for resolve-value-query: ${key}, skipping...`);
|
|
19
18
|
}
|
|
20
19
|
const values = query.criteria
|
|
21
|
-
.map(criteria => (0,
|
|
22
|
-
.flatMap(ident => (0, resolve_by_name_1.
|
|
20
|
+
.map(criteria => (0, parse_1.slicingCriterionToId)(criteria, ast.idMap))
|
|
21
|
+
.flatMap(ident => (0, resolve_by_name_1.resolve)(ident, { graph, full: true, idMap: ast.idMap }));
|
|
23
22
|
results[key] = {
|
|
24
23
|
values: [...new Set(values)]
|
|
25
24
|
};
|
package/queries/query-print.js
CHANGED
|
@@ -8,15 +8,18 @@ const query_1 = require("./query");
|
|
|
8
8
|
const html_hover_over_1 = require("../util/html-hover-over");
|
|
9
9
|
const time_1 = require("../util/time");
|
|
10
10
|
const built_in_1 = require("../dataflow/environments/built-in");
|
|
11
|
-
function nodeString(
|
|
11
|
+
function nodeString(nodeId, formatter, processed) {
|
|
12
|
+
const isObj = typeof nodeId === 'object' && nodeId !== null && 'id' in nodeId;
|
|
13
|
+
const id = isObj ? nodeId.id : nodeId;
|
|
14
|
+
const info = isObj ? nodeId.info : undefined;
|
|
12
15
|
if (id === built_in_1.BuiltIn) {
|
|
13
|
-
return (0, ansi_1.italic)('built-in', formatter);
|
|
16
|
+
return (0, ansi_1.italic)('built-in', formatter) + (info ? ` (${JSON.stringify(info)})` : '');
|
|
14
17
|
}
|
|
15
18
|
const node = processed.normalize.idMap.get(id);
|
|
16
19
|
if (node === undefined) {
|
|
17
|
-
return `UNKNOWN: ${id}`;
|
|
20
|
+
return `UNKNOWN: ${id} (info: ${JSON.stringify(info)})`;
|
|
18
21
|
}
|
|
19
|
-
return `${(0, ansi_1.italic)('`' + (node.lexeme ?? node.info.fullLexeme ?? 'UNKNOWN') + '`', formatter)} (L.${node.location?.[0]})`;
|
|
22
|
+
return `${(0, ansi_1.italic)('`' + (node.lexeme ?? node.info.fullLexeme ?? 'UNKNOWN') + '`', formatter)} (L.${node.location?.[0]}${info ? ', ' + JSON.stringify(info) : ''})`;
|
|
20
23
|
}
|
|
21
24
|
function asciiCallContextSubHit(formatter, results, processed) {
|
|
22
25
|
const result = [];
|
package/util/version.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.flowrVersion = flowrVersion;
|
|
4
4
|
const semver_1 = require("semver");
|
|
5
5
|
// this is automatically replaced with the current version by release-it
|
|
6
|
-
const version = '2.2.
|
|
6
|
+
const version = '2.2.6';
|
|
7
7
|
function flowrVersion() {
|
|
8
8
|
return new semver_1.SemVer(version);
|
|
9
9
|
}
|