@eagleoutice/flowr 2.2.4 → 2.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -9
- package/cli/repl/commands/repl-cfg.js +4 -3
- package/cli/repl/commands/repl-dataflow.js +4 -3
- package/cli/repl/commands/repl-normalize.js +4 -3
- package/cli/repl/core.d.ts +1 -1
- package/cli/repl/core.js +23 -13
- package/config.d.ts +2 -2
- package/config.js +7 -1
- package/dataflow/environments/resolve-by-name.d.ts +24 -2
- package/dataflow/environments/resolve-by-name.js +85 -4
- package/documentation/print-query-wiki.js +6 -4
- package/documentation/print-readme.js +6 -6
- package/package.json +1 -1
- package/queries/catalog/call-context-query/call-context-query-executor.js +20 -6
- package/queries/catalog/call-context-query/call-context-query-format.d.ts +12 -6
- package/queries/catalog/call-context-query/call-context-query-format.js +8 -6
- package/queries/catalog/dependencies-query/dependencies-query-executor.js +157 -53
- package/queries/catalog/dependencies-query/dependencies-query-format.d.ts +16 -1
- package/queries/catalog/dependencies-query/dependencies-query-format.js +121 -112
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.d.ts +1 -1
- package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +3 -4
- package/queries/query-print.js +7 -4
- package/util/version.js +1 -1
|
@@ -3,15 +3,28 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.executeDependenciesQuery = executeDependenciesQuery;
|
|
4
4
|
const query_1 = require("../../query");
|
|
5
5
|
const dependencies_query_format_1 = require("./dependencies-query-format");
|
|
6
|
+
const vertex_1 = require("../../../dataflow/graph/vertex");
|
|
6
7
|
const graph_1 = require("../../../dataflow/graph/graph");
|
|
7
8
|
const log_1 = require("../../../util/log");
|
|
8
9
|
const type_1 = require("../../../r-bridge/lang-4.x/ast/model/type");
|
|
9
|
-
const retriever_1 = require("../../../r-bridge/retriever");
|
|
10
10
|
const r_function_call_1 = require("../../../r-bridge/lang-4.x/ast/model/nodes/r-function-call");
|
|
11
11
|
const visitor_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/visitor");
|
|
12
12
|
const assert_1 = require("../../../util/assert");
|
|
13
13
|
const objects_1 = require("../../../util/objects");
|
|
14
|
-
const
|
|
14
|
+
const resolve_by_name_1 = require("../../../dataflow/environments/resolve-by-name");
|
|
15
|
+
function collectNamespaceAccesses(data, libraries) {
|
|
16
|
+
/* for libraries, we have to additionally track all uses of `::` and `:::`, for this we currently simply traverse all uses */
|
|
17
|
+
(0, visitor_1.visitAst)(data.ast.ast, n => {
|
|
18
|
+
if (n.type === type_1.RType.Symbol && n.namespace) {
|
|
19
|
+
/* we should improve the identification of ':::' */
|
|
20
|
+
libraries.push({
|
|
21
|
+
nodeId: n.info.id,
|
|
22
|
+
functionName: (n.info.fullLexeme ?? n.lexeme).includes(':::') ? ':::' : '::',
|
|
23
|
+
libraryName: n.namespace
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
}
|
|
15
28
|
function executeDependenciesQuery(data, queries) {
|
|
16
29
|
if (queries.length !== 1) {
|
|
17
30
|
log_1.log.warn('Dependencies query expects only up to one query, but got ', queries.length, 'only using the first query');
|
|
@@ -25,58 +38,46 @@ function executeDependenciesQuery(data, queries) {
|
|
|
25
38
|
const writeFunctions = getFunctionsToCheck(query.writeFunctions, ignoreDefault, dependencies_query_format_1.WriteFunctions);
|
|
26
39
|
const numberOfFunctions = libraryFunctions.length + sourceFunctions.length + readFunctions.length + writeFunctions.length;
|
|
27
40
|
const results = numberOfFunctions === 0 ? { kinds: {}, '.meta': { timing: 0 } } : (0, query_1.executeQueriesOfSameType)(data, ...makeCallContextQuery(libraryFunctions, 'library'), ...makeCallContextQuery(sourceFunctions, 'source'), ...makeCallContextQuery(readFunctions, 'read'), ...makeCallContextQuery(writeFunctions, 'write'));
|
|
28
|
-
|
|
41
|
+
function getLexeme(argument, id) {
|
|
29
42
|
if ((argument && argument !== dependencies_query_format_1.Unknown) || !id) {
|
|
30
43
|
return undefined;
|
|
31
44
|
}
|
|
32
45
|
let get = data.ast.idMap.get(id);
|
|
33
|
-
if (
|
|
34
|
-
return undefined;
|
|
35
|
-
}
|
|
36
|
-
if (get.type === type_1.RType.Argument) {
|
|
46
|
+
if (get?.type === type_1.RType.Argument) {
|
|
37
47
|
get = get.value;
|
|
38
48
|
}
|
|
39
49
|
return get?.info.fullLexeme ?? get?.lexeme;
|
|
40
|
-
}
|
|
41
|
-
const libraries = getResults(data, results, 'library', libraryFunctions, (id, vertex, argId,
|
|
50
|
+
}
|
|
51
|
+
const libraries = getResults(data, results, 'library', libraryFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
42
52
|
nodeId: id,
|
|
43
53
|
functionName: vertex.name,
|
|
44
|
-
lexemeOfArgument: getLexeme(
|
|
45
|
-
libraryName:
|
|
46
|
-
|
|
54
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
55
|
+
libraryName: value ?? dependencies_query_format_1.Unknown,
|
|
56
|
+
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
57
|
+
}));
|
|
47
58
|
if (!ignoreDefault) {
|
|
48
|
-
|
|
49
|
-
(0, visitor_1.visitAst)(data.ast.ast, n => {
|
|
50
|
-
if (n.type === type_1.RType.Symbol && n.namespace) {
|
|
51
|
-
/* we should improve the identification of ':::' */
|
|
52
|
-
libraries.push({
|
|
53
|
-
nodeId: n.info.id,
|
|
54
|
-
functionName: (n.info.fullLexeme ?? n.lexeme).includes(':::') ? ':::' : '::',
|
|
55
|
-
libraryName: n.namespace
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
});
|
|
59
|
+
collectNamespaceAccesses(data, libraries);
|
|
59
60
|
}
|
|
60
|
-
const sourcedFiles = getResults(data, results, 'source', sourceFunctions, (id, vertex, argId,
|
|
61
|
+
const sourcedFiles = getResults(data, results, 'source', sourceFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
61
62
|
nodeId: id,
|
|
62
63
|
functionName: vertex.name,
|
|
63
|
-
file:
|
|
64
|
-
lexemeOfArgument: getLexeme(
|
|
64
|
+
file: value ?? dependencies_query_format_1.Unknown,
|
|
65
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
65
66
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
66
67
|
}));
|
|
67
|
-
const readData = getResults(data, results, 'read', readFunctions, (id, vertex, argId,
|
|
68
|
+
const readData = getResults(data, results, 'read', readFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
68
69
|
nodeId: id,
|
|
69
70
|
functionName: vertex.name,
|
|
70
|
-
source:
|
|
71
|
-
lexemeOfArgument: getLexeme(
|
|
71
|
+
source: value ?? dependencies_query_format_1.Unknown,
|
|
72
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
72
73
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
73
74
|
}));
|
|
74
|
-
const writtenData = getResults(data, results, 'write', writeFunctions, (id, vertex, argId,
|
|
75
|
+
const writtenData = getResults(data, results, 'write', writeFunctions, (id, vertex, argId, value, linkedIds) => ({
|
|
75
76
|
nodeId: id,
|
|
76
77
|
functionName: vertex.name,
|
|
77
78
|
// write functions that don't have argIndex are assumed to write to stdout
|
|
78
|
-
destination:
|
|
79
|
-
lexemeOfArgument: getLexeme(
|
|
79
|
+
destination: value ?? (linkedIds?.length ? dependencies_query_format_1.Unknown : 'stdout'),
|
|
80
|
+
lexemeOfArgument: getLexeme(value, argId),
|
|
80
81
|
linkedIds: linkedIds?.length ? linkedIds : undefined
|
|
81
82
|
}));
|
|
82
83
|
return {
|
|
@@ -93,31 +94,32 @@ function makeCallContextQuery(functions, kind) {
|
|
|
93
94
|
includeAliases: false,
|
|
94
95
|
callNameExact: true,
|
|
95
96
|
subkind: f.name,
|
|
96
|
-
linkTo: f.linkTo
|
|
97
|
+
linkTo: f.linkTo,
|
|
97
98
|
kind
|
|
98
99
|
}));
|
|
99
100
|
}
|
|
100
|
-
function
|
|
101
|
+
function dropInfoOnLinkedIds(linkedIds) {
|
|
102
|
+
if (!linkedIds) {
|
|
103
|
+
return undefined;
|
|
104
|
+
}
|
|
105
|
+
return linkedIds.map(id => typeof id === 'object' ? id.id : id);
|
|
106
|
+
}
|
|
107
|
+
function getResults(data, results, kind, functions, makeInfo) {
|
|
101
108
|
const kindEntries = Object.entries(results?.kinds[kind]?.subkinds ?? {});
|
|
102
109
|
return kindEntries.flatMap(([name, results]) => results.flatMap(({ id, linkedIds }) => {
|
|
103
110
|
const vertex = data.dataflow.graph.getVertex(id);
|
|
104
111
|
const info = functions.find(f => f.name === name);
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
const args = index !== undefined ? getArgumentValue(data, vertex, index, additionalAllowedTypes) : undefined;
|
|
113
|
-
if (!args) {
|
|
114
|
-
const record = (0, objects_1.compactRecord)(makeInfo(id, vertex, undefined, undefined, linkedIds));
|
|
112
|
+
const args = getArgumentValue(data, vertex, info.argIdx, info.argName, info.resolveValue);
|
|
113
|
+
const linkedArgs = collectValuesFromLinks(args, data, linkedIds);
|
|
114
|
+
const foundValues = linkedArgs ?? args;
|
|
115
|
+
if (!foundValues) {
|
|
116
|
+
const record = (0, objects_1.compactRecord)(makeInfo(id, vertex, undefined, undefined, dropInfoOnLinkedIds(linkedIds)));
|
|
115
117
|
return record ? [record] : [];
|
|
116
118
|
}
|
|
117
119
|
const results = [];
|
|
118
|
-
for (const [arg, values] of
|
|
120
|
+
for (const [arg, values] of foundValues.entries()) {
|
|
119
121
|
for (const value of values) {
|
|
120
|
-
const result = (0, objects_1.compactRecord)(makeInfo(id, vertex, arg, value, linkedIds));
|
|
122
|
+
const result = (0, objects_1.compactRecord)(makeInfo(id, vertex, arg, value, dropInfoOnLinkedIds(linkedIds)));
|
|
121
123
|
if (result) {
|
|
122
124
|
results.push(result);
|
|
123
125
|
}
|
|
@@ -126,11 +128,110 @@ function getResults(data, results, kind, functions, makeInfo, additionalAllowedT
|
|
|
126
128
|
return results;
|
|
127
129
|
})) ?? [];
|
|
128
130
|
}
|
|
131
|
+
function collectValuesFromLinks(args, data, linkedIds) {
|
|
132
|
+
if (!linkedIds || linkedIds.length === 0) {
|
|
133
|
+
return undefined;
|
|
134
|
+
}
|
|
135
|
+
const hasAtLeastAValue = args !== undefined && [...args.values()].some(set => [...set].some(v => v !== dependencies_query_format_1.Unknown && v !== undefined));
|
|
136
|
+
const map = new Map();
|
|
137
|
+
for (const linkedId of linkedIds) {
|
|
138
|
+
if (typeof linkedId !== 'object' || !linkedId.info) {
|
|
139
|
+
continue;
|
|
140
|
+
}
|
|
141
|
+
const info = linkedId.info;
|
|
142
|
+
// do not collect this one
|
|
143
|
+
if (hasAtLeastAValue && info.when !== dependencies_query_format_1.DependencyInfoLinkConstraint.Always) {
|
|
144
|
+
continue;
|
|
145
|
+
}
|
|
146
|
+
// collect this one!
|
|
147
|
+
const vertex = data.dataflow.graph.getVertex(linkedId.id);
|
|
148
|
+
if (vertex === undefined || vertex.tag !== vertex_1.VertexType.FunctionCall) {
|
|
149
|
+
continue;
|
|
150
|
+
}
|
|
151
|
+
const args = getArgumentValue(data, vertex, info.argIdx, info.argName, info.resolveValue);
|
|
152
|
+
if (args === undefined) {
|
|
153
|
+
continue;
|
|
154
|
+
}
|
|
155
|
+
for (const [arg, values] of args.entries()) {
|
|
156
|
+
const set = map.get(arg) ?? new Set();
|
|
157
|
+
map.set(arg, set);
|
|
158
|
+
for (const value of values) {
|
|
159
|
+
set.add(value);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return map;
|
|
164
|
+
}
|
|
165
|
+
function hasCharacterOnly(data, vertex, idMap) {
|
|
166
|
+
if (!vertex.args || vertex.args.length === 0 || !idMap) {
|
|
167
|
+
return false;
|
|
168
|
+
}
|
|
169
|
+
const treatAsChar = getArgumentValue(data, vertex, 5, 'character.only', true);
|
|
170
|
+
if (!treatAsChar) {
|
|
171
|
+
return false;
|
|
172
|
+
}
|
|
173
|
+
const hasTrue = [...treatAsChar.values()].some(set => set?.has('TRUE'));
|
|
174
|
+
const hasFalse = hasTrue ? [...treatAsChar.values()].some(set => set === undefined || set.has('FALSE')) : false;
|
|
175
|
+
if (hasTrue && hasFalse) {
|
|
176
|
+
return 'maybe';
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
return hasTrue;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
function resolveBasedOnConfig(data, vertex, argument, environment, idMap, resolveValue) {
|
|
183
|
+
let full = true;
|
|
184
|
+
if (!resolveValue) {
|
|
185
|
+
full = false;
|
|
186
|
+
}
|
|
187
|
+
if (resolveValue === 'library') {
|
|
188
|
+
const hasChar = hasCharacterOnly(data, vertex, idMap);
|
|
189
|
+
if (hasChar === false) {
|
|
190
|
+
if (argument.type === type_1.RType.Symbol) {
|
|
191
|
+
return [argument.lexeme];
|
|
192
|
+
}
|
|
193
|
+
full = false;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
return (0, resolve_by_name_1.resolve)(argument, { environment, graph: data.dataflow.graph, full });
|
|
197
|
+
}
|
|
198
|
+
function unwrapRValue(value) {
|
|
199
|
+
if (value === undefined) {
|
|
200
|
+
return undefined;
|
|
201
|
+
}
|
|
202
|
+
switch (typeof value) {
|
|
203
|
+
case 'string':
|
|
204
|
+
return value;
|
|
205
|
+
case 'number':
|
|
206
|
+
return value.toString();
|
|
207
|
+
case 'boolean':
|
|
208
|
+
return value ? 'TRUE' : 'FALSE';
|
|
209
|
+
}
|
|
210
|
+
if (typeof value !== 'object' || value === null) {
|
|
211
|
+
return JSON.stringify(value);
|
|
212
|
+
}
|
|
213
|
+
if ('str' in value) {
|
|
214
|
+
return value.str;
|
|
215
|
+
}
|
|
216
|
+
else if ('num' in value) {
|
|
217
|
+
return value.num.toString();
|
|
218
|
+
}
|
|
219
|
+
else {
|
|
220
|
+
return JSON.stringify(value);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
129
223
|
/**
|
|
130
224
|
* Get the values of all arguments matching the criteria.
|
|
131
225
|
*/
|
|
132
|
-
function getArgumentValue(
|
|
133
|
-
|
|
226
|
+
function getArgumentValue(data, vertex, argumentIndex, argumentName, resolveValue) {
|
|
227
|
+
const graph = data.dataflow.graph;
|
|
228
|
+
if (argumentName) {
|
|
229
|
+
const arg = vertex?.args.findIndex(arg => arg !== r_function_call_1.EmptyArgument && arg.name === argumentName);
|
|
230
|
+
if (arg >= 0) {
|
|
231
|
+
argumentIndex = arg;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
if (!vertex || argumentIndex === undefined) {
|
|
134
235
|
return undefined;
|
|
135
236
|
}
|
|
136
237
|
if (argumentIndex === 'unnamed') {
|
|
@@ -143,9 +244,11 @@ function getArgumentValue({ dataflow: { graph } }, vertex, argumentIndex, additi
|
|
|
143
244
|
valueNode = valueNode.value;
|
|
144
245
|
}
|
|
145
246
|
if (valueNode) {
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
247
|
+
// TDODO: extend vector support etc.
|
|
248
|
+
// this should be evaluated in the callee-context
|
|
249
|
+
const values = resolveBasedOnConfig(data, vertex, valueNode, vertex.environment, graph.idMap, resolveValue)
|
|
250
|
+
?.map(unwrapRValue) ?? [dependencies_query_format_1.Unknown];
|
|
251
|
+
map.set(ref, new Set(values));
|
|
149
252
|
}
|
|
150
253
|
}
|
|
151
254
|
return map;
|
|
@@ -160,8 +263,9 @@ function getArgumentValue({ dataflow: { graph } }, vertex, argumentIndex, additi
|
|
|
160
263
|
valueNode = valueNode.value;
|
|
161
264
|
}
|
|
162
265
|
if (valueNode) {
|
|
163
|
-
const
|
|
164
|
-
|
|
266
|
+
const values = resolveBasedOnConfig(data, vertex, valueNode, vertex.environment, graph.idMap, resolveValue)
|
|
267
|
+
?.map(unwrapRValue) ?? [dependencies_query_format_1.Unknown];
|
|
268
|
+
return new Map([[arg, new Set(values)]]);
|
|
165
269
|
}
|
|
166
270
|
}
|
|
167
271
|
return undefined;
|
|
@@ -2,7 +2,21 @@ import type { BaseQueryFormat, BaseQueryResult } from '../../base-query-format';
|
|
|
2
2
|
import type { NodeId } from '../../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
3
3
|
import Joi from 'joi';
|
|
4
4
|
import { executeDependenciesQuery } from './dependencies-query-executor';
|
|
5
|
+
import type { LinkTo } from '../call-context-query/call-context-query-format';
|
|
5
6
|
export declare const Unknown = "unknown";
|
|
7
|
+
/** when to read the argument value from a linked function */
|
|
8
|
+
export declare enum DependencyInfoLinkConstraint {
|
|
9
|
+
Always = "always",
|
|
10
|
+
IfUnknown = "if-unknown"
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* A dependency link may have attached information. If you pass it, we try to resolve the argument value from the linked function
|
|
14
|
+
* if the `when` constraint is met.
|
|
15
|
+
*/
|
|
16
|
+
export type DependencyInfoLink = LinkTo<RegExp | string, Omit<FunctionInfo, 'name' | 'linkTo'> & {
|
|
17
|
+
when: DependencyInfoLinkConstraint;
|
|
18
|
+
} | undefined>;
|
|
19
|
+
export type DependencyInfoLinkAttachedInfo = DependencyInfoLink['attachLinkInfo'];
|
|
6
20
|
export declare const LibraryFunctions: FunctionInfo[];
|
|
7
21
|
export declare const SourceFunctions: FunctionInfo[];
|
|
8
22
|
export declare const ReadFunctions: FunctionInfo[];
|
|
@@ -11,7 +25,8 @@ export interface FunctionInfo {
|
|
|
11
25
|
name: string;
|
|
12
26
|
argIdx?: number | 'unnamed';
|
|
13
27
|
argName?: string;
|
|
14
|
-
linkTo?:
|
|
28
|
+
linkTo?: DependencyInfoLink[];
|
|
29
|
+
resolveValue?: boolean | 'library';
|
|
15
30
|
}
|
|
16
31
|
export interface DependenciesQuery extends BaseQueryFormat {
|
|
17
32
|
readonly type: 'dependencies';
|
|
@@ -3,146 +3,155 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.DependenciesQueryDefinition = exports.WriteFunctions = exports.ReadFunctions = exports.SourceFunctions = exports.LibraryFunctions = exports.Unknown = void 0;
|
|
6
|
+
exports.DependenciesQueryDefinition = exports.WriteFunctions = exports.ReadFunctions = exports.SourceFunctions = exports.LibraryFunctions = exports.DependencyInfoLinkConstraint = exports.Unknown = void 0;
|
|
7
7
|
const ansi_1 = require("../../../util/ansi");
|
|
8
8
|
const time_1 = require("../../../util/time");
|
|
9
9
|
const joi_1 = __importDefault(require("joi"));
|
|
10
10
|
const dependencies_query_executor_1 = require("./dependencies-query-executor");
|
|
11
11
|
exports.Unknown = 'unknown';
|
|
12
|
+
/** when to read the argument value from a linked function */
|
|
13
|
+
var DependencyInfoLinkConstraint;
|
|
14
|
+
(function (DependencyInfoLinkConstraint) {
|
|
15
|
+
DependencyInfoLinkConstraint["Always"] = "always";
|
|
16
|
+
DependencyInfoLinkConstraint["IfUnknown"] = "if-unknown";
|
|
17
|
+
})(DependencyInfoLinkConstraint || (exports.DependencyInfoLinkConstraint = DependencyInfoLinkConstraint = {}));
|
|
12
18
|
// these lists are originally based on https://github.com/duncantl/CodeDepends/blob/7fd96dfee16b252e5f642c77a7ababf48e9326f8/R/codeTypes.R
|
|
13
19
|
exports.LibraryFunctions = [
|
|
14
|
-
{ name: 'library', argIdx: 0, argName: 'package' },
|
|
15
|
-
{ name: 'require', argIdx: 0, argName: 'package' },
|
|
16
|
-
{ name: 'loadNamespace', argIdx: 0, argName: 'package' },
|
|
17
|
-
{ name: 'attachNamespace', argIdx: 0, argName: 'ns' },
|
|
18
|
-
{ name: 'attach', argIdx: 0, argName: 'what' },
|
|
19
|
-
{ name: 'groundhog.library', argIdx: 0, argName: 'pkg' },
|
|
20
|
-
{ name: 'p_load', argIdx: 'unnamed' }, // pacman
|
|
21
|
-
{ name: 'p_load_gh', argIdx: 'unnamed' }, // pacman
|
|
22
|
-
{ name: 'from_import', argIdx: 0, argName: 'package' }, // easypackages
|
|
23
|
-
{ name: 'libraries', argIdx: 'unnamed' }, // easypackages
|
|
24
|
-
{ name: 'shelf', argIdx: 'unnamed' } // librarian
|
|
20
|
+
{ name: 'library', argIdx: 0, argName: 'package', resolveValue: 'library' },
|
|
21
|
+
{ name: 'require', argIdx: 0, argName: 'package', resolveValue: true },
|
|
22
|
+
{ name: 'loadNamespace', argIdx: 0, argName: 'package', resolveValue: true },
|
|
23
|
+
{ name: 'attachNamespace', argIdx: 0, argName: 'ns', resolveValue: true },
|
|
24
|
+
{ name: 'attach', argIdx: 0, argName: 'what', resolveValue: true },
|
|
25
|
+
{ name: 'groundhog.library', argIdx: 0, argName: 'pkg', resolveValue: true },
|
|
26
|
+
{ name: 'p_load', argIdx: 'unnamed', resolveValue: 'library' }, // pacman
|
|
27
|
+
{ name: 'p_load_gh', argIdx: 'unnamed', resolveValue: 'library' }, // pacman
|
|
28
|
+
{ name: 'from_import', argIdx: 0, argName: 'package', resolveValue: true }, // easypackages
|
|
29
|
+
{ name: 'libraries', argIdx: 'unnamed', resolveValue: true }, // easypackages
|
|
30
|
+
{ name: 'shelf', argIdx: 'unnamed', resolveValue: true } // librarian
|
|
25
31
|
];
|
|
26
32
|
exports.SourceFunctions = [
|
|
27
|
-
{ name: 'source', argIdx: 0, argName: 'file' },
|
|
28
|
-
{ name: 'sys.source', argIdx: 0, argName: 'file' }
|
|
33
|
+
{ name: 'source', argIdx: 0, argName: 'file', resolveValue: true },
|
|
34
|
+
{ name: 'sys.source', argIdx: 0, argName: 'file', resolveValue: true }
|
|
29
35
|
];
|
|
30
36
|
exports.ReadFunctions = [
|
|
31
|
-
{ name: 'read.table', argIdx: 0, argName: 'file' },
|
|
32
|
-
{ name: 'read.csv', argIdx: 0, argName: 'file' },
|
|
33
|
-
{ name: 'read.csv2', argIdx: 0, argName: 'file' },
|
|
34
|
-
{ name: 'read.delim', argIdx: 0, argName: 'file' },
|
|
35
|
-
{ name: 'read.dcf', argIdx: 0, argName: 'file' },
|
|
36
|
-
{ name: 'scan', argIdx: 0, argName: 'file' },
|
|
37
|
-
{ name: 'read.fwf', argIdx: 0, argName: 'file' },
|
|
38
|
-
{ name: 'file', argIdx: 1, argName: 'open' },
|
|
39
|
-
{ name: 'url', argIdx: 1, argName: 'open' },
|
|
40
|
-
{ name: 'load', argIdx: 0, argName: 'file' },
|
|
41
|
-
{ name: 'gzfile', argIdx: 1, argName: 'open' },
|
|
42
|
-
{ name: 'bzfile', argIdx: 1, argName: 'open' },
|
|
43
|
-
{ name: 'download.file', argIdx: 0, argName: 'url' },
|
|
44
|
-
{ name: 'pipe', argIdx: 1, argName: 'open' },
|
|
45
|
-
{ name: 'fifo', argIdx: 1, argName: 'open' },
|
|
46
|
-
{ name: 'unz', argIdx: 1, argName: 'open' },
|
|
47
|
-
{ name: 'matrix', argIdx: 0, argName: 'data' },
|
|
48
|
-
{ name: 'readRDS', argIdx: 0, argName: 'file' },
|
|
49
|
-
{ name: 'readLines', argIdx: 0, argName: 'con' },
|
|
50
|
-
{ name: 'readRenviron', argIdx: 0, argName: 'path' },
|
|
37
|
+
{ name: 'read.table', argIdx: 0, argName: 'file', resolveValue: true },
|
|
38
|
+
{ name: 'read.csv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
39
|
+
{ name: 'read.csv2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
40
|
+
{ name: 'read.delim', argIdx: 0, argName: 'file', resolveValue: true },
|
|
41
|
+
{ name: 'read.dcf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
42
|
+
{ name: 'scan', argIdx: 0, argName: 'file', resolveValue: true },
|
|
43
|
+
{ name: 'read.fwf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
44
|
+
{ name: 'file', argIdx: 1, argName: 'open', resolveValue: true },
|
|
45
|
+
{ name: 'url', argIdx: 1, argName: 'open', resolveValue: true },
|
|
46
|
+
{ name: 'load', argIdx: 0, argName: 'file', resolveValue: true },
|
|
47
|
+
{ name: 'gzfile', argIdx: 1, argName: 'open', resolveValue: true },
|
|
48
|
+
{ name: 'bzfile', argIdx: 1, argName: 'open', resolveValue: true },
|
|
49
|
+
{ name: 'download.file', argIdx: 0, argName: 'url', resolveValue: true },
|
|
50
|
+
{ name: 'pipe', argIdx: 1, argName: 'open', resolveValue: true },
|
|
51
|
+
{ name: 'fifo', argIdx: 1, argName: 'open', resolveValue: true },
|
|
52
|
+
{ name: 'unz', argIdx: 1, argName: 'open', resolveValue: true },
|
|
53
|
+
{ name: 'matrix', argIdx: 0, argName: 'data', resolveValue: true },
|
|
54
|
+
{ name: 'readRDS', argIdx: 0, argName: 'file', resolveValue: true },
|
|
55
|
+
{ name: 'readLines', argIdx: 0, argName: 'con', resolveValue: true },
|
|
56
|
+
{ name: 'readRenviron', argIdx: 0, argName: 'path', resolveValue: true },
|
|
51
57
|
// readr
|
|
52
|
-
{ name: 'read_csv', argIdx: 0, argName: 'file' },
|
|
53
|
-
{ name: 'read_csv2', argIdx: 0, argName: 'file' },
|
|
54
|
-
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
55
|
-
{ name: 'read_delim', argIdx: 0, argName: 'file' },
|
|
56
|
-
{ name: 'read_dsv', argIdx: 0, argName: 'file' },
|
|
57
|
-
{ name: 'read_fwf', argIdx: 0, argName: 'file' },
|
|
58
|
-
{ name: 'read_tsv', argIdx: 0, argName: 'file' },
|
|
59
|
-
{ name: 'read_table', argIdx: 0, argName: 'file' },
|
|
60
|
-
{ name: 'read_log', argIdx: 0, argName: 'file' },
|
|
61
|
-
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
62
|
-
{ name: 'read_lines_chunked', argIdx: 0, argName: 'file' },
|
|
58
|
+
{ name: 'read_csv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
59
|
+
{ name: 'read_csv2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
60
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file', resolveValue: true },
|
|
61
|
+
{ name: 'read_delim', argIdx: 0, argName: 'file', resolveValue: true },
|
|
62
|
+
{ name: 'read_dsv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
63
|
+
{ name: 'read_fwf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
64
|
+
{ name: 'read_tsv', argIdx: 0, argName: 'file', resolveValue: true },
|
|
65
|
+
{ name: 'read_table', argIdx: 0, argName: 'file', resolveValue: true },
|
|
66
|
+
{ name: 'read_log', argIdx: 0, argName: 'file', resolveValue: true },
|
|
67
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file', resolveValue: true },
|
|
68
|
+
{ name: 'read_lines_chunked', argIdx: 0, argName: 'file', resolveValue: true },
|
|
63
69
|
// xlsx
|
|
64
|
-
{ name: 'read.xlsx', argIdx: 0, argName: 'file' },
|
|
65
|
-
{ name: 'read.xlsx2', argIdx: 0, argName: 'file' },
|
|
70
|
+
{ name: 'read.xlsx', argIdx: 0, argName: 'file', resolveValue: true },
|
|
71
|
+
{ name: 'read.xlsx2', argIdx: 0, argName: 'file', resolveValue: true },
|
|
66
72
|
// data.table
|
|
67
|
-
{ name: 'fread', argIdx: 0, argName: 'file' },
|
|
73
|
+
{ name: 'fread', argIdx: 0, argName: 'file', resolveValue: true },
|
|
68
74
|
// haven
|
|
69
|
-
{ name: 'read_sas', argIdx: 0, argName: 'file' },
|
|
70
|
-
{ name: 'read_sav', argIdx: 0, argName: 'file' },
|
|
71
|
-
{ name: 'read_por', argIdx: 0, argName: 'file' },
|
|
72
|
-
{ name: 'read_dta', argIdx: 0, argName: 'file' },
|
|
73
|
-
{ name: 'read_xpt', argIdx: 0, argName: 'file' },
|
|
75
|
+
{ name: 'read_sas', argIdx: 0, argName: 'file', resolveValue: true },
|
|
76
|
+
{ name: 'read_sav', argIdx: 0, argName: 'file', resolveValue: true },
|
|
77
|
+
{ name: 'read_por', argIdx: 0, argName: 'file', resolveValue: true },
|
|
78
|
+
{ name: 'read_dta', argIdx: 0, argName: 'file', resolveValue: true },
|
|
79
|
+
{ name: 'read_xpt', argIdx: 0, argName: 'file', resolveValue: true },
|
|
74
80
|
// feather
|
|
75
|
-
{ name: 'read_feather', argIdx: 0, argName: 'file' },
|
|
81
|
+
{ name: 'read_feather', argIdx: 0, argName: 'file', resolveValue: true },
|
|
76
82
|
// foreign
|
|
77
|
-
{ name: 'read.arff', argIdx: 0, argName: 'file' },
|
|
78
|
-
{ name: 'read.dbf', argIdx: 0, argName: 'file' },
|
|
79
|
-
{ name: 'read.dta', argIdx: 0, argName: 'file' },
|
|
80
|
-
{ name: 'read.epiinfo', argIdx: 0, argName: 'file' },
|
|
81
|
-
{ name: 'read.mtp', argIdx: 0, argName: 'file' },
|
|
82
|
-
{ name: 'read.octave', argIdx: 0, argName: 'file' },
|
|
83
|
-
{ name: 'read.spss', argIdx: 0, argName: 'file' },
|
|
84
|
-
{ name: 'read.ssd', argIdx: 0, argName: 'file' },
|
|
85
|
-
{ name: 'read.systat', argIdx: 0, argName: 'file' },
|
|
86
|
-
{ name: 'read.xport', argIdx: 0, argName: 'file' },
|
|
83
|
+
{ name: 'read.arff', argIdx: 0, argName: 'file', resolveValue: true },
|
|
84
|
+
{ name: 'read.dbf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
85
|
+
{ name: 'read.dta', argIdx: 0, argName: 'file', resolveValue: true },
|
|
86
|
+
{ name: 'read.epiinfo', argIdx: 0, argName: 'file', resolveValue: true },
|
|
87
|
+
{ name: 'read.mtp', argIdx: 0, argName: 'file', resolveValue: true },
|
|
88
|
+
{ name: 'read.octave', argIdx: 0, argName: 'file', resolveValue: true },
|
|
89
|
+
{ name: 'read.spss', argIdx: 0, argName: 'file', resolveValue: true },
|
|
90
|
+
{ name: 'read.ssd', argIdx: 0, argName: 'file', resolveValue: true },
|
|
91
|
+
{ name: 'read.systat', argIdx: 0, argName: 'file', resolveValue: true },
|
|
92
|
+
{ name: 'read.xport', argIdx: 0, argName: 'file', resolveValue: true },
|
|
87
93
|
// car
|
|
88
|
-
{ name: 'Import', argIdx: 0, argName: 'file' },
|
|
94
|
+
{ name: 'Import', argIdx: 0, argName: 'file', resolveValue: true },
|
|
95
|
+
];
|
|
96
|
+
const OutputRedirects = [
|
|
97
|
+
{ type: 'link-to-last-call', callName: 'sink', attachLinkInfo: { argIdx: 0, argName: 'file', when: DependencyInfoLinkConstraint.IfUnknown, resolveValue: true } }
|
|
89
98
|
];
|
|
90
99
|
exports.WriteFunctions = [
|
|
91
|
-
{ name: 'save',
|
|
92
|
-
{ name: 'save.image', argIdx:
|
|
93
|
-
{ name: 'write', argIdx: 1, argName: 'file' },
|
|
94
|
-
{ name: 'dput', argIdx: 1, argName: 'file' },
|
|
95
|
-
{ name: 'dump', argIdx: 1, argName: 'file' },
|
|
96
|
-
{ name: 'write.table', argIdx: 1, argName: 'file' },
|
|
97
|
-
{ name: 'write.csv', argIdx: 1, argName: 'file' },
|
|
98
|
-
{ name: 'saveRDS', argIdx: 1, argName: 'file' },
|
|
100
|
+
{ name: 'save', argName: 'file', resolveValue: true },
|
|
101
|
+
{ name: 'save.image', argIdx: 1, argName: 'file', resolveValue: true },
|
|
102
|
+
{ name: 'write', argIdx: 1, argName: 'file', resolveValue: true },
|
|
103
|
+
{ name: 'dput', argIdx: 1, argName: 'file', resolveValue: true },
|
|
104
|
+
{ name: 'dump', argIdx: 1, argName: 'file', resolveValue: true },
|
|
105
|
+
{ name: 'write.table', argIdx: 1, argName: 'file', resolveValue: true },
|
|
106
|
+
{ name: 'write.csv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
107
|
+
{ name: 'saveRDS', argIdx: 1, argName: 'file', resolveValue: true },
|
|
99
108
|
// write functions that don't have argIndex are assumed to write to stdout
|
|
100
|
-
{ name: 'print', linkTo:
|
|
101
|
-
{ name: 'cat', linkTo:
|
|
102
|
-
{ name: 'message', linkTo:
|
|
103
|
-
{ name: 'warning', linkTo:
|
|
109
|
+
{ name: 'print', linkTo: OutputRedirects, resolveValue: true },
|
|
110
|
+
{ name: 'cat', linkTo: OutputRedirects, argName: 'file', resolveValue: true },
|
|
111
|
+
{ name: 'message', linkTo: OutputRedirects, resolveValue: true },
|
|
112
|
+
{ name: 'warning', linkTo: OutputRedirects, resolveValue: true },
|
|
104
113
|
// readr
|
|
105
|
-
{ name: 'write_csv', argIdx: 1, argName: 'file' },
|
|
106
|
-
{ name: 'write_csv2', argIdx: 1, argName: 'file' },
|
|
107
|
-
{ name: 'write_delim', argIdx: 1, argName: 'file' },
|
|
108
|
-
{ name: 'write_dsv', argIdx: 1, argName: 'file' },
|
|
109
|
-
{ name: 'write_fwf', argIdx: 1, argName: 'file' },
|
|
110
|
-
{ name: 'write_tsv', argIdx: 1, argName: 'file' },
|
|
111
|
-
{ name: 'write_table', argIdx: 1, argName: 'file' },
|
|
112
|
-
{ name: 'write_log', argIdx: 1, argName: 'file' },
|
|
114
|
+
{ name: 'write_csv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
115
|
+
{ name: 'write_csv2', argIdx: 1, argName: 'file', resolveValue: true },
|
|
116
|
+
{ name: 'write_delim', argIdx: 1, argName: 'file', resolveValue: true },
|
|
117
|
+
{ name: 'write_dsv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
118
|
+
{ name: 'write_fwf', argIdx: 1, argName: 'file', resolveValue: true },
|
|
119
|
+
{ name: 'write_tsv', argIdx: 1, argName: 'file', resolveValue: true },
|
|
120
|
+
{ name: 'write_table', argIdx: 1, argName: 'file', resolveValue: true },
|
|
121
|
+
{ name: 'write_log', argIdx: 1, argName: 'file', resolveValue: true },
|
|
113
122
|
// heaven
|
|
114
|
-
{ name: 'write_sas', argIdx: 1, argName: 'file' },
|
|
115
|
-
{ name: 'write_sav', argIdx: 1, argName: 'file' },
|
|
116
|
-
{ name: 'write_por', argIdx: 1, argName: 'file' },
|
|
117
|
-
{ name: 'write_dta', argIdx: 1, argName: 'file' },
|
|
118
|
-
{ name: 'write_xpt', argIdx: 1, argName: 'file' },
|
|
123
|
+
{ name: 'write_sas', argIdx: 1, argName: 'file', resolveValue: true },
|
|
124
|
+
{ name: 'write_sav', argIdx: 1, argName: 'file', resolveValue: true },
|
|
125
|
+
{ name: 'write_por', argIdx: 1, argName: 'file', resolveValue: true },
|
|
126
|
+
{ name: 'write_dta', argIdx: 1, argName: 'file', resolveValue: true },
|
|
127
|
+
{ name: 'write_xpt', argIdx: 1, argName: 'file', resolveValue: true },
|
|
119
128
|
// feather
|
|
120
|
-
{ name: 'write_feather', argIdx: 1, argName: 'file' },
|
|
129
|
+
{ name: 'write_feather', argIdx: 1, argName: 'file', resolveValue: true },
|
|
121
130
|
// foreign
|
|
122
|
-
{ name: 'write.arff', argIdx: 1, argName: 'file' },
|
|
123
|
-
{ name: 'write.dbf', argIdx: 1, argName: 'file' },
|
|
124
|
-
{ name: 'write.dta', argIdx: 1, argName: 'file' },
|
|
125
|
-
{ name: 'write.foreign', argIdx: 1, argName: 'file' },
|
|
131
|
+
{ name: 'write.arff', argIdx: 1, argName: 'file', resolveValue: true },
|
|
132
|
+
{ name: 'write.dbf', argIdx: 1, argName: 'file', resolveValue: true },
|
|
133
|
+
{ name: 'write.dta', argIdx: 1, argName: 'file', resolveValue: true },
|
|
134
|
+
{ name: 'write.foreign', argIdx: 1, argName: 'file', resolveValue: true },
|
|
126
135
|
// xlsx
|
|
127
|
-
{ name: 'write.xlsx', argIdx: 1, argName: 'file' },
|
|
128
|
-
{ name: 'write.xlsx2', argIdx: 1, argName: 'file' },
|
|
136
|
+
{ name: 'write.xlsx', argIdx: 1, argName: 'file', resolveValue: true },
|
|
137
|
+
{ name: 'write.xlsx2', argIdx: 1, argName: 'file', resolveValue: true },
|
|
129
138
|
// graphics
|
|
130
|
-
{ name: 'pdf', argIdx: 0, argName: 'file' },
|
|
131
|
-
{ name: 'jpeg', argIdx: 0, argName: 'file' },
|
|
132
|
-
{ name: 'png', argIdx: 0, argName: 'file' },
|
|
133
|
-
{ name: 'windows', argIdx: 0, argName: 'file' },
|
|
134
|
-
{ name: 'postscript', argIdx: 0, argName: 'file' },
|
|
135
|
-
{ name: 'xfix', argIdx: 0, argName: 'file' },
|
|
136
|
-
{ name: 'bitmap', argIdx: 0, argName: 'file' },
|
|
137
|
-
{ name: 'pictex', argIdx: 0, argName: 'file' },
|
|
138
|
-
{ name: 'cairo_pdf', argIdx: 0, argName: 'file' },
|
|
139
|
-
{ name: 'svg', argIdx: 0, argName: 'file' },
|
|
140
|
-
{ name: 'bmp', argIdx: 0, argName: 'file' },
|
|
141
|
-
{ name: 'tiff', argIdx: 0, argName: 'file' },
|
|
142
|
-
{ name: 'X11', argIdx: 0, argName: 'file' },
|
|
143
|
-
{ name: 'quartz', argIdx: 0, argName: 'file' },
|
|
139
|
+
{ name: 'pdf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
140
|
+
{ name: 'jpeg', argIdx: 0, argName: 'file', resolveValue: true },
|
|
141
|
+
{ name: 'png', argIdx: 0, argName: 'file', resolveValue: true },
|
|
142
|
+
{ name: 'windows', argIdx: 0, argName: 'file', resolveValue: true },
|
|
143
|
+
{ name: 'postscript', argIdx: 0, argName: 'file', resolveValue: true },
|
|
144
|
+
{ name: 'xfix', argIdx: 0, argName: 'file', resolveValue: true },
|
|
145
|
+
{ name: 'bitmap', argIdx: 0, argName: 'file', resolveValue: true },
|
|
146
|
+
{ name: 'pictex', argIdx: 0, argName: 'file', resolveValue: true },
|
|
147
|
+
{ name: 'cairo_pdf', argIdx: 0, argName: 'file', resolveValue: true },
|
|
148
|
+
{ name: 'svg', argIdx: 0, argName: 'file', resolveValue: true },
|
|
149
|
+
{ name: 'bmp', argIdx: 0, argName: 'file', resolveValue: true },
|
|
150
|
+
{ name: 'tiff', argIdx: 0, argName: 'file', resolveValue: true },
|
|
151
|
+
{ name: 'X11', argIdx: 0, argName: 'file', resolveValue: true },
|
|
152
|
+
{ name: 'quartz', argIdx: 0, argName: 'file', resolveValue: true },
|
|
144
153
|
// car
|
|
145
|
-
{ name: 'Export', argIdx: 0, argName: 'file' },
|
|
154
|
+
{ name: 'Export', argIdx: 0, argName: 'file', resolveValue: true },
|
|
146
155
|
];
|
|
147
156
|
function printResultSection(title, infos, result, sectionSpecifics) {
|
|
148
157
|
if (infos.length <= 0) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
import type { ResolveValueQuery, ResolveValueQueryResult } from './resolve-value-query-format';
|
|
2
2
|
import type { BasicQueryData } from '../../base-query-format';
|
|
3
3
|
export declare function fingerPrintOfQuery(query: ResolveValueQuery): string;
|
|
4
|
-
export declare function executeResolveValueQuery({ dataflow: { graph
|
|
4
|
+
export declare function executeResolveValueQuery({ dataflow: { graph }, ast }: BasicQueryData, queries: readonly ResolveValueQuery[]): ResolveValueQueryResult;
|
|
@@ -5,11 +5,10 @@ exports.executeResolveValueQuery = executeResolveValueQuery;
|
|
|
5
5
|
const log_1 = require("../../../util/log");
|
|
6
6
|
const parse_1 = require("../../../slicing/criterion/parse");
|
|
7
7
|
const resolve_by_name_1 = require("../../../dataflow/environments/resolve-by-name");
|
|
8
|
-
const node_id_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/node-id");
|
|
9
8
|
function fingerPrintOfQuery(query) {
|
|
10
9
|
return JSON.stringify(query);
|
|
11
10
|
}
|
|
12
|
-
function executeResolveValueQuery({ dataflow: { graph
|
|
11
|
+
function executeResolveValueQuery({ dataflow: { graph }, ast }, queries) {
|
|
13
12
|
const start = Date.now();
|
|
14
13
|
const results = {};
|
|
15
14
|
for (const query of queries) {
|
|
@@ -18,8 +17,8 @@ function executeResolveValueQuery({ dataflow: { graph, environment }, ast }, que
|
|
|
18
17
|
log_1.log.warn(`Duplicate Key for resolve-value-query: ${key}, skipping...`);
|
|
19
18
|
}
|
|
20
19
|
const values = query.criteria
|
|
21
|
-
.map(criteria => (0,
|
|
22
|
-
.flatMap(ident => (0, resolve_by_name_1.
|
|
20
|
+
.map(criteria => (0, parse_1.slicingCriterionToId)(criteria, ast.idMap))
|
|
21
|
+
.flatMap(ident => (0, resolve_by_name_1.resolve)(ident, { graph, full: true, idMap: ast.idMap }));
|
|
23
22
|
results[key] = {
|
|
24
23
|
values: [...new Set(values)]
|
|
25
24
|
};
|