@eagleoutice/flowr 2.1.10 → 2.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/cli/flowr.js +8 -0
- package/cli/repl/commands/repl-query.js +14 -3
- package/cli/repl/server/connection.js +1 -1
- package/core/steps/pipeline/default-pipelines.d.ts +6 -0
- package/core/steps/pipeline/default-pipelines.js +6 -0
- package/dataflow/environments/resolve-by-name.d.ts +2 -1
- package/dataflow/environments/resolve-by-name.js +2 -1
- package/dataflow/graph/vertex.d.ts +4 -0
- package/dataflow/graph/vertex.js +3 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-access.js +29 -26
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.d.ts +1 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +28 -24
- package/dataflow/internal/process/functions/call/built-in/built-in-replacement.js +2 -1
- package/documentation/doc-util/doc-dfg.js +1 -1
- package/documentation/doc-util/doc-query.js +1 -1
- package/documentation/doc-util/doc-search.d.ts +25 -0
- package/documentation/doc-util/doc-search.js +121 -0
- package/documentation/doc-util/doc-types.d.ts +10 -2
- package/documentation/doc-util/doc-types.js +81 -3
- package/documentation/print-dataflow-graph-wiki.js +1 -1
- package/documentation/print-interface-wiki.js +31 -15
- package/documentation/print-normalized-ast-wiki.js +4 -4
- package/documentation/print-query-wiki.js +35 -0
- package/documentation/print-search-wiki.d.ts +1 -0
- package/documentation/print-search-wiki.js +74 -0
- package/package.json +2 -1
- package/queries/base-query-format.d.ts +2 -2
- package/queries/catalog/call-context-query/call-context-query-executor.d.ts +1 -1
- package/queries/catalog/call-context-query/call-context-query-executor.js +1 -1
- package/queries/catalog/cluster-query/cluster-query-executor.d.ts +1 -1
- package/queries/catalog/cluster-query/cluster-query-executor.js +1 -1
- package/queries/catalog/config-query/config-query-executor.d.ts +3 -0
- package/queries/catalog/config-query/config-query-executor.js +18 -0
- package/queries/catalog/config-query/config-query-format.d.ts +16 -0
- package/queries/catalog/config-query/config-query-format.js +24 -0
- package/queries/catalog/dataflow-query/dataflow-query-executor.d.ts +1 -1
- package/queries/catalog/dataflow-query/dataflow-query-executor.js +1 -1
- package/queries/catalog/dependencies-query/dependencies-query-executor.js +2 -2
- package/queries/catalog/lineage-query/lineage-query-executor.d.ts +1 -1
- package/queries/catalog/lineage-query/lineage-query-executor.js +1 -1
- package/queries/catalog/location-map-query/location-map-query-format.js +1 -1
- package/queries/catalog/search-query/search-query-executor.d.ts +3 -0
- package/queries/catalog/search-query/search-query-executor.js +27 -0
- package/queries/catalog/search-query/search-query-format.d.ts +72 -0
- package/queries/catalog/search-query/search-query-format.js +29 -0
- package/queries/catalog/static-slice-query/static-slice-query-executor.d.ts +1 -1
- package/queries/catalog/static-slice-query/static-slice-query-executor.js +1 -1
- package/queries/query.d.ts +65 -1
- package/queries/query.js +5 -1
- package/r-bridge/lang-4.x/ast/model/type.d.ts +4 -0
- package/r-bridge/lang-4.x/ast/model/type.js +3 -1
- package/search/flowr-search-builder.d.ts +193 -0
- package/search/flowr-search-builder.js +192 -0
- package/search/flowr-search-executor.d.ts +9 -0
- package/search/flowr-search-executor.js +16 -0
- package/search/flowr-search-filters.d.ts +74 -0
- package/search/flowr-search-filters.js +136 -0
- package/search/flowr-search-printer.d.ts +10 -0
- package/search/flowr-search-printer.js +85 -0
- package/search/flowr-search-traverse.d.ts +7 -0
- package/search/flowr-search-traverse.js +12 -0
- package/search/flowr-search.d.ts +58 -0
- package/search/flowr-search.js +29 -0
- package/search/search-executor/search-generators.d.ts +37 -0
- package/search/search-executor/search-generators.js +64 -0
- package/search/search-executor/search-transformer.d.ts +57 -0
- package/search/search-executor/search-transformer.js +99 -0
- package/search/search-optimizer/search-optimizer.d.ts +9 -0
- package/search/search-optimizer/search-optimizer.js +89 -0
- package/util/arrays.d.ts +13 -0
- package/util/assert.d.ts +1 -1
- package/util/mermaid/mermaid.js +17 -0
- package/util/version.js +1 -1
package/README.md
CHANGED
package/cli/flowr.js
CHANGED
|
@@ -21,6 +21,7 @@ const core_1 = require("./repl/core");
|
|
|
21
21
|
const repl_version_1 = require("./repl/commands/repl-version");
|
|
22
22
|
const print_version_1 = require("./repl/print-version");
|
|
23
23
|
const flowr_main_options_1 = require("./flowr-main-options");
|
|
24
|
+
const fs_1 = __importDefault(require("fs"));
|
|
24
25
|
exports.toolName = 'flowr';
|
|
25
26
|
exports.optionHelp = [
|
|
26
27
|
{
|
|
@@ -59,6 +60,13 @@ if (options['config-json']) {
|
|
|
59
60
|
}
|
|
60
61
|
}
|
|
61
62
|
if (!usedConfig) {
|
|
63
|
+
if (options['config-file']) {
|
|
64
|
+
// validate it exists
|
|
65
|
+
if (!fs_1.default.existsSync(options['config-file'])) {
|
|
66
|
+
log_1.log.error(`Config file '${options['config-file']}' does not exist`);
|
|
67
|
+
process.exit(1);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
62
70
|
(0, config_1.setConfigFile)(options['config-file'] ?? flowr_main_options_1.defaultConfigFile, undefined, true);
|
|
63
71
|
}
|
|
64
72
|
function retrieveShell() {
|
|
@@ -21,8 +21,10 @@ function printHelp(output) {
|
|
|
21
21
|
output.stdout('The query is an array of query objects to represent multiple queries. Each query object may have the following properties:');
|
|
22
22
|
output.stdout((0, schema_1.describeSchema)((0, query_1.AnyQuerySchema)(), output.formatter));
|
|
23
23
|
output.stdout(`\n\nThe example ${(0, ansi_1.italic)(':query "[{\\"type\\": \\"call-context\\", \\"callName\\": \\"mean\\" }]" mean(1:10)', output.formatter)} would return the call context of the mean function.`);
|
|
24
|
-
output.stdout('As a convenience, we interpret any (non-help) string not starting with \'[\' as a regex for the simple call-context query.');
|
|
24
|
+
output.stdout('As a convenience, we interpret any (non-help, non-@) string not starting with \'[\' as a regex for the simple call-context query.');
|
|
25
25
|
output.stdout(`Hence, ${(0, ansi_1.italic)(':query "mean" mean(1:10)', output.formatter)} is equivalent to the above example.`);
|
|
26
|
+
output.stdout(`Similarly, '@<type>' is interpreted as a query of the given type.`);
|
|
27
|
+
output.stdout(`With this, ${(0, ansi_1.italic)(':query @config', output.formatter)} prints the result of the config query.`);
|
|
26
28
|
}
|
|
27
29
|
async function processQueryArgs(line, shell, output) {
|
|
28
30
|
const args = (0, args_1.splitAtEscapeSensitive)(line);
|
|
@@ -36,7 +38,16 @@ async function processQueryArgs(line, shell, output) {
|
|
|
36
38
|
return;
|
|
37
39
|
}
|
|
38
40
|
let parsedQuery = [];
|
|
39
|
-
if (query.startsWith('
|
|
41
|
+
if (query.startsWith('@')) {
|
|
42
|
+
parsedQuery = [{ type: query.slice(1) }];
|
|
43
|
+
const validationResult = (0, query_1.QueriesSchema)().validate(parsedQuery);
|
|
44
|
+
if (validationResult.error) {
|
|
45
|
+
output.stderr(`Invalid query: ${validationResult.error.message}`);
|
|
46
|
+
printHelp(output);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
else if (query.startsWith('[')) {
|
|
40
51
|
parsedQuery = JSON.parse(query);
|
|
41
52
|
const validationResult = (0, query_1.QueriesSchema)().validate(parsedQuery);
|
|
42
53
|
if (validationResult.error) {
|
|
@@ -50,7 +61,7 @@ async function processQueryArgs(line, shell, output) {
|
|
|
50
61
|
}
|
|
51
62
|
const processed = await getDataflow(shell, args.join(' '));
|
|
52
63
|
return {
|
|
53
|
-
query: (0, query_1.executeQueries)({
|
|
64
|
+
query: (0, query_1.executeQueries)({ dataflow: processed.dataflow, ast: processed.normalize }, parsedQuery),
|
|
54
65
|
processed
|
|
55
66
|
};
|
|
56
67
|
}
|
|
@@ -326,7 +326,7 @@ class FlowRServerConnection {
|
|
|
326
326
|
const { dataflow: dfg, normalize: ast } = fileInformation.pipeline.getResults(true);
|
|
327
327
|
(0, assert_1.guard)(dfg !== undefined, `Dataflow graph must be present (request: ${request.filetoken})`);
|
|
328
328
|
(0, assert_1.guard)(ast !== undefined, `AST must be present (request: ${request.filetoken})`);
|
|
329
|
-
const results = (0, query_1.executeQueries)({
|
|
329
|
+
const results = (0, query_1.executeQueries)({ dataflow: dfg, ast }, request.query);
|
|
330
330
|
(0, send_1.sendMessage)(this.socket, {
|
|
331
331
|
type: 'response-query',
|
|
332
332
|
id: request.id,
|
|
@@ -227,6 +227,11 @@ export declare const DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE: import("./pipel
|
|
|
227
227
|
readonly dependencies: readonly ["dataflow"];
|
|
228
228
|
readonly requiredInput: import("../all/static-slicing/00-slice").SliceRequiredInput;
|
|
229
229
|
}>;
|
|
230
|
+
/**
|
|
231
|
+
* The default pipeline for working with flowr, including the dataflow step,
|
|
232
|
+
* see the {@link DEFAULT_NORMALIZE_PIPELINE} for the pipeline without the dataflow step,
|
|
233
|
+
* and the {@link DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE} for the pipeline with slicing and reconstructing steps
|
|
234
|
+
*/
|
|
230
235
|
export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
|
|
231
236
|
readonly name: "parse";
|
|
232
237
|
readonly humanReadableName: "parse with R shell";
|
|
@@ -280,6 +285,7 @@ export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
|
|
|
280
285
|
};
|
|
281
286
|
readonly dependencies: readonly ["normalize"];
|
|
282
287
|
}>;
|
|
288
|
+
/** The pipeline to use when you want to parse and normalize your R file, see {@link DEFAULT_DATAFLOW_PIPELINE} for the additional `dataflow` step */
|
|
283
289
|
export declare const DEFAULT_NORMALIZE_PIPELINE: import("./pipeline").Pipeline<{
|
|
284
290
|
readonly name: "parse";
|
|
285
291
|
readonly humanReadableName: "parse with R shell";
|
|
@@ -13,7 +13,13 @@ const _10_reconstruct_1 = require("../all/static-slicing/10-reconstruct");
|
|
|
13
13
|
exports.DEFAULT_SLICING_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE, _10_reconstruct_1.NAIVE_RECONSTRUCT);
|
|
14
14
|
exports.DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE = exports.DEFAULT_SLICING_PIPELINE;
|
|
15
15
|
exports.DEFAULT_SLICE_WITHOUT_RECONSTRUCT_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW, _00_slice_1.STATIC_SLICE);
|
|
16
|
+
/**
|
|
17
|
+
* The default pipeline for working with flowr, including the dataflow step,
|
|
18
|
+
* see the {@link DEFAULT_NORMALIZE_PIPELINE} for the pipeline without the dataflow step,
|
|
19
|
+
* and the {@link DEFAULT_SLICE_AND_RECONSTRUCT_PIPELINE} for the pipeline with slicing and reconstructing steps
|
|
20
|
+
*/
|
|
16
21
|
exports.DEFAULT_DATAFLOW_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE, _20_dataflow_1.STATIC_DATAFLOW);
|
|
22
|
+
/** The pipeline to use when you want to parse and normalize your R file, see {@link DEFAULT_DATAFLOW_PIPELINE} for the additional `dataflow` step */
|
|
17
23
|
exports.DEFAULT_NORMALIZE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, _10_normalize_1.NORMALIZE);
|
|
18
24
|
exports.DEFAULT_PARSE_PIPELINE = (0, pipeline_1.createPipeline)(_00_parse_1.PARSE_WITH_R_SHELL_STEP);
|
|
19
25
|
//# sourceMappingURL=default-pipelines.js.map
|
|
@@ -11,7 +11,8 @@ import type { DataflowGraph } from '../graph/graph';
|
|
|
11
11
|
* @param environment - The current environment used for name resolution
|
|
12
12
|
* @param target - The target (meta) type of the identifier to resolve
|
|
13
13
|
*
|
|
14
|
-
* @returns A list of possible definitions
|
|
14
|
+
* @returns A list of possible identifier definitions (one if the definition location is exactly and always known), or `undefined`
|
|
15
|
+
* if the identifier is undefined in the current scope/with the current environment information.
|
|
15
16
|
*/
|
|
16
17
|
export declare function resolveByName(name: Identifier, environment: REnvironmentInformation, target?: ReferenceType): IdentifierDefinition[] | undefined;
|
|
17
18
|
export declare function resolvesToBuiltInConstant(name: Identifier | undefined, environment: REnvironmentInformation, wantedValue: unknown): Ternary;
|
|
@@ -35,7 +35,8 @@ const TargetTypePredicate = {
|
|
|
35
35
|
* @param environment - The current environment used for name resolution
|
|
36
36
|
* @param target - The target (meta) type of the identifier to resolve
|
|
37
37
|
*
|
|
38
|
-
* @returns A list of possible definitions
|
|
38
|
+
* @returns A list of possible identifier definitions (one if the definition location is exactly and always known), or `undefined`
|
|
39
|
+
* if the identifier is undefined in the current scope/with the current environment information.
|
|
39
40
|
*/
|
|
40
41
|
function resolveByName(name, environment, target = identifier_1.ReferenceType.Unknown) {
|
|
41
42
|
let current = environment.current;
|
|
@@ -10,6 +10,10 @@ export declare enum VertexType {
|
|
|
10
10
|
VariableDefinition = "variable-definition",
|
|
11
11
|
FunctionDefinition = "function-definition"
|
|
12
12
|
}
|
|
13
|
+
export declare const ValidVertexTypes: Set<string>;
|
|
14
|
+
export declare const ValidVertexTypeReverse: {
|
|
15
|
+
[k: string]: string;
|
|
16
|
+
};
|
|
13
17
|
/**
|
|
14
18
|
* A single index of a container, which is not a container itself.
|
|
15
19
|
*
|
package/dataflow/graph/vertex.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.VertexType = void 0;
|
|
3
|
+
exports.ValidVertexTypeReverse = exports.ValidVertexTypes = exports.VertexType = void 0;
|
|
4
4
|
exports.isParentContainerIndex = isParentContainerIndex;
|
|
5
5
|
exports.isValueVertex = isValueVertex;
|
|
6
6
|
exports.isUseVertex = isUseVertex;
|
|
@@ -15,6 +15,8 @@ var VertexType;
|
|
|
15
15
|
VertexType["VariableDefinition"] = "variable-definition";
|
|
16
16
|
VertexType["FunctionDefinition"] = "function-definition";
|
|
17
17
|
})(VertexType || (exports.VertexType = VertexType = {}));
|
|
18
|
+
exports.ValidVertexTypes = new Set(Object.values(VertexType));
|
|
19
|
+
exports.ValidVertexTypeReverse = Object.fromEntries(Object.entries(VertexType).map(([k, v]) => [v, k]));
|
|
18
20
|
function isParentContainerIndex(index) {
|
|
19
21
|
return 'subIndices' in index;
|
|
20
22
|
}
|
|
@@ -13,6 +13,7 @@ const built_in_assignment_1 = require("./built-in-assignment");
|
|
|
13
13
|
const identifier_1 = require("../../../../../environments/identifier");
|
|
14
14
|
const vertex_1 = require("../../../../../graph/vertex");
|
|
15
15
|
const list_access_1 = require("../../../../../../util/list-access");
|
|
16
|
+
const config_1 = require("../../../../../../config");
|
|
16
17
|
function tableAssignmentProcessor(name, args, rootId, data, outInfo) {
|
|
17
18
|
outInfo.definitionRootNodes.push(rootId);
|
|
18
19
|
return (0, known_call_handling_1.processKnownFunctionCall)({ name, args, rootId, data }).information;
|
|
@@ -147,34 +148,36 @@ function processStringBasedAccess(args, data, name, rootId, config) {
|
|
|
147
148
|
if (accessedArg === undefined || accessArg === undefined) {
|
|
148
149
|
return fnCall;
|
|
149
150
|
}
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
else {
|
|
156
|
-
// Higher access call
|
|
157
|
-
const underlyingAccessId = accessedArg.value?.info.id ?? -1;
|
|
158
|
-
const vertex = fnCall.information.graph.getVertex(underlyingAccessId);
|
|
159
|
-
const subIndices = vertex?.indicesCollection
|
|
160
|
-
?.flatMap(indices => indices.indices)
|
|
161
|
-
?.flatMap(index => index?.subIndices ?? []);
|
|
162
|
-
if (subIndices) {
|
|
163
|
-
accessedIndicesCollection = (0, list_access_1.filterIndices)(subIndices, accessArg);
|
|
151
|
+
if ((0, config_1.getConfig)().solver.pointerTracking) {
|
|
152
|
+
let accessedIndicesCollection;
|
|
153
|
+
// If the accessedArg is a symbol, it's either a simple access or the base case of a nested access
|
|
154
|
+
if (accessedArg.value?.type === type_1.RType.Symbol) {
|
|
155
|
+
accessedIndicesCollection = (0, list_access_1.resolveSingleIndex)(accessedArg, accessArg, data.environment);
|
|
164
156
|
}
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
157
|
+
else {
|
|
158
|
+
// Higher access call
|
|
159
|
+
const underlyingAccessId = accessedArg.value?.info.id ?? -1;
|
|
160
|
+
const vertex = fnCall.information.graph.getVertex(underlyingAccessId);
|
|
161
|
+
const subIndices = vertex?.indicesCollection
|
|
162
|
+
?.flatMap(indices => indices.indices)
|
|
163
|
+
?.flatMap(index => index?.subIndices ?? []);
|
|
164
|
+
if (subIndices) {
|
|
165
|
+
accessedIndicesCollection = (0, list_access_1.filterIndices)(subIndices, accessArg);
|
|
166
|
+
}
|
|
171
167
|
}
|
|
172
|
-
//
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
168
|
+
// Add indices to vertex afterward
|
|
169
|
+
if (accessedIndicesCollection) {
|
|
170
|
+
const vertex = fnCall.information.graph.getVertex(rootId);
|
|
171
|
+
if (vertex) {
|
|
172
|
+
vertex.indicesCollection = accessedIndicesCollection;
|
|
173
|
+
}
|
|
174
|
+
// When access has no access as parent, it's the top most
|
|
175
|
+
const rootNode = data.completeAst.idMap.get(rootId);
|
|
176
|
+
const parentNode = data.completeAst.idMap.get(rootNode?.info.parent ?? -1);
|
|
177
|
+
if (parentNode?.type !== type_1.RType.Access) {
|
|
178
|
+
// Only reference indices in top most access
|
|
179
|
+
referenceIndices(accessedIndicesCollection, fnCall, name.info.id);
|
|
180
|
+
}
|
|
178
181
|
}
|
|
179
182
|
}
|
|
180
183
|
return fnCall;
|
|
@@ -42,8 +42,7 @@ export interface AssignmentToSymbolParameters<OtherInfo> extends AssignmentConfi
|
|
|
42
42
|
* @param nodeToDefine - `x`
|
|
43
43
|
* @param sourceIds - `v`
|
|
44
44
|
* @param rootIdOfAssignment - `<-`
|
|
45
|
-
* @param
|
|
46
|
-
* @param superAssignment - whether this is a super assignment (i.e., `<<-`)
|
|
45
|
+
* @param config - configuration for the assignment processing
|
|
47
46
|
*/
|
|
48
47
|
export declare function markAsAssignment(information: {
|
|
49
48
|
environment: REnvironmentInformation;
|
|
@@ -18,6 +18,7 @@ const define_1 = require("../../../../../environments/define");
|
|
|
18
18
|
const edge_1 = require("../../../../../graph/edge");
|
|
19
19
|
const resolve_by_name_1 = require("../../../../../environments/resolve-by-name");
|
|
20
20
|
const list_access_1 = require("../../../../../../util/list-access");
|
|
21
|
+
const config_1 = require("../../../../../../config");
|
|
21
22
|
function toReplacementSymbol(target, prefix, superAssignment) {
|
|
22
23
|
return {
|
|
23
24
|
type: type_1.RType.Symbol,
|
|
@@ -184,29 +185,30 @@ function checkTargetReferenceType(source, sourceInfo) {
|
|
|
184
185
|
* @param nodeToDefine - `x`
|
|
185
186
|
* @param sourceIds - `v`
|
|
186
187
|
* @param rootIdOfAssignment - `<-`
|
|
187
|
-
* @param
|
|
188
|
-
* @param superAssignment - whether this is a super assignment (i.e., `<<-`)
|
|
188
|
+
* @param config - configuration for the assignment processing
|
|
189
189
|
*/
|
|
190
190
|
function markAsAssignment(information, nodeToDefine, sourceIds, rootIdOfAssignment, config) {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
// Indices defined by replacement operation e.g. $<-
|
|
198
|
-
if (config?.indicesCollection !== undefined) {
|
|
199
|
-
// If there were indices stored in the vertex, then a container was defined
|
|
200
|
-
// and assigned to the index of another container e.g. a$b <- list(c = 1)
|
|
201
|
-
if (indicesCollection) {
|
|
202
|
-
indicesCollection = (0, list_access_1.addSubIndicesToLeafIndices)(config.indicesCollection, indicesCollection);
|
|
191
|
+
if ((0, config_1.getConfig)().solver.pointerTracking) {
|
|
192
|
+
let indicesCollection = undefined;
|
|
193
|
+
if (sourceIds.length === 1) {
|
|
194
|
+
// support for tracking indices
|
|
195
|
+
// Indices were defined for the vertex e.g. a <- list(c = 1) or a$b <- list(c = 1)
|
|
196
|
+
indicesCollection = information.graph.getVertex(sourceIds[0])?.indicesCollection;
|
|
203
197
|
}
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
198
|
+
// Indices defined by replacement operation e.g. $<-
|
|
199
|
+
if (config?.indicesCollection !== undefined) {
|
|
200
|
+
// If there were indices stored in the vertex, then a container was defined
|
|
201
|
+
// and assigned to the index of another container e.g. a$b <- list(c = 1)
|
|
202
|
+
if (indicesCollection) {
|
|
203
|
+
indicesCollection = (0, list_access_1.addSubIndicesToLeafIndices)(config.indicesCollection, indicesCollection);
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
// No indices were defined for the vertex e.g. a$b <- 2
|
|
207
|
+
indicesCollection = config.indicesCollection;
|
|
208
|
+
}
|
|
207
209
|
}
|
|
210
|
+
nodeToDefine.indicesCollection ??= indicesCollection;
|
|
208
211
|
}
|
|
209
|
-
nodeToDefine.indicesCollection ??= indicesCollection;
|
|
210
212
|
information.environment = (0, define_1.define)(nodeToDefine, config?.superAssignment, information.environment);
|
|
211
213
|
information.graph.setDefinitionOfVertex(nodeToDefine);
|
|
212
214
|
if (!config?.quoteSource) {
|
|
@@ -215,12 +217,14 @@ function markAsAssignment(information, nodeToDefine, sourceIds, rootIdOfAssignme
|
|
|
215
217
|
}
|
|
216
218
|
}
|
|
217
219
|
information.graph.addEdge(nodeToDefine, rootIdOfAssignment, edge_1.EdgeType.DefinedBy);
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
edge
|
|
222
|
-
|
|
223
|
-
|
|
220
|
+
if ((0, config_1.getConfig)().solver.pointerTracking) {
|
|
221
|
+
// kinda dirty, but we have to remove existing read edges for the symbol, added by the child
|
|
222
|
+
const out = information.graph.outgoingEdges(nodeToDefine.nodeId);
|
|
223
|
+
for (const [id, edge] of (out ?? [])) {
|
|
224
|
+
edge.types &= ~edge_1.EdgeType.Reads;
|
|
225
|
+
if (edge.types == 0) {
|
|
226
|
+
out?.delete(id);
|
|
227
|
+
}
|
|
224
228
|
}
|
|
225
229
|
}
|
|
226
230
|
}
|
|
@@ -15,6 +15,7 @@ const edge_1 = require("../../../../../graph/edge");
|
|
|
15
15
|
const dfg_1 = require("../../../../../../util/mermaid/dfg");
|
|
16
16
|
const type_1 = require("../../../../../../r-bridge/lang-4.x/ast/model/type");
|
|
17
17
|
const list_access_1 = require("../../../../../../util/list-access");
|
|
18
|
+
const config_1 = require("../../../../../../config");
|
|
18
19
|
function processReplacementFunction(name,
|
|
19
20
|
/** The last one has to be the value */
|
|
20
21
|
args, rootId, data, config) {
|
|
@@ -25,7 +26,7 @@ args, rootId, data, config) {
|
|
|
25
26
|
/* we only get here if <-, <<-, ... or whatever is part of the replacement is not overwritten */
|
|
26
27
|
(0, log_1.expensiveTrace)(logger_1.dataflowLogger, () => `Replacement ${name.content} with ${JSON.stringify(args)}, processing`);
|
|
27
28
|
let indices = undefined;
|
|
28
|
-
if (name.content === '$<-') {
|
|
29
|
+
if (name.content === '$<-' && (0, config_1.getConfig)().solver.pointerTracking) {
|
|
29
30
|
const nonEmptyArgs = args.filter(arg => arg !== r_function_call_1.EmptyArgument);
|
|
30
31
|
const accessedArg = nonEmptyArgs.find(arg => arg.info.role === "accessed" /* RoleInParent.Accessed */);
|
|
31
32
|
const accessArg = nonEmptyArgs.find(arg => arg.info.role === "index-access" /* RoleInParent.IndexAccess */);
|
|
@@ -42,7 +42,7 @@ async function printDfGraphForCode(shell, code, { mark, showCode = true, codeOpe
|
|
|
42
42
|
if (switchCodeAndGraph) {
|
|
43
43
|
(0, assert_1.guard)(showCode, 'can not switch code and graph if code is not shown');
|
|
44
44
|
}
|
|
45
|
-
const metaInfo = `The analysis required _${(0, time_1.printAsMs)(duration)}_ (
|
|
45
|
+
const metaInfo = `The analysis required _${(0, time_1.printAsMs)(duration)}_ (including parse and normalize) within the generation environment.`;
|
|
46
46
|
const dfGraph = printDfGraph(result.dataflow.graph, mark);
|
|
47
47
|
let resultText = '\n\n';
|
|
48
48
|
if (showCode) {
|
|
@@ -23,7 +23,7 @@ async function showQuery(shell, code, queries, { showCode, collapseResult, colla
|
|
|
23
23
|
shell,
|
|
24
24
|
request: (0, retriever_1.requestFromInput)(code)
|
|
25
25
|
}).allRemainingSteps();
|
|
26
|
-
const results = (0, query_1.executeQueries)({
|
|
26
|
+
const results = (0, query_1.executeQueries)({ dataflow: analysis.dataflow, ast: analysis.normalize }, queries);
|
|
27
27
|
const duration = performance.now() - now;
|
|
28
28
|
const metaInfo = `
|
|
29
29
|
The analysis required _${(0, time_1.printAsMs)(duration)}_ (including parsing and normalization and the query) within the generation environment.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { RShell } from '../../r-bridge/shell';
|
|
2
|
+
import type { SupportedQueryTypes } from '../../queries/query';
|
|
3
|
+
import type { SupportedVirtualQueryTypes } from '../../queries/virtual-query/virtual-queries';
|
|
4
|
+
import type { FlowrSearchLike } from '../../search/flowr-search-builder';
|
|
5
|
+
export interface ShowSearchOptions {
|
|
6
|
+
readonly showCode?: boolean;
|
|
7
|
+
readonly collapseResult?: boolean;
|
|
8
|
+
}
|
|
9
|
+
export declare function showSearch(shell: RShell, code: string, search: FlowrSearchLike, { collapseResult }?: ShowSearchOptions): Promise<string>;
|
|
10
|
+
export interface QueryDocumentation {
|
|
11
|
+
readonly name: string;
|
|
12
|
+
readonly type: 'virtual' | 'active';
|
|
13
|
+
readonly shortDescription: string;
|
|
14
|
+
readonly functionName: string;
|
|
15
|
+
readonly functionFile: string;
|
|
16
|
+
readonly buildExplanation: (shell: RShell) => Promise<string>;
|
|
17
|
+
}
|
|
18
|
+
export declare const RegisteredQueries: {
|
|
19
|
+
active: Map<string, QueryDocumentation>;
|
|
20
|
+
virtual: Map<string, QueryDocumentation>;
|
|
21
|
+
};
|
|
22
|
+
export declare function registerQueryDocumentation(query: SupportedQueryTypes | SupportedVirtualQueryTypes, doc: QueryDocumentation): void;
|
|
23
|
+
export declare function linkToQueryOfName(id: SupportedQueryTypes | SupportedVirtualQueryTypes): string;
|
|
24
|
+
export declare function tocForQueryType(type: 'active' | 'virtual'): string;
|
|
25
|
+
export declare function explainQueries(shell: RShell, type: 'active' | 'virtual'): Promise<string>;
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RegisteredQueries = void 0;
|
|
4
|
+
exports.showSearch = showSearch;
|
|
5
|
+
exports.registerQueryDocumentation = registerQueryDocumentation;
|
|
6
|
+
exports.linkToQueryOfName = linkToQueryOfName;
|
|
7
|
+
exports.tocForQueryType = tocForQueryType;
|
|
8
|
+
exports.explainQueries = explainQueries;
|
|
9
|
+
const pipeline_executor_1 = require("../../core/pipeline-executor");
|
|
10
|
+
const default_pipelines_1 = require("../../core/steps/pipeline/default-pipelines");
|
|
11
|
+
const retriever_1 = require("../../r-bridge/retriever");
|
|
12
|
+
const doc_files_1 = require("./doc-files");
|
|
13
|
+
const doc_dfg_1 = require("./doc-dfg");
|
|
14
|
+
const doc_code_1 = require("./doc-code");
|
|
15
|
+
const time_1 = require("../../util/time");
|
|
16
|
+
const flowr_search_executor_1 = require("../../search/flowr-search-executor");
|
|
17
|
+
const flowr_search_printer_1 = require("../../search/flowr-search-printer");
|
|
18
|
+
const node_id_1 = require("../../r-bridge/lang-4.x/ast/model/processing/node-id");
|
|
19
|
+
const dfg_1 = require("../../util/mermaid/dfg");
|
|
20
|
+
async function showSearch(shell, code, search, { collapseResult = true } = {}) {
|
|
21
|
+
const now = performance.now();
|
|
22
|
+
const analysis = await new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_DATAFLOW_PIPELINE, {
|
|
23
|
+
shell,
|
|
24
|
+
request: (0, retriever_1.requestFromInput)(code)
|
|
25
|
+
}).allRemainingSteps();
|
|
26
|
+
const result = (0, flowr_search_executor_1.runSearch)(search, analysis);
|
|
27
|
+
const duration = performance.now() - now;
|
|
28
|
+
const metaInfo = `
|
|
29
|
+
The search required _${(0, time_1.printAsMs)(duration)}_ (including parsing and normalization and the query) within the generation environment.
|
|
30
|
+
`.trim();
|
|
31
|
+
return `
|
|
32
|
+
|
|
33
|
+
${(0, doc_code_1.codeBlock)('ts', (0, flowr_search_printer_1.flowrSearchToCode)(search))}
|
|
34
|
+
|
|
35
|
+
<details style="color:gray"> <summary>Search Visualization</summary>
|
|
36
|
+
|
|
37
|
+
${(0, doc_code_1.codeBlock)('mermaid', (0, flowr_search_printer_1.flowrSearchToMermaid)(search))}
|
|
38
|
+
|
|
39
|
+
In the code:
|
|
40
|
+
|
|
41
|
+
${(0, doc_code_1.codeBlock)('r', code)}
|
|
42
|
+
|
|
43
|
+
<details style="color:gray"> <summary>JSON Representation</summary>
|
|
44
|
+
|
|
45
|
+
${(0, doc_code_1.codeBlock)('json', JSON.stringify(search, null, 2))}
|
|
46
|
+
|
|
47
|
+
</details>
|
|
48
|
+
|
|
49
|
+
</details>
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
${collapseResult ? ' <details> <summary style="color:gray">Show Results</summary>' : ''}
|
|
53
|
+
|
|
54
|
+
The query returns the following vetices (all references to \`x\` in the code):
|
|
55
|
+
${result.map(({ node }) => `<b>${node.info.id} ('${(0, node_id_1.recoverContent)(node.info.id, analysis.dataflow.graph)}')</b> at L${(0, dfg_1.formatRange)(node.location)}`).join(', ')}
|
|
56
|
+
|
|
57
|
+
${metaInfo}
|
|
58
|
+
|
|
59
|
+
The returned results are highlighted thick and blue within the dataflow graph:
|
|
60
|
+
|
|
61
|
+
${await (0, doc_dfg_1.printDfGraphForCode)(shell, code, { showCode: false, switchCodeAndGraph: false, mark: new Set(result.map(({ node }) => node.info.id)) })}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
${collapseResult ? '</details>' : ''}
|
|
65
|
+
|
|
66
|
+
`;
|
|
67
|
+
}
|
|
68
|
+
exports.RegisteredQueries = {
|
|
69
|
+
'active': new Map(),
|
|
70
|
+
'virtual': new Map()
|
|
71
|
+
};
|
|
72
|
+
function registerQueryDocumentation(query, doc) {
|
|
73
|
+
const map = exports.RegisteredQueries[doc.type];
|
|
74
|
+
if (map.has(query)) {
|
|
75
|
+
throw new Error(`Query ${query} already registered`);
|
|
76
|
+
}
|
|
77
|
+
map.set(query, doc);
|
|
78
|
+
}
|
|
79
|
+
function linkify(name) {
|
|
80
|
+
return name.toLowerCase().replace(/ /g, '-');
|
|
81
|
+
}
|
|
82
|
+
function linkToQueryOfName(id) {
|
|
83
|
+
const query = exports.RegisteredQueries.active.get(id) ?? exports.RegisteredQueries.virtual.get(id);
|
|
84
|
+
if (!query) {
|
|
85
|
+
throw new Error(`Query ${id} not found`);
|
|
86
|
+
}
|
|
87
|
+
return `[${query.name}](#${linkify(query.name)})`;
|
|
88
|
+
}
|
|
89
|
+
function tocForQueryType(type) {
|
|
90
|
+
const queries = [...exports.RegisteredQueries[type].entries()].sort(([, { name: a }], [, { name: b }]) => a.localeCompare(b));
|
|
91
|
+
const result = [];
|
|
92
|
+
for (const [id, { name, shortDescription }] of queries) {
|
|
93
|
+
result.push(`1. [${name}](#${linkify(name)}) (\`${id}\`):\\\n ${shortDescription}`);
|
|
94
|
+
}
|
|
95
|
+
return result.join('\n');
|
|
96
|
+
}
|
|
97
|
+
async function explainQuery(shell, { name, functionName, functionFile, buildExplanation }) {
|
|
98
|
+
return `
|
|
99
|
+
### ${name}
|
|
100
|
+
|
|
101
|
+
${await buildExplanation(shell)}
|
|
102
|
+
|
|
103
|
+
<details>
|
|
104
|
+
|
|
105
|
+
<summary style="color:gray">Implementation Details</summary>
|
|
106
|
+
|
|
107
|
+
Responsible for the execution of the ${name} query is \`${functionName}\` in ${(0, doc_files_1.getFilePathMd)(functionFile)}.
|
|
108
|
+
|
|
109
|
+
</details>
|
|
110
|
+
|
|
111
|
+
`;
|
|
112
|
+
}
|
|
113
|
+
async function explainQueries(shell, type) {
|
|
114
|
+
const queries = [...exports.RegisteredQueries[type].entries()].sort(([, { name: a }], [, { name: b }]) => a.localeCompare(b));
|
|
115
|
+
const result = [];
|
|
116
|
+
for (const [, doc] of queries) {
|
|
117
|
+
result.push(await explainQuery(shell, doc));
|
|
118
|
+
}
|
|
119
|
+
return result.join(`\n${'-'.repeat(5)}\n\n`);
|
|
120
|
+
}
|
|
121
|
+
//# sourceMappingURL=doc-search.js.map
|
|
@@ -2,7 +2,7 @@ import ts from 'typescript';
|
|
|
2
2
|
export interface TypeElementInSource {
|
|
3
3
|
name: string;
|
|
4
4
|
node: ts.Node;
|
|
5
|
-
kind: 'interface' | 'type' | 'enum' | 'class';
|
|
5
|
+
kind: 'interface' | 'type' | 'enum' | 'class' | 'variable';
|
|
6
6
|
extends: string[];
|
|
7
7
|
generics: string[];
|
|
8
8
|
filePath: string;
|
|
@@ -25,7 +25,7 @@ export interface MermaidTypeReport {
|
|
|
25
25
|
program: ts.Program;
|
|
26
26
|
}
|
|
27
27
|
export declare function getTypesFromFolderAsMermaid(options: GetTypesAsMermaidOption): MermaidTypeReport;
|
|
28
|
-
export declare function implSnippet(node: TypeElementInSource | undefined, program: ts.Program, nesting?: number): string;
|
|
28
|
+
export declare function implSnippet(node: TypeElementInSource | undefined, program: ts.Program, showName?: boolean, nesting?: number): string;
|
|
29
29
|
export interface PrintHierarchyArguments {
|
|
30
30
|
readonly program: ts.Program;
|
|
31
31
|
readonly hierarchy: TypeElementInSource[];
|
|
@@ -36,3 +36,11 @@ export interface PrintHierarchyArguments {
|
|
|
36
36
|
}
|
|
37
37
|
export declare const mermaidHide: string[];
|
|
38
38
|
export declare function printHierarchy({ program, hierarchy, root, collapseFromNesting, initialNesting, maxDepth }: PrintHierarchyArguments): string;
|
|
39
|
+
/**
|
|
40
|
+
* Create a short link to a type in the documentation
|
|
41
|
+
* @param name - The name of the type, e.g. `MyType`, may include a container, e.g. `MyContainer::MyType` (this works with function nestings too)
|
|
42
|
+
* @param hierarchy - The hierarchy of types to search in
|
|
43
|
+
* @param codeStyle - Whether to use code style for the link
|
|
44
|
+
*/
|
|
45
|
+
export declare function shortLink(name: string, hierarchy: TypeElementInSource[], codeStyle?: boolean): string;
|
|
46
|
+
export declare function getDocumentationForType(name: string, hierarchy: TypeElementInSource[]): string;
|