@eagleoutice/flowr 2.1.3 → 2.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/benchmark/slicer.js +1 -1
- package/cli/repl/commands/repl-parse.js +1 -1
- package/cli/repl/commands/repl-query.js +4 -5
- package/cli/repl/server/connection.js +6 -1
- package/cli/repl/server/messages/message-query.js +2 -2
- package/cli/repl/server/net.js +1 -1
- package/cli/repl/server/send.js +3 -6
- package/cli/repl/server/server.d.ts +2 -2
- package/cli/repl/server/server.js +1 -1
- package/config.js +1 -1
- package/core/pipeline-executor.js +2 -1
- package/core/steps/all/core/00-parse.d.ts +11 -4
- package/core/steps/all/core/00-parse.js +5 -5
- package/core/steps/all/core/10-normalize.d.ts +2 -1
- package/core/steps/all/core/20-dataflow.d.ts +2 -2
- package/core/steps/all/core/20-dataflow.js +2 -2
- package/core/steps/pipeline/default-pipelines.d.ts +41 -23
- package/core/steps/pipeline/pipeline.d.ts +15 -3
- package/core/steps/pipeline/pipeline.js +2 -2
- package/dataflow/environments/built-in.d.ts +8 -6
- package/dataflow/environments/built-in.js +6 -1
- package/dataflow/environments/default-builtin-config.js +21 -5
- package/dataflow/environments/environment.d.ts +1 -0
- package/dataflow/environments/environment.js +5 -5
- package/dataflow/extractor.js +23 -0
- package/dataflow/graph/dataflowgraph-builder.d.ts +2 -0
- package/dataflow/graph/dataflowgraph-builder.js +9 -0
- package/dataflow/graph/diff.js +1 -1
- package/dataflow/graph/graph.d.ts +7 -2
- package/dataflow/graph/graph.js +10 -2
- package/dataflow/internal/process/functions/call/argument/unpack-argument.d.ts +1 -1
- package/dataflow/internal/process/functions/call/argument/unpack-argument.js +2 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-assignment.js +2 -2
- package/dataflow/internal/process/functions/call/built-in/built-in-expression-list.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-for-loop.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-pipe.js +1 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-source.js +13 -1
- package/dataflow/internal/process/functions/call/built-in/built-in-while-loop.js +1 -1
- package/dataflow/internal/process/functions/call/named-call-handling.js +1 -1
- package/dataflow/processor.d.ts +3 -3
- package/documentation/data/server/doc-data-server-messages.js +8 -14
- package/documentation/doc-util/doc-cli-option.js +4 -4
- package/documentation/doc-util/doc-query.d.ts +4 -6
- package/documentation/doc-util/doc-query.js +16 -156
- package/documentation/doc-util/doc-repl.js +2 -2
- package/documentation/print-dataflow-graph-wiki.js +2 -1
- package/documentation/print-interface-wiki.js +8 -3
- package/documentation/print-query-wiki.js +107 -16
- package/package.json +1 -1
- package/queries/base-query-format.d.ts +6 -0
- package/queries/catalog/call-context-query/call-context-query-executor.d.ts +1 -1
- package/queries/catalog/call-context-query/call-context-query-executor.js +26 -80
- package/queries/catalog/call-context-query/call-context-query-format.d.ts +14 -13
- package/queries/catalog/call-context-query/call-context-query-format.js +32 -14
- package/queries/catalog/call-context-query/identify-link-to-last-call-relation.d.ts +17 -0
- package/queries/catalog/call-context-query/identify-link-to-last-call-relation.js +99 -0
- package/queries/catalog/cluster-query/cluster-query-executor.d.ts +1 -1
- package/queries/catalog/cluster-query/cluster-query-format.d.ts +59 -0
- package/queries/catalog/cluster-query/cluster-query-format.js +29 -0
- package/queries/catalog/dataflow-query/dataflow-query-executor.d.ts +1 -1
- package/queries/catalog/dataflow-query/dataflow-query-format.d.ts +59 -0
- package/queries/catalog/dataflow-query/dataflow-query-format.js +21 -0
- package/queries/catalog/dependencies-query/dependencies-query-executor.d.ts +3 -0
- package/queries/catalog/dependencies-query/dependencies-query-executor.js +144 -0
- package/queries/catalog/dependencies-query/dependencies-query-format.d.ts +102 -0
- package/queries/catalog/dependencies-query/dependencies-query-format.js +187 -0
- package/queries/catalog/id-map-query/id-map-query-executor.d.ts +1 -1
- package/queries/catalog/id-map-query/id-map-query-format.d.ts +59 -0
- package/queries/catalog/id-map-query/id-map-query-format.js +21 -0
- package/queries/catalog/lineage-query/lineage-query-executor.d.ts +1 -1
- package/queries/catalog/lineage-query/lineage-query-format.d.ts +59 -0
- package/queries/catalog/lineage-query/lineage-query-format.js +24 -0
- package/queries/catalog/location-map-query/location-map-query-executor.d.ts +3 -0
- package/queries/catalog/location-map-query/location-map-query-executor.js +21 -0
- package/queries/catalog/location-map-query/location-map-query-format.d.ts +17 -0
- package/queries/catalog/location-map-query/location-map-query-format.js +24 -0
- package/queries/catalog/normalized-ast-query/normalized-ast-query-executor.d.ts +1 -1
- package/queries/catalog/normalized-ast-query/normalized-ast-query-format.d.ts +59 -0
- package/queries/catalog/normalized-ast-query/normalized-ast-query-format.js +21 -0
- package/queries/catalog/static-slice-query/static-slice-query-executor.d.ts +1 -1
- package/queries/catalog/static-slice-query/static-slice-query-executor.js +8 -3
- package/queries/catalog/static-slice-query/static-slice-query-format.d.ts +59 -0
- package/queries/catalog/static-slice-query/static-slice-query-format.js +40 -0
- package/queries/query-print.d.ts +8 -0
- package/queries/query-print.js +94 -0
- package/queries/query.d.ts +431 -26
- package/queries/query.js +36 -18
- package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +2 -1
- package/r-bridge/lang-4.x/ast/parser/json/parser.js +2 -2
- package/r-bridge/lang-4.x/ast/parser/main/internal/functions/normalize-argument.js +2 -1
- package/r-bridge/retriever.js +1 -1
- package/r-bridge/shell-executor.js +1 -1
- package/r-bridge/shell.d.ts +1 -2
- package/r-bridge/shell.js +22 -18
- package/slicing/static/static-slicer.js +3 -1
- package/statistics/features/supported/used-functions/used-functions.js +1 -1
- package/{documentation/doc-util/doc-hover-over.js → util/html-hover-over.js} +1 -1
- package/util/json.d.ts +2 -1
- package/util/json.js +101 -3
- package/util/objects.d.ts +2 -1
- package/util/objects.js +3 -0
- package/util/version.js +1 -1
- package/queries/query-schema.d.ts +0 -13
- package/queries/query-schema.js +0 -54
- /package/{documentation/doc-util/doc-hover-over.d.ts → util/html-hover-over.d.ts} +0 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.executeDependenciesQuery = executeDependenciesQuery;
|
|
4
|
+
const query_1 = require("../../query");
|
|
5
|
+
const dependencies_query_format_1 = require("./dependencies-query-format");
|
|
6
|
+
const graph_1 = require("../../../dataflow/graph/graph");
|
|
7
|
+
const log_1 = require("../../../util/log");
|
|
8
|
+
const type_1 = require("../../../r-bridge/lang-4.x/ast/model/type");
|
|
9
|
+
const retriever_1 = require("../../../r-bridge/retriever");
|
|
10
|
+
const r_function_call_1 = require("../../../r-bridge/lang-4.x/ast/model/nodes/r-function-call");
|
|
11
|
+
const visitor_1 = require("../../../r-bridge/lang-4.x/ast/model/processing/visitor");
|
|
12
|
+
const assert_1 = require("../../../util/assert");
|
|
13
|
+
const objects_1 = require("../../../util/objects");
|
|
14
|
+
const SupportedVertexTypes = [type_1.RType.String, type_1.RType.Logical, type_1.RType.Number];
|
|
15
|
+
const Unknown = 'unknown';
|
|
16
|
+
function executeDependenciesQuery(data, queries) {
|
|
17
|
+
if (queries.length !== 1) {
|
|
18
|
+
log_1.log.warn('Dependencies query expects only up to one query, but got ', queries.length, 'only using the first query');
|
|
19
|
+
}
|
|
20
|
+
const now = Date.now();
|
|
21
|
+
const [query] = queries;
|
|
22
|
+
const ignoreDefault = query.ignoreDefaultFunctions ?? false;
|
|
23
|
+
const libraryFunctions = getFunctionsToCheck(query.libraryFunctions, ignoreDefault, dependencies_query_format_1.LibraryFunctions);
|
|
24
|
+
const sourceFunctions = getFunctionsToCheck(query.sourceFunctions, ignoreDefault, dependencies_query_format_1.SourceFunctions);
|
|
25
|
+
const readFunctions = getFunctionsToCheck(query.readFunctions, ignoreDefault, dependencies_query_format_1.ReadFunctions);
|
|
26
|
+
const writeFunctions = getFunctionsToCheck(query.writeFunctions, ignoreDefault, dependencies_query_format_1.WriteFunctions);
|
|
27
|
+
const numberOfFunctions = libraryFunctions.length + sourceFunctions.length + readFunctions.length + writeFunctions.length;
|
|
28
|
+
const results = numberOfFunctions === 0 ? { kinds: {}, '.meta': { timing: 0 } } : (0, query_1.executeQueriesOfSameType)(data, ...makeCallContextQuery(libraryFunctions, 'library'), ...makeCallContextQuery(sourceFunctions, 'source'), ...makeCallContextQuery(readFunctions, 'read'), ...makeCallContextQuery(writeFunctions, 'write'));
|
|
29
|
+
const libraries = getResults(data, results, 'library', libraryFunctions, (id, vertex, argument) => ({
|
|
30
|
+
nodeId: id,
|
|
31
|
+
functionName: vertex.name,
|
|
32
|
+
libraryName: argument ?? Unknown
|
|
33
|
+
}), [type_1.RType.Symbol]);
|
|
34
|
+
if (!ignoreDefault) {
|
|
35
|
+
/* for libraries, we have to additionally track all uses of `::` and `:::`, for this we currently simply traverse all uses */
|
|
36
|
+
(0, visitor_1.visitAst)(data.ast.ast, n => {
|
|
37
|
+
if (n.type === type_1.RType.Symbol && n.namespace) {
|
|
38
|
+
/* we should improve the identification of ':::' */
|
|
39
|
+
libraries.push({
|
|
40
|
+
nodeId: n.info.id,
|
|
41
|
+
functionName: (n.info.fullLexeme ?? n.lexeme).includes(':::') ? ':::' : '::',
|
|
42
|
+
libraryName: n.namespace
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
const sourcedFiles = getResults(data, results, 'source', sourceFunctions, (id, vertex, argument, linkedIds) => ({
|
|
48
|
+
nodeId: id,
|
|
49
|
+
functionName: vertex.name,
|
|
50
|
+
file: argument ?? Unknown,
|
|
51
|
+
linkedIds: (linkedIds?.length ?? 0) > 0 ? linkedIds : undefined
|
|
52
|
+
}));
|
|
53
|
+
const readData = getResults(data, results, 'read', readFunctions, (id, vertex, argument, linkedIds) => ({
|
|
54
|
+
nodeId: id,
|
|
55
|
+
functionName: vertex.name,
|
|
56
|
+
source: argument ?? Unknown,
|
|
57
|
+
linkedIds: (linkedIds?.length ?? 0) > 0 ? linkedIds : undefined
|
|
58
|
+
}));
|
|
59
|
+
const writtenData = getResults(data, results, 'write', writeFunctions, (id, vertex, argument, linkedIds) => ({
|
|
60
|
+
nodeId: id,
|
|
61
|
+
functionName: vertex.name,
|
|
62
|
+
// write functions that don't have argIndex are assumed to write to stdout
|
|
63
|
+
destination: argument ?? ((linkedIds?.length ?? 0) > 0 ? Unknown : 'stdout'),
|
|
64
|
+
linkedIds: (linkedIds?.length ?? 0) > 0 ? linkedIds : undefined
|
|
65
|
+
}));
|
|
66
|
+
return {
|
|
67
|
+
'.meta': {
|
|
68
|
+
timing: Date.now() - now
|
|
69
|
+
},
|
|
70
|
+
libraries, sourcedFiles, readData, writtenData
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
function makeCallContextQuery(functions, kind) {
|
|
74
|
+
return functions.map(f => ({
|
|
75
|
+
type: 'call-context',
|
|
76
|
+
callName: f.name,
|
|
77
|
+
includeAliases: false,
|
|
78
|
+
callNameExact: true,
|
|
79
|
+
subkind: f.name,
|
|
80
|
+
linkTo: f.linkTo ? { type: 'link-to-last-call', callName: f.linkTo } : undefined,
|
|
81
|
+
kind
|
|
82
|
+
}));
|
|
83
|
+
}
|
|
84
|
+
function getResults(data, results, kind, functions, makeInfo, additionalAllowedTypes) {
|
|
85
|
+
return Object.entries(results?.kinds[kind]?.subkinds ?? {}).flatMap(([name, results]) => results.flatMap(({ id, linkedIds }) => {
|
|
86
|
+
const vertex = data.graph.getVertex(id);
|
|
87
|
+
const info = functions.find(f => f.name === name);
|
|
88
|
+
let index = info.argIdx;
|
|
89
|
+
if (info.argName) {
|
|
90
|
+
const arg = vertex?.args.findIndex(arg => arg !== r_function_call_1.EmptyArgument && arg.name === info.argName);
|
|
91
|
+
if (arg >= 0) {
|
|
92
|
+
index = arg;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
const args = index !== undefined ? getArgumentValue(data, vertex, index, additionalAllowedTypes) : undefined;
|
|
96
|
+
if (!args) {
|
|
97
|
+
return (0, objects_1.compactRecord)(makeInfo(id, vertex, undefined, linkedIds));
|
|
98
|
+
}
|
|
99
|
+
return args.flatMap(a => (0, objects_1.compactRecord)(makeInfo(id, vertex, a, linkedIds)));
|
|
100
|
+
})).filter(assert_1.isNotUndefined) ?? [];
|
|
101
|
+
}
|
|
102
|
+
function getArgumentValue({ graph }, vertex, argumentIndex, additionalAllowedTypes) {
|
|
103
|
+
if (vertex) {
|
|
104
|
+
if (argumentIndex === 'unnamed') {
|
|
105
|
+
// return all unnamed arguments
|
|
106
|
+
const references = vertex.args.filter(arg => arg !== r_function_call_1.EmptyArgument && !arg.name).map(graph_1.getReferenceOfArgument);
|
|
107
|
+
return references.map(ref => {
|
|
108
|
+
if (!ref) {
|
|
109
|
+
return undefined;
|
|
110
|
+
}
|
|
111
|
+
let valueNode = graph.idMap?.get(ref);
|
|
112
|
+
if (valueNode?.type === type_1.RType.Argument) {
|
|
113
|
+
valueNode = valueNode.value;
|
|
114
|
+
}
|
|
115
|
+
if (valueNode) {
|
|
116
|
+
const allowedTypes = [...SupportedVertexTypes, ...additionalAllowedTypes ?? []];
|
|
117
|
+
return allowedTypes.includes(valueNode.type) ? (0, retriever_1.removeRQuotes)(valueNode.lexeme) : Unknown;
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
if (vertex.args.length > argumentIndex) {
|
|
122
|
+
const arg = (0, graph_1.getReferenceOfArgument)(vertex.args[argumentIndex]);
|
|
123
|
+
if (arg) {
|
|
124
|
+
let valueNode = graph.idMap?.get(arg);
|
|
125
|
+
if (valueNode?.type === type_1.RType.Argument) {
|
|
126
|
+
valueNode = valueNode.value;
|
|
127
|
+
}
|
|
128
|
+
if (valueNode) {
|
|
129
|
+
const allowedTypes = [...SupportedVertexTypes, ...additionalAllowedTypes ?? []];
|
|
130
|
+
return [allowedTypes.includes(valueNode.type) ? (0, retriever_1.removeRQuotes)(valueNode.lexeme) : Unknown];
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return undefined;
|
|
136
|
+
}
|
|
137
|
+
function getFunctionsToCheck(customFunctions, ignoreDefaultFunctions, defaultFunctions) {
|
|
138
|
+
const functions = ignoreDefaultFunctions ? [] : [...defaultFunctions];
|
|
139
|
+
if (customFunctions) {
|
|
140
|
+
functions.push(...customFunctions);
|
|
141
|
+
}
|
|
142
|
+
return functions;
|
|
143
|
+
}
|
|
144
|
+
//# sourceMappingURL=dependencies-query-executor.js.map
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import type { BaseQueryFormat, BaseQueryResult } from '../../base-query-format';
|
|
2
|
+
import type { NodeId } from '../../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
3
|
+
import Joi from 'joi';
|
|
4
|
+
import { executeDependenciesQuery } from './dependencies-query-executor';
|
|
5
|
+
export declare const LibraryFunctions: FunctionInfo[];
|
|
6
|
+
export declare const SourceFunctions: FunctionInfo[];
|
|
7
|
+
export declare const ReadFunctions: FunctionInfo[];
|
|
8
|
+
export declare const WriteFunctions: FunctionInfo[];
|
|
9
|
+
export interface FunctionInfo {
|
|
10
|
+
name: string;
|
|
11
|
+
argIdx?: number | 'unnamed';
|
|
12
|
+
argName?: string;
|
|
13
|
+
linkTo?: string;
|
|
14
|
+
}
|
|
15
|
+
export interface DependenciesQuery extends BaseQueryFormat {
|
|
16
|
+
readonly type: 'dependencies';
|
|
17
|
+
readonly ignoreDefaultFunctions?: boolean;
|
|
18
|
+
readonly libraryFunctions?: FunctionInfo[];
|
|
19
|
+
readonly sourceFunctions?: FunctionInfo[];
|
|
20
|
+
readonly readFunctions?: FunctionInfo[];
|
|
21
|
+
readonly writeFunctions?: FunctionInfo[];
|
|
22
|
+
}
|
|
23
|
+
export interface DependenciesQueryResult extends BaseQueryResult {
|
|
24
|
+
libraries: LibraryInfo[];
|
|
25
|
+
sourcedFiles: SourceInfo[];
|
|
26
|
+
readData: ReadInfo[];
|
|
27
|
+
writtenData: WriteInfo[];
|
|
28
|
+
}
|
|
29
|
+
export interface DependencyInfo extends Record<string, unknown> {
|
|
30
|
+
nodeId: NodeId;
|
|
31
|
+
functionName: string;
|
|
32
|
+
linkedIds?: readonly NodeId[];
|
|
33
|
+
}
|
|
34
|
+
export type LibraryInfo = (DependencyInfo & {
|
|
35
|
+
libraryName: 'unknown' | string;
|
|
36
|
+
});
|
|
37
|
+
export type SourceInfo = (DependencyInfo & {
|
|
38
|
+
file: string;
|
|
39
|
+
});
|
|
40
|
+
export type ReadInfo = (DependencyInfo & {
|
|
41
|
+
source: string;
|
|
42
|
+
});
|
|
43
|
+
export type WriteInfo = (DependencyInfo & {
|
|
44
|
+
destination: 'stdout' | string;
|
|
45
|
+
});
|
|
46
|
+
export declare const DependenciesQueryDefinition: {
|
|
47
|
+
readonly executor: typeof executeDependenciesQuery;
|
|
48
|
+
readonly asciiSummarizer: (formatter: import("../../../util/ansi").OutputFormatter, _processed: import("../../../core/steps/pipeline/pipeline").PipelineOutput<import("../../../core/steps/pipeline/pipeline").Pipeline<{
|
|
49
|
+
readonly name: "parse";
|
|
50
|
+
readonly humanReadableName: "parse with R shell";
|
|
51
|
+
readonly description: "Parse the given R code into an AST";
|
|
52
|
+
readonly processor: (_results: unknown, input: Partial<import("../../../core/steps/all/core/00-parse").ParseRequiredInput>) => Promise<import("../../../core/steps/all/core/00-parse").ParseStepOutput>;
|
|
53
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
54
|
+
readonly printer: {
|
|
55
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
56
|
+
readonly 2: {
|
|
57
|
+
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
58
|
+
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
59
|
+
};
|
|
60
|
+
readonly 5: ({ parsed }: import("../../../core/steps/all/core/00-parse").ParseStepOutput, config: import("../../../util/quads").QuadSerializationConfiguration) => string;
|
|
61
|
+
};
|
|
62
|
+
readonly dependencies: readonly [];
|
|
63
|
+
readonly requiredInput: import("../../../core/steps/all/core/00-parse").ParseRequiredInput;
|
|
64
|
+
} | {
|
|
65
|
+
readonly name: "normalize";
|
|
66
|
+
readonly humanReadableName: "normalize";
|
|
67
|
+
readonly description: "Normalize the AST to flowR's AST";
|
|
68
|
+
readonly processor: (results: {
|
|
69
|
+
parse?: import("../../../core/steps/all/core/00-parse").ParseStepOutput;
|
|
70
|
+
}, input: Partial<import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput>) => import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>>;
|
|
71
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
72
|
+
readonly printer: {
|
|
73
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
74
|
+
readonly 2: typeof import("../../../core/print/normalize-printer").normalizedAstToJson;
|
|
75
|
+
readonly 5: typeof import("../../../core/print/normalize-printer").normalizedAstToQuads;
|
|
76
|
+
readonly 3: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaid;
|
|
77
|
+
readonly 4: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaidUrl;
|
|
78
|
+
};
|
|
79
|
+
readonly dependencies: readonly ["parse"];
|
|
80
|
+
readonly requiredInput: import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput;
|
|
81
|
+
} | {
|
|
82
|
+
readonly humanReadableName: "dataflow";
|
|
83
|
+
readonly processor: (results: {
|
|
84
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
85
|
+
}, input: {
|
|
86
|
+
request?: import("../../../r-bridge/retriever").RParseRequests;
|
|
87
|
+
}) => import("../../../dataflow/info").DataflowInformation;
|
|
88
|
+
readonly requiredInput: {};
|
|
89
|
+
readonly name: "dataflow";
|
|
90
|
+
readonly description: "Construct the dataflow graph";
|
|
91
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
92
|
+
readonly printer: {
|
|
93
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
94
|
+
readonly 2: typeof import("../../../core/print/dataflow-printer").dataflowGraphToJson;
|
|
95
|
+
readonly 5: typeof import("../../../core/print/dataflow-printer").dataflowGraphToQuads;
|
|
96
|
+
readonly 3: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaid;
|
|
97
|
+
readonly 4: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
98
|
+
};
|
|
99
|
+
readonly dependencies: readonly ["normalize"];
|
|
100
|
+
}>>, queryResults: BaseQueryResult, result: string[]) => true;
|
|
101
|
+
readonly schema: Joi.ObjectSchema<any>;
|
|
102
|
+
};
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.DependenciesQueryDefinition = exports.WriteFunctions = exports.ReadFunctions = exports.SourceFunctions = exports.LibraryFunctions = void 0;
|
|
7
|
+
const ansi_1 = require("../../../util/ansi");
|
|
8
|
+
const time_1 = require("../../../util/time");
|
|
9
|
+
const joi_1 = __importDefault(require("joi"));
|
|
10
|
+
const dependencies_query_executor_1 = require("./dependencies-query-executor");
|
|
11
|
+
// these lists are originally based on https://github.com/duncantl/CodeDepends/blob/7fd96dfee16b252e5f642c77a7ababf48e9326f8/R/codeTypes.R
|
|
12
|
+
exports.LibraryFunctions = [
|
|
13
|
+
{ name: 'library', argIdx: 0, argName: 'package' },
|
|
14
|
+
{ name: 'require', argIdx: 0, argName: 'package' },
|
|
15
|
+
{ name: 'loadNamespace', argIdx: 0, argName: 'package' },
|
|
16
|
+
{ name: 'attachNamespace', argIdx: 0, argName: 'ns' },
|
|
17
|
+
{ name: 'attach', argIdx: 0, argName: 'what' },
|
|
18
|
+
{ name: 'groundhog.library', argIdx: 0, argName: 'pkg' },
|
|
19
|
+
{ name: 'p_load', argIdx: 'unnamed' }, // pacman
|
|
20
|
+
{ name: 'p_load_gh', argIdx: 'unnamed' }, // pacman
|
|
21
|
+
{ name: 'from_import', argIdx: 0, argName: 'package' }, // easypackages
|
|
22
|
+
{ name: 'libraries', argIdx: 'unnamed' }, // easypackages
|
|
23
|
+
{ name: 'shelf', argIdx: 'unnamed' } // librarian
|
|
24
|
+
];
|
|
25
|
+
exports.SourceFunctions = [
|
|
26
|
+
{ name: 'source', argIdx: 0, argName: 'file' },
|
|
27
|
+
{ name: 'sys.source', argIdx: 0, argName: 'file' }
|
|
28
|
+
];
|
|
29
|
+
exports.ReadFunctions = [
|
|
30
|
+
{ name: 'read.table', argIdx: 0, argName: 'file' },
|
|
31
|
+
{ name: 'read.csv', argIdx: 0, argName: 'file' },
|
|
32
|
+
{ name: 'read.csv2', argIdx: 0, argName: 'file' },
|
|
33
|
+
{ name: 'read.delim', argIdx: 0, argName: 'file' },
|
|
34
|
+
{ name: 'read.dcf', argIdx: 0, argName: 'file' },
|
|
35
|
+
{ name: 'scan', argIdx: 0, argName: 'file' },
|
|
36
|
+
{ name: 'read.fwf', argIdx: 0, argName: 'file' },
|
|
37
|
+
{ name: 'file', argIdx: 1, argName: 'open' },
|
|
38
|
+
{ name: 'url', argIdx: 1, argName: 'open' },
|
|
39
|
+
{ name: 'load', argIdx: 0, argName: 'file' },
|
|
40
|
+
{ name: 'gzfile', argIdx: 1, argName: 'open' },
|
|
41
|
+
{ name: 'bzfile', argIdx: 1, argName: 'open' },
|
|
42
|
+
{ name: 'download.file', argIdx: 0, argName: 'url' },
|
|
43
|
+
{ name: 'pipe', argIdx: 1, argName: 'open' },
|
|
44
|
+
{ name: 'fifo', argIdx: 1, argName: 'open' },
|
|
45
|
+
{ name: 'unz', argIdx: 1, argName: 'open' },
|
|
46
|
+
{ name: 'matrix', argIdx: 0, argName: 'data' },
|
|
47
|
+
{ name: 'readRDS', argIdx: 0, argName: 'file' },
|
|
48
|
+
{ name: 'readLines', argIdx: 0, argName: 'con' },
|
|
49
|
+
{ name: 'readRenviron', argIdx: 0, argName: 'path' },
|
|
50
|
+
// readr
|
|
51
|
+
{ name: 'read_csv', argIdx: 0, argName: 'file' },
|
|
52
|
+
{ name: 'read_csv2', argIdx: 0, argName: 'file' },
|
|
53
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
54
|
+
{ name: 'read_delim', argIdx: 0, argName: 'file' },
|
|
55
|
+
{ name: 'read_dsv', argIdx: 0, argName: 'file' },
|
|
56
|
+
{ name: 'read_fwf', argIdx: 0, argName: 'file' },
|
|
57
|
+
{ name: 'read_tsv', argIdx: 0, argName: 'file' },
|
|
58
|
+
{ name: 'read_table', argIdx: 0, argName: 'file' },
|
|
59
|
+
{ name: 'read_log', argIdx: 0, argName: 'file' },
|
|
60
|
+
{ name: 'read_lines', argIdx: 0, argName: 'file' },
|
|
61
|
+
{ name: 'read_lines_chunked', argIdx: 0, argName: 'file' },
|
|
62
|
+
// xlsx
|
|
63
|
+
{ name: 'read.xlsx', argIdx: 0, argName: 'file' },
|
|
64
|
+
{ name: 'read.xlsx2', argIdx: 0, argName: 'file' },
|
|
65
|
+
// data.table
|
|
66
|
+
{ name: 'fread', argIdx: 0, argName: 'file' },
|
|
67
|
+
// haven
|
|
68
|
+
{ name: 'read_sas', argIdx: 0, argName: 'file' },
|
|
69
|
+
{ name: 'read_sav', argIdx: 0, argName: 'file' },
|
|
70
|
+
{ name: 'read_por', argIdx: 0, argName: 'file' },
|
|
71
|
+
{ name: 'read_dta', argIdx: 0, argName: 'file' },
|
|
72
|
+
{ name: 'read_xpt', argIdx: 0, argName: 'file' },
|
|
73
|
+
// feather
|
|
74
|
+
{ name: 'read_feather', argIdx: 0, argName: 'file' },
|
|
75
|
+
// foreign
|
|
76
|
+
{ name: 'read.arff', argIdx: 0, argName: 'file' },
|
|
77
|
+
{ name: 'read.dbf', argIdx: 0, argName: 'file' },
|
|
78
|
+
{ name: 'read.dta', argIdx: 0, argName: 'file' },
|
|
79
|
+
{ name: 'read.epiinfo', argIdx: 0, argName: 'file' },
|
|
80
|
+
{ name: 'read.mtp', argIdx: 0, argName: 'file' },
|
|
81
|
+
{ name: 'read.octave', argIdx: 0, argName: 'file' },
|
|
82
|
+
{ name: 'read.spss', argIdx: 0, argName: 'file' },
|
|
83
|
+
{ name: 'read.ssd', argIdx: 0, argName: 'file' },
|
|
84
|
+
{ name: 'read.systat', argIdx: 0, argName: 'file' },
|
|
85
|
+
{ name: 'read.xport', argIdx: 0, argName: 'file' },
|
|
86
|
+
];
|
|
87
|
+
exports.WriteFunctions = [
|
|
88
|
+
{ name: 'save', argIdx: 0, argName: '...' },
|
|
89
|
+
{ name: 'save.image', argIdx: 0, argName: 'file' },
|
|
90
|
+
{ name: 'write', argIdx: 1, argName: 'file' },
|
|
91
|
+
{ name: 'dput', argIdx: 1, argName: 'file' },
|
|
92
|
+
{ name: 'dump', argIdx: 1, argName: 'file' },
|
|
93
|
+
{ name: 'write.table', argIdx: 1, argName: 'file' },
|
|
94
|
+
{ name: 'write.csv', argIdx: 1, argName: 'file' },
|
|
95
|
+
{ name: 'saveRDS', argIdx: 1, argName: 'file' },
|
|
96
|
+
// write functions that don't have argIndex are assumed to write to stdout
|
|
97
|
+
{ name: 'print', linkTo: 'sink' },
|
|
98
|
+
{ name: 'cat', linkTo: 'sink', argIdx: 1, argName: 'file' },
|
|
99
|
+
{ name: 'message', linkTo: 'sink' },
|
|
100
|
+
{ name: 'warning', linkTo: 'sink' },
|
|
101
|
+
// readr
|
|
102
|
+
{ name: 'write_csv', argIdx: 1, argName: 'file' },
|
|
103
|
+
{ name: 'write_csv2', argIdx: 1, argName: 'file' },
|
|
104
|
+
{ name: 'write_delim', argIdx: 1, argName: 'file' },
|
|
105
|
+
{ name: 'write_dsv', argIdx: 1, argName: 'file' },
|
|
106
|
+
{ name: 'write_fwf', argIdx: 1, argName: 'file' },
|
|
107
|
+
{ name: 'write_tsv', argIdx: 1, argName: 'file' },
|
|
108
|
+
{ name: 'write_table', argIdx: 1, argName: 'file' },
|
|
109
|
+
{ name: 'write_log', argIdx: 1, argName: 'file' },
|
|
110
|
+
// heaven
|
|
111
|
+
{ name: 'write_sas', argIdx: 1, argName: 'file' },
|
|
112
|
+
{ name: 'write_sav', argIdx: 1, argName: 'file' },
|
|
113
|
+
{ name: 'write_por', argIdx: 1, argName: 'file' },
|
|
114
|
+
{ name: 'write_dta', argIdx: 1, argName: 'file' },
|
|
115
|
+
{ name: 'write_xpt', argIdx: 1, argName: 'file' },
|
|
116
|
+
// feather
|
|
117
|
+
{ name: 'write_feather', argIdx: 1, argName: 'file' },
|
|
118
|
+
// foreign
|
|
119
|
+
{ name: 'write.arff', argIdx: 1, argName: 'file' },
|
|
120
|
+
{ name: 'write.dbf', argIdx: 1, argName: 'file' },
|
|
121
|
+
{ name: 'write.dta', argIdx: 1, argName: 'file' },
|
|
122
|
+
{ name: 'write.foreign', argIdx: 1, argName: 'file' },
|
|
123
|
+
// xlsx
|
|
124
|
+
{ name: 'write.xlsx', argIdx: 1, argName: 'file' },
|
|
125
|
+
{ name: 'write.xlsx2', argIdx: 1, argName: 'file' },
|
|
126
|
+
// graphics
|
|
127
|
+
{ name: 'pdf', argIdx: 0, argName: 'file' },
|
|
128
|
+
{ name: 'jpeg', argIdx: 0, argName: 'file' },
|
|
129
|
+
{ name: 'png', argIdx: 0, argName: 'file' },
|
|
130
|
+
{ name: 'windows', argIdx: 0, argName: 'file' },
|
|
131
|
+
{ name: 'postscript', argIdx: 0, argName: 'file' },
|
|
132
|
+
{ name: 'xfix', argIdx: 0, argName: 'file' },
|
|
133
|
+
{ name: 'bitmap', argIdx: 0, argName: 'file' },
|
|
134
|
+
{ name: 'pictex', argIdx: 0, argName: 'file' },
|
|
135
|
+
{ name: 'cairo_pdf', argIdx: 0, argName: 'file' },
|
|
136
|
+
{ name: 'svg', argIdx: 0, argName: 'file' },
|
|
137
|
+
{ name: 'bmp', argIdx: 0, argName: 'file' },
|
|
138
|
+
{ name: 'tiff', argIdx: 0, argName: 'file' },
|
|
139
|
+
{ name: 'X11', argIdx: 0, argName: 'file' },
|
|
140
|
+
{ name: 'quartz', argIdx: 0, argName: 'file' },
|
|
141
|
+
];
|
|
142
|
+
function printResultSection(title, infos, result, sectionSpecifics) {
|
|
143
|
+
if (infos.length <= 0) {
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
result.push(` ╰ ${title}`);
|
|
147
|
+
const grouped = infos.reduce(function (groups, i) {
|
|
148
|
+
const array = groups.get(i.functionName);
|
|
149
|
+
if (array) {
|
|
150
|
+
array.push(i);
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
groups.set(i.functionName, [i]);
|
|
154
|
+
}
|
|
155
|
+
return groups;
|
|
156
|
+
}, new Map());
|
|
157
|
+
for (const [functionName, infos] of grouped) {
|
|
158
|
+
result.push(` ╰ \`${functionName}\``);
|
|
159
|
+
result.push(infos.map(i => ` ╰ Node Id: ${i.nodeId}, ${sectionSpecifics(i)}`).join('\n'));
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
const functionInfoSchema = joi_1.default.array().items(joi_1.default.object({
|
|
163
|
+
name: joi_1.default.string().required().description('The name of the library function.'),
|
|
164
|
+
argIdx: joi_1.default.number().optional().description('The index of the argument that contains the library name.'),
|
|
165
|
+
argName: joi_1.default.string().optional().description('The name of the argument that contains the library name.'),
|
|
166
|
+
})).optional();
|
|
167
|
+
exports.DependenciesQueryDefinition = {
|
|
168
|
+
executor: dependencies_query_executor_1.executeDependenciesQuery,
|
|
169
|
+
asciiSummarizer: (formatter, _processed, queryResults, result) => {
|
|
170
|
+
const out = queryResults;
|
|
171
|
+
result.push(`Query: ${(0, ansi_1.bold)('dependencies', formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
172
|
+
printResultSection('Libraries', out.libraries, result, l => `\`${l.libraryName}\``);
|
|
173
|
+
printResultSection('Sourced Files', out.sourcedFiles, result, s => `\`${s.file}\``);
|
|
174
|
+
printResultSection('Read Data', out.readData, result, r => `\`${r.source}\``);
|
|
175
|
+
printResultSection('Written Data', out.writtenData, result, w => `\`${w.destination}\``);
|
|
176
|
+
return true;
|
|
177
|
+
},
|
|
178
|
+
schema: joi_1.default.object({
|
|
179
|
+
type: joi_1.default.string().valid('dependencies').required().description('The type of the query.'),
|
|
180
|
+
ignoreDefaultFunctions: joi_1.default.boolean().optional().description('Should the set of functions that are detected by default be ignored/skipped?'),
|
|
181
|
+
libraryFunctions: functionInfoSchema.description('The set of library functions to search for.'),
|
|
182
|
+
sourceFunctions: functionInfoSchema.description('The set of source functions to search for.'),
|
|
183
|
+
readFunctions: functionInfoSchema.description('The set of data reading functions to search for.'),
|
|
184
|
+
writeFunctions: functionInfoSchema.description('The set of data writing functions to search for.'),
|
|
185
|
+
}).description('The dependencies query retrieves and returns the set of all dependencies in the dataflow graph, which includes libraries, sourced files, read data, and written data.')
|
|
186
|
+
};
|
|
187
|
+
//# sourceMappingURL=dependencies-query-format.js.map
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import type { BasicQueryData } from '../../query';
|
|
2
1
|
import type { IdMapQuery, IdMapQueryResult } from './id-map-query-format';
|
|
2
|
+
import type { BasicQueryData } from '../../base-query-format';
|
|
3
3
|
export declare function executeIdMapQuery({ ast }: BasicQueryData, queries: readonly IdMapQuery[]): IdMapQueryResult;
|
|
@@ -1,8 +1,67 @@
|
|
|
1
1
|
import type { BaseQueryFormat, BaseQueryResult } from '../../base-query-format';
|
|
2
2
|
import type { AstIdMap } from '../../../r-bridge/lang-4.x/ast/model/processing/decorate';
|
|
3
|
+
import { executeIdMapQuery } from './id-map-query-executor';
|
|
4
|
+
import Joi from 'joi';
|
|
3
5
|
export interface IdMapQuery extends BaseQueryFormat {
|
|
4
6
|
readonly type: 'id-map';
|
|
5
7
|
}
|
|
6
8
|
export interface IdMapQueryResult extends BaseQueryResult {
|
|
7
9
|
readonly idMap: AstIdMap;
|
|
8
10
|
}
|
|
11
|
+
export declare const IdMapQueryDefinition: {
|
|
12
|
+
readonly executor: typeof executeIdMapQuery;
|
|
13
|
+
readonly asciiSummarizer: (formatter: import("../../../util/ansi").OutputFormatter, _processed: import("../../../core/steps/pipeline/pipeline").PipelineOutput<import("../../../core/steps/pipeline/pipeline").Pipeline<{
|
|
14
|
+
readonly name: "parse";
|
|
15
|
+
readonly humanReadableName: "parse with R shell";
|
|
16
|
+
readonly description: "Parse the given R code into an AST";
|
|
17
|
+
readonly processor: (_results: unknown, input: Partial<import("../../../core/steps/all/core/00-parse").ParseRequiredInput>) => Promise<import("../../../core/steps/all/core/00-parse").ParseStepOutput>;
|
|
18
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
19
|
+
readonly printer: {
|
|
20
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
21
|
+
readonly 2: {
|
|
22
|
+
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
23
|
+
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
24
|
+
};
|
|
25
|
+
readonly 5: ({ parsed }: import("../../../core/steps/all/core/00-parse").ParseStepOutput, config: import("../../../util/quads").QuadSerializationConfiguration) => string;
|
|
26
|
+
};
|
|
27
|
+
readonly dependencies: readonly [];
|
|
28
|
+
readonly requiredInput: import("../../../core/steps/all/core/00-parse").ParseRequiredInput;
|
|
29
|
+
} | {
|
|
30
|
+
readonly name: "normalize";
|
|
31
|
+
readonly humanReadableName: "normalize";
|
|
32
|
+
readonly description: "Normalize the AST to flowR's AST";
|
|
33
|
+
readonly processor: (results: {
|
|
34
|
+
parse?: import("../../../core/steps/all/core/00-parse").ParseStepOutput;
|
|
35
|
+
}, input: Partial<import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput>) => import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>>;
|
|
36
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
37
|
+
readonly printer: {
|
|
38
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
39
|
+
readonly 2: typeof import("../../../core/print/normalize-printer").normalizedAstToJson;
|
|
40
|
+
readonly 5: typeof import("../../../core/print/normalize-printer").normalizedAstToQuads;
|
|
41
|
+
readonly 3: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaid;
|
|
42
|
+
readonly 4: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaidUrl;
|
|
43
|
+
};
|
|
44
|
+
readonly dependencies: readonly ["parse"];
|
|
45
|
+
readonly requiredInput: import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput;
|
|
46
|
+
} | {
|
|
47
|
+
readonly humanReadableName: "dataflow";
|
|
48
|
+
readonly processor: (results: {
|
|
49
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
50
|
+
}, input: {
|
|
51
|
+
request?: import("../../../r-bridge/retriever").RParseRequests;
|
|
52
|
+
}) => import("../../../dataflow/info").DataflowInformation;
|
|
53
|
+
readonly requiredInput: {};
|
|
54
|
+
readonly name: "dataflow";
|
|
55
|
+
readonly description: "Construct the dataflow graph";
|
|
56
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
57
|
+
readonly printer: {
|
|
58
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
59
|
+
readonly 2: typeof import("../../../core/print/dataflow-printer").dataflowGraphToJson;
|
|
60
|
+
readonly 5: typeof import("../../../core/print/dataflow-printer").dataflowGraphToQuads;
|
|
61
|
+
readonly 3: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaid;
|
|
62
|
+
readonly 4: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
63
|
+
};
|
|
64
|
+
readonly dependencies: readonly ["normalize"];
|
|
65
|
+
}>>, queryResults: BaseQueryResult, result: string[]) => true;
|
|
66
|
+
readonly schema: Joi.ObjectSchema<any>;
|
|
67
|
+
};
|
|
@@ -1,3 +1,24 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.IdMapQueryDefinition = void 0;
|
|
7
|
+
const id_map_query_executor_1 = require("./id-map-query-executor");
|
|
8
|
+
const ansi_1 = require("../../../util/ansi");
|
|
9
|
+
const time_1 = require("../../../util/time");
|
|
10
|
+
const joi_1 = __importDefault(require("joi"));
|
|
11
|
+
const query_print_1 = require("../../query-print");
|
|
12
|
+
exports.IdMapQueryDefinition = {
|
|
13
|
+
executor: id_map_query_executor_1.executeIdMapQuery,
|
|
14
|
+
asciiSummarizer: (formatter, _processed, queryResults, result) => {
|
|
15
|
+
const out = queryResults;
|
|
16
|
+
result.push(`Query: ${(0, ansi_1.bold)('id-map', formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
17
|
+
result.push(` ╰ Id List: {${(0, query_print_1.summarizeIdsIfTooLong)(formatter, [...out.idMap.keys()])}}`);
|
|
18
|
+
return true;
|
|
19
|
+
},
|
|
20
|
+
schema: joi_1.default.object({
|
|
21
|
+
type: joi_1.default.string().valid('id-map').required().description('The type of the query.'),
|
|
22
|
+
}).description('The id map query retrieves the id map from the normalized AST.')
|
|
23
|
+
};
|
|
3
24
|
//# sourceMappingURL=id-map-query-format.js.map
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import type { BasicQueryData } from '../../query';
|
|
2
1
|
import type { LineageQuery, LineageQueryResult } from './lineage-query-format';
|
|
2
|
+
import type { BasicQueryData } from '../../base-query-format';
|
|
3
3
|
export declare function executeLineageQuery({ graph, ast }: BasicQueryData, queries: readonly LineageQuery[]): LineageQueryResult;
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import type { BaseQueryFormat, BaseQueryResult } from '../../base-query-format';
|
|
2
2
|
import type { SingleSlicingCriterion } from '../../../slicing/criterion/parse';
|
|
3
3
|
import type { NodeId } from '../../../r-bridge/lang-4.x/ast/model/processing/node-id';
|
|
4
|
+
import Joi from 'joi';
|
|
5
|
+
import { executeLineageQuery } from './lineage-query-executor';
|
|
4
6
|
/**
|
|
5
7
|
* Calculates the lineage of the given criterion.
|
|
6
8
|
*/
|
|
@@ -12,3 +14,60 @@ export interface LineageQueryResult extends BaseQueryResult {
|
|
|
12
14
|
/** Maps each criterion to the found lineage, duplicates are ignored. */
|
|
13
15
|
readonly lineages: Record<SingleSlicingCriterion, Set<NodeId>>;
|
|
14
16
|
}
|
|
17
|
+
export declare const LineageQueryDefinition: {
|
|
18
|
+
readonly executor: typeof executeLineageQuery;
|
|
19
|
+
readonly asciiSummarizer: (formatter: import("../../../util/ansi").OutputFormatter, _processed: import("../../../core/steps/pipeline/pipeline").PipelineOutput<import("../../../core/steps/pipeline/pipeline").Pipeline<{
|
|
20
|
+
readonly name: "parse";
|
|
21
|
+
readonly humanReadableName: "parse with R shell";
|
|
22
|
+
readonly description: "Parse the given R code into an AST";
|
|
23
|
+
readonly processor: (_results: unknown, input: Partial<import("../../../core/steps/all/core/00-parse").ParseRequiredInput>) => Promise<import("../../../core/steps/all/core/00-parse").ParseStepOutput>;
|
|
24
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
25
|
+
readonly printer: {
|
|
26
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
27
|
+
readonly 2: {
|
|
28
|
+
(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
|
29
|
+
(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
|
|
30
|
+
};
|
|
31
|
+
readonly 5: ({ parsed }: import("../../../core/steps/all/core/00-parse").ParseStepOutput, config: import("../../../util/quads").QuadSerializationConfiguration) => string;
|
|
32
|
+
};
|
|
33
|
+
readonly dependencies: readonly [];
|
|
34
|
+
readonly requiredInput: import("../../../core/steps/all/core/00-parse").ParseRequiredInput;
|
|
35
|
+
} | {
|
|
36
|
+
readonly name: "normalize";
|
|
37
|
+
readonly humanReadableName: "normalize";
|
|
38
|
+
readonly description: "Normalize the AST to flowR's AST";
|
|
39
|
+
readonly processor: (results: {
|
|
40
|
+
parse?: import("../../../core/steps/all/core/00-parse").ParseStepOutput;
|
|
41
|
+
}, input: Partial<import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput>) => import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>>;
|
|
42
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
43
|
+
readonly printer: {
|
|
44
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
45
|
+
readonly 2: typeof import("../../../core/print/normalize-printer").normalizedAstToJson;
|
|
46
|
+
readonly 5: typeof import("../../../core/print/normalize-printer").normalizedAstToQuads;
|
|
47
|
+
readonly 3: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaid;
|
|
48
|
+
readonly 4: typeof import("../../../core/print/normalize-printer").printNormalizedAstToMermaidUrl;
|
|
49
|
+
};
|
|
50
|
+
readonly dependencies: readonly ["parse"];
|
|
51
|
+
readonly requiredInput: import("../../../core/steps/all/core/10-normalize").NormalizeRequiredInput;
|
|
52
|
+
} | {
|
|
53
|
+
readonly humanReadableName: "dataflow";
|
|
54
|
+
readonly processor: (results: {
|
|
55
|
+
normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst;
|
|
56
|
+
}, input: {
|
|
57
|
+
request?: import("../../../r-bridge/retriever").RParseRequests;
|
|
58
|
+
}) => import("../../../dataflow/info").DataflowInformation;
|
|
59
|
+
readonly requiredInput: {};
|
|
60
|
+
readonly name: "dataflow";
|
|
61
|
+
readonly description: "Construct the dataflow graph";
|
|
62
|
+
readonly executed: import("../../../core/steps/pipeline-step").PipelineStepStage.OncePerFile;
|
|
63
|
+
readonly printer: {
|
|
64
|
+
readonly 0: typeof import("../../../core/print/print").internalPrinter;
|
|
65
|
+
readonly 2: typeof import("../../../core/print/dataflow-printer").dataflowGraphToJson;
|
|
66
|
+
readonly 5: typeof import("../../../core/print/dataflow-printer").dataflowGraphToQuads;
|
|
67
|
+
readonly 3: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaid;
|
|
68
|
+
readonly 4: typeof import("../../../core/print/dataflow-printer").dataflowGraphToMermaidUrl;
|
|
69
|
+
};
|
|
70
|
+
readonly dependencies: readonly ["normalize"];
|
|
71
|
+
}>>, queryResults: BaseQueryResult, result: string[]) => true;
|
|
72
|
+
readonly schema: Joi.ObjectSchema<any>;
|
|
73
|
+
};
|
|
@@ -1,3 +1,27 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
2
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LineageQueryDefinition = void 0;
|
|
7
|
+
const ansi_1 = require("../../../util/ansi");
|
|
8
|
+
const time_1 = require("../../../util/time");
|
|
9
|
+
const joi_1 = __importDefault(require("joi"));
|
|
10
|
+
const lineage_query_executor_1 = require("./lineage-query-executor");
|
|
11
|
+
const query_print_1 = require("../../query-print");
|
|
12
|
+
exports.LineageQueryDefinition = {
|
|
13
|
+
executor: lineage_query_executor_1.executeLineageQuery,
|
|
14
|
+
asciiSummarizer: (formatter, _processed, queryResults, result) => {
|
|
15
|
+
const out = queryResults;
|
|
16
|
+
result.push(`Query: ${(0, ansi_1.bold)('lineage', formatter)} (${(0, time_1.printAsMs)(out['.meta'].timing, 0)})`);
|
|
17
|
+
for (const [criteria, lineage] of Object.entries(out.lineages)) {
|
|
18
|
+
result.push(` ╰ ${criteria}: {${(0, query_print_1.summarizeIdsIfTooLong)(formatter, [...lineage])}}`);
|
|
19
|
+
}
|
|
20
|
+
return true;
|
|
21
|
+
},
|
|
22
|
+
schema: joi_1.default.object({
|
|
23
|
+
type: joi_1.default.string().valid('lineage').required().description('The type of the query.'),
|
|
24
|
+
criterion: joi_1.default.string().required().description('The slicing criterion of the node to get the lineage of.')
|
|
25
|
+
}).description('Lineage query used to find the lineage of a node in the dataflow graph')
|
|
26
|
+
};
|
|
3
27
|
//# sourceMappingURL=lineage-query-format.js.map
|