@eagleoutice/flowr 2.0.11 → 2.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +41 -8
  2. package/abstract-interpretation/domain.d.ts +33 -3
  3. package/abstract-interpretation/domain.js +31 -1
  4. package/benchmark/slicer.d.ts +3 -0
  5. package/benchmark/slicer.js +3 -0
  6. package/benchmark/stats/print.js +0 -1
  7. package/benchmark/summarizer/second-phase/graph.js +4 -4
  8. package/cli/flowr.d.ts +9 -8
  9. package/cli/flowr.js +22 -14
  10. package/cli/repl/core.d.ts +28 -12
  11. package/cli/repl/core.js +18 -18
  12. package/cli/repl/server/connection.d.ts +2 -1
  13. package/cli/repl/server/connection.js +61 -18
  14. package/cli/repl/server/messages/analysis.d.ts +11 -5
  15. package/cli/repl/server/messages/analysis.js +1 -1
  16. package/cli/repl/server/server.d.ts +2 -1
  17. package/cli/repl/server/server.js +4 -2
  18. package/core/steps/all/core/00-parse.d.ts +3 -3
  19. package/core/steps/all/core/00-parse.js +7 -1
  20. package/core/steps/all/core/20-dataflow.d.ts +2 -2
  21. package/core/steps/pipeline/default-pipelines.d.ts +8 -8
  22. package/dataflow/environments/built-in.js +1 -1
  23. package/dataflow/extractor.d.ts +2 -2
  24. package/dataflow/extractor.js +20 -4
  25. package/dataflow/info.d.ts +9 -3
  26. package/dataflow/internal/process/functions/call/built-in/built-in-source.d.ts +8 -2
  27. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +25 -4
  28. package/dataflow/processor.d.ts +5 -4
  29. package/dataflow/processor.js +3 -2
  30. package/package.json +7 -5
  31. package/r-bridge/data/types.d.ts +2 -2
  32. package/r-bridge/lang-4.x/ast/model/processing/decorate.d.ts +1 -0
  33. package/r-bridge/lang-4.x/ast/model/processing/decorate.js +5 -1
  34. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/normalize-repeat.d.ts +2 -2
  35. package/r-bridge/lang-4.x/ast/parser/xml/internal/loops/normalize-repeat.js +2 -2
  36. package/r-bridge/retriever.d.ts +24 -16
  37. package/r-bridge/retriever.js +19 -8
  38. package/slicing/criterion/parse.js +4 -4
  39. package/util/version.js +1 -1
@@ -1,4 +1,30 @@
1
1
  "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ var __importDefault = (this && this.__importDefault) || function (mod) {
26
+ return (mod && mod.__esModule) ? mod : { "default": mod };
27
+ };
2
28
  Object.defineProperty(exports, "__esModule", { value: true });
3
29
  exports.sanitizeAnalysisResults = exports.FlowRServerConnection = void 0;
4
30
  const send_1 = require("./send");
@@ -17,6 +43,9 @@ const _10_normalize_1 = require("../../../core/steps/all/core/10-normalize");
17
43
  const _20_dataflow_1 = require("../../../core/steps/all/core/20-dataflow");
18
44
  const ansi_1 = require("../../../util/ansi");
19
45
  const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
46
+ const graph_1 = require("../../../dataflow/graph/graph");
47
+ const tmp = __importStar(require("tmp"));
48
+ const fs_1 = __importDefault(require("fs"));
20
49
  /**
21
50
  * Each connection handles a single client, answering to its requests.
22
51
  * There is no need to construct this class manually, {@link FlowRServer} will do it for you.
@@ -26,16 +55,18 @@ class FlowRServerConnection {
26
55
  shell;
27
56
  name;
28
57
  logger;
58
+ allowRSessionAccess;
29
59
  // maps token to information
30
60
  fileMap = new Map();
31
61
  // we do not have to ensure synchronized shell-access as we are always running synchronized
32
- constructor(socket, name, shell) {
62
+ constructor(socket, name, shell, allowRSessionAccess) {
33
63
  this.socket = socket;
34
64
  this.shell = shell;
35
65
  this.name = name;
36
66
  this.logger = server_1.serverLog.getSubLogger({ name });
37
67
  this.socket.on('data', data => this.handleData(String(data)));
38
68
  this.socket.on('error', e => this.logger.error(`[${this.name}] Error while handling connection: ${String(e)}`));
69
+ this.allowRSessionAccess = allowRSessionAccess;
39
70
  }
40
71
  currentMessageBuffer = '';
41
72
  handleData(message) {
@@ -56,7 +87,7 @@ class FlowRServerConnection {
56
87
  }
57
88
  switch (request.message.type) {
58
89
  case 'request-file-analysis':
59
- this.handleFileAnalysisRequest(request.message);
90
+ void this.handleFileAnalysisRequest(request.message);
60
91
  break;
61
92
  case 'request-slice':
62
93
  this.handleSliceRequest(request.message);
@@ -74,7 +105,7 @@ class FlowRServerConnection {
74
105
  this.socket.end();
75
106
  }
76
107
  }
77
- handleFileAnalysisRequest(base) {
108
+ async handleFileAnalysisRequest(base) {
78
109
  const requestResult = (0, validate_1.validateMessage)(base, analysis_1.requestAnalysisMessage);
79
110
  if (requestResult.type === 'error') {
80
111
  (0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
@@ -85,8 +116,9 @@ class FlowRServerConnection {
85
116
  if (message.filetoken && this.fileMap.has(message.filetoken)) {
86
117
  this.logger.warn(`File token ${message.filetoken} already exists. Overwriting.`);
87
118
  }
88
- const slicer = this.createPipelineExecutorForRequest(message);
89
- void slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(results, message))
119
+ const tempFile = tmp.fileSync({ postfix: '.R' });
120
+ const slicer = this.createPipelineExecutorForRequest(message, tempFile.name);
121
+ await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(results, message))
90
122
  .catch(e => {
91
123
  this.logger.error(`[${this.name}] Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`);
92
124
  (0, send_1.sendMessage)(this.socket, {
@@ -96,6 +128,8 @@ class FlowRServerConnection {
96
128
  reason: `Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`
97
129
  });
98
130
  });
131
+ // this is an interestingly named function that means "I am a callback that removes a file" - so this deletes the file
132
+ tempFile.removeCallback();
99
133
  }
100
134
  async sendFileAnalysisResponse(results, message) {
101
135
  let cfg = undefined;
@@ -127,14 +161,27 @@ class FlowRServerConnection {
127
161
  });
128
162
  }
129
163
  }
130
- createPipelineExecutorForRequest(message) {
164
+ createPipelineExecutorForRequest(message, tempFile) {
165
+ let request;
166
+ if (message.content !== undefined) {
167
+ // we store the code in a temporary file in case it's too big for the shell to handle
168
+ fs_1.default.writeFileSync(tempFile, message.content ?? '');
169
+ request = { request: 'file', content: tempFile };
170
+ }
171
+ else if (message.filepath !== undefined) {
172
+ if (typeof message.filepath === 'string') {
173
+ request = { request: 'file', content: message.filepath };
174
+ }
175
+ else {
176
+ request = message.filepath.map(fp => ({ request: 'file', content: fp }));
177
+ }
178
+ }
179
+ else {
180
+ throw new Error('Either content or filepath must be defined.');
181
+ }
131
182
  const slicer = new pipeline_executor_1.PipelineExecutor(default_pipelines_1.DEFAULT_SLICING_PIPELINE, {
132
183
  shell: this.shell,
133
- // we have to make sure that the content is not interpreted as a file path if it starts with 'file://' therefore, we do it manually
134
- request: {
135
- request: message.content === undefined ? 'file' : 'text',
136
- content: message.content ?? message.filepath
137
- },
184
+ request,
138
185
  criterion: [] // currently unknown
139
186
  });
140
187
  if (message.filetoken) {
@@ -201,7 +248,7 @@ class FlowRServerConnection {
201
248
  formatter: request.ansi ? ansi_1.ansiFormatter : ansi_1.voidFormatter,
202
249
  stdout: msg => out('stdout', msg),
203
250
  stderr: msg => out('stderr', msg)
204
- }, request.expression, this.shell).then(() => {
251
+ }, request.expression, this.shell, this.allowRSessionAccess).then(() => {
205
252
  (0, send_1.sendMessage)(this.socket, {
206
253
  type: 'end-repl-execution',
207
254
  id: request.id
@@ -219,12 +266,8 @@ function sanitizeAnalysisResults(results) {
219
266
  },
220
267
  dataflow: {
221
268
  ...results.dataflow,
222
- graph: {
223
- ...results.dataflow?.graph,
224
- functionCache: undefined,
225
- // @ts-expect-error this is private, but we want to sanitize it for the purpose of json serialization
226
- _idMap: undefined
227
- }
269
+ // we want to keep the DataflowGraph type information, but not the idMap
270
+ graph: new graph_1.DataflowGraph(undefined).mergeWith(results.dataflow?.graph)
228
271
  }
229
272
  };
230
273
  }
@@ -12,7 +12,7 @@ export interface FileAnalysisRequestMessage extends IdMessageBase {
12
12
  * This is a unique token that you assign to subsequently slice the respective files.
13
13
  * If you pass the same token multiple times, previous results will be overwritten.
14
14
  *
15
- * If you do not pass a file token, the server will _not_ store the results!
15
+ * If you do _not_ pass a file token, the server will _not_ store the results!
16
16
  */
17
17
  filetoken?: string;
18
18
  /**
@@ -20,10 +20,16 @@ export interface FileAnalysisRequestMessage extends IdMessageBase {
20
20
  * However, the name is only for debugging and bears no semantic meaning.
21
21
  */
22
22
  filename?: string;
23
- /** The contents of the file, or an R expression itself (like `1 + 1`), give either this or the `filepath`. */
23
+ /**
24
+ * The contents of the file, or an R expression itself (like `1 + 1`), give either this or the `filepath`.
25
+ * If you want to load multiple R files as one, either use `filepath` or concatenate the file-contents for this field.
26
+ */
24
27
  content?: string;
25
- /** The filepath on the local machine, accessible to flowR, or simply. Give either this or the `content` */
26
- filepath?: string;
28
+ /**
29
+ * The filepath on the local machine, accessible to flowR, or simply. Give either this or the `content`.
30
+ * If you want to load multiple R files as one, either use this or concatenate the file-contents for the `content`.
31
+ */
32
+ filepath?: string | readonly string[];
27
33
  /** Can be used to additionally extract the {@link ControlFlowInformation} of the file, which is not exposed (and not fully calculated) by default. */
28
34
  cfg?: boolean;
29
35
  /** Controls the serialization of the `results` (and the {@link ControlFlowGraph} if the corresponding flag is set). If missing, we assume _json_. */
@@ -35,7 +41,7 @@ export declare const requestAnalysisMessage: MessageDefinition<FileAnalysisReque
35
41
  * It contains the results of the analysis in JSON format (guided by {@link FileAnalysisRequestMessage#format}).
36
42
  *
37
43
  * The `idMap` of the normalization step (see {@link NormalizedAst}) is not serialized as it would essentially
38
- * repeat the complete normalized AST.
44
+ * repeat the complete normalized AST, you have to re-create it yourself if you require it.
39
45
  *
40
46
  * @note The serialization of maps and sets is controlled by the {@link jsonReplacer} as part of {@link sendMessage}.
41
47
  *
@@ -13,7 +13,7 @@ exports.requestAnalysisMessage = {
13
13
  filetoken: joi_1.default.string().optional(),
14
14
  filename: joi_1.default.string().optional(),
15
15
  content: joi_1.default.string().optional(),
16
- filepath: joi_1.default.string().optional(),
16
+ filepath: joi_1.default.alternatives(joi_1.default.string(), joi_1.default.array().items(joi_1.default.string())).optional(),
17
17
  cfg: joi_1.default.boolean().optional(),
18
18
  format: joi_1.default.string().valid('json', 'n-quads').optional()
19
19
  }).xor('content', 'filepath')
@@ -11,10 +11,11 @@ export declare class FlowRServer {
11
11
  private readonly server;
12
12
  private readonly shell;
13
13
  private versionInformation;
14
+ private allowRSessionAccess;
14
15
  /** maps names to the respective connection */
15
16
  private connections;
16
17
  private nameCounter;
17
- constructor(shell: RShell, server?: Server);
18
+ constructor(shell: RShell, allowRSessionAccess: boolean, server?: Server);
18
19
  start(port: number): Promise<void>;
19
20
  private onConnect;
20
21
  }
@@ -17,13 +17,15 @@ class FlowRServer {
17
17
  server;
18
18
  shell;
19
19
  versionInformation;
20
+ allowRSessionAccess = false;
20
21
  /** maps names to the respective connection */
21
22
  connections = new Map();
22
23
  nameCounter = 0;
23
- constructor(shell, server = new net_1.NetServer()) {
24
+ constructor(shell, allowRSessionAccess, server = new net_1.NetServer()) {
24
25
  this.server = server;
25
26
  this.server.onConnect(c => this.onConnect(c));
26
27
  this.shell = shell;
28
+ this.allowRSessionAccess = allowRSessionAccess;
27
29
  }
28
30
  async start(port) {
29
31
  this.versionInformation = await (0, version_1.retrieveVersionInformation)(this.shell);
@@ -37,7 +39,7 @@ class FlowRServer {
37
39
  }
38
40
  const name = `client-${this.nameCounter++}`;
39
41
  exports.serverLog.info(`Client connected: ${(0, send_1.getUnnamedSocketName)(c)} as "${name}"`);
40
- this.connections.set(name, new connection_1.FlowRServerConnection(c, name, this.shell));
42
+ this.connections.set(name, new connection_1.FlowRServerConnection(c, name, this.shell, this.allowRSessionAccess));
41
43
  helloClient(c, name, this.versionInformation);
42
44
  c.on('close', () => {
43
45
  this.connections.delete(name);
@@ -3,14 +3,14 @@ import { parseToQuads } from '../../../print/parse-printer';
3
3
  import { PipelineStepStage } from '../../pipeline-step';
4
4
  import type { RShellExecutor } from '../../../../r-bridge/shell-executor';
5
5
  import type { RShell } from '../../../../r-bridge/shell';
6
- import type { RParseRequest } from '../../../../r-bridge/retriever';
6
+ import type { RParseRequests } from '../../../../r-bridge/retriever';
7
7
  export interface ParseRequiredInput {
8
8
  /** This is the {@link RShell} or {@link RShellExecutor} connection to be used to obtain the original parses AST of the R code */
9
9
  readonly shell: RShell | RShellExecutor;
10
10
  /** The request which essentially indicates the input to extract the AST from */
11
- readonly request: RParseRequest;
11
+ readonly request: RParseRequests;
12
12
  }
13
- declare function processor(_results: unknown, input: Partial<ParseRequiredInput>): import("ts-essentials").AsyncOrSync<string>;
13
+ declare function processor(_results: unknown, input: Partial<ParseRequiredInput>): Promise<string>;
14
14
  export declare const PARSE_WITH_R_SHELL_STEP: {
15
15
  readonly name: "parse";
16
16
  readonly humanReadableName: "parse with R shell";
@@ -5,7 +5,13 @@ const print_1 = require("../../../print/print");
5
5
  const parse_printer_1 = require("../../../print/parse-printer");
6
6
  const retriever_1 = require("../../../../r-bridge/retriever");
7
7
  function processor(_results, input) {
8
- return (0, retriever_1.retrieveParseDataFromRCode)(input.request, input.shell);
8
+ /* in the future, we want to expose all cases */
9
+ if (Array.isArray(input.request)) {
10
+ return (0, retriever_1.retrieveParseDataFromRCode)(input.request[0], input.shell);
11
+ }
12
+ else {
13
+ return (0, retriever_1.retrieveParseDataFromRCode)(input.request, input.shell);
14
+ }
9
15
  }
10
16
  exports.PARSE_WITH_R_SHELL_STEP = {
11
17
  name: 'parse',
@@ -2,11 +2,11 @@ import { internalPrinter } from '../../../print/print';
2
2
  import { PipelineStepStage } from '../../pipeline-step';
3
3
  import { dataflowGraphToJson, dataflowGraphToMermaid, dataflowGraphToMermaidUrl, dataflowGraphToQuads } from '../../../print/dataflow-printer';
4
4
  import type { NormalizedAst } from '../../../../r-bridge/lang-4.x/ast/model/processing/decorate';
5
- import type { RParseRequest } from '../../../../r-bridge/retriever';
5
+ import type { RParseRequests } from '../../../../r-bridge/retriever';
6
6
  declare function legacyProcessor(results: {
7
7
  normalize?: NormalizedAst;
8
8
  }, input: {
9
- request?: RParseRequest;
9
+ request?: RParseRequests;
10
10
  }): import("../../../../dataflow/info").DataflowInformation;
11
11
  export declare const STATIC_DATAFLOW: {
12
12
  readonly humanReadableName: "dataflow";
@@ -2,7 +2,7 @@ export declare const DEFAULT_SLICING_PIPELINE: import("./pipeline").Pipeline<{
2
2
  readonly name: "parse";
3
3
  readonly humanReadableName: "parse with R shell";
4
4
  readonly description: "Parse the given R code into an AST";
5
- readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => import("ts-essentials").AsyncOrSync<string>;
5
+ readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
6
6
  readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
7
7
  readonly printer: {
8
8
  readonly 0: typeof import("../../print/print").internalPrinter;
@@ -33,7 +33,7 @@ export declare const DEFAULT_SLICING_PIPELINE: import("./pipeline").Pipeline<{
33
33
  readonly processor: (results: {
34
34
  normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>> | undefined;
35
35
  }, input: {
36
- request?: import("../../../r-bridge/retriever").RParseRequest | undefined;
36
+ request?: import("../../../r-bridge/retriever").RParseRequests | undefined;
37
37
  }) => import("../../../dataflow/info").DataflowInformation;
38
38
  readonly requiredInput: {};
39
39
  readonly name: "dataflow";
@@ -80,7 +80,7 @@ export declare const DEFAULT_RECONSTRUCT_PIPELINE: import("./pipeline").Pipeline
80
80
  readonly name: "parse";
81
81
  readonly humanReadableName: "parse with R shell";
82
82
  readonly description: "Parse the given R code into an AST";
83
- readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => import("ts-essentials").AsyncOrSync<string>;
83
+ readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
84
84
  readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
85
85
  readonly printer: {
86
86
  readonly 0: typeof import("../../print/print").internalPrinter;
@@ -111,7 +111,7 @@ export declare const DEFAULT_RECONSTRUCT_PIPELINE: import("./pipeline").Pipeline
111
111
  readonly processor: (results: {
112
112
  normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>> | undefined;
113
113
  }, input: {
114
- request?: import("../../../r-bridge/retriever").RParseRequest | undefined;
114
+ request?: import("../../../r-bridge/retriever").RParseRequests | undefined;
115
115
  }) => import("../../../dataflow/info").DataflowInformation;
116
116
  readonly requiredInput: {};
117
117
  readonly name: "dataflow";
@@ -158,7 +158,7 @@ export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
158
158
  readonly name: "parse";
159
159
  readonly humanReadableName: "parse with R shell";
160
160
  readonly description: "Parse the given R code into an AST";
161
- readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => import("ts-essentials").AsyncOrSync<string>;
161
+ readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
162
162
  readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
163
163
  readonly printer: {
164
164
  readonly 0: typeof import("../../print/print").internalPrinter;
@@ -189,7 +189,7 @@ export declare const DEFAULT_DATAFLOW_PIPELINE: import("./pipeline").Pipeline<{
189
189
  readonly processor: (results: {
190
190
  normalize?: import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").NormalizedAst<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation, import("../../../r-bridge/lang-4.x/ast/model/model").RNode<import("../../../r-bridge/lang-4.x/ast/model/processing/decorate").ParentInformation>> | undefined;
191
191
  }, input: {
192
- request?: import("../../../r-bridge/retriever").RParseRequest | undefined;
192
+ request?: import("../../../r-bridge/retriever").RParseRequests | undefined;
193
193
  }) => import("../../../dataflow/info").DataflowInformation;
194
194
  readonly requiredInput: {};
195
195
  readonly name: "dataflow";
@@ -208,7 +208,7 @@ export declare const DEFAULT_NORMALIZE_PIPELINE: import("./pipeline").Pipeline<{
208
208
  readonly name: "parse";
209
209
  readonly humanReadableName: "parse with R shell";
210
210
  readonly description: "Parse the given R code into an AST";
211
- readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => import("ts-essentials").AsyncOrSync<string>;
211
+ readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
212
212
  readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
213
213
  readonly printer: {
214
214
  readonly 0: typeof import("../../print/print").internalPrinter;
@@ -239,7 +239,7 @@ export declare const DEFAULT_PARSE_PIPELINE: import("./pipeline").Pipeline<{
239
239
  readonly name: "parse";
240
240
  readonly humanReadableName: "parse with R shell";
241
241
  readonly description: "Parse the given R code into an AST";
242
- readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => import("ts-essentials").AsyncOrSync<string>;
242
+ readonly processor: (_results: unknown, input: Partial<import("../all/core/00-parse").ParseRequiredInput>) => Promise<string>;
243
243
  readonly executed: import("../pipeline-step").PipelineStepStage.OncePerFile;
244
244
  readonly printer: {
245
245
  readonly 0: typeof import("../../print/print").internalPrinter;
@@ -108,7 +108,7 @@ registerBuiltInFunctions(true, ['return'], defaultBuiltInProcessor, { returnsNth
108
108
  registerBuiltInFunctions(true, ['break'], defaultBuiltInProcessor, { cfg: 2 /* ExitPointType.Break */ });
109
109
  registerBuiltInFunctions(true, ['next'], defaultBuiltInProcessor, { cfg: 3 /* ExitPointType.Next */ });
110
110
  registerBuiltInFunctions(true, ['{'], built_in_expression_list_1.processExpressionList, {});
111
- registerBuiltInFunctions(true, ['source'], built_in_source_1.processSourceCall, {});
111
+ registerBuiltInFunctions(true, ['source'], built_in_source_1.processSourceCall, { includeFunctionCall: true, forceFollow: false });
112
112
  registerBuiltInFunctions(true, ['[', '[['], built_in_access_1.processAccess, { treatIndicesAsString: false });
113
113
  registerBuiltInFunctions(true, ['$', '@'], built_in_access_1.processAccess, { treatIndicesAsString: true });
114
114
  registerBuiltInFunctions(true, ['if', 'ifelse'], built_in_if_then_else_1.processIfThenElse, {});
@@ -1,6 +1,6 @@
1
1
  import type { DataflowInformation } from './info';
2
2
  import type { DataflowProcessors } from './processor';
3
3
  import type { NormalizedAst, ParentInformation } from '../r-bridge/lang-4.x/ast/model/processing/decorate';
4
- import type { RParseRequest } from '../r-bridge/retriever';
4
+ import type { RParseRequests } from '../r-bridge/retriever';
5
5
  export declare const processors: DataflowProcessors<ParentInformation>;
6
- export declare function produceDataFlowGraph<OtherInfo>(request: RParseRequest, ast: NormalizedAst<OtherInfo & ParentInformation>): DataflowInformation;
6
+ export declare function produceDataFlowGraph<OtherInfo>(request: RParseRequests, ast: NormalizedAst<OtherInfo & ParentInformation>): DataflowInformation;
@@ -14,6 +14,7 @@ const make_argument_1 = require("./internal/process/functions/call/argument/make
14
14
  const range_1 = require("../util/range");
15
15
  const retriever_1 = require("../r-bridge/retriever");
16
16
  const environment_1 = require("./environments/environment");
17
+ const built_in_source_1 = require("./internal/process/functions/call/built-in/built-in-source");
17
18
  exports.processors = {
18
19
  ["RNumber" /* RType.Number */]: process_value_1.processValue,
19
20
  ["RString" /* RType.String */]: process_value_1.processValue,
@@ -45,14 +46,29 @@ exports.processors = {
45
46
  }, (0, make_argument_1.wrapArgumentsUnnamed)(n.children, d.completeAst.idMap), n.info.id, d)
46
47
  };
47
48
  function produceDataFlowGraph(request, ast) {
48
- return (0, processor_1.processDataflowFor)(ast.ast, {
49
+ const multifile = Array.isArray(request);
50
+ let firstRequest;
51
+ if (multifile) {
52
+ firstRequest = request[0];
53
+ }
54
+ else {
55
+ firstRequest = request;
56
+ }
57
+ const dfData = {
49
58
  completeAst: ast,
50
59
  environment: (0, environment_1.initializeCleanEnvironments)(),
51
60
  processors: exports.processors,
52
- currentRequest: request,
61
+ currentRequest: firstRequest,
53
62
  controlDependencies: undefined,
54
- referenceChain: [(0, retriever_1.requestFingerprint)(request)]
55
- });
63
+ referenceChain: [(0, retriever_1.requestFingerprint)(firstRequest)]
64
+ };
65
+ let df = (0, processor_1.processDataflowFor)(ast.ast, dfData);
66
+ if (multifile) {
67
+ for (let i = 1; i < request.length; i++) {
68
+ df = (0, built_in_source_1.standaloneSourceFile)(request[i], dfData, `root-${i}`, df);
69
+ }
70
+ }
71
+ return df;
56
72
  }
57
73
  exports.produceDataFlowGraph = produceDataFlowGraph;
58
74
  //# sourceMappingURL=extractor.js.map
@@ -5,25 +5,31 @@ import type { REnvironmentInformation } from './environments/environment';
5
5
  import { DataflowGraph } from './graph/graph';
6
6
  import type { GenericDifferenceInformation, WriteableDifferenceReport } from '../util/diff';
7
7
  export declare const enum ExitPointType {
8
+ /** The exit point is the implicit (last executed expression of a function/block) */
8
9
  Default = 0,
10
+ /** The exit point is an explicit `return` call (or an alias of it) */
9
11
  Return = 1,
12
+ /** The exit point is an explicit `break` call (or an alias of it) */
10
13
  Break = 2,
14
+ /** The exit point is an explicit `next` call (or an alias of it) */
11
15
  Next = 3
12
16
  }
13
17
  export interface ControlDependency {
18
+ /** The id of the node that causes the control dependency to be active (e.g., the condition of an if) */
14
19
  readonly id: NodeId;
15
20
  /** when does this control dependency trigger (if the condition is true or false)? */
16
21
  readonly when?: boolean;
17
22
  }
18
23
  export interface ExitPoint {
24
+ /** What kind of exit point is this one? May be used to filter for exit points of specific causes. */
19
25
  readonly type: ExitPointType;
26
+ /** The id of the node which causes the exit point! */
20
27
  readonly nodeId: NodeId;
28
+ /** Control dependencies which influence if the exit point triggers (e.g., if the `return` is contained within an `if` statement) */
21
29
  readonly controlDependencies: ControlDependency[] | undefined;
22
30
  }
23
31
  export declare function addNonDefaultExitPoints(existing: ExitPoint[], add: readonly ExitPoint[]): void;
24
- /**
25
- * The control flow information for the current {@link DataflowInformation}.
26
- */
32
+ /** The control flow information for the current {@link DataflowInformation}. */
27
33
  export interface DataflowCfgInformation {
28
34
  /**
29
35
  * The entry node into the subgraph
@@ -1,11 +1,17 @@
1
1
  import { type DataflowProcessorInformation } from '../../../../../processor';
2
2
  import type { DataflowInformation } from '../../../../../info';
3
- import type { RParseRequest, RParseRequestProvider } from '../../../../../../r-bridge/retriever';
3
+ import type { RParseRequestProvider, RParseRequest } from '../../../../../../r-bridge/retriever';
4
4
  import type { IdGenerator, ParentInformation } from '../../../../../../r-bridge/lang-4.x/ast/model/processing/decorate';
5
5
  import type { RFunctionArgument } from '../../../../../../r-bridge/lang-4.x/ast/model/nodes/r-function-call';
6
6
  import type { RSymbol } from '../../../../../../r-bridge/lang-4.x/ast/model/nodes/r-symbol';
7
7
  import type { NodeId } from '../../../../../../r-bridge/lang-4.x/ast/model/processing/node-id';
8
8
  import type { NoInfo } from '../../../../../../r-bridge/lang-4.x/ast/model/model';
9
9
  export declare function setSourceProvider(provider: RParseRequestProvider): void;
10
- export declare function processSourceCall<OtherInfo>(name: RSymbol<OtherInfo & ParentInformation>, args: readonly RFunctionArgument<OtherInfo & ParentInformation>[], rootId: NodeId, data: DataflowProcessorInformation<OtherInfo & ParentInformation>): DataflowInformation;
10
+ export declare function processSourceCall<OtherInfo>(name: RSymbol<OtherInfo & ParentInformation>, args: readonly RFunctionArgument<OtherInfo & ParentInformation>[], rootId: NodeId, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, config: {
11
+ /** should this produce an explicit source function call in the graph? */
12
+ includeFunctionCall?: boolean;
13
+ /** should this function call be followed, even when the configuratio disables it? */
14
+ forceFollow?: boolean;
15
+ }): DataflowInformation;
11
16
  export declare function sourceRequest<OtherInfo>(request: RParseRequest, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, information: DataflowInformation, getId: IdGenerator<NoInfo>): DataflowInformation;
17
+ export declare function standaloneSourceFile<OtherInfo>(inputRequest: RParseRequest, data: DataflowProcessorInformation<OtherInfo & ParentInformation>, uniqueSourceId: string, information: DataflowInformation): DataflowInformation;
@@ -1,8 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.sourceRequest = exports.processSourceCall = exports.setSourceProvider = void 0;
3
+ exports.standaloneSourceFile = exports.sourceRequest = exports.processSourceCall = exports.setSourceProvider = void 0;
4
4
  const shell_executor_1 = require("../../../../../../r-bridge/shell-executor");
5
5
  const processor_1 = require("../../../../../processor");
6
+ const info_1 = require("../../../../../info");
6
7
  const config_1 = require("../../../../../../config");
7
8
  const parser_1 = require("../../../../../../r-bridge/lang-4.x/ast/parser/json/parser");
8
9
  const known_call_handling_1 = require("../known-call-handling");
@@ -16,10 +17,12 @@ function setSourceProvider(provider) {
16
17
  sourceProvider = provider;
17
18
  }
18
19
  exports.setSourceProvider = setSourceProvider;
19
- function processSourceCall(name, args, rootId, data) {
20
- const information = (0, known_call_handling_1.processKnownFunctionCall)({ name, args, rootId, data }).information;
20
+ function processSourceCall(name, args, rootId, data, config) {
21
+ const information = config.includeFunctionCall ?
22
+ (0, known_call_handling_1.processKnownFunctionCall)({ name, args, rootId, data }).information
23
+ : (0, info_1.initializeCleanDataflowInformation)(rootId, data);
21
24
  const sourceFile = args[0];
22
- if ((0, config_1.getConfig)().ignoreSourceCalls) {
25
+ if (!config.forceFollow && (0, config_1.getConfig)().ignoreSourceCalls) {
23
26
  logger_1.dataflowLogger.info(`Skipping source call ${JSON.stringify(sourceFile)} (disabled in config file)`);
24
27
  return information;
25
28
  }
@@ -69,4 +72,22 @@ function sourceRequest(request, data, information, getId) {
69
72
  return newInformation;
70
73
  }
71
74
  exports.sourceRequest = sourceRequest;
75
+ function standaloneSourceFile(inputRequest, data, uniqueSourceId, information) {
76
+ const path = inputRequest.request === 'file' ? inputRequest.content : '-inline-';
77
+ /* this way we can still pass content */
78
+ const request = inputRequest.request === 'file' ? sourceProvider.createRequest(inputRequest.content) : inputRequest;
79
+ const fingerprint = (0, retriever_1.requestFingerprint)(request);
80
+ // check if the sourced file has already been dataflow analyzed, and if so, skip it
81
+ if (data.referenceChain.includes(fingerprint)) {
82
+ logger_1.dataflowLogger.info(`Found loop in dataflow analysis for ${JSON.stringify(request)}: ${JSON.stringify(data.referenceChain)}, skipping further dataflow analysis`);
83
+ return information;
84
+ }
85
+ return sourceRequest(request, {
86
+ ...data,
87
+ currentRequest: request,
88
+ environment: information.environment,
89
+ referenceChain: [...data.referenceChain, fingerprint]
90
+ }, information, (0, decorate_1.deterministicPrefixIdGenerator)(path + '@' + uniqueSourceId));
91
+ }
92
+ exports.standaloneSourceFile = standaloneSourceFile;
72
93
  //# sourceMappingURL=built-in-source.js.map
@@ -21,11 +21,11 @@ export interface DataflowProcessorInformation<OtherInfo> {
21
21
  */
22
22
  readonly processors: DataflowProcessors<OtherInfo>;
23
23
  /**
24
- * The {@link RParseRequest} that is currently being parsed
24
+ * The {@link RParseRequests} that is currently being parsed
25
25
  */
26
26
  readonly currentRequest: RParseRequest;
27
27
  /**
28
- * The chain of {@link RParseRequest} fingerprints ({@link requestFingerprint}) that lead to the {@link currentRequest}.
28
+ * The chain of {@link RParseRequests} fingerprints ({@link requestFingerprint}) that lead to the {@link currentRequest}.
29
29
  * The most recent (last) entry is expected to always be the {@link currentRequest}.
30
30
  */
31
31
  readonly referenceChain: string[];
@@ -47,11 +47,12 @@ export type DataflowProcessors<OtherInfo> = {
47
47
  /**
48
48
  * Originally, dataflow processor was written as a two-way fold, but this produced problems when trying to resolve function calls
49
49
  * which require information regarding the calling *and* definition context. While this only is a problem for late bindings as they happen
50
- * with functions (and probably quote'd R-expressions) it is still a problem that must be dealt with.
50
+ * with functions (and probably quote'd R-expressions), it is still a problem that must be dealt with.
51
51
  * Therefore, the dataflow processor has no complete control over the traversal and merge strategy of the graph, with each processor being in
52
52
  * the position to call the other processors as needed for its children.
53
53
  * <p>
54
- * Now this method can be called recursively within the other processors to parse the dataflow for nodes that you can not narrow down.
54
+ * Now this method can be called recursively within the other processors to parse the dataflow for nodes that you cannot narrow down
55
+ * in type or context.
55
56
  *
56
57
  * @param current - The current node to start processing from
57
58
  * @param data - The initial (/current) information to be passed down
@@ -4,11 +4,12 @@ exports.processDataflowFor = void 0;
4
4
  /**
5
5
  * Originally, dataflow processor was written as a two-way fold, but this produced problems when trying to resolve function calls
6
6
  * which require information regarding the calling *and* definition context. While this only is a problem for late bindings as they happen
7
- * with functions (and probably quote'd R-expressions) it is still a problem that must be dealt with.
7
+ * with functions (and probably quote'd R-expressions), it is still a problem that must be dealt with.
8
8
  * Therefore, the dataflow processor has no complete control over the traversal and merge strategy of the graph, with each processor being in
9
9
  * the position to call the other processors as needed for its children.
10
10
  * <p>
11
- * Now this method can be called recursively within the other processors to parse the dataflow for nodes that you can not narrow down.
11
+ * Now this method can be called recursively within the other processors to parse the dataflow for nodes that you cannot narrow down
12
+ * in type or context.
12
13
  *
13
14
  * @param current - The current node to start processing from
14
15
  * @param data - The initial (/current) information to be passed down
package/package.json CHANGED
@@ -1,18 +1,20 @@
1
1
  {
2
2
  "name": "@eagleoutice/flowr",
3
- "version": "2.0.11",
3
+ "version": "2.0.12",
4
4
  "description": "Static Dataflow Analyzer and Program Slicer for the R Programming Language",
5
5
  "types": "dist/src/index.d.ts",
6
6
  "repository": {
7
7
  "type": "git",
8
- "url": "git+https://github.com/Code-Inspect/flowr.git"
8
+ "url": "git+https://github.com/flowr-analysis/flowr.git"
9
9
  },
10
- "homepage": "https://github.com/Code-Inspect/flowr",
10
+ "homepage": "https://github.com/flowr-analysis/flowr",
11
11
  "bugs": {
12
- "url": "https://github.com/Code-Inspect/flowr/issues"
12
+ "url": "https://github.com/flowr-analysis/flowr/issues"
13
13
  },
14
14
  "scripts": {
15
15
  "main": "npm run build:bundle-flowr && node dist/src/cli/flowr.min.js",
16
+ "flowr": "npm run main --",
17
+ "main-dev": "ts-node src/cli/flowr.ts",
16
18
  "publish-library": "cp .npmignore package.json README.md LICENSE dist/src/ && cd dist/src && npm publish --access public",
17
19
  "release": "npx release-it --ci",
18
20
  "stats": "ts-node src/cli/statistics-app.ts",
@@ -94,7 +96,7 @@
94
96
  ],
95
97
  "entryPoints": [
96
98
  "src",
97
- "test/helper"
99
+ "test/functionality/_helper"
98
100
  ],
99
101
  "entryPointStrategy": "expand",
100
102
  "exclude": [
@@ -1,7 +1,7 @@
1
1
  declare const enum RequiredFeature {
2
- /** https://github.com/Code-Inspect/flowr/labels/typing */
2
+ /** https://github.com/flowr-analysis/flowr/labels/typing */
3
3
  Typing = 0,
4
- /** https://github.com/Code-Inspect/flowr/labels/abstract%20interpretation */
4
+ /** https://github.com/flowr-analysis/flowr/labels/abstract%20interpretation */
5
5
  AbstractInterpretation = 1
6
6
  }
7
7
  export interface FlowrCapability {