@eagleoutice/flowr 2.1.4 → 2.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,9 +4,11 @@ import { PipelineStepStage } from '../../pipeline-step';
4
4
  import type { IdGenerator } from '../../../../r-bridge/lang-4.x/ast/model/processing/decorate';
5
5
  import type { NoInfo } from '../../../../r-bridge/lang-4.x/ast/model/model';
6
6
  import type { ParseStepOutput } from './00-parse';
7
+ import type { RParseRequests } from '../../../../r-bridge/retriever';
7
8
  export interface NormalizeRequiredInput {
8
9
  /** This id generator is only necessary if you want to retrieve a dataflow from the parsed R AST, it determines the id generator to use and by default uses the {@link deterministicCountingIdGenerator}*/
9
10
  readonly getId?: IdGenerator<NoInfo>;
11
+ readonly request: RParseRequests;
10
12
  }
11
13
  declare function processor(results: {
12
14
  parse?: ParseStepOutput;
@@ -4,8 +4,19 @@ exports.NORMALIZE = void 0;
4
4
  const print_1 = require("../../../print/print");
5
5
  const normalize_printer_1 = require("../../../print/normalize-printer");
6
6
  const parser_1 = require("../../../../r-bridge/lang-4.x/ast/parser/json/parser");
7
+ function getCurrentRequestFile(request) {
8
+ if (request === undefined) {
9
+ return undefined;
10
+ }
11
+ else if (Array.isArray(request)) {
12
+ return request[0].request === 'file' ? request[0].content : undefined;
13
+ }
14
+ else {
15
+ return request.request === 'file' ? request.content : undefined;
16
+ }
17
+ }
7
18
  function processor(results, input) {
8
- return (0, parser_1.normalize)(results.parse, input.getId);
19
+ return (0, parser_1.normalize)(results.parse, input.getId, getCurrentRequestFile(input.request));
9
20
  }
10
21
  exports.NORMALIZE = {
11
22
  name: 'normalize',
@@ -6,7 +6,7 @@ export type Identifier = string & {
6
6
  };
7
7
  /**
8
8
  * Each reference only has exactly one reference type, stored as the respective number.
9
- * However, wenn checking we may want to allow for one of several types,
9
+ * However, when checking we may want to allow for one of several types,
10
10
  * allowing the combination of the respective bitmasks.
11
11
  */
12
12
  export declare enum ReferenceType {
@@ -4,7 +4,7 @@ exports.ReferenceTypeReverseMapping = exports.ReferenceType = void 0;
4
4
  exports.isReferenceType = isReferenceType;
5
5
  /**
6
6
  * Each reference only has exactly one reference type, stored as the respective number.
7
- * However, wenn checking we may want to allow for one of several types,
7
+ * However, when checking we may want to allow for one of several types,
8
8
  * allowing the combination of the respective bitmasks.
9
9
  */
10
10
  var ReferenceType;
@@ -67,7 +67,7 @@ function sourceRequest(rootId, request, data, information, getId) {
67
67
  let dataflow;
68
68
  try {
69
69
  const parsed = (0, retriever_1.retrieveParseDataFromRCode)(request, executor);
70
- normalized = (0, parser_1.normalize)({ parsed }, getId);
70
+ normalized = (0, parser_1.normalize)({ parsed }, getId, request.request === 'file' ? request.content : undefined);
71
71
  dataflow = (0, processor_1.processDataflowFor)(normalized.ast, {
72
72
  ...data,
73
73
  currentRequest: request,
@@ -81,7 +81,9 @@ function sourceRequest(rootId, request, data, information, getId) {
81
81
  return information;
82
82
  }
83
83
  // take the entry point as well as all the written references, and give them a control dependency to the source call to show that they are conditional
84
- dataflow.graph.addControlDependency(dataflow.entryPoint, rootId);
84
+ if (dataflow.graph.hasVertex(dataflow.entryPoint)) {
85
+ dataflow.graph.addControlDependency(dataflow.entryPoint, rootId);
86
+ }
85
87
  for (const out of dataflow.out) {
86
88
  dataflow.graph.addControlDependency(out.nodeId, rootId);
87
89
  }
@@ -1,6 +1,9 @@
1
1
  export declare const FlowrGithubBaseRef = "https://github.com/flowr-analysis";
2
+ export declare const FlowrSiteBaseRef = "https://flowr-analysis.github.io/flowr";
2
3
  export declare const RemoteFlowrFilePathBaseRef = "https://github.com/flowr-analysis/flowr/tree/main/";
3
4
  export declare const FlowrWikiBaseRef = "https://github.com/flowr-analysis/flowr/wiki/";
4
5
  export declare const FlowrNpmRef = "https://www.npmjs.com/package/@eagleoutice/flowr";
6
+ export declare const FlowrDockerRef = "https://hub.docker.com/r/eagleoutice/flowr";
7
+ export declare const FlowrCodecovRef = "https://app.codecov.io/gh/flowr-analysis/flowr";
5
8
  export declare function getFilePathMd(path: string): string;
6
9
  export declare function getFileContentFromRoot(path: string): string;
@@ -3,18 +3,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.FlowrNpmRef = exports.FlowrWikiBaseRef = exports.RemoteFlowrFilePathBaseRef = exports.FlowrGithubBaseRef = void 0;
6
+ exports.FlowrCodecovRef = exports.FlowrDockerRef = exports.FlowrNpmRef = exports.FlowrWikiBaseRef = exports.RemoteFlowrFilePathBaseRef = exports.FlowrSiteBaseRef = exports.FlowrGithubBaseRef = void 0;
7
7
  exports.getFilePathMd = getFilePathMd;
8
8
  exports.getFileContentFromRoot = getFileContentFromRoot;
9
9
  const fs_1 = __importDefault(require("fs"));
10
10
  exports.FlowrGithubBaseRef = 'https://github.com/flowr-analysis';
11
+ exports.FlowrSiteBaseRef = 'https://flowr-analysis.github.io/flowr';
11
12
  exports.RemoteFlowrFilePathBaseRef = `${exports.FlowrGithubBaseRef}/flowr/tree/main/`;
12
13
  exports.FlowrWikiBaseRef = `${exports.FlowrGithubBaseRef}/flowr/wiki/`;
13
14
  exports.FlowrNpmRef = 'https://www.npmjs.com/package/@eagleoutice/flowr';
15
+ exports.FlowrDockerRef = 'https://hub.docker.com/r/eagleoutice/flowr';
16
+ exports.FlowrCodecovRef = 'https://app.codecov.io/gh/flowr-analysis/flowr';
14
17
  function getFilePathMd(path) {
15
18
  // we go one up as we are in doc-util now :D #convenience
16
19
  const fullpath = require.resolve('../' + path);
17
- const relative = fullpath.replace(process.cwd(), '.');
20
+ // normalize path separators so that this is consistent when testing on windows
21
+ const cwd = process.cwd().replaceAll('\\', '/');
22
+ const relative = fullpath.replaceAll('\\', '/').replace(cwd, '.');
18
23
  /* remove project prefix */
19
24
  return `[\`${relative}\`](${exports.RemoteFlowrFilePathBaseRef}${relative})`;
20
25
  }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,195 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const log_1 = require("../../test/functionality/_helper/log");
4
+ const doc_code_1 = require("./doc-util/doc-code");
5
+ const doc_files_1 = require("./doc-util/doc-files");
6
+ function getText() {
7
+ return `
8
+ For the latest code-coverage information, see [codecov.io](${doc_files_1.FlowrCodecovRef}),
9
+ for the latest benchmark results, see the [benchmark results](${doc_files_1.FlowrSiteBaseRef}/wiki/stats/benchmark) wiki page.
10
+
11
+ - [Testing Suites](#testing-suites)
12
+ - [Functionality Tests](#functionality-tests)
13
+ - [Test Structure](#test-structure)
14
+ - [Writing a Test](#writing-a-test)
15
+ - [Running Only Some Tests](#running-only-some-tests)
16
+ - [Performance Tests](#performance-tests)
17
+ - [Oh no, the tests are slow](#oh-no-the-tests-are-slow)
18
+ - [Testing Within Your IDE](#testing-within-your-ide)
19
+ - [Using Visual Studio Code](#vs-code)
20
+ - [Using WebStorm](#webstorm)
21
+ - [CI Pipeline](#ci-pipeline)
22
+ - [Linting](#linting)
23
+ - [Oh no, the linter fails](#oh-no-the-linter-fails)
24
+ - [License Checker](#license-checker)
25
+
26
+ ## Testing Suites
27
+
28
+ Currently, flowR contains two testing suites: one for [functionality](#functionality-tests) and one for [performance](#performance-tests). We explain each of them in the following.
29
+ In addition to running those tests, you can use the more generalized \`npm run checkup\`. This will include the construction of the docker image, the generation of the wiki pages, and the linter.
30
+
31
+ ### Functionality Tests
32
+
33
+ The functionality tests represent conventional unit (and depending on your terminology component/api) tests.
34
+ We use [vitest](https://vitest.dev/) as our testing framework.
35
+ You can run the tests by issuing:
36
+
37
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run test')}
38
+
39
+ Within the commandline,
40
+ this should automatically drop you into a watch mode which will automatically re-run the tests if you change the code.
41
+ If, at any time there are too many errors, you can use \`--bail=<value>\` to stop the tests after a certain number of errors.
42
+ For example:
43
+
44
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run test -- --bail=1')}
45
+
46
+ If you want to run the tests without the watch mode, you can use:
47
+
48
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run test -- --no-watch')}
49
+
50
+ To run all tests, including a coverage report and label summary, run:
51
+
52
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run test-full')}
53
+
54
+ However, depending on your local R version, your network connection and potentially other factors, some tests may be skipped automatically as they don't apply to your current system setup
55
+ (or can't be tested with the current prerequisites).
56
+ Each test can specify such requirements as part of the \`TestConfiguration\`, which is then used in the \`test.skipIf\` function of _vitest_.
57
+ It is up to the [ci](#ci-pipeline) to run the tests on different systems to ensure that those tests are ensured to run.
58
+
59
+ #### Test Structure
60
+
61
+ All functionality tests are to be located under [test/functionality](${doc_files_1.RemoteFlowrFilePathBaseRef}test/functionality).
62
+
63
+ This folder contains three special and important elements:
64
+
65
+ - \`test-setup\` which is the entry point if *all* tests are run. It should automatically disable logging statements and configure global variables (e.g., if installation tests should run).
66
+ - \`_helper\` which contains helper functions to be used by other tests.
67
+ - \`test-summary\` which may produce a summary of the covered capabilities.
68
+
69
+ We name all tests using the \`.test.ts\` suffix and try to run them in parallel.
70
+ Whenever this is not possible (e.g., when using \`withShell\`), please use \`describe.sequential\` to disable parallel execution for the respective test.
71
+
72
+ #### Writing a Test
73
+
74
+ Currently, this is heavily dependent on what you want to test (normalization, dataflow, quad-export, ...)
75
+ and it is probably best to have a look at existing tests in that area to get an idea of what comfort functionality is available.
76
+
77
+ Generally, tests should be [labeled](${doc_files_1.RemoteFlowrFilePathBaseRef}test/functionality/_helper/label.ts) according to the *flowR* capabilities they test. The set of currently supported capabilities and their IDs can be found in ${(0, doc_files_1.getFilePathMd)('../r-bridge/data/data.ts')}. The resulting labels are used in the test report that is generated as part of the test output. They group tests by the capabilities they test and allow the report to display how many tests ensure that any given capability is properly supported.
78
+
79
+ Various helper functions are available to ease in writing tests with common behaviors, like testing for dataflow, slicing or query results. These can be found in [the \`_helper\` subdirectory](${doc_files_1.RemoteFlowrFilePathBaseRef}test/functionality/_helper).
80
+
81
+ For example, an [existing test](${doc_files_1.RemoteFlowrFilePathBaseRef}test/functionality/dataflow/processing-of-elements/atomic/dataflow-atomic.test.ts) that tests the dataflow graph of a simple variable looks like this:
82
+ ${(0, doc_code_1.codeBlock)('typescript', `
83
+ assertDataflow(label('simple variable', ['name-normal']), shell,
84
+ 'x', emptyGraph().use('0', 'x')
85
+ );
86
+ `)}
87
+
88
+ When writing dataflow tests, additional settings can be used to reduce the amount of graph data that needs to be pre-written. Notably:
89
+ - \`expectIsSubgraph\` indicates that the expected graph is a subgraph, rather than the full graph that the test should generate. The test will then only check if the supplied graph is contained in the result graph, rather than an exact match.
90
+ - \`resolveIdsAsCriterion\` indicates that the ids given in the expected (sub)graph should be resolved as [slicing criteria](${doc_files_1.FlowrWikiBaseRef}/Terminology#slicing-criterion) rather than actual ids. For example, passing \`12@a\` as an id in the expected (sub)graph will cause it to be resolved as the corresponding id.
91
+
92
+ The following example shows both in use.
93
+ ${(0, doc_code_1.codeBlock)('typescript', `
94
+ assertDataflow(label('without distractors', [...OperatorDatabase['<-'].capabilities, 'numbers', 'name-normal', 'newlines', 'name-escaped']),
95
+ shell, '\`a\` <- 2\\na',
96
+ emptyGraph()
97
+ .use('2@a')
98
+ .reads('2@a', '1@\`a\`'),
99
+ {
100
+ expectIsSubgraph: true,
101
+ resolveIdsAsCriterion: true
102
+ }
103
+ );
104
+ `)}
105
+
106
+ #### Running Only Some Tests
107
+
108
+ To run only some tests, vitest allows you to [filter](https://vitest.dev/guide/filtering.html) tests.
109
+ Besides, you can use the watch mode (with \`npm run test\`) to only run tests that are affected by your changes.
110
+
111
+ ### Performance Tests
112
+
113
+ The performance test suite of *flowR* uses several suites to check for variations in the required times for certain steps.
114
+ Although we measure wall time in the CI (which is subject to rather large variations), it should give a rough idea of the performance of *flowR*.
115
+ Furthermore, the respective scripts can be used locally as well.
116
+ To run them, issue:
117
+
118
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run performance-test')}
119
+
120
+ See [test/performance](${doc_files_1.RemoteFlowrFilePathBaseRef}test/performance) for more information on the suites, how to run them, and their results. If you are interested in the results of the benchmarks, see [here](${doc_files_1.FlowrSiteBaseRef}/wiki/stats/benchmark).
121
+
122
+
123
+ ### Testing Within Your IDE
124
+
125
+ #### VS Code
126
+
127
+ Using the vitest Extension for Visual Studio Code, you can start tests directly from the definition and explore your suite in the Testing tab.
128
+ To get started, install the [vitest Extension](https://marketplace.visualstudio.com/items?itemName=vitest.explorer).
129
+
130
+ ![vscode market place](img/vs-code-vitest.png)
131
+
132
+ | Testing Tab | In Code |
133
+ |:---------------------------------------:|:-------------------------------------:|
134
+ | ![testing tab](img/testing-vs-code.png) | ![in code](img/testing-vs-code-2.png) |
135
+
136
+ - Left-clicking the <img style="vertical-align: middle" src='img/circle-check-regular.svg' height='16pt'> or <img style="vertical-align: middle" src='img/circle-xmark-regular.svg' height='16pt'> Icon next to the code will rerun the test. Right-clicking will open a context menu, allowing you to debug the test.
137
+ - In the Testing tab, you can run (and debug) all tests, individual suites or individual tests.
138
+
139
+ #### Webstorm
140
+
141
+ Please follow the official guide [here](https://www.jetbrains.com/help/webstorm/vitest.html).
142
+
143
+
144
+ ## CI Pipeline
145
+
146
+ We have several workflows defined in [.github/workflows](../.github/workflows/).
147
+ We explain the most important workflows in the following:
148
+
149
+ - [qa.yaml](../.github/workflows/qa.yaml) is the main workflow that will run different steps depending on several factors. It is responsible for:
150
+ - running the [functionality](#functionality-tests) and [performance tests](#performance-tests)
151
+ - uploading the results to the [benchmark page](${doc_files_1.FlowrSiteBaseRef}/wiki/stats/benchmark) for releases
152
+ - running the [functionality tests](#functionality-tests) on different operating systems (Windows, macOS, Linux) and with different versions of R
153
+ - reporting code coverage
154
+ - running the [linter](#linting) and reporting its results
155
+ - deploying the documentation to [GitHub Pages](${doc_files_1.FlowrSiteBaseRef}/doc/)
156
+ - [release.yaml](../.github/workflows/release.yaml) is responsible for creating a new release, only to be run by repository owners. Furthermore, it adds the new docker image to [docker hub](${doc_files_1.FlowrDockerRef}).
157
+ - [broken-links-and-wiki.yaml](../.github/workflows/broken-links-and-wiki.yaml) repeatedly tests that all links are not dead!
158
+
159
+ ## Linting
160
+
161
+ There are two linting scripts.
162
+ The main one:
163
+
164
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run lint')}
165
+
166
+ And a weaker version of the first (allowing for *todo* comments) which is run automatically in the [pre-push githook](../.githooks/pre-push) as explained in the [CONTRIBUTING.md](../.github/CONTRIBUTING.md):
167
+
168
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run lint-local')}
169
+
170
+ Besides checking coding style (as defined in the [package.json](../package.json)), the *full* linter runs the [license checker](#license-checker).
171
+
172
+ In case you are unaware,
173
+ eslint can automatically fix several linting problems[](https://eslint.org/docs/latest/use/command-line-interface#fix-problems).
174
+ So you may be fine by just running:
175
+
176
+ ${(0, doc_code_1.codeBlock)('shell', 'npm run lint-local -- --fix')}
177
+
178
+ ### Oh no, the linter fails
179
+
180
+ By now, the rules should be rather stable and so, if the linter fails,
181
+ it is usually best if you (if necessary) read the respective description and fix the respective problem.
182
+ Rules in this project cover general JavaScript issues [using regular ESLint](https://eslint.org/docs/latest/rules), TypeScript-specific issues [using typescript-eslint](https://typescript-eslint.io/rules/), and code formatting [with ESLint Stylistic](https://eslint.style/packages/default#rules).
183
+
184
+ However, in case you think that the linter is wrong, please do not hesitate to open a [new issue](${doc_files_1.FlowrGithubBaseRef}/flowr/issues/new/choose).
185
+
186
+ ### License Checker
187
+
188
+ *flowR* is licensed under the [GPLv3 License](${doc_files_1.FlowrGithubBaseRef}/flowr/blob/main/LICENSE) requiring us to only rely on [compatible licenses](https://www.gnu.org/licenses/license-list.en.html). For now, this list is hardcoded as part of the npm [\`license-compat\`](../package.json) script so it can very well be that a new dependency you add causes the checker to fail &mdash; *even though it is compatible*. In that case, please either open a [new issue](${doc_files_1.FlowrGithubBaseRef}/flowr/issues/new/choose) or directly add the license to the list (including a reference to why it is compatible).
189
+ `;
190
+ }
191
+ if (require.main === module) {
192
+ (0, log_1.setMinLevelOfAllLogs)(6 /* LogLevel.Fatal */);
193
+ console.log(getText());
194
+ }
195
+ //# sourceMappingURL=print-linting-and-testing-wiki.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eagleoutice/flowr",
3
- "version": "2.1.4",
3
+ "version": "2.1.6",
4
4
  "description": "Static Dataflow Analyzer and Program Slicer for the R Programming Language",
5
5
  "types": "dist/src/index.d.ts",
6
6
  "repository": {
@@ -28,16 +28,18 @@
28
28
  "wiki:df-graph": "ts-node src/documentation/print-dataflow-graph-wiki.ts",
29
29
  "wiki:normalized-ast": "ts-node src/documentation/print-normalized-ast-wiki.ts",
30
30
  "wiki:query-api": "ts-node src/documentation/print-query-wiki.ts",
31
+ "wiki:linting-and-testing": "ts-node src/documentation/print-linting-and-testing-wiki.ts",
31
32
  "wiki:interface": "ts-node src/documentation/print-interface-wiki.ts",
32
33
  "build": "tsc --project .",
33
- "build:bundle-flowr": "npm run build && esbuild --bundle dist/src/cli/flowr.js --platform=node --bundle --minify --target=node18 --outfile=dist/src/cli/flowr.min.js",
34
+ "build:bundle-flowr": "npm run build && esbuild --bundle dist/src/cli/flowr.js --platform=node --bundle --minify --target=node22 --outfile=dist/src/cli/flowr.min.js",
34
35
  "lint-local": "npx eslint --version && npx eslint src/ test/ --rule \"no-warning-comments: off\"",
35
36
  "lint": "npm run license-compat -- --summary && npx eslint --version && npx eslint src/ test/",
36
37
  "license-compat": "license-checker --onlyAllow 'MIT;MIT OR X11;GPLv2;LGPL;GNUGPL;ISC;Apache-2.0;FreeBSD;BSD-2-Clause;clearbsd;ModifiedBSD;BSD-3-Clause;Python-2.0;Unlicense;WTFPL;BlueOak-1.0.0;CC-BY-4.0;CC-BY-3.0;CC0-1.0;0BSD'",
37
38
  "doc": "typedoc",
38
- "test": "nyc --no-clean mocha",
39
+ "test": "vitest --config test/vitest.config.mts",
40
+ "test:coverage": "npm run test -- --coverage",
39
41
  "performance-test": "func() { cd test/performance/ && bash run-all-suites.sh $1 $2 $3; cd ../../; }; func",
40
- "test-full": "npm run test -- --test-installation",
42
+ "test-full": "npm run test:coverage -- --no-watch -- --make-summary --test-installation",
41
43
  "detect-circular-deps": "npx madge --extensions ts,tsx --circular src/",
42
44
  "checkup": "npm run flowr -- --execute \":version\" && npm run lint && npm run test-full -- --forbid-only && docker build -t test-flowr -f scripts/Dockerfile . && npm run doc && npm-run-all wiki:*"
43
45
  },
@@ -51,33 +53,6 @@
51
53
  ],
52
54
  "author": "Florian Sihler",
53
55
  "license": "ISC",
54
- "mocha": {
55
- "require": "ts-node/register",
56
- "timeout": 60000,
57
- "spec": "test/**/*.spec.ts",
58
- "source-map": true,
59
- "recursive": true,
60
- "exit": true,
61
- "produce-source-map": true
62
- },
63
- "nyc": {
64
- "all": true,
65
- "per-file": true,
66
- "check-coverage": false,
67
- "skip-full": true,
68
- "lines": 70,
69
- "extension": [
70
- ".ts"
71
- ],
72
- "include": [
73
- "src/**/*.ts"
74
- ],
75
- "reporter": [
76
- "text",
77
- "lcov",
78
- "cobertura"
79
- ]
80
- },
81
56
  "eslintConfig": {
82
57
  "settings": {
83
58
  "import/resolver": {
@@ -192,11 +167,8 @@
192
167
  "@commitlint/config-angular": "^19.3.0",
193
168
  "@eagleoutice/eslint-config-flowr": "^1.0.14",
194
169
  "@j-ulrich/release-it-regex-bumper": "^5.1.0",
195
- "@types/chai": "^4.3.16",
196
- "@types/chai-as-promised": "^7.1.8",
197
170
  "@types/command-line-args": "^5.2.3",
198
171
  "@types/command-line-usage": "^5.0.4",
199
- "@types/mocha": "^10.0.6",
200
172
  "@types/n-readlines": "^1.0.6",
201
173
  "@types/n3": "^1.16.4",
202
174
  "@types/object-hash": "^3.0.6",
@@ -204,21 +176,19 @@
204
176
  "@types/tmp": "^0.2.6",
205
177
  "@types/ws": "^8.5.10",
206
178
  "@typescript-eslint/eslint-plugin": "^7.8.0",
207
- "chai": "^4.3.16",
208
- "chai-as-promised": "^7.1.1",
179
+ "@vitest/coverage-v8": "^2.1.4",
209
180
  "esbuild": "^0.23.1",
210
181
  "eslint": "^8.57.1",
211
182
  "license-checker": "^25.0.1",
212
- "mocha": "^10.7.3",
213
- "mocha-multi-reporters": "^1.5.1",
214
- "nyc": "^17.0.0",
183
+ "npm-run-all": "^4.1.5",
215
184
  "release-it": "^17.6.0",
216
185
  "ts-node": "^10.9.2",
217
186
  "typedoc": "^0.26.7",
218
187
  "typedoc-plugin-missing-exports": "^3.0.0",
219
188
  "typedoc-theme-hierarchy": "^5.0.3",
220
189
  "typedoc-umlclass": "^0.10.0",
221
- "typescript": "^5.6.2"
190
+ "typescript": "^5.6.2",
191
+ "vitest": "^2.1.4"
222
192
  },
223
193
  "dependencies": {
224
194
  "@xmldom/xmldom": "^0.9.2",
@@ -74,7 +74,7 @@ function makeCallContextQuery(functions, kind) {
74
74
  return functions.map(f => ({
75
75
  type: 'call-context',
76
76
  callName: f.name,
77
- includeAliases: true,
77
+ includeAliases: false,
78
78
  callNameExact: true,
79
79
  subkind: f.name,
80
80
  linkTo: f.linkTo ? { type: 'link-to-last-call', callName: f.linkTo } : undefined,
@@ -48,6 +48,10 @@ interface Source {
48
48
  * This allows inline-comments!
49
49
  */
50
50
  additionalTokens?: OtherInfoNode[];
51
+ /**
52
+ * The file in which the respective node is located
53
+ */
54
+ file?: string;
51
55
  }
52
56
  /**
53
57
  * Provides the common base of all {@link RNode|RNodes}.
@@ -75,14 +75,21 @@ export interface NormalizedAst<OtherInfo = ParentInformation, Node = RNode<Other
75
75
  /** The root of the AST with parent information */
76
76
  ast: Node;
77
77
  }
78
+ export interface NormalizedAstDecorationConfiguration<OtherInfo> {
79
+ /** The id generator: must generate a unique id für each passed node */
80
+ getId?: IdGenerator<OtherInfo>;
81
+ /** the path to the file this AST was extracted from will be added to the nodes */
82
+ file?: string;
83
+ }
78
84
  /**
79
85
  * Covert the given AST into a doubly linked tree while assigning ids (so it stays serializable).
80
86
  *
81
87
  * @param ast - The root of the AST to convert
82
88
  * @param getId - The id generator: must generate a unique id für each passed node
89
+ * @param file - the path to the file this AST was extracted from will be added to the nodes
83
90
  *
84
91
  * @typeParam OtherInfo - The original decoration of the ast nodes (probably is nothing as the id decoration is most likely the first step to be performed after extraction)
85
92
  *
86
93
  * @returns A decorated AST based on the input and the id provider.
87
94
  */
88
- export declare function decorateAst<OtherInfo = NoInfo>(ast: RNode<OtherInfo>, getId?: IdGenerator<OtherInfo>): NormalizedAst<OtherInfo & ParentInformation>;
95
+ export declare function decorateAst<OtherInfo = NoInfo>(ast: RNode<OtherInfo>, { getId, file }: NormalizedAstDecorationConfiguration<OtherInfo>): NormalizedAst<OtherInfo & ParentInformation>;
@@ -69,14 +69,15 @@ const nestForElement = new Set([
69
69
  *
70
70
  * @param ast - The root of the AST to convert
71
71
  * @param getId - The id generator: must generate a unique id für each passed node
72
+ * @param file - the path to the file this AST was extracted from will be added to the nodes
72
73
  *
73
74
  * @typeParam OtherInfo - The original decoration of the ast nodes (probably is nothing as the id decoration is most likely the first step to be performed after extraction)
74
75
  *
75
76
  * @returns A decorated AST based on the input and the id provider.
76
77
  */
77
- function decorateAst(ast, getId = deterministicCountingIdGenerator(0)) {
78
+ function decorateAst(ast, { getId = deterministicCountingIdGenerator(0), file }) {
78
79
  const idMap = new bimap_1.BiMap();
79
- const info = { idMap, getId };
80
+ const info = { idMap, getId, file };
80
81
  /* Please note, that all fold processors do not re-create copies in higher-folding steps so that the idMap stays intact. */
81
82
  const foldLeaf = createFoldForLeaf(info);
82
83
  const foldBinaryOp = createFoldForBinaryOp(info);
@@ -139,6 +140,7 @@ function createFoldForLeaf(info) {
139
140
  nesting
140
141
  }
141
142
  };
143
+ decorated.info.file = info.file;
142
144
  info.idMap.set(id, decorated);
143
145
  return decorated;
144
146
  };
@@ -161,6 +163,7 @@ function createFoldForBinaryOp(info) {
161
163
  lhsInfo.role = "binop-lhs" /* RoleInParent.BinaryOperationLhs */;
162
164
  rhsInfo.role = "binop-rhs" /* RoleInParent.BinaryOperationRhs */;
163
165
  }
166
+ decorated.info.file = info.file;
164
167
  return decorated;
165
168
  };
166
169
  }
@@ -172,6 +175,7 @@ function createFoldForUnaryOp(info) {
172
175
  const opInfo = operand.info;
173
176
  opInfo.parent = id;
174
177
  opInfo.role = "unary-operand" /* RoleInParent.UnaryOperand */;
178
+ decorated.info.file = info.file;
175
179
  return decorated;
176
180
  };
177
181
  }
@@ -195,6 +199,7 @@ function createFoldForAccess(info) {
195
199
  }
196
200
  }
197
201
  }
202
+ decorated.info.file = info.file;
198
203
  return decorated;
199
204
  };
200
205
  }
@@ -214,6 +219,7 @@ function createFoldForForLoop(info) {
214
219
  bodyInfo.parent = id;
215
220
  bodyInfo.index = 2;
216
221
  bodyInfo.role = "for-body" /* RoleInParent.ForBody */;
222
+ decorated.info.file = info.file;
217
223
  return decorated;
218
224
  };
219
225
  }
@@ -225,6 +231,7 @@ function createFoldForRepeatLoop(info) {
225
231
  const bodyInfo = body.info;
226
232
  bodyInfo.parent = id;
227
233
  bodyInfo.role = "repeat-body" /* RoleInParent.RepeatBody */;
234
+ decorated.info.file = info.file;
228
235
  return decorated;
229
236
  };
230
237
  }
@@ -240,6 +247,7 @@ function createFoldForWhileLoop(info) {
240
247
  bodyInfo.parent = id;
241
248
  bodyInfo.index = 1;
242
249
  bodyInfo.role = "while-body" /* RoleInParent.WhileBody */;
250
+ decorated.info.file = info.file;
243
251
  return decorated;
244
252
  };
245
253
  }
@@ -261,6 +269,7 @@ function createFoldForIfThenElse(info) {
261
269
  otherwiseInfo.index = 2;
262
270
  otherwiseInfo.role = "if-otherwise" /* RoleInParent.IfOtherwise */;
263
271
  }
272
+ decorated.info.file = info.file;
264
273
  return decorated;
265
274
  };
266
275
  }
@@ -276,6 +285,7 @@ function createFoldForExprList(info) {
276
285
  childInfo.index = i++;
277
286
  childInfo.role = "expr-list-child" /* RoleInParent.ExpressionListChild */;
278
287
  }
288
+ decorated.info.file = info.file;
279
289
  return decorated;
280
290
  };
281
291
  }
@@ -303,6 +313,7 @@ function createFoldForFunctionCall(info) {
303
313
  argInfo.role = "call-argument" /* RoleInParent.FunctionCallArgument */;
304
314
  }
305
315
  }
316
+ decorated.info.file = info.file;
306
317
  return decorated;
307
318
  };
308
319
  }
@@ -322,6 +333,7 @@ function createFoldForFunctionDefinition(info) {
322
333
  bodyInfo.parent = id;
323
334
  bodyInfo.index = idx;
324
335
  bodyInfo.role = "function-def-body" /* RoleInParent.FunctionDefinitionBody */;
336
+ decorated.info.file = info.file;
325
337
  return decorated;
326
338
  };
327
339
  }
@@ -339,6 +351,7 @@ function createFoldForFunctionParameter(info) {
339
351
  defaultInfo.index = 1;
340
352
  defaultInfo.role = "param-value" /* RoleInParent.ParameterDefaultValue */;
341
353
  }
354
+ decorated.info.file = info.file;
342
355
  return decorated;
343
356
  };
344
357
  }
@@ -360,6 +373,7 @@ function createFoldForFunctionArgument(info) {
360
373
  valueInfo.index = idx;
361
374
  valueInfo.role = "arg-value" /* RoleInParent.ArgumentValue */;
362
375
  }
376
+ decorated.info.file = info.file;
363
377
  return decorated;
364
378
  };
365
379
  }
@@ -2,4 +2,7 @@ import type { IdGenerator, NormalizedAst } from '../../model/processing/decorate
2
2
  import type { NoInfo } from '../../model/model';
3
3
  import type { ParseStepOutput } from '../../../../../core/steps/all/core/00-parse';
4
4
  export declare const parseLog: import("tslog").Logger<import("tslog").ILogObj>;
5
- export declare function normalize({ parsed }: ParseStepOutput, getId?: IdGenerator<NoInfo>): NormalizedAst;
5
+ /**
6
+ * Take the output as produced by the parse step and normalize the AST from the R parser.
7
+ */
8
+ export declare function normalize({ parsed }: ParseStepOutput, getId?: IdGenerator<NoInfo>, file?: string): NormalizedAst;
@@ -7,9 +7,12 @@ const log_1 = require("../../../../../util/log");
7
7
  const decorate_1 = require("../../model/processing/decorate");
8
8
  const normalize_root_1 = require("../main/internal/structure/normalize-root");
9
9
  exports.parseLog = log_1.log.getSubLogger({ name: 'ast-parser' });
10
- function normalize({ parsed }, getId = (0, decorate_1.deterministicCountingIdGenerator)(0)) {
10
+ /**
11
+ * Take the output as produced by the parse step and normalize the AST from the R parser.
12
+ */
13
+ function normalize({ parsed }, getId = (0, decorate_1.deterministicCountingIdGenerator)(0), file) {
11
14
  const data = { currentRange: undefined, currentLexeme: undefined };
12
15
  const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(parsed));
13
- return (0, decorate_1.decorateAst)((0, normalize_root_1.normalizeRootObjToAst)(data, object), getId);
16
+ return (0, decorate_1.decorateAst)((0, normalize_root_1.normalizeRootObjToAst)(data, object), { getId, file });
14
17
  }
15
18
  //# sourceMappingURL=parser.js.map
@@ -35,7 +35,7 @@ export type RParseRequests = RParseRequest | ReadonlyArray<RParseRequest>;
35
35
  export declare function requestFromInput(input: `${typeof fileProtocol}${string}`): RParseRequestFromFile;
36
36
  export declare function requestFromInput(input: `${typeof fileProtocol}${string}`[]): RParseRequestFromFile[];
37
37
  export declare function requestFromInput(input: string): RParseRequestFromText;
38
- export declare function requestFromInput(input: readonly string[]): RParseRequests;
38
+ export declare function requestFromInput(input: readonly string[] | string): RParseRequests;
39
39
  export declare function requestProviderFromFile(): RParseRequestProvider;
40
40
  export declare function requestProviderFromText(text: Readonly<{
41
41
  [path: string]: string;
@@ -20,6 +20,7 @@ const object_hash_1 = __importDefault(require("object-hash"));
20
20
  const parser_1 = require("./lang-4.x/ast/parser/json/parser");
21
21
  const init_1 = require("./init");
22
22
  const convert_values_1 = require("./lang-4.x/convert-values");
23
+ const decorate_1 = require("./lang-4.x/ast/model/processing/decorate");
23
24
  const type_1 = require("./lang-4.x/ast/model/type");
24
25
  exports.fileProtocol = 'file://';
25
26
  /**
@@ -92,7 +93,7 @@ function retrieveParseDataFromRCode(request, shell) {
92
93
  */
93
94
  async function retrieveNormalizedAstFromRCode(request, shell) {
94
95
  const data = await retrieveParseDataFromRCode(request, shell);
95
- return (0, parser_1.normalize)({ parsed: data });
96
+ return (0, parser_1.normalize)({ parsed: data }, (0, decorate_1.deterministicCountingIdGenerator)(0), request.request === 'file' ? request.content : undefined);
96
97
  }
97
98
  /**
98
99
  * If the string has (R-)quotes around it, they will be removed; otherwise the string is returned unchanged.
package/util/version.js CHANGED
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.flowrVersion = flowrVersion;
4
4
  const semver_1 = require("semver");
5
5
  // this is automatically replaced with the current version by release-it
6
- const version = '2.1.4';
6
+ const version = '2.1.6';
7
7
  function flowrVersion() {
8
8
  return new semver_1.SemVer(version);
9
9
  }