@eagleoutice/flowr 2.2.2 → 2.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/cli/repl/commands/repl-dataflow.js +7 -4
  2. package/cli/repl/commands/repl-parse.js +43 -2
  3. package/cli/repl/print-version.d.ts +1 -0
  4. package/cli/repl/print-version.js +7 -2
  5. package/cli/repl/server/connection.js +10 -8
  6. package/core/pipeline-executor.d.ts +6 -0
  7. package/core/pipeline-executor.js +8 -0
  8. package/core/print/dataflow-printer.js +3 -0
  9. package/core/steps/all/core/01-parse-tree-sitter.d.ts +7 -0
  10. package/core/steps/pipeline/default-pipelines.d.ts +57 -47
  11. package/core/steps/pipeline/default-pipelines.js +23 -2
  12. package/core/steps/pipeline/pipeline.d.ts +1 -1
  13. package/core/steps/pipeline/pipeline.js +1 -1
  14. package/core/steps/pipeline-step.d.ts +1 -3
  15. package/dataflow/environments/resolve-by-name.d.ts +3 -2
  16. package/dataflow/environments/resolve-by-name.js +4 -4
  17. package/dataflow/extractor.d.ts +10 -0
  18. package/dataflow/extractor.js +10 -0
  19. package/dataflow/internal/process/functions/call/built-in/built-in-if-then-else.js +1 -1
  20. package/dataflow/internal/process/functions/call/built-in/built-in-source.js +20 -4
  21. package/documentation/doc-util/doc-dfg.d.ts +5 -3
  22. package/documentation/doc-util/doc-dfg.js +10 -8
  23. package/documentation/doc-util/doc-files.d.ts +1 -1
  24. package/documentation/doc-util/doc-files.js +1 -1
  25. package/documentation/doc-util/doc-normalized-ast.d.ts +2 -1
  26. package/documentation/doc-util/doc-normalized-ast.js +4 -5
  27. package/documentation/doc-util/doc-repl.d.ts +6 -2
  28. package/documentation/doc-util/doc-repl.js +10 -6
  29. package/documentation/doc-util/doc-structure.d.ts +1 -1
  30. package/documentation/doc-util/doc-types.d.ts +7 -5
  31. package/documentation/doc-util/doc-types.js +15 -10
  32. package/documentation/index.d.ts +9 -0
  33. package/documentation/index.js +26 -0
  34. package/documentation/print-capabilities-markdown.js +105 -19
  35. package/documentation/print-core-wiki.d.ts +1 -0
  36. package/documentation/print-core-wiki.js +406 -0
  37. package/documentation/print-dataflow-graph-wiki.js +27 -27
  38. package/documentation/print-interface-wiki.js +1 -1
  39. package/documentation/print-linting-and-testing-wiki.js +26 -8
  40. package/documentation/print-normalized-ast-wiki.js +22 -17
  41. package/documentation/print-query-wiki.js +7 -7
  42. package/documentation/print-search-wiki.js +2 -1
  43. package/package.json +3 -2
  44. package/queries/catalog/happens-before-query/happens-before-query-format.js +1 -1
  45. package/queries/catalog/resolve-value-query/resolve-value-query-executor.js +1 -1
  46. package/queries/catalog/resolve-value-query/resolve-value-query-format.js +1 -1
  47. package/queries/catalog/search-query/search-query-format.js +1 -1
  48. package/r-bridge/data/data.d.ts +48 -7
  49. package/r-bridge/data/data.js +62 -8
  50. package/r-bridge/data/types.d.ts +7 -1
  51. package/r-bridge/lang-4.x/ast/model/processing/decorate.d.ts +2 -0
  52. package/r-bridge/lang-4.x/ast/model/processing/node-id.js +2 -5
  53. package/r-bridge/lang-4.x/ast/parser/json/format.d.ts +6 -0
  54. package/r-bridge/lang-4.x/ast/parser/json/format.js +6 -0
  55. package/r-bridge/lang-4.x/ast/parser/json/parser.d.ts +13 -2
  56. package/r-bridge/lang-4.x/ast/parser/json/parser.js +19 -3
  57. package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.d.ts +3 -0
  58. package/r-bridge/lang-4.x/ast/parser/main/internal/structure/normalize-root.js +3 -0
  59. package/r-bridge/lang-4.x/tree-sitter/tree-sitter-normalize.js +6 -1
  60. package/r-bridge/parser.d.ts +10 -0
  61. package/r-bridge/parser.js +26 -2
  62. package/search/flowr-search-builder.d.ts +1 -2
  63. package/search/flowr-search-builder.js +1 -3
  64. package/util/mermaid/dfg.d.ts +3 -0
  65. package/util/mermaid/dfg.js +24 -8
  66. package/util/strings.d.ts +9 -0
  67. package/util/strings.js +14 -0
  68. package/util/version.js +1 -1
@@ -36,6 +36,12 @@ export interface NamedJsonEntry {
36
36
  name: RawRType;
37
37
  content: JsonEntry;
38
38
  }
39
+ /**
40
+ * Takes the raw {@link RShell} output and extracts the csv information contained
41
+ */
39
42
  export declare function prepareParsedData(data: string): CsvEntry[];
43
+ /**
44
+ * Takes the CSV-Entries and maps them to the old json format for compatibility.
45
+ */
40
46
  export declare function convertPreparedParsedData(roots: readonly CsvEntry[]): JsonEntry;
41
47
  export {};
@@ -7,6 +7,9 @@ const retriever_1 = require("../../../../retriever");
7
7
  const assert_1 = require("../../../../../util/assert");
8
8
  const type_1 = require("../../model/type");
9
9
  exports.RootId = 0;
10
+ /**
11
+ * Takes the raw {@link RShell} output and extracts the csv information contained
12
+ */
10
13
  function prepareParsedData(data) {
11
14
  let json;
12
15
  try {
@@ -36,6 +39,9 @@ function prepareParsedData(data) {
36
39
  }
37
40
  return roots;
38
41
  }
42
+ /**
43
+ * Takes the CSV-Entries and maps them to the old json format for compatibility.
44
+ */
39
45
  function convertPreparedParsedData(roots) {
40
46
  const partialEntry = {
41
47
  token: type_1.RawRType.ExpressionList,
@@ -1,10 +1,21 @@
1
1
  import type { IdGenerator, NormalizedAst } from '../../model/processing/decorate';
2
- import type { NoInfo } from '../../model/model';
2
+ import type { NoInfo, RNode } from '../../model/model';
3
3
  import type { ParseStepOutputTS } from '../../../../../core/steps/all/core/01-parse-tree-sitter';
4
4
  import type { ParseStepOutput } from '../../../../parser';
5
5
  export declare const parseLog: import("tslog").Logger<import("tslog").ILogObj>;
6
6
  /**
7
7
  * Take the output as produced by the parse step and normalize the AST from the R parser.
8
+ *
9
+ * @see {@link normalizeButNotDecorated} for a version that does not decorate the AST
10
+ * @see {@link normalizeTreeSitter} for a version that normalizes the AST from the TreeSitter parser
11
+ */
12
+ export declare function normalize(parsed: ParseStepOutput<string>, getId?: IdGenerator<NoInfo>, file?: string): NormalizedAst;
13
+ /**
14
+ * Take the output as produced by the parse step and normalize the AST from the R parser.
15
+ * For additional decoration with ${@link decorateAst} use {@link normalize}.
16
+ */
17
+ export declare function normalizeButNotDecorated({ parsed }: ParseStepOutput<string>): RNode;
18
+ /**
19
+ * Tree-Sitter pendant to {@link normalize}.
8
20
  */
9
- export declare function normalize({ parsed }: ParseStepOutput<string>, getId?: IdGenerator<NoInfo>, file?: string): NormalizedAst;
10
21
  export declare function normalizeTreeSitter({ parsed }: ParseStepOutputTS, getId?: IdGenerator<NoInfo>, file?: string): NormalizedAst;
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.parseLog = void 0;
4
4
  exports.normalize = normalize;
5
+ exports.normalizeButNotDecorated = normalizeButNotDecorated;
5
6
  exports.normalizeTreeSitter = normalizeTreeSitter;
6
7
  const format_1 = require("./format");
7
8
  const log_1 = require("../../../../../util/log");
@@ -11,13 +12,28 @@ const tree_sitter_normalize_1 = require("../../../tree-sitter/tree-sitter-normal
11
12
  exports.parseLog = log_1.log.getSubLogger({ name: 'ast-parser' });
12
13
  /**
13
14
  * Take the output as produced by the parse step and normalize the AST from the R parser.
15
+ *
16
+ * @see {@link normalizeButNotDecorated} for a version that does not decorate the AST
17
+ * @see {@link normalizeTreeSitter} for a version that normalizes the AST from the TreeSitter parser
14
18
  */
15
- function normalize({ parsed }, getId = (0, decorate_1.deterministicCountingIdGenerator)(0), file) {
19
+ function normalize(parsed, getId = (0, decorate_1.deterministicCountingIdGenerator)(0), file) {
20
+ return (0, decorate_1.decorateAst)(normalizeButNotDecorated(parsed), { getId, file });
21
+ }
22
+ /**
23
+ * Take the output as produced by the parse step and normalize the AST from the R parser.
24
+ * For additional decoration with ${@link decorateAst} use {@link normalize}.
25
+ */
26
+ function normalizeButNotDecorated({ parsed }) {
16
27
  const data = { currentRange: undefined, currentLexeme: undefined };
17
28
  const object = (0, format_1.convertPreparedParsedData)((0, format_1.prepareParsedData)(parsed));
18
- return (0, decorate_1.decorateAst)((0, normalize_root_1.normalizeRootObjToAst)(data, object), { getId, file });
29
+ return (0, normalize_root_1.normalizeRootObjToAst)(data, object);
19
30
  }
31
+ /**
32
+ * Tree-Sitter pendant to {@link normalize}.
33
+ */
20
34
  function normalizeTreeSitter({ parsed }, getId = (0, decorate_1.deterministicCountingIdGenerator)(0), file) {
21
- return (0, decorate_1.decorateAst)((0, tree_sitter_normalize_1.normalizeTreeSitterTreeToAst)(parsed), { getId, file });
35
+ const result = (0, decorate_1.decorateAst)((0, tree_sitter_normalize_1.normalizeTreeSitterTreeToAst)(parsed), { getId, file });
36
+ result.hasError = parsed.rootNode.hasError;
37
+ return result;
22
38
  }
23
39
  //# sourceMappingURL=parser.js.map
@@ -1,4 +1,7 @@
1
1
  import type { NormalizerData } from '../../normalizer-data';
2
2
  import type { RExpressionList } from '../../../../model/nodes/r-expression-list';
3
3
  import type { JsonEntry } from '../../../json/format';
4
+ /**
5
+ * Takes the parse dta as object and produces an undecorated, normalized AST.
6
+ */
4
7
  export declare function normalizeRootObjToAst(data: NormalizerData, obj: JsonEntry): RExpressionList;
@@ -6,6 +6,9 @@ const normalize_expressions_1 = require("./normalize-expressions");
6
6
  const log_1 = require("../../../../../../../util/log");
7
7
  const arrays_1 = require("../../../../../../../util/arrays");
8
8
  const type_1 = require("../../../../model/type");
9
+ /**
10
+ * Takes the parse dta as object and produces an undecorated, normalized AST.
11
+ */
9
12
  function normalizeRootObjToAst(data, obj) {
10
13
  const exprContent = obj.token;
11
14
  (0, normalize_meta_1.assureTokenType)(exprContent, type_1.RawRType.ExpressionList);
@@ -18,7 +18,12 @@ function normalizeTreeSitterTreeToAst(tree) {
18
18
  return root;
19
19
  }
20
20
  function nonErrorChildren(node) {
21
- return node.children.filter(n => n.type !== tree_sitter_types_1.TreeSitterType.Error);
21
+ if (node.hasError) {
22
+ return [];
23
+ }
24
+ else {
25
+ return node.children;
26
+ }
22
27
  }
23
28
  function convertTreeNode(node) {
24
29
  // generally, the grammar source file dictates what children a node has in what order:
@@ -27,6 +27,16 @@ export interface ParseRequiredInput<T> {
27
27
  export interface ParseStepOutput<T> {
28
28
  /** The parsed AST of the R code as given by the R parse side */
29
29
  readonly parsed: T;
30
+ /** Additional meta information about the parse */
31
+ readonly '.parse-meta'?: {
32
+ /** The number of tokens in the AST */
33
+ readonly tokenCount: number;
34
+ };
30
35
  }
36
+ /**
37
+ * Takes an input program and parses it using the given parser.
38
+ * @param _results - just a proxy for the pipeline, signifies that this function does not need prior knowledge of the pipeline
39
+ * @param input - the input to the parse step
40
+ */
31
41
  export declare function parseRequests<T extends KnownParserType>(_results: unknown, input: Partial<ParseRequiredInput<T>>): Promise<ParseStepOutput<T>>;
32
42
  export {};
@@ -1,14 +1,38 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.parseRequests = parseRequests;
4
+ function countChildren(node) {
5
+ let ret = 1;
6
+ for (const child of node.children) {
7
+ ret += countChildren(child);
8
+ }
9
+ return ret;
10
+ }
11
+ /**
12
+ * Takes an input program and parses it using the given parser.
13
+ * @param _results - just a proxy for the pipeline, signifies that this function does not need prior knowledge of the pipeline
14
+ * @param input - the input to the parse step
15
+ */
4
16
  async function parseRequests(_results, input) {
5
17
  /* in the future, we want to expose all cases */
6
18
  const request = (Array.isArray(input.request) ? input.request[0] : input.request);
7
19
  if (input.parser?.async) {
8
- return { parsed: await input.parser.parse(request) };
20
+ const parsed = await input.parser.parse(request);
21
+ return {
22
+ parsed,
23
+ '.parse-meta': typeof parsed === 'object' && 'rootNode' in parsed ? {
24
+ tokenCount: countChildren(parsed.rootNode),
25
+ } : undefined
26
+ };
9
27
  }
10
28
  else {
11
- return { parsed: input.parser.parse(request) };
29
+ const parsed = input.parser.parse(request);
30
+ return {
31
+ parsed,
32
+ '.parse-meta': typeof parsed === 'object' && 'rootNode' in parsed ? {
33
+ tokenCount: countChildren(parsed.rootNode),
34
+ } : undefined
35
+ };
12
36
  }
13
37
  }
14
38
  //# sourceMappingURL=parser.js.map
@@ -111,7 +111,6 @@ export declare const Q: {
111
111
  readonly id: (id: NodeId) => FlowrSearchBuilder<"get">;
112
112
  };
113
113
  export type FlowrSearchBuilderType<Generator extends GeneratorNames = GeneratorNames, Transformers extends TransformerNames[] = TransformerNames[], Info = ParentInformation, ElementType = FlowrSearchElements<Info, FlowrSearchElement<Info>[]>> = FlowrSearchBuilder<Generator, Transformers, Info, ElementType>;
114
- type GetElements<F> = F extends FlowrSearchElements<infer Info, infer Elements> ? Elements extends FlowrSearchElement<Info>[] ? Elements : never : never;
115
114
  /**
116
115
  * The search query is a combination of a generator and a list of transformers
117
116
  * and allows this view to pass such queries in a serialized form.
@@ -172,7 +171,7 @@ export declare class FlowrSearchBuilder<Generator extends GeneratorNames, Transf
172
171
  /**
173
172
  * merge combines the search results with those of another search.
174
173
  */
175
- merge<Generator2 extends GeneratorNames, Transformers2 extends TransformerNames[], OtherElementType extends FlowrSearchElements<Info, FlowrSearchElement<Info>[]>>(other: FlowrSearchBuilder<Generator2, Transformers2, Info, OtherElementType>): FlowrSearchBuilder<Generator, Transformers, Info, FlowrSearchElements<Info, [...GetElements<ElementType>, ...GetElements<OtherElementType>]>>;
174
+ merge<Generator2 extends GeneratorNames, Transformers2 extends TransformerNames[], OtherElementType extends FlowrSearchElements<Info, FlowrSearchElement<Info>[]>>(other: FlowrSearchBuilder<Generator2, Transformers2, Info, OtherElementType>): FlowrSearchBuilder<Generator, Transformers, Info>;
176
175
  /**
177
176
  * Construct the final search (this may happen automatically with most search handlers).
178
177
  *
@@ -161,9 +161,7 @@ class FlowrSearchBuilder {
161
161
  /**
162
162
  * merge combines the search results with those of another search.
163
163
  */
164
- merge(other /* | FlowrSearch<Info, Generator2, Transformers2, OtherElementType> */
165
- // @ts-expect-error -- this works when merging, there is no info disparity
166
- ) {
164
+ merge(other /* | FlowrSearch<Info, Generator2, Transformers2, OtherElementType> */) {
167
165
  this.search.push({ type: 'transformer', name: 'merge', args: { generator: other.generator, search: other.search } });
168
166
  return this;
169
167
  }
@@ -18,6 +18,8 @@ interface MermaidGraph {
18
18
  /** in the form of from-\>to because I am lazy, see {@link encodeEdge} */
19
19
  presentEdges: Set<string>;
20
20
  rootGraph: DataflowGraph;
21
+ /** if given, the dataflow graph will only focus on the "important" parts */
22
+ simplified?: boolean;
21
23
  }
22
24
  /**
23
25
  * Prints a {@link SourceRange|range} as a human readable string.
@@ -33,6 +35,7 @@ interface MermaidGraphConfiguration {
33
35
  markStyle?: MermaidMarkStyle;
34
36
  rootGraph?: DataflowGraph;
35
37
  presentEdges?: Set<string>;
38
+ simplified?: boolean;
36
39
  }
37
40
  export declare function graphToMermaid(config: MermaidGraphConfiguration): {
38
41
  string: string;
@@ -37,14 +37,24 @@ function subflowToMermaid(nodeId, exitPoints, subflow, mermaid, idPrefix = '') {
37
37
  return;
38
38
  }
39
39
  const subflowId = `${idPrefix}flow-${nodeId}`;
40
- mermaid.nodeLines.push(`\nsubgraph "${subflowId}" [function ${nodeId}]`);
40
+ if (mermaid.simplified) {
41
+ // get parent
42
+ const idMap = mermaid.rootGraph.idMap;
43
+ const node = idMap?.get(nodeId);
44
+ const nodeLexeme = node?.info.fullLexeme ?? node?.lexeme ?? '??';
45
+ mermaid.nodeLines.push(`\nsubgraph "${subflowId}" ["${(0, mermaid_1.escapeMarkdown)(nodeLexeme ?? 'function')}"]`);
46
+ }
47
+ else {
48
+ mermaid.nodeLines.push(`\nsubgraph "${subflowId}" [function ${nodeId}]`);
49
+ }
41
50
  const subgraph = graphToMermaidGraph(subflow.graph, {
42
51
  graph: mermaid.rootGraph,
43
52
  rootGraph: mermaid.rootGraph,
44
53
  idPrefix,
45
54
  includeEnvironments: mermaid.includeEnvironments,
46
55
  mark: mermaid.mark,
47
- prefix: null
56
+ prefix: null,
57
+ simplified: mermaid.simplified
48
58
  });
49
59
  mermaid.nodeLines.push(...subgraph.nodeLines);
50
60
  mermaid.edgeLines.push(...subgraph.edgeLines);
@@ -143,10 +153,16 @@ function vertexToMermaid(info, mermaid, id, idPrefix, mark) {
143
153
  }
144
154
  const node = mermaid.rootGraph.idMap?.get(info.id);
145
155
  const lexeme = node?.lexeme ?? (node?.type === type_1.RType.ExpressionList ? node?.grouping?.[0]?.lexeme : '') ?? '??';
146
- const escapedName = (0, mermaid_1.escapeMarkdown)(node ? `[${node.type}] ${lexeme}` : '??');
147
- const deps = info.controlDependencies ? ', :may:' + info.controlDependencies.map(c => c.id + (c.when ? '+' : '-')).join(',') : '';
148
- const n = node?.info.fullRange ?? node?.location ?? (node?.type === type_1.RType.ExpressionList ? node?.grouping?.[0].location : undefined);
149
- mermaid.nodeLines.push(` ${idPrefix}${id}${open}"\`${escapedName}${escapedName.length > 10 ? '\n ' : ' '}(${id}${deps})\n *${formatRange(n)}*${fCall ? displayFunctionArgMapping(info.args) : ''}\`"${close}`);
156
+ if (mermaid.simplified) {
157
+ const escapedName = '**' + (0, mermaid_1.escapeMarkdown)(node ? `${lexeme}` : '??') + '**' + (node ? `\n*${node.type}*` : '');
158
+ mermaid.nodeLines.push(` ${idPrefix}${id}${open}"\`${escapedName}\`"${close}`);
159
+ }
160
+ else {
161
+ const escapedName = (0, mermaid_1.escapeMarkdown)(node ? `[${node.type}] ${lexeme}` : '??');
162
+ const deps = info.controlDependencies ? ', :may:' + info.controlDependencies.map(c => c.id + (c.when ? '+' : '-')).join(',') : '';
163
+ const n = node?.info.fullRange ?? node?.location ?? (node?.type === type_1.RType.ExpressionList ? node?.grouping?.[0].location : undefined);
164
+ mermaid.nodeLines.push(` ${idPrefix}${id}${open}"\`${escapedName}${escapedName.length > 10 ? '\n ' : ' '}(${id}${deps})\n *${formatRange(n)}*${fCall ? displayFunctionArgMapping(info.args) : ''}\`"${close}`);
165
+ }
150
166
  if (mark?.has(id)) {
151
167
  mermaid.nodeLines.push(` style ${idPrefix}${id} ${mermaid.markStyle.vertex} `);
152
168
  }
@@ -176,8 +192,8 @@ function vertexToMermaid(info, mermaid, id, idPrefix, mark) {
176
192
  }
177
193
  }
178
194
  // make the passing of root ids more performant again
179
- function graphToMermaidGraph(rootIds, { graph, prefix = 'flowchart TD', idPrefix = '', includeEnvironments = true, mark, rootGraph, presentEdges = new Set(), markStyle = { vertex: 'stroke:teal,stroke-width:7px,stroke-opacity:.8;', edge: 'stroke:teal,stroke-width:4.2px,stroke-opacity:.8' } }) {
180
- const mermaid = { nodeLines: prefix === null ? [] : [prefix], edgeLines: [], presentEdges, mark, rootGraph: rootGraph ?? graph, includeEnvironments, markStyle };
195
+ function graphToMermaidGraph(rootIds, { simplified, graph, prefix = 'flowchart TD', idPrefix = '', includeEnvironments = !simplified, mark, rootGraph, presentEdges = new Set(), markStyle = { vertex: 'stroke:teal,stroke-width:7px,stroke-opacity:.8;', edge: 'stroke:teal,stroke-width:4.2px,stroke-opacity:.8' } }) {
196
+ const mermaid = { nodeLines: prefix === null ? [] : [prefix], edgeLines: [], presentEdges, mark, rootGraph: rootGraph ?? graph, includeEnvironments, markStyle, simplified };
181
197
  for (const [id, info] of graph.vertices(true)) {
182
198
  if (rootIds.has(id)) {
183
199
  vertexToMermaid(info, mermaid, id, idPrefix, mark);
package/util/strings.d.ts CHANGED
@@ -10,3 +10,12 @@ export declare function withoutWhitespace(output: string): string;
10
10
  * Find the longest common prefix in an array of strings
11
11
  */
12
12
  export declare function longestCommonPrefix(strings: string[]): string;
13
+ /**
14
+ * Join a list of strings, but with special handling for the last element/scenarios in which the array contains exactly two elements.
15
+ * The goal is to create (partial) sentences like `a, b, and c` or `a and b`.
16
+ */
17
+ export declare function joinWithLast(strs: readonly string[], { join, last, joinTwo }?: {
18
+ join?: string;
19
+ last?: string;
20
+ joinTwo?: string;
21
+ }): string;
package/util/strings.js CHANGED
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.startAndEndsWith = startAndEndsWith;
4
4
  exports.withoutWhitespace = withoutWhitespace;
5
5
  exports.longestCommonPrefix = longestCommonPrefix;
6
+ exports.joinWithLast = joinWithLast;
6
7
  /**
7
8
  * Check if the given string starts and ends with the given letter
8
9
  */
@@ -37,4 +38,17 @@ function longestCommonPrefix(strings) {
37
38
  }
38
39
  return prefix;
39
40
  }
41
+ /**
42
+ * Join a list of strings, but with special handling for the last element/scenarios in which the array contains exactly two elements.
43
+ * The goal is to create (partial) sentences like `a, b, and c` or `a and b`.
44
+ */
45
+ function joinWithLast(strs, { join = ', ', last = ', and ', joinTwo = ' and ' } = {}) {
46
+ if (strs.length <= 1) {
47
+ return strs.join('');
48
+ }
49
+ else if (strs.length === 2) {
50
+ return strs.join(joinTwo);
51
+ }
52
+ return strs.slice(0, -1).join(join) + last + strs[strs.length - 1];
53
+ }
40
54
  //# sourceMappingURL=strings.js.map
package/util/version.js CHANGED
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.flowrVersion = flowrVersion;
4
4
  const semver_1 = require("semver");
5
5
  // this is automatically replaced with the current version by release-it
6
- const version = '2.2.2';
6
+ const version = '2.2.3';
7
7
  function flowrVersion() {
8
8
  return new semver_1.SemVer(version);
9
9
  }