csv-sql-engine 0.0.2 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/csv/csv-text.js +1 -2
- package/dist/engine/define-ast-handler.d.ts +5 -2
- package/dist/engine/engine.d.ts +1 -1
- package/dist/engine/handlers/row-delete.handler.js +14 -10
- package/dist/engine/handlers/row-insert.handler.js +18 -14
- package/dist/engine/handlers/row-select.handler.js +12 -13
- package/dist/engine/handlers/row-update.handler.js +16 -11
- package/dist/engine/sort-values.d.ts +12 -3
- package/dist/engine/sort-values.js +21 -15
- package/dist/errors/sql.error.js +1 -2
- package/dist/index.d.ts +0 -1
- package/dist/index.js +0 -1
- package/package.json +6 -6
- package/dist/augments/trim-lines.d.ts +0 -6
- package/dist/augments/trim-lines.js +0 -12
package/dist/csv/csv-text.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { wrapString } from '@augment-vir/common';
|
|
1
|
+
import { trimLines, wrapString } from '@augment-vir/common';
|
|
2
2
|
import { csvParseRows } from 'd3-dsv';
|
|
3
|
-
import { trimLines } from '../augments/trim-lines.js';
|
|
4
3
|
/**
|
|
5
4
|
* Converts multiple rows of values into a CSV file string.
|
|
6
5
|
*
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import { type MaybePromise } from '@augment-vir/common';
|
|
2
2
|
import { type AstHandlerParams } from './params.js';
|
|
3
|
+
import { type SortValuesOutput } from './sort-values.js';
|
|
3
4
|
/**
|
|
4
5
|
* Output from a handler that handled a SQL query.
|
|
5
6
|
*
|
|
6
7
|
* @category Internal
|
|
7
8
|
*/
|
|
8
|
-
export type AstHandlerResult =
|
|
9
|
+
export type AstHandlerResult = SortValuesOutput & {
|
|
10
|
+
numberOfRowsAffected: number;
|
|
11
|
+
};
|
|
9
12
|
/**
|
|
10
13
|
* An AST / SQL handler.
|
|
11
14
|
*
|
|
@@ -17,7 +20,7 @@ export type AstHandler = {
|
|
|
17
20
|
* Return `undefined` to mark this AST as not-handled. That means that other handlers should be
|
|
18
21
|
* used instead.
|
|
19
22
|
*/
|
|
20
|
-
handler: (params: Readonly<AstHandlerParams>) => MaybePromise<AstHandlerResult | undefined>;
|
|
23
|
+
handler: (params: Readonly<AstHandlerParams>) => MaybePromise<AstHandlerResult[] | undefined>;
|
|
21
24
|
};
|
|
22
25
|
/**
|
|
23
26
|
* Used to define new handlers.
|
package/dist/engine/engine.d.ts
CHANGED
|
@@ -13,4 +13,4 @@ export declare const allAstHandlers: ReadonlyArray<Readonly<AstHandler>>;
|
|
|
13
13
|
*
|
|
14
14
|
* @category Main
|
|
15
15
|
*/
|
|
16
|
-
export declare function executeSql(sqlInput: Sql | string, params: Readonly<ExecuteSqlParams>): Promise<AstHandlerResult[]>;
|
|
16
|
+
export declare function executeSql(sqlInput: Sql | string, params: Readonly<ExecuteSqlParams>): Promise<AstHandlerResult[][]>;
|
|
@@ -15,7 +15,7 @@ export const rowDeleteHandler = defineAstHandler({
|
|
|
15
15
|
async handler({ ast, csvDirPath, sql }) {
|
|
16
16
|
if (ast.type === AstType.Delete) {
|
|
17
17
|
const tableNames = ast.table.map((table) => table.table);
|
|
18
|
-
const
|
|
18
|
+
const results = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
19
19
|
const { tableFilePath, sanitizedTableName } = nameCsvTableFile({
|
|
20
20
|
csvDirPath,
|
|
21
21
|
tableName,
|
|
@@ -27,25 +27,29 @@ export const rowDeleteHandler = defineAstHandler({
|
|
|
27
27
|
});
|
|
28
28
|
const rowIndexesToDelete = findWhereMatches(ast.where, csvContents, tableFilePath);
|
|
29
29
|
const returningRequirement = ast.returning;
|
|
30
|
-
const
|
|
31
|
-
?
|
|
32
|
-
.filter((row, index) => rowIndexesToDelete.includes(index))
|
|
33
|
-
.map((row) => sortValues({
|
|
30
|
+
const result = returningRequirement
|
|
31
|
+
? sortValues({
|
|
34
32
|
csvFileHeaderOrder: csvHeaders,
|
|
35
33
|
sqlQueryHeaderOrder: returningRequirement.columns.map((column) => column.expr.column),
|
|
36
34
|
from: {
|
|
37
|
-
csvFile: row,
|
|
35
|
+
csvFile: csvContents.filter((row, index) => rowIndexesToDelete.includes(index)),
|
|
38
36
|
},
|
|
39
37
|
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
40
|
-
})
|
|
41
|
-
:
|
|
38
|
+
})
|
|
39
|
+
: {
|
|
40
|
+
columnNames: [],
|
|
41
|
+
values: [],
|
|
42
|
+
};
|
|
42
43
|
rowIndexesToDelete.forEach((rowIndexToDelete) => {
|
|
43
44
|
csvContents.splice(rowIndexToDelete, 1);
|
|
44
45
|
});
|
|
45
46
|
await writeCsvFile(tableFilePath, csvContents);
|
|
46
|
-
return
|
|
47
|
+
return {
|
|
48
|
+
...result,
|
|
49
|
+
numberOfRowsAffected: rowIndexesToDelete.length,
|
|
50
|
+
};
|
|
47
51
|
});
|
|
48
|
-
return
|
|
52
|
+
return results.flat().filter(check.isTruthy);
|
|
49
53
|
}
|
|
50
54
|
return undefined;
|
|
51
55
|
},
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { check } from '@augment-vir/assert';
|
|
1
|
+
import { assertWrap, check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
3
|
import { appendCsvRow, nameCsvTableFile, readCsvHeaders } from '../../csv/csv-file.js';
|
|
4
4
|
import { AstType } from '../../sql/ast.js';
|
|
@@ -14,7 +14,7 @@ export const rowInsertHandler = defineAstHandler({
|
|
|
14
14
|
async handler({ ast, csvDirPath, sql }) {
|
|
15
15
|
if (ast.type === AstType.Insert) {
|
|
16
16
|
const tableNames = ast.table.map((table) => table.table);
|
|
17
|
-
const
|
|
17
|
+
const results = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
18
18
|
const { tableFilePath, sanitizedTableName } = nameCsvTableFile({
|
|
19
19
|
csvDirPath,
|
|
20
20
|
tableName,
|
|
@@ -24,30 +24,34 @@ export const rowInsertHandler = defineAstHandler({
|
|
|
24
24
|
csvFilePath: tableFilePath,
|
|
25
25
|
sanitizedTableName,
|
|
26
26
|
});
|
|
27
|
-
const newRow = sortValues({
|
|
27
|
+
const newRow = assertWrap.isDefined(sortValues({
|
|
28
28
|
csvFileHeaderOrder,
|
|
29
29
|
sqlQueryHeaderOrder: ast.columns || csvFileHeaderOrder,
|
|
30
30
|
from: {
|
|
31
|
-
sqlQuery: rawValues,
|
|
31
|
+
sqlQuery: [rawValues],
|
|
32
32
|
},
|
|
33
33
|
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
34
|
-
});
|
|
34
|
+
}).values[0], 'No sorted row retrieved.');
|
|
35
35
|
await appendCsvRow(newRow, tableFilePath);
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
const readResult = ast.returning
|
|
37
|
+
? sortValues({
|
|
38
38
|
csvFileHeaderOrder,
|
|
39
39
|
sqlQueryHeaderOrder: ast.returning.columns.map((column) => column.expr.column),
|
|
40
40
|
from: {
|
|
41
|
-
csvFile: newRow,
|
|
41
|
+
csvFile: [newRow],
|
|
42
42
|
},
|
|
43
43
|
unconsumedInterpolationValues: undefined,
|
|
44
|
-
})
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
44
|
+
})
|
|
45
|
+
: {
|
|
46
|
+
columnNames: [],
|
|
47
|
+
values: [],
|
|
48
|
+
};
|
|
49
|
+
return {
|
|
50
|
+
...readResult,
|
|
51
|
+
numberOfRowsAffected: 1,
|
|
52
|
+
};
|
|
49
53
|
});
|
|
50
|
-
return
|
|
54
|
+
return results.filter(check.isTruthy);
|
|
51
55
|
}
|
|
52
56
|
return undefined;
|
|
53
57
|
},
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
|
-
import { nameCsvTableFile, readCsvFile, readCsvHeaders
|
|
3
|
+
import { nameCsvTableFile, readCsvFile, readCsvHeaders } from '../../csv/csv-file.js';
|
|
4
4
|
import { AstType } from '../../sql/ast.js';
|
|
5
5
|
import { defineAstHandler } from '../define-ast-handler.js';
|
|
6
6
|
import { sortValues } from '../sort-values.js';
|
|
@@ -27,18 +27,17 @@ export const rowSelectHandler = defineAstHandler({
|
|
|
27
27
|
});
|
|
28
28
|
const rowIndexesToSelect = findWhereMatches(ast.where, csvContents, tableFilePath);
|
|
29
29
|
const columnNames = ast.columns.map((column) => column.expr.column);
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
return selection;
|
|
30
|
+
return {
|
|
31
|
+
...sortValues({
|
|
32
|
+
csvFileHeaderOrder: csvHeaders,
|
|
33
|
+
sqlQueryHeaderOrder: columnNames,
|
|
34
|
+
from: {
|
|
35
|
+
csvFile: csvContents.filter((row, index) => rowIndexesToSelect.includes(index)),
|
|
36
|
+
},
|
|
37
|
+
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
38
|
+
}),
|
|
39
|
+
numberOfRowsAffected: 0,
|
|
40
|
+
};
|
|
42
41
|
});
|
|
43
42
|
return allSelections.flat().filter(check.isTruthy);
|
|
44
43
|
}
|
|
@@ -16,7 +16,7 @@ export const rowUpdateHandler = defineAstHandler({
|
|
|
16
16
|
async handler({ ast, csvDirPath, sql }) {
|
|
17
17
|
if (ast.type === AstType.Update) {
|
|
18
18
|
const tableNames = ast.table.map((table) => table.table);
|
|
19
|
-
const
|
|
19
|
+
const results = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
20
20
|
const { tableFilePath, sanitizedTableName } = nameCsvTableFile({
|
|
21
21
|
csvDirPath,
|
|
22
22
|
tableName,
|
|
@@ -40,22 +40,27 @@ export const rowUpdateHandler = defineAstHandler({
|
|
|
40
40
|
row[headerIndex] = set.value.value;
|
|
41
41
|
});
|
|
42
42
|
});
|
|
43
|
-
const
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
.map((row) => sortValues({
|
|
43
|
+
const sqlHeaders = returningRequirement?.columns.map((column) => column.expr.column) || [];
|
|
44
|
+
const result = returningRequirement
|
|
45
|
+
? sortValues({
|
|
47
46
|
csvFileHeaderOrder: csvHeaders,
|
|
48
|
-
sqlQueryHeaderOrder:
|
|
47
|
+
sqlQueryHeaderOrder: sqlHeaders,
|
|
49
48
|
from: {
|
|
50
|
-
csvFile: row,
|
|
49
|
+
csvFile: csvContents.filter((row, index) => rowIndexesToUpdate.includes(index)),
|
|
51
50
|
},
|
|
52
51
|
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
53
|
-
})
|
|
54
|
-
:
|
|
52
|
+
})
|
|
53
|
+
: {
|
|
54
|
+
columnNames: [],
|
|
55
|
+
values: [],
|
|
56
|
+
};
|
|
55
57
|
await writeCsvFile(tableFilePath, csvContents);
|
|
56
|
-
return
|
|
58
|
+
return {
|
|
59
|
+
...result,
|
|
60
|
+
numberOfRowsAffected: rowIndexesToUpdate.length,
|
|
61
|
+
};
|
|
57
62
|
});
|
|
58
|
-
return
|
|
63
|
+
return results.flat().filter(check.isTruthy);
|
|
59
64
|
}
|
|
60
65
|
return undefined;
|
|
61
66
|
},
|
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
import { type RequireExactlyOne } from 'type-fest';
|
|
2
2
|
import { type ConsumableValue } from '../sql/sql.js';
|
|
3
|
+
/**
|
|
4
|
+
* Output from {@link sortValues}.
|
|
5
|
+
*
|
|
6
|
+
* @category Internal
|
|
7
|
+
*/
|
|
8
|
+
export type SortValuesOutput = {
|
|
9
|
+
values: string[][];
|
|
10
|
+
columnNames: string[];
|
|
11
|
+
};
|
|
3
12
|
/**
|
|
4
13
|
* Sorts values for CSV insertion or reading and handle interpolated values.
|
|
5
14
|
*
|
|
@@ -10,9 +19,9 @@ export declare function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, fr
|
|
|
10
19
|
sqlQueryHeaderOrder: ReadonlyArray<string>;
|
|
11
20
|
from: RequireExactlyOne<{
|
|
12
21
|
/** When a CSV value array is provided, they are sorted to the SQL header order. */
|
|
13
|
-
csvFile: ReadonlyArray<string
|
|
22
|
+
csvFile: ReadonlyArray<ReadonlyArray<string>>;
|
|
14
23
|
/** When a SQL value array is provided, they are sorted to the CSV header order. */
|
|
15
|
-
sqlQuery: ReadonlyArray<string
|
|
24
|
+
sqlQuery: ReadonlyArray<ReadonlyArray<string>>;
|
|
16
25
|
}>;
|
|
17
26
|
unconsumedInterpolationValues: undefined | ConsumableValue[];
|
|
18
|
-
}>):
|
|
27
|
+
}>): SortValuesOutput;
|
|
@@ -13,26 +13,32 @@ export function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, from, unco
|
|
|
13
13
|
return header;
|
|
14
14
|
}
|
|
15
15
|
});
|
|
16
|
-
const values = (from.csvFile || from.sqlQuery).map((
|
|
17
|
-
|
|
18
|
-
if (
|
|
19
|
-
if (unconsumedInterpolationValues
|
|
20
|
-
|
|
16
|
+
const values = (from.csvFile || from.sqlQuery).map((valueRow) => {
|
|
17
|
+
const mappedValueRow = valueRow.map((value) => {
|
|
18
|
+
if (value === '?') {
|
|
19
|
+
if (unconsumedInterpolationValues) {
|
|
20
|
+
if (unconsumedInterpolationValues.length) {
|
|
21
|
+
return unconsumedInterpolationValues.shift() || '';
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
throw new Error('Encountered ? but all interpolation values have already been used.');
|
|
25
|
+
}
|
|
21
26
|
}
|
|
22
27
|
else {
|
|
23
|
-
throw new Error('Encountered ? but
|
|
28
|
+
throw new Error('Encountered ? but received no interpolation values.');
|
|
24
29
|
}
|
|
25
30
|
}
|
|
26
31
|
else {
|
|
27
|
-
|
|
32
|
+
return value;
|
|
28
33
|
}
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
return toOrder.map((header) => {
|
|
35
|
-
const sourceIndex = fromOrder.indexOf(header);
|
|
36
|
-
return values[sourceIndex] ?? '';
|
|
34
|
+
});
|
|
35
|
+
return toOrder.map((header) => {
|
|
36
|
+
const sourceIndex = fromOrder.indexOf(header);
|
|
37
|
+
return mappedValueRow[sourceIndex] ?? '';
|
|
38
|
+
});
|
|
37
39
|
});
|
|
40
|
+
return {
|
|
41
|
+
columnNames: toOrder,
|
|
42
|
+
values,
|
|
43
|
+
};
|
|
38
44
|
}
|
package/dist/errors/sql.error.js
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import { addSuffix, extractErrorMessage, indent } from '@augment-vir/common';
|
|
2
|
-
import { trimLines } from '../augments/trim-lines.js';
|
|
1
|
+
import { addSuffix, extractErrorMessage, indent, trimLines } from '@augment-vir/common';
|
|
3
2
|
import { CsvSqlEngineError } from './csv-sql-engine.error.js';
|
|
4
3
|
/**
|
|
5
4
|
* Generic SQL related error thrown by the csv-sql-engine package. All SQL related errors from this
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "csv-sql-engine",
|
|
3
|
-
"version": "0.0
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"description": "API for executing SQL statements on CSV files.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"CSV",
|
|
@@ -43,9 +43,9 @@
|
|
|
43
43
|
"test:update": "npm run test update"
|
|
44
44
|
},
|
|
45
45
|
"dependencies": {
|
|
46
|
-
"@augment-vir/assert": "^31.
|
|
47
|
-
"@augment-vir/common": "^31.
|
|
48
|
-
"@augment-vir/node": "^31.
|
|
46
|
+
"@augment-vir/assert": "^31.56.0",
|
|
47
|
+
"@augment-vir/common": "^31.56.0",
|
|
48
|
+
"@augment-vir/node": "^31.56.0",
|
|
49
49
|
"@sinclair/typebox": "^0.34.45",
|
|
50
50
|
"d3-dsv": "^3.0.1",
|
|
51
51
|
"node-sql-parser": "^5.3.13",
|
|
@@ -53,7 +53,7 @@
|
|
|
53
53
|
"sql-template-tag": "^5.2.1"
|
|
54
54
|
},
|
|
55
55
|
"devDependencies": {
|
|
56
|
-
"@augment-vir/test": "^31.
|
|
56
|
+
"@augment-vir/test": "^31.56.0",
|
|
57
57
|
"@eslint/eslintrc": "^3.3.3",
|
|
58
58
|
"@eslint/js": "^9.39.2",
|
|
59
59
|
"@stylistic/eslint-plugin": "^5.6.1",
|
|
@@ -93,7 +93,7 @@
|
|
|
93
93
|
"typedoc": "^0.28.15",
|
|
94
94
|
"typescript": "^5.9.3",
|
|
95
95
|
"typescript-eslint": "^8.50.1",
|
|
96
|
-
"virmator": "^14.
|
|
96
|
+
"virmator": "^14.4.0"
|
|
97
97
|
},
|
|
98
98
|
"engines": {
|
|
99
99
|
"node": ">=22"
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { check } from '@augment-vir/assert';
|
|
2
|
-
import { filterMap } from '@augment-vir/common';
|
|
3
|
-
/**
|
|
4
|
-
* Trims every line in the given string.
|
|
5
|
-
*
|
|
6
|
-
* @category Internal
|
|
7
|
-
*/
|
|
8
|
-
export function trimLines(value) {
|
|
9
|
-
return filterMap(value.trim().split('\n'), (line) => line.trim(), check.isTruthy)
|
|
10
|
-
.join('\n')
|
|
11
|
-
.trim();
|
|
12
|
-
}
|