csv-sql-engine 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/csv/csv-text.d.ts +0 -16
- package/dist/csv/csv-text.js +0 -22
- package/dist/engine/handlers/row-delete.handler.js +3 -2
- package/dist/engine/handlers/row-insert.handler.js +13 -13
- package/dist/engine/handlers/row-select.handler.js +3 -2
- package/dist/engine/handlers/row-update.handler.js +5 -4
- package/dist/engine/sort-values.d.ts +18 -0
- package/dist/engine/sort-values.js +38 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/dist/sql/sql.d.ts +11 -2
- package/dist/sql/sql.js +14 -1
- package/package.json +1 -1
package/dist/csv/csv-text.d.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { type RequireExactlyOne } from 'type-fest';
|
|
2
1
|
/**
|
|
3
2
|
* Converts multiple rows of values into a CSV file string.
|
|
4
3
|
*
|
|
@@ -11,21 +10,6 @@ export declare function convertRowsToCsv(rows: ReadonlyArray<ReadonlyArray<strin
|
|
|
11
10
|
* @category CSV
|
|
12
11
|
*/
|
|
13
12
|
export declare function convertRowToCsv(row: ReadonlyArray<string>): string;
|
|
14
|
-
/**
|
|
15
|
-
* Sorts values for CSV insertion or reading.
|
|
16
|
-
*
|
|
17
|
-
* @category CSV
|
|
18
|
-
*/
|
|
19
|
-
export declare function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, from, }: Readonly<{
|
|
20
|
-
csvFileHeaderOrder: ReadonlyArray<string>;
|
|
21
|
-
sqlQueryHeaderOrder: ReadonlyArray<string>;
|
|
22
|
-
from: RequireExactlyOne<{
|
|
23
|
-
/** When a CSV value array is provided, they are sorted to the SQL header order. */
|
|
24
|
-
csvFile: ReadonlyArray<string>;
|
|
25
|
-
/** When a SQL value array is provided, they are sorted to the CSV header order. */
|
|
26
|
-
sqlQuery: ReadonlyArray<string>;
|
|
27
|
-
}>;
|
|
28
|
-
}>): string[];
|
|
29
13
|
/**
|
|
30
14
|
* Reads a CSV file contents string and converts it into multiple rows of strings.
|
|
31
15
|
*
|
package/dist/csv/csv-text.js
CHANGED
|
@@ -26,28 +26,6 @@ export function convertRowToCsv(row) {
|
|
|
26
26
|
})
|
|
27
27
|
.join(',');
|
|
28
28
|
}
|
|
29
|
-
/**
|
|
30
|
-
* Sorts values for CSV insertion or reading.
|
|
31
|
-
*
|
|
32
|
-
* @category CSV
|
|
33
|
-
*/
|
|
34
|
-
export function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, from, }) {
|
|
35
|
-
const fromOrder = from.sqlQuery ? sqlQueryHeaderOrder : csvFileHeaderOrder;
|
|
36
|
-
const toOrder = (from.sqlQuery ? csvFileHeaderOrder : sqlQueryHeaderOrder).flatMap((header) => {
|
|
37
|
-
if (header === '*') {
|
|
38
|
-
return csvFileHeaderOrder;
|
|
39
|
-
}
|
|
40
|
-
else {
|
|
41
|
-
return header;
|
|
42
|
-
}
|
|
43
|
-
});
|
|
44
|
-
const values = from.csvFile || from.sqlQuery;
|
|
45
|
-
return toOrder.map((header) => {
|
|
46
|
-
const sourceIndex = fromOrder.indexOf(header);
|
|
47
|
-
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
48
|
-
return values[sourceIndex];
|
|
49
|
-
});
|
|
50
|
-
}
|
|
51
29
|
/**
|
|
52
30
|
* Reads a CSV file contents string and converts it into multiple rows of strings.
|
|
53
31
|
*
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
3
|
import { nameCsvTableFile, readCsvFile, readCsvHeaders, writeCsvFile } from '../../csv/csv-file.js';
|
|
4
|
-
import { sortValues } from '../../csv/csv-text.js';
|
|
5
4
|
import { AstType } from '../../sql/ast.js';
|
|
6
5
|
import { defineAstHandler } from '../define-ast-handler.js';
|
|
6
|
+
import { sortValues } from '../sort-values.js';
|
|
7
7
|
import { findWhereMatches } from '../where-matcher.js';
|
|
8
8
|
/**
|
|
9
9
|
* Handles deleting rows.
|
|
@@ -12,7 +12,7 @@ import { findWhereMatches } from '../where-matcher.js';
|
|
|
12
12
|
*/
|
|
13
13
|
export const rowDeleteHandler = defineAstHandler({
|
|
14
14
|
name: 'row-delete',
|
|
15
|
-
async handler({ ast, csvDirPath }) {
|
|
15
|
+
async handler({ ast, csvDirPath, sql }) {
|
|
16
16
|
if (ast.type === AstType.Delete) {
|
|
17
17
|
const tableNames = ast.table.map((table) => table.table);
|
|
18
18
|
const returning = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
@@ -36,6 +36,7 @@ export const rowDeleteHandler = defineAstHandler({
|
|
|
36
36
|
from: {
|
|
37
37
|
csvFile: row,
|
|
38
38
|
},
|
|
39
|
+
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
39
40
|
}))
|
|
40
41
|
: undefined;
|
|
41
42
|
rowIndexesToDelete.forEach((rowIndexToDelete) => {
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
3
|
import { appendCsvRow, nameCsvTableFile, readCsvHeaders } from '../../csv/csv-file.js';
|
|
4
|
-
import { sortValues } from '../../csv/csv-text.js';
|
|
5
4
|
import { AstType } from '../../sql/ast.js';
|
|
6
5
|
import { defineAstHandler } from '../define-ast-handler.js';
|
|
6
|
+
import { sortValues } from '../sort-values.js';
|
|
7
7
|
/**
|
|
8
8
|
* Handles inserting rows.
|
|
9
9
|
*
|
|
@@ -11,7 +11,7 @@ import { defineAstHandler } from '../define-ast-handler.js';
|
|
|
11
11
|
*/
|
|
12
12
|
export const rowInsertHandler = defineAstHandler({
|
|
13
13
|
name: 'row-insert',
|
|
14
|
-
async handler({ ast, csvDirPath }) {
|
|
14
|
+
async handler({ ast, csvDirPath, sql }) {
|
|
15
15
|
if (ast.type === AstType.Insert) {
|
|
16
16
|
const tableNames = ast.table.map((table) => table.table);
|
|
17
17
|
const returning = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
@@ -24,23 +24,23 @@ export const rowInsertHandler = defineAstHandler({
|
|
|
24
24
|
csvFilePath: tableFilePath,
|
|
25
25
|
sanitizedTableName,
|
|
26
26
|
});
|
|
27
|
-
const
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
await appendCsvRow(values, tableFilePath);
|
|
27
|
+
const newRow = sortValues({
|
|
28
|
+
csvFileHeaderOrder,
|
|
29
|
+
sqlQueryHeaderOrder: ast.columns || csvFileHeaderOrder,
|
|
30
|
+
from: {
|
|
31
|
+
sqlQuery: rawValues,
|
|
32
|
+
},
|
|
33
|
+
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
34
|
+
});
|
|
35
|
+
await appendCsvRow(newRow, tableFilePath);
|
|
37
36
|
if (ast.returning) {
|
|
38
37
|
return sortValues({
|
|
39
38
|
csvFileHeaderOrder,
|
|
40
39
|
sqlQueryHeaderOrder: ast.returning.columns.map((column) => column.expr.column),
|
|
41
40
|
from: {
|
|
42
|
-
csvFile:
|
|
41
|
+
csvFile: newRow,
|
|
43
42
|
},
|
|
43
|
+
unconsumedInterpolationValues: undefined,
|
|
44
44
|
});
|
|
45
45
|
}
|
|
46
46
|
else {
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
3
|
import { nameCsvTableFile, readCsvFile, readCsvHeaders, writeCsvFile } from '../../csv/csv-file.js';
|
|
4
|
-
import { sortValues } from '../../csv/csv-text.js';
|
|
5
4
|
import { AstType } from '../../sql/ast.js';
|
|
6
5
|
import { defineAstHandler } from '../define-ast-handler.js';
|
|
6
|
+
import { sortValues } from '../sort-values.js';
|
|
7
7
|
import { findWhereMatches } from '../where-matcher.js';
|
|
8
8
|
/**
|
|
9
9
|
* Handles SQL selection.
|
|
@@ -12,7 +12,7 @@ import { findWhereMatches } from '../where-matcher.js';
|
|
|
12
12
|
*/
|
|
13
13
|
export const rowSelectHandler = defineAstHandler({
|
|
14
14
|
name: 'row-select',
|
|
15
|
-
async handler({ ast, csvDirPath }) {
|
|
15
|
+
async handler({ ast, csvDirPath, sql }) {
|
|
16
16
|
if (ast.type === AstType.Select) {
|
|
17
17
|
const tableNames = ast.from.map((table) => table.table);
|
|
18
18
|
const allSelections = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
@@ -35,6 +35,7 @@ export const rowSelectHandler = defineAstHandler({
|
|
|
35
35
|
from: {
|
|
36
36
|
csvFile: row,
|
|
37
37
|
},
|
|
38
|
+
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
38
39
|
}));
|
|
39
40
|
await writeCsvFile(tableFilePath, csvContents);
|
|
40
41
|
return selection;
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { assertWrap, check } from '@augment-vir/assert';
|
|
2
2
|
import { awaitedBlockingMap } from '@augment-vir/common';
|
|
3
3
|
import { createCsvHeaderMaps, nameCsvTableFile, readCsvFile, readCsvHeaders, writeCsvFile, } from '../../csv/csv-file.js';
|
|
4
|
-
import { sortValues } from '../../csv/csv-text.js';
|
|
5
4
|
import { CsvColumnDoesNotExistError } from '../../errors/csv.error.js';
|
|
6
5
|
import { AstType } from '../../sql/ast.js';
|
|
7
6
|
import { defineAstHandler } from '../define-ast-handler.js';
|
|
7
|
+
import { sortValues } from '../sort-values.js';
|
|
8
8
|
import { findWhereMatches } from '../where-matcher.js';
|
|
9
9
|
/**
|
|
10
10
|
* Handles updating rows.
|
|
@@ -13,7 +13,7 @@ import { findWhereMatches } from '../where-matcher.js';
|
|
|
13
13
|
*/
|
|
14
14
|
export const rowUpdateHandler = defineAstHandler({
|
|
15
15
|
name: 'row-update',
|
|
16
|
-
async handler({ ast, csvDirPath }) {
|
|
16
|
+
async handler({ ast, csvDirPath, sql }) {
|
|
17
17
|
if (ast.type === AstType.Update) {
|
|
18
18
|
const tableNames = ast.table.map((table) => table.table);
|
|
19
19
|
const returning = await awaitedBlockingMap(tableNames, async (tableName) => {
|
|
@@ -40,7 +40,7 @@ export const rowUpdateHandler = defineAstHandler({
|
|
|
40
40
|
row[headerIndex] = set.value.value;
|
|
41
41
|
});
|
|
42
42
|
});
|
|
43
|
-
const
|
|
43
|
+
const updatedRow = returningRequirement
|
|
44
44
|
? csvContents
|
|
45
45
|
.filter((row, index) => rowIndexesToUpdate.includes(index))
|
|
46
46
|
.map((row) => sortValues({
|
|
@@ -49,10 +49,11 @@ export const rowUpdateHandler = defineAstHandler({
|
|
|
49
49
|
from: {
|
|
50
50
|
csvFile: row,
|
|
51
51
|
},
|
|
52
|
+
unconsumedInterpolationValues: sql.unconsumedValues,
|
|
52
53
|
}))
|
|
53
54
|
: undefined;
|
|
54
55
|
await writeCsvFile(tableFilePath, csvContents);
|
|
55
|
-
return
|
|
56
|
+
return updatedRow;
|
|
56
57
|
});
|
|
57
58
|
return returning.flat().filter(check.isTruthy);
|
|
58
59
|
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { type RequireExactlyOne } from 'type-fest';
|
|
2
|
+
import { type ConsumableValue } from '../sql/sql.js';
|
|
3
|
+
/**
|
|
4
|
+
* Sorts values for CSV insertion or reading and handle interpolated values.
|
|
5
|
+
*
|
|
6
|
+
* @category Internal
|
|
7
|
+
*/
|
|
8
|
+
export declare function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, from, unconsumedInterpolationValues, }: Readonly<{
|
|
9
|
+
csvFileHeaderOrder: ReadonlyArray<string>;
|
|
10
|
+
sqlQueryHeaderOrder: ReadonlyArray<string>;
|
|
11
|
+
from: RequireExactlyOne<{
|
|
12
|
+
/** When a CSV value array is provided, they are sorted to the SQL header order. */
|
|
13
|
+
csvFile: ReadonlyArray<string>;
|
|
14
|
+
/** When a SQL value array is provided, they are sorted to the CSV header order. */
|
|
15
|
+
sqlQuery: ReadonlyArray<string>;
|
|
16
|
+
}>;
|
|
17
|
+
unconsumedInterpolationValues: undefined | ConsumableValue[];
|
|
18
|
+
}>): string[];
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sorts values for CSV insertion or reading and handle interpolated values.
|
|
3
|
+
*
|
|
4
|
+
* @category Internal
|
|
5
|
+
*/
|
|
6
|
+
export function sortValues({ csvFileHeaderOrder, sqlQueryHeaderOrder, from, unconsumedInterpolationValues, }) {
|
|
7
|
+
const fromOrder = from.sqlQuery ? sqlQueryHeaderOrder : csvFileHeaderOrder;
|
|
8
|
+
const toOrder = (from.sqlQuery ? csvFileHeaderOrder : sqlQueryHeaderOrder).flatMap((header) => {
|
|
9
|
+
if (header === '*') {
|
|
10
|
+
return csvFileHeaderOrder;
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
return header;
|
|
14
|
+
}
|
|
15
|
+
});
|
|
16
|
+
const values = (from.csvFile || from.sqlQuery).map((value) => {
|
|
17
|
+
if (value === '?') {
|
|
18
|
+
if (unconsumedInterpolationValues) {
|
|
19
|
+
if (unconsumedInterpolationValues.length) {
|
|
20
|
+
return unconsumedInterpolationValues.shift() || '';
|
|
21
|
+
}
|
|
22
|
+
else {
|
|
23
|
+
throw new Error('Encountered ? but all interpolation values have already been used.');
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
throw new Error('Encountered ? but received no interpolation values.');
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
else {
|
|
31
|
+
return value;
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
return toOrder.map((header) => {
|
|
35
|
+
const sourceIndex = fromOrder.indexOf(header);
|
|
36
|
+
return values[sourceIndex] ?? '';
|
|
37
|
+
});
|
|
38
|
+
}
|
package/dist/index.d.ts
CHANGED
|
@@ -11,6 +11,7 @@ export * from './engine/handlers/table-alter.handler.js';
|
|
|
11
11
|
export * from './engine/handlers/table-create.handler.js';
|
|
12
12
|
export * from './engine/handlers/table-drop.handler.js';
|
|
13
13
|
export * from './engine/params.js';
|
|
14
|
+
export * from './engine/sort-values.js';
|
|
14
15
|
export * from './engine/where-matcher.js';
|
|
15
16
|
export * from './errors/csv-sql-engine.error.js';
|
|
16
17
|
export * from './errors/csv.error.js';
|
package/dist/index.js
CHANGED
|
@@ -11,6 +11,7 @@ export * from './engine/handlers/table-alter.handler.js';
|
|
|
11
11
|
export * from './engine/handlers/table-create.handler.js';
|
|
12
12
|
export * from './engine/handlers/table-drop.handler.js';
|
|
13
13
|
export * from './engine/params.js';
|
|
14
|
+
export * from './engine/sort-values.js';
|
|
14
15
|
export * from './engine/where-matcher.js';
|
|
15
16
|
export * from './errors/csv-sql-engine.error.js';
|
|
16
17
|
export * from './errors/csv.error.js';
|
package/dist/sql/sql.d.ts
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
|
-
import { type
|
|
1
|
+
import { type Branded } from '@augment-vir/common';
|
|
2
2
|
import { Sql as OriginalSql } from 'sql-template-tag';
|
|
3
|
+
/**
|
|
4
|
+
* Used to mark consumable values so we don't accidentally assign the wrong array for mutation.
|
|
5
|
+
*
|
|
6
|
+
* @category Internal
|
|
7
|
+
*/
|
|
8
|
+
export type ConsumableValue = Branded<string, 'consumable-values'>;
|
|
3
9
|
/**
|
|
4
10
|
* A SQL command's strings and values.
|
|
5
11
|
*
|
|
@@ -7,6 +13,9 @@ import { Sql as OriginalSql } from 'sql-template-tag';
|
|
|
7
13
|
*/
|
|
8
14
|
export declare class Sql extends OriginalSql {
|
|
9
15
|
values: string[];
|
|
16
|
+
/** This will be mutated by whatever is reading this SQL. */
|
|
17
|
+
unconsumedValues: ConsumableValue[];
|
|
18
|
+
constructor(rawStrings: readonly string[], rawValues: readonly (Sql | string)[]);
|
|
10
19
|
}
|
|
11
20
|
/**
|
|
12
21
|
* Parses a SQL string with interpolations extracted into values so that they can be properly
|
|
@@ -14,7 +23,7 @@ export declare class Sql extends OriginalSql {
|
|
|
14
23
|
*
|
|
15
24
|
* @category SQL
|
|
16
25
|
*/
|
|
17
|
-
export declare function sql(strings: ReadonlyArray<string>, ...values: Array<
|
|
26
|
+
export declare function sql(strings: ReadonlyArray<string>, ...values: Array<string | number | Sql>): Sql;
|
|
18
27
|
/**
|
|
19
28
|
* Creates a raw, _unsafe_, {@link Sql} command. Prefer {@link sql} whenever possible.
|
|
20
29
|
*
|
package/dist/sql/sql.js
CHANGED
|
@@ -5,6 +5,12 @@ import { Sql as OriginalSql } from 'sql-template-tag';
|
|
|
5
5
|
* @category Internal
|
|
6
6
|
*/
|
|
7
7
|
export class Sql extends OriginalSql {
|
|
8
|
+
/** This will be mutated by whatever is reading this SQL. */
|
|
9
|
+
unconsumedValues;
|
|
10
|
+
constructor(rawStrings, rawValues) {
|
|
11
|
+
super(rawStrings, rawValues);
|
|
12
|
+
this.unconsumedValues = [...this.values];
|
|
13
|
+
}
|
|
8
14
|
}
|
|
9
15
|
/**
|
|
10
16
|
* Parses a SQL string with interpolations extracted into values so that they can be properly
|
|
@@ -13,7 +19,14 @@ export class Sql extends OriginalSql {
|
|
|
13
19
|
* @category SQL
|
|
14
20
|
*/
|
|
15
21
|
export function sql(strings, ...values) {
|
|
16
|
-
return new Sql(strings, values)
|
|
22
|
+
return new Sql(strings, values.map((value) => {
|
|
23
|
+
if (value instanceof Sql) {
|
|
24
|
+
return value;
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
return String(value);
|
|
28
|
+
}
|
|
29
|
+
}));
|
|
17
30
|
}
|
|
18
31
|
/**
|
|
19
32
|
* Creates a raw, _unsafe_, {@link Sql} command. Prefer {@link sql} whenever possible.
|