@drax/crud-back 3.9.0 → 3.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/builders/CrudSchemaBuilder.js +3 -0
- package/dist/controllers/AbstractFastifyController.js +110 -5
- package/dist/imports/AbstractImport.js +44 -0
- package/dist/imports/ImportCsv.js +79 -0
- package/dist/imports/ImportCsvReport.js +54 -0
- package/dist/imports/ImportJson.js +14 -0
- package/dist/regexs/QueryFilterRegex.js +1 -1
- package/dist/schemas/FindSchema.js +1 -1
- package/dist/schemas/GroupBySchema.js +1 -1
- package/dist/schemas/PaginateSchema.js +1 -1
- package/dist/services/AbstractService.js +39 -0
- package/package.json +4 -4
- package/src/builders/CrudSchemaBuilder.ts +4 -0
- package/src/controllers/AbstractFastifyController.ts +125 -6
- package/src/imports/AbstractImport.ts +73 -0
- package/src/imports/ImportCsv.ts +102 -0
- package/src/imports/ImportCsvReport.ts +83 -0
- package/src/imports/ImportJson.ts +20 -0
- package/src/regexs/QueryFilterRegex.ts +1 -1
- package/src/schemas/FindSchema.ts +1 -1
- package/src/schemas/GroupBySchema.ts +1 -1
- package/src/schemas/PaginateSchema.ts +1 -1
- package/src/services/AbstractService.ts +53 -1
- package/test/controllers/PersonController.test.ts +64 -0
- package/test/services/AbstractService.test.ts +21 -10
- package/tsconfig.tsbuildinfo +1 -1
- package/types/builders/CrudSchemaBuilder.d.ts.map +1 -1
- package/types/controllers/AbstractFastifyController.d.ts +4 -1
- package/types/controllers/AbstractFastifyController.d.ts.map +1 -1
- package/types/imports/AbstractImport.d.ts +20 -0
- package/types/imports/AbstractImport.d.ts.map +1 -0
- package/types/imports/ImportCsv.d.ts +13 -0
- package/types/imports/ImportCsv.d.ts.map +1 -0
- package/types/imports/ImportCsvReport.d.ts +22 -0
- package/types/imports/ImportCsvReport.d.ts.map +1 -0
- package/types/imports/ImportJson.d.ts +7 -0
- package/types/imports/ImportJson.d.ts.map +1 -0
- package/types/regexs/QueryFilterRegex.d.ts.map +1 -1
- package/types/services/AbstractService.d.ts +3 -1
- package/types/services/AbstractService.d.ts.map +1 -1
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import {setNestedValue} from "@drax/common-back";
|
|
2
|
+
|
|
3
|
+
interface ImportOptions {
|
|
4
|
+
content: string
|
|
5
|
+
separator?: string
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
interface ParsedImportRow<T = any> {
|
|
9
|
+
rawValues: string[]
|
|
10
|
+
item: T
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
class AbstractImport {
|
|
14
|
+
|
|
15
|
+
protected content: string
|
|
16
|
+
protected separator: string
|
|
17
|
+
|
|
18
|
+
constructor(options: ImportOptions) {
|
|
19
|
+
this.content = options.content;
|
|
20
|
+
this.separator = options.separator || ';';
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
parseValue(value: string): any {
|
|
24
|
+
const trimmedValue = value.trim();
|
|
25
|
+
|
|
26
|
+
if (trimmedValue === '') {
|
|
27
|
+
return '';
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (trimmedValue === 'null') {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
if (trimmedValue === 'true') {
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (trimmedValue === 'false') {
|
|
39
|
+
return false;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (/^-?\d+(\.\d+)?$/.test(trimmedValue)) {
|
|
43
|
+
return Number(trimmedValue);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (
|
|
47
|
+
(trimmedValue.startsWith('{') && trimmedValue.endsWith('}')) ||
|
|
48
|
+
(trimmedValue.startsWith('[') && trimmedValue.endsWith(']'))
|
|
49
|
+
) {
|
|
50
|
+
try {
|
|
51
|
+
return JSON.parse(trimmedValue);
|
|
52
|
+
} catch {
|
|
53
|
+
return value;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return value;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
assignNestedValue(record: Record<string, any>, key: string, value: any) {
|
|
61
|
+
if (key.includes('.')) {
|
|
62
|
+
setNestedValue(record, key, value);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
record[key] = value;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export type {ImportOptions}
|
|
71
|
+
export type {ParsedImportRow}
|
|
72
|
+
export {AbstractImport}
|
|
73
|
+
export default AbstractImport;
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import AbstractImport, {ImportOptions, ParsedImportRow} from "./AbstractImport.js";
|
|
2
|
+
|
|
3
|
+
interface ParsedCsvImport<T = any> {
|
|
4
|
+
headers: string[]
|
|
5
|
+
rows: ParsedImportRow<T>[]
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
class ImportCsv extends AbstractImport {
|
|
9
|
+
|
|
10
|
+
constructor(options: ImportOptions) {
|
|
11
|
+
super(options);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
process(): any[] {
|
|
15
|
+
return this.processDetailed().rows.map(row => row.item);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
processDetailed(): ParsedCsvImport {
|
|
19
|
+
const rows = this.parseRows(this.content);
|
|
20
|
+
if (rows.length === 0) {
|
|
21
|
+
return {headers: [], rows: []};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const [headers, ...dataRows] = rows;
|
|
25
|
+
|
|
26
|
+
return {
|
|
27
|
+
headers,
|
|
28
|
+
rows: dataRows
|
|
29
|
+
.filter(row => row.some(value => value.trim() !== ''))
|
|
30
|
+
.map((row): ParsedImportRow => {
|
|
31
|
+
const item: Record<string, any> = {};
|
|
32
|
+
|
|
33
|
+
headers.forEach((header, index) => {
|
|
34
|
+
const normalizedHeader = header.trim();
|
|
35
|
+
if (!normalizedHeader) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
this.assignNestedValue(item, normalizedHeader, this.parseValue(row[index] ?? ''));
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
rawValues: row,
|
|
44
|
+
item
|
|
45
|
+
};
|
|
46
|
+
})
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
protected parseRows(content: string): string[][] {
|
|
51
|
+
const rows: string[][] = [];
|
|
52
|
+
let currentRow: string[] = [];
|
|
53
|
+
let currentValue = '';
|
|
54
|
+
let inQuotes = false;
|
|
55
|
+
|
|
56
|
+
for (let index = 0; index < content.length; index++) {
|
|
57
|
+
const char = content[index];
|
|
58
|
+
const nextChar = content[index + 1];
|
|
59
|
+
|
|
60
|
+
if (char === '"') {
|
|
61
|
+
if (inQuotes && nextChar === '"') {
|
|
62
|
+
currentValue += '"';
|
|
63
|
+
index++;
|
|
64
|
+
} else {
|
|
65
|
+
inQuotes = !inQuotes;
|
|
66
|
+
}
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (char === this.separator && !inQuotes) {
|
|
71
|
+
currentRow.push(currentValue);
|
|
72
|
+
currentValue = '';
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if ((char === '\n' || char === '\r') && !inQuotes) {
|
|
77
|
+
if (char === '\r' && nextChar === '\n') {
|
|
78
|
+
index++;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
currentRow.push(currentValue);
|
|
82
|
+
if (currentRow.some(value => value !== '')) {
|
|
83
|
+
rows.push(currentRow);
|
|
84
|
+
}
|
|
85
|
+
currentRow = [];
|
|
86
|
+
currentValue = '';
|
|
87
|
+
continue;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
currentValue += char;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
currentRow.push(currentValue);
|
|
94
|
+
if (currentRow.some(value => value !== '')) {
|
|
95
|
+
rows.push(currentRow);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return rows;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
export default ImportCsv;
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import AbstractExport from "../exports/AbstractExport.js";
|
|
3
|
+
|
|
4
|
+
interface ImportCsvReportRow {
|
|
5
|
+
rawValues: string[]
|
|
6
|
+
status: 'success' | 'error'
|
|
7
|
+
error?: string
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
interface ImportCsvReportOptions {
|
|
11
|
+
destinationPath: string
|
|
12
|
+
fileName?: string
|
|
13
|
+
headers: string[]
|
|
14
|
+
separator?: string
|
|
15
|
+
rows: ImportCsvReportRow[]
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
class ImportCsvReport extends AbstractExport {
|
|
19
|
+
|
|
20
|
+
protected separator: string
|
|
21
|
+
protected rows: ImportCsvReportRow[]
|
|
22
|
+
|
|
23
|
+
constructor(options: ImportCsvReportOptions) {
|
|
24
|
+
super({
|
|
25
|
+
cursor: [],
|
|
26
|
+
destinationPath: options.destinationPath,
|
|
27
|
+
headers: options.headers,
|
|
28
|
+
fileName: options.fileName || 'import_report',
|
|
29
|
+
});
|
|
30
|
+
this.separator = options.separator || ';';
|
|
31
|
+
this.rows = options.rows;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
process(): Promise<any> {
|
|
35
|
+
return new Promise((resolve, reject) => {
|
|
36
|
+
try {
|
|
37
|
+
this.generateFilePath('csv')
|
|
38
|
+
const start = Date.now();
|
|
39
|
+
const writableStream = fs.createWriteStream(this.relativeFilePath);
|
|
40
|
+
|
|
41
|
+
writableStream.on('error', reject);
|
|
42
|
+
writableStream.on('finish', () => resolve({
|
|
43
|
+
status: 'success',
|
|
44
|
+
destinationPath: this.destinationPath,
|
|
45
|
+
fileName: this.fileName,
|
|
46
|
+
filePath: this.destinationPath + '/' + this.fileName,
|
|
47
|
+
relativeFilePath: this.relativeFilePath,
|
|
48
|
+
rowCount: this.rows.length,
|
|
49
|
+
time: Date.now() - start,
|
|
50
|
+
message: 'Import report generated',
|
|
51
|
+
}))
|
|
52
|
+
|
|
53
|
+
writableStream.write([...this.headers, 'import_status', 'error'].join(this.separator) + '\n');
|
|
54
|
+
|
|
55
|
+
for (const row of this.rows) {
|
|
56
|
+
const values = [...row.rawValues, row.status, row.error || ''].map(value => this.escapeCsvValue(value));
|
|
57
|
+
writableStream.write(values.join(this.separator) + '\n');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
writableStream.end();
|
|
61
|
+
} catch (e) {
|
|
62
|
+
reject(e);
|
|
63
|
+
}
|
|
64
|
+
})
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
protected escapeCsvValue(value: any): string {
|
|
68
|
+
let formattedValue = value === null || value === undefined ? '' : String(value);
|
|
69
|
+
|
|
70
|
+
if (
|
|
71
|
+
formattedValue.includes(this.separator) ||
|
|
72
|
+
formattedValue.includes('"') ||
|
|
73
|
+
formattedValue.includes('\n') ||
|
|
74
|
+
formattedValue.includes('\r')
|
|
75
|
+
) {
|
|
76
|
+
formattedValue = '"' + formattedValue.replace(/"/g, '""') + '"';
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return formattedValue;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export default ImportCsvReport;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import AbstractImport, {ImportOptions} from "./AbstractImport.js";
|
|
2
|
+
|
|
3
|
+
class ImportJson extends AbstractImport {
|
|
4
|
+
|
|
5
|
+
constructor(options: ImportOptions) {
|
|
6
|
+
super(options);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
process(): any[] {
|
|
10
|
+
const parsedContent = JSON.parse(this.content);
|
|
11
|
+
|
|
12
|
+
if (!Array.isArray(parsedContent)) {
|
|
13
|
+
throw new Error('Invalid JSON import format. Expected an array of objects');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
return parsedContent;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export default ImportJson;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const QueryFilterRegex = /^(?:[a-zA-Z0-9_.\-]+;(?:eq|like|ne|in|nin|gt|gte|lt|lte|empty);[a-zA-Z0-9_.\-:\., áéíóúÁÉÍÓÚ]*)(?:\|[a-zA-Z0-9_.\-]+;(?:eq|like|ne|in|nin|gt|gte|lt|lte|empty);[a-zA-Z0-9_.\-:\., áéíóúÁÉÍÓÚ]*)*$/
|
|
1
|
+
const QueryFilterRegex = /^(?:[a-zA-Z0-9_.\-]+;(?:eq|like|ne|in|nin|gt|gte|lt|lte|empty);[a-zA-Z0-9_.\-:\., áéíóúÁÉÍÓÚ]*(?:;[a-zA-Z0-9_.\-]+)?)(?:\|[a-zA-Z0-9_.\-]+;(?:eq|like|ne|in|nin|gt|gte|lt|lte|empty);[a-zA-Z0-9_.\-:\., áéíóúÁÉÍÓÚ]*(?:;[a-zA-Z0-9_.\-]+)?)*$/
|
|
2
2
|
|
|
3
3
|
export default QueryFilterRegex
|
|
4
4
|
export {QueryFilterRegex}
|
|
@@ -6,7 +6,7 @@ const FindQuerySchema = z.object({
|
|
|
6
6
|
orderBy: z.string().optional(),
|
|
7
7
|
order: z.enum(["asc", "desc"]).optional(),
|
|
8
8
|
search: z.string().optional(),
|
|
9
|
-
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value|field;operator;value|..."),
|
|
9
|
+
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value[;orGroup]|field;operator;value[;orGroup]|..."),
|
|
10
10
|
});
|
|
11
11
|
|
|
12
12
|
|
|
@@ -3,7 +3,7 @@ import QueryFilterRegex from "../regexs/QueryFilterRegex.js";
|
|
|
3
3
|
|
|
4
4
|
const GroupByQuerySchema = z.object({
|
|
5
5
|
fields: z.array(z.string()).min(1).max(10),
|
|
6
|
-
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value|field;operator;value|..."),
|
|
6
|
+
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value[;orGroup]|field;operator;value[;orGroup]|..."),
|
|
7
7
|
});
|
|
8
8
|
|
|
9
9
|
|
|
@@ -7,7 +7,7 @@ const PaginateQuerySchema = z.object({
|
|
|
7
7
|
orderBy: z.string().optional(),
|
|
8
8
|
order: z.enum(["asc", "desc"]).optional(),
|
|
9
9
|
search: z.string().optional(),
|
|
10
|
-
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value|field;operator;value|..."),
|
|
10
|
+
filters: z.string().regex(QueryFilterRegex).optional().describe("Format: field;operator;value[;orGroup]|field;operator;value[;orGroup]|..."),
|
|
11
11
|
});
|
|
12
12
|
|
|
13
13
|
|
|
@@ -6,13 +6,15 @@ import type {
|
|
|
6
6
|
IDraxPaginateResult,
|
|
7
7
|
IDraxFindOptions,
|
|
8
8
|
IDraxExportOptions,
|
|
9
|
-
IDraxCrudRepository, IDraxFieldFilter, IDraxGroupByOptions
|
|
9
|
+
IDraxCrudRepository, IDraxFieldFilter, IDraxGroupByOptions, IDraxImportOptions, IDraxImportResult
|
|
10
10
|
} from "@drax/crud-share";
|
|
11
11
|
import {IDraxCrudService} from "@drax/crud-share";
|
|
12
12
|
import ExportCsv from "../exports/ExportCsv.js";
|
|
13
13
|
import ExportJson from "../exports/ExportJson.js";
|
|
14
14
|
import {IDraxExportResult} from "@drax/crud-share";
|
|
15
15
|
import {IDraxFindOneOptions} from "@drax/crud-share/types/interfaces/IDraxFindOneOptions";
|
|
16
|
+
import ImportCsv from "../imports/ImportCsv.js";
|
|
17
|
+
import ImportJson from "../imports/ImportJson.js";
|
|
16
18
|
|
|
17
19
|
abstract class AbstractService<T, C, U> implements IDraxCrudService<T, C, U> {
|
|
18
20
|
|
|
@@ -476,6 +478,56 @@ abstract class AbstractService<T, C, U> implements IDraxCrudService<T, C, U> {
|
|
|
476
478
|
|
|
477
479
|
}
|
|
478
480
|
|
|
481
|
+
parseImport({
|
|
482
|
+
format = 'JSON',
|
|
483
|
+
content,
|
|
484
|
+
separator = ';'
|
|
485
|
+
}: IDraxImportOptions): C[] {
|
|
486
|
+
let importer: ImportCsv | ImportJson;
|
|
487
|
+
|
|
488
|
+
switch (format) {
|
|
489
|
+
case 'JSON':
|
|
490
|
+
importer = new ImportJson({content, separator});
|
|
491
|
+
break;
|
|
492
|
+
case 'CSV':
|
|
493
|
+
importer = new ImportCsv({content, separator});
|
|
494
|
+
break;
|
|
495
|
+
default:
|
|
496
|
+
throw new Error(`Unsupported import format: ${format}`);
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
return importer.process() as C[];
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
async import({
|
|
503
|
+
format = 'JSON',
|
|
504
|
+
content,
|
|
505
|
+
separator = ';'
|
|
506
|
+
}: IDraxImportOptions): Promise<IDraxImportResult> {
|
|
507
|
+
try {
|
|
508
|
+
const start = Date.now();
|
|
509
|
+
const items = this.parseImport({format, content, separator});
|
|
510
|
+
|
|
511
|
+
for (const item of items) {
|
|
512
|
+
await this.create(item as C);
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
return {
|
|
516
|
+
status: 'success',
|
|
517
|
+
rowCount: items.length,
|
|
518
|
+
time: Date.now() - start,
|
|
519
|
+
message: 'Import successful',
|
|
520
|
+
};
|
|
521
|
+
} catch (e) {
|
|
522
|
+
console.error("Error import", {
|
|
523
|
+
name: e?.name,
|
|
524
|
+
message: e?.message,
|
|
525
|
+
stack: e?.stack,
|
|
526
|
+
});
|
|
527
|
+
throw e;
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
|
|
479
531
|
}
|
|
480
532
|
|
|
481
533
|
export default AbstractService
|
|
@@ -466,6 +466,70 @@ describe("Person Controller Test", function () {
|
|
|
466
466
|
expect(findByResult[0].fullname).toBe("Active Person")
|
|
467
467
|
})
|
|
468
468
|
|
|
469
|
+
it("should create and find people with filters grouped by orGroup", async () => {
|
|
470
|
+
const { accessToken } = await testSetup.rootUserLogin()
|
|
471
|
+
await testSetup.dropCollection('Person')
|
|
472
|
+
|
|
473
|
+
const entityData = [
|
|
474
|
+
{ fullname: "Hero Person", race: "human", live: true, address: defaultAddress },
|
|
475
|
+
{ fullname: "Mage Person", race: "elf", live: true, address: defaultAddress },
|
|
476
|
+
{ fullname: "Hidden Hero", race: "orc", live: false, address: defaultAddress },
|
|
477
|
+
{ fullname: "Hidden Rogue", race: "human", live: false, address: defaultAddress }
|
|
478
|
+
]
|
|
479
|
+
|
|
480
|
+
for (const data of entityData) {
|
|
481
|
+
await testSetup.fastifyInstance.inject({
|
|
482
|
+
method: 'POST',
|
|
483
|
+
url: '/api/person',
|
|
484
|
+
payload: data,
|
|
485
|
+
headers: { Authorization: `Bearer ${accessToken}` }
|
|
486
|
+
})
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
const findByResp = await testSetup.fastifyInstance.inject({
|
|
490
|
+
method: 'GET',
|
|
491
|
+
url: '/api/person/find?filters=fullname;like;Hero;group1|race;eq;elf;group1|live;eq;true',
|
|
492
|
+
headers: { Authorization: `Bearer ${accessToken}` }
|
|
493
|
+
})
|
|
494
|
+
|
|
495
|
+
const findByResult = await findByResp.json()
|
|
496
|
+
expect(findByResp.statusCode).toBe(200)
|
|
497
|
+
expect(findByResult.length).toBe(2)
|
|
498
|
+
expect(findByResult.map((item: any) => item.fullname).sort()).toEqual(["Hero Person", "Mage Person"])
|
|
499
|
+
})
|
|
500
|
+
|
|
501
|
+
it("should combine search with orGroup filters without overriding either condition", async () => {
|
|
502
|
+
const { accessToken } = await testSetup.rootUserLogin()
|
|
503
|
+
await testSetup.dropCollection('Person')
|
|
504
|
+
|
|
505
|
+
const entityData = [
|
|
506
|
+
{ fullname: "Searchable Hero", race: "human", live: true, address: defaultAddress },
|
|
507
|
+
{ fullname: "Searchable Elf", race: "elf", live: true, address: defaultAddress },
|
|
508
|
+
{ fullname: "Hidden Elf", race: "elf", live: true, address: defaultAddress },
|
|
509
|
+
{ fullname: "Searchable Rogue", race: "human", live: false, address: defaultAddress }
|
|
510
|
+
]
|
|
511
|
+
|
|
512
|
+
for (const data of entityData) {
|
|
513
|
+
await testSetup.fastifyInstance.inject({
|
|
514
|
+
method: 'POST',
|
|
515
|
+
url: '/api/person',
|
|
516
|
+
payload: data,
|
|
517
|
+
headers: { Authorization: `Bearer ${accessToken}` }
|
|
518
|
+
})
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
const paginateResp = await testSetup.fastifyInstance.inject({
|
|
522
|
+
method: 'GET',
|
|
523
|
+
url: '/api/person?search=Searchable&filters=fullname;like;Hero;group1|race;eq;elf;group1|live;eq;true',
|
|
524
|
+
headers: { Authorization: `Bearer ${accessToken}` }
|
|
525
|
+
})
|
|
526
|
+
|
|
527
|
+
const paginateResult = await paginateResp.json()
|
|
528
|
+
expect(paginateResp.statusCode).toBe(200)
|
|
529
|
+
expect(paginateResult.total).toBe(2)
|
|
530
|
+
expect(paginateResult.items.map((item: any) => item.fullname).sort()).toEqual(["Searchable Elf", "Searchable Hero"])
|
|
531
|
+
})
|
|
532
|
+
|
|
469
533
|
// 8. Create and Group By
|
|
470
534
|
it("should create and groupBy for people", async () => {
|
|
471
535
|
const { accessToken } = await testSetup.rootUserLogin()
|
|
@@ -1,12 +1,6 @@
|
|
|
1
1
|
import { test, assert } from 'vitest';
|
|
2
2
|
import MockRepository from "../_mocks/MockRepository.js";
|
|
3
3
|
import {AbstractService} from "../../src/services/AbstractService.js";
|
|
4
|
-
import {fileURLToPath} from "url";
|
|
5
|
-
import * as path from "path";
|
|
6
|
-
|
|
7
|
-
//@ts-ignore
|
|
8
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
-
const __dirname = path.dirname(__filename);
|
|
10
4
|
|
|
11
5
|
const mockRepository = new MockRepository();
|
|
12
6
|
|
|
@@ -21,11 +15,29 @@ test('create', async () => {
|
|
|
21
15
|
assert.deepStrictEqual(item.name, 'John Doe');
|
|
22
16
|
})
|
|
23
17
|
|
|
18
|
+
test('import json', async () => {
|
|
19
|
+
const result: any = await service.import({
|
|
20
|
+
format: 'JSON',
|
|
21
|
+
content: JSON.stringify([
|
|
22
|
+
{name: 'John Doe'},
|
|
23
|
+
{name: 'Jane Doe'}
|
|
24
|
+
])
|
|
25
|
+
})
|
|
24
26
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
27
|
+
assert.deepStrictEqual(result.rowCount, 2);
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
test('import csv', async () => {
|
|
31
|
+
const result: any = await service.import({
|
|
32
|
+
format: 'CSV',
|
|
33
|
+
separator: ';',
|
|
34
|
+
content: '_id;name;profile.age\n1;John Doe;32\n2;Jane Doe;28'
|
|
35
|
+
})
|
|
28
36
|
|
|
37
|
+
assert.deepStrictEqual(result.rowCount, 2);
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
test('export', async () => {
|
|
29
41
|
const result:any = await service.export(
|
|
30
42
|
{
|
|
31
43
|
format: 'CSV',
|
|
@@ -39,6 +51,5 @@ test('export', async () => {
|
|
|
39
51
|
|
|
40
52
|
console.log("result",result)
|
|
41
53
|
|
|
42
|
-
assert.deepStrictEqual(outputPath, result.outputPath);
|
|
43
54
|
assert.deepStrictEqual(2, result.rowCount);
|
|
44
55
|
})
|