@postxl/generator 0.60.7 → 0.61.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generator.js +29 -36
- package/dist/generators/indices/businesslogic-actiontypes.generator.js +6 -6
- package/dist/generators/indices/businesslogic-update-clonecontext.generator.js +3 -3
- package/dist/generators/indices/businesslogic-update-module.generator.js +9 -9
- package/dist/generators/indices/businesslogic-update-service.generator.js +6 -6
- package/dist/generators/indices/businesslogic-view-module.generator.js +8 -8
- package/dist/generators/indices/businesslogic-view-service.generator.js +6 -6
- package/dist/generators/indices/data-types.generator.js +1 -1
- package/dist/generators/indices/datamock-module.generator.js +8 -32
- package/dist/generators/indices/datamocker.generator.js +3 -3
- package/dist/generators/indices/datamodule.generator.js +5 -4
- package/dist/generators/indices/dataservice.generator.js +4 -4
- package/dist/generators/indices/dispatcher-service.generator.js +11 -10
- package/dist/generators/indices/importexport-convert-import-functions.generator.js +7 -7
- package/dist/generators/indices/importexport-exporter-class.generator.js +6 -6
- package/dist/generators/indices/importexport-import-service.generator.js +14 -12
- package/dist/generators/indices/importexport-types.generator.js +2 -2
- package/dist/generators/indices/repositories.generator.d.ts +0 -7
- package/dist/generators/indices/repositories.generator.js +2 -16
- package/dist/generators/indices/seed-migration.generator.js +6 -4
- package/dist/generators/indices/testdata-service.generator.js +7 -7
- package/dist/generators/models/businesslogic-update.generator.js +24 -23
- package/dist/generators/models/businesslogic-view.generator.js +8 -9
- package/dist/generators/models/importexport-decoder.generator.d.ts +0 -7
- package/dist/generators/models/importexport-decoder.generator.js +2 -16
- package/dist/generators/models/repository.generator.js +5 -7
- package/dist/generators/models/route.generator.js +3 -3
- package/dist/generators/models/seed.generator.js +2 -2
- package/dist/generators/models/stub.generator.js +1 -1
- package/dist/lib/meta.d.ts +189 -231
- package/dist/lib/meta.js +78 -91
- package/dist/lib/schema/schema.d.ts +10 -3
- package/dist/lib/schema/types.d.ts +27 -0
- package/dist/lib/schema/types.js +28 -1
- package/package.json +1 -1
- package/dist/generators/indices/businesslogic-update-index.generator.d.ts +0 -9
- package/dist/generators/indices/businesslogic-update-index.generator.js +0 -23
- package/dist/generators/indices/businesslogic-view-index.generator.d.ts +0 -9
- package/dist/generators/indices/businesslogic-view-index.generator.js +0 -19
- package/dist/generators/indices/stubs.generator.d.ts +0 -16
- package/dist/generators/indices/stubs.generator.js +0 -29
|
@@ -9,27 +9,29 @@ const string_1 = require("../../lib/utils/string");
|
|
|
9
9
|
* Generates the Exporter class for the Import-Export module
|
|
10
10
|
*/
|
|
11
11
|
function generateImportExportImportService({ models, meta }) {
|
|
12
|
-
const imports = imports_1.ImportsGenerator.from(meta.importExport.importService.
|
|
12
|
+
const imports = imports_1.ImportsGenerator.from(meta.importExport.importService.location.path);
|
|
13
13
|
const { types, decoder, converterFunctions } = meta.importExport;
|
|
14
14
|
const { delta_Fields, delta, delta_Model } = types;
|
|
15
15
|
const { create, errors, unchanged, update } = delta_Model;
|
|
16
16
|
const { nonUnique, invalidReference, invalidType, isRequiredDependency, missingField } = errors;
|
|
17
17
|
const { dto } = meta.types;
|
|
18
18
|
imports.addImports({
|
|
19
|
-
[meta.data.
|
|
19
|
+
[meta.data.dataService.location.import]: [meta.data.dataService.class],
|
|
20
|
+
[meta.data.types.location.import]: [(0, types_1.toAnnotatedTypeName)(meta.data.types.bulkMutation)],
|
|
20
21
|
[meta.types.importPath]: [
|
|
21
22
|
(0, types_1.toAnnotatedTypeName)(dto.genericModel),
|
|
22
23
|
(0, types_1.toAnnotatedTypeName)(dto.idType),
|
|
23
24
|
(0, types_1.toAnnotatedTypeName)(dto.update),
|
|
24
25
|
],
|
|
25
|
-
[meta.actions.
|
|
26
|
-
[
|
|
26
|
+
[meta.actions.execution.interfaceLocation.import]: [meta.actions.execution.interface],
|
|
27
|
+
[meta.actions.dispatcher.classLocation.import]: [meta.actions.dispatcher.class],
|
|
28
|
+
[types.location.path]: [
|
|
27
29
|
(0, types_1.toAnnotatedTypeName)(delta_Fields),
|
|
28
30
|
(0, types_1.toAnnotatedTypeName)(delta_Model.type),
|
|
29
31
|
(0, types_1.toAnnotatedTypeName)(delta),
|
|
30
32
|
],
|
|
31
|
-
[decoder.
|
|
32
|
-
[converterFunctions.
|
|
33
|
+
[decoder.location.path]: [(0, types_1.toAnnotatedTypeName)(decoder.decodedPXLModelDataTypeName)],
|
|
34
|
+
[converterFunctions.location.path]: [converterFunctions.deltaToBulkMutations],
|
|
33
35
|
});
|
|
34
36
|
const resultAssignments = [];
|
|
35
37
|
const detectDeltaFunctions = [];
|
|
@@ -448,7 +450,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
448
450
|
const { types } = schemaMeta.importExport;
|
|
449
451
|
const { delta_Model } = types;
|
|
450
452
|
imports.addTypeImport({
|
|
451
|
-
from: types.
|
|
453
|
+
from: types.location.path,
|
|
452
454
|
items: [modelMeta.importExport.delta_Model_Errors],
|
|
453
455
|
});
|
|
454
456
|
const returnType = `${delta_Model.type}<${model.typeName}, ${modelMeta.types.brandedIdType}, ${modelMeta.importExport.delta_Model_Errors}>[]`;
|
|
@@ -470,7 +472,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
470
472
|
if (field.kind === 'scalar') {
|
|
471
473
|
if (field.isUnique) {
|
|
472
474
|
imports.addTypeImport({
|
|
473
|
-
from: schemaMeta.importExport.types.
|
|
475
|
+
from: schemaMeta.importExport.types.location.path,
|
|
474
476
|
items: [delta_Model.errors.nonUnique.type],
|
|
475
477
|
});
|
|
476
478
|
const getByName = `getBy${(0, string_1.toPascalCase)(fieldName)}`;
|
|
@@ -482,7 +484,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
482
484
|
}
|
|
483
485
|
if (field.validation && field.validation.type === 'int') {
|
|
484
486
|
imports.addTypeImport({
|
|
485
|
-
from: schemaMeta.importExport.types.
|
|
487
|
+
from: schemaMeta.importExport.types.location.path,
|
|
486
488
|
items: [delta_Model.errors.invalidType.type],
|
|
487
489
|
});
|
|
488
490
|
sharedValidations.push(`this.validateInt({item, fieldName: '${fieldName}'})`);
|
|
@@ -491,7 +493,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
491
493
|
else if (field.kind === 'relation') {
|
|
492
494
|
const relatedModelMeta = (0, meta_1.getModelMetadata)({ model: field.relationToModel });
|
|
493
495
|
imports.addTypeImport({
|
|
494
|
-
from: schemaMeta.importExport.types.
|
|
496
|
+
from: schemaMeta.importExport.types.location.path,
|
|
495
497
|
items: [delta_Model.errors.invalidReference.type],
|
|
496
498
|
});
|
|
497
499
|
sharedValidations.push(`this.validateReferenceField({
|
|
@@ -514,7 +516,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
514
516
|
continue;
|
|
515
517
|
}
|
|
516
518
|
imports.addTypeImport({
|
|
517
|
-
from: schemaMeta.importExport.types.
|
|
519
|
+
from: schemaMeta.importExport.types.location.path,
|
|
518
520
|
items: [delta_Model.errors.isRequiredDependency.type],
|
|
519
521
|
});
|
|
520
522
|
const relatedModelMeta = (0, meta_1.getModelMetadata)({ model: relatedModel });
|
|
@@ -532,7 +534,7 @@ function generateDetectDeltaFunction({ model, modelMeta, models, schemaMeta, imp
|
|
|
532
534
|
}
|
|
533
535
|
if (requiredFields.length > 0) {
|
|
534
536
|
imports.addTypeImport({
|
|
535
|
-
from: schemaMeta.importExport.types.
|
|
537
|
+
from: schemaMeta.importExport.types.location.path,
|
|
536
538
|
items: [delta_Model.errors.missingField.type],
|
|
537
539
|
});
|
|
538
540
|
}
|
|
@@ -8,7 +8,7 @@ const meta_1 = require("../../lib/meta");
|
|
|
8
8
|
*/
|
|
9
9
|
function generateImportExportTypes({ models, meta }) {
|
|
10
10
|
const { types } = meta.importExport;
|
|
11
|
-
const imports = imports_1.ImportsGenerator.from(types.
|
|
11
|
+
const imports = imports_1.ImportsGenerator.from(types.location.path);
|
|
12
12
|
const dto = meta.types.dto;
|
|
13
13
|
imports
|
|
14
14
|
.addTypeImport({
|
|
@@ -16,7 +16,7 @@ function generateImportExportTypes({ models, meta }) {
|
|
|
16
16
|
items: [dto.create, dto.update, dto.genericModel, dto.idType],
|
|
17
17
|
})
|
|
18
18
|
.addTypeImport({
|
|
19
|
-
from: meta.data.
|
|
19
|
+
from: meta.data.types.location.import,
|
|
20
20
|
items: [meta.data.types.bulkMutation],
|
|
21
21
|
});
|
|
22
22
|
const deltaTypes = [];
|
|
@@ -1,12 +1,5 @@
|
|
|
1
1
|
import { SchemaMetaData } from '../../lib/meta';
|
|
2
2
|
import { Model } from '../../lib/schema/schema';
|
|
3
|
-
/**
|
|
4
|
-
* Generates an index file that exports all repository stubs.
|
|
5
|
-
*/
|
|
6
|
-
export declare function generateRepositoriesIndex({ models, meta }: {
|
|
7
|
-
models: Model[];
|
|
8
|
-
meta: SchemaMetaData;
|
|
9
|
-
}): string;
|
|
10
3
|
/**
|
|
11
4
|
* Generates a an array that contains all the repositories.
|
|
12
5
|
*/
|
|
@@ -1,22 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.generateRepositoriesArray =
|
|
4
|
-
const exports_1 = require("../../lib/exports");
|
|
3
|
+
exports.generateRepositoriesArray = void 0;
|
|
5
4
|
const imports_1 = require("../../lib/imports");
|
|
6
5
|
const meta_1 = require("../../lib/meta");
|
|
7
|
-
/**
|
|
8
|
-
* Generates an index file that exports all repository stubs.
|
|
9
|
-
*/
|
|
10
|
-
function generateRepositoriesIndex({ models, meta }) {
|
|
11
|
-
const exports = exports_1.ExportsGenerator.from(meta.data.repository.indexFilePath);
|
|
12
|
-
for (const model of models) {
|
|
13
|
-
const meta = (0, meta_1.getModelMetadata)({ model });
|
|
14
|
-
exports.exportEverythingFromPath(meta.data.repository.filePath);
|
|
15
|
-
}
|
|
16
|
-
exports.exportEverythingFromPath(meta.data.repository.constFilePath);
|
|
17
|
-
return exports.generate();
|
|
18
|
-
}
|
|
19
|
-
exports.generateRepositoriesIndex = generateRepositoriesIndex;
|
|
20
6
|
/**
|
|
21
7
|
* Generates a an array that contains all the repositories.
|
|
22
8
|
*/
|
|
@@ -26,7 +12,7 @@ function generateRepositoriesArray({ models, meta }) {
|
|
|
26
12
|
for (const { meta } of mm) {
|
|
27
13
|
imports.addImport({
|
|
28
14
|
items: [meta.data.repository.className],
|
|
29
|
-
from: meta.data.repository.
|
|
15
|
+
from: meta.data.repository.location.path,
|
|
30
16
|
});
|
|
31
17
|
}
|
|
32
18
|
return /* ts */ `
|
|
@@ -8,14 +8,16 @@ const types_1 = require("../../lib/schema/types");
|
|
|
8
8
|
* Generates the initial migration based on the generated seed data.
|
|
9
9
|
*/
|
|
10
10
|
function generateSeedMigration({ models, meta }) {
|
|
11
|
-
const imports = imports_1.ImportsGenerator.from(meta.seedData.
|
|
11
|
+
const imports = imports_1.ImportsGenerator.from(meta.seedData.initialMigrationLocation.path);
|
|
12
12
|
const modelTypes = [];
|
|
13
13
|
for (const model of models) {
|
|
14
14
|
const modelMeta = (0, meta_1.getModelMetadata)({ model });
|
|
15
15
|
imports.addImports({
|
|
16
|
-
[modelMeta.seed.
|
|
17
|
-
[meta.importExport.
|
|
18
|
-
|
|
16
|
+
[modelMeta.seed.location.path]: [modelMeta.seed.constantName],
|
|
17
|
+
[meta.importExport.converterFunctions.location.import]: [
|
|
18
|
+
meta.importExport.converterFunctions.importedDataToBulkMutations,
|
|
19
|
+
],
|
|
20
|
+
[meta.seed.actionsLocation.import]: [(0, types_1.toTypeName)('Action_Seed_Data')],
|
|
19
21
|
});
|
|
20
22
|
modelTypes.push(`${modelMeta.seed.constantName}`);
|
|
21
23
|
}
|
|
@@ -10,18 +10,19 @@ const types_1 = require("../../lib/schema/types");
|
|
|
10
10
|
function generateTestDataService({ models, meta: schemaMeta, }) {
|
|
11
11
|
const imports = imports_1.ImportsGenerator.from(schemaMeta.data.testDataServiceFilePath);
|
|
12
12
|
imports.addImports({
|
|
13
|
-
[schemaMeta.actions.
|
|
14
|
-
[schemaMeta.data.
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
[schemaMeta.actions.execution.classLocation.import]: [(0, types_1.toClassName)('ActionExecutionFactory')],
|
|
14
|
+
[schemaMeta.data.dataService.location.import]: [schemaMeta.data.dataService.class],
|
|
15
|
+
[schemaMeta.backendModules.db.databaseService.location.import]: [schemaMeta.backendModules.db.databaseService.name],
|
|
16
|
+
[schemaMeta.data.mockModule.location.import]: [(0, types_1.toAnnotatedTypeName)(schemaMeta.data.dataMockDataType)],
|
|
17
|
+
[schemaMeta.importExport.converterFunctions.location.import]: [
|
|
18
|
+
schemaMeta.importExport.converterFunctions.mockDataToBulkMutations,
|
|
17
19
|
],
|
|
18
|
-
[schemaMeta.importExport.importPath]: [schemaMeta.importExport.converterFunctions.mockDataToBulkMutations],
|
|
19
20
|
});
|
|
20
21
|
const reInitCalls = [];
|
|
21
22
|
const modelMetas = models.map((model) => ({ model, meta: (0, meta_1.getModelMetadata)({ model }) }));
|
|
22
23
|
for (const { model, meta } of modelMetas) {
|
|
23
24
|
if (model.defaultField) {
|
|
24
|
-
imports.addImport({ items: [meta.data.stubGenerationFnName], from: meta.data.
|
|
25
|
+
imports.addImport({ items: [meta.data.stubGenerationFnName], from: meta.data.stubLocation.import });
|
|
25
26
|
const stubDefault = `${meta.data.stubGenerationFnName}({ ${model.defaultField.name}: true })`;
|
|
26
27
|
reInitCalls.push(`await this.dataService.${meta.data.dataServiceName}.reInit({ items: mockData.${meta.seed.constantName}?.create ?? [${stubDefault}], execution: actionExecution })`);
|
|
27
28
|
}
|
|
@@ -32,7 +33,6 @@ function generateTestDataService({ models, meta: schemaMeta, }) {
|
|
|
32
33
|
return /* ts */ `
|
|
33
34
|
import { Injectable, Logger } from '@nestjs/common'
|
|
34
35
|
|
|
35
|
-
import { DatabaseService } from '@backend/db'
|
|
36
36
|
${imports.generate()}
|
|
37
37
|
|
|
38
38
|
@Injectable()
|
|
@@ -42,35 +42,36 @@ function generateModelBusinessLogicUpdate({ model, meta }) {
|
|
|
42
42
|
typeName: meta.types.typeName,
|
|
43
43
|
brandedId: model.brandedIdType,
|
|
44
44
|
decoders: {
|
|
45
|
-
name: meta.
|
|
45
|
+
name: meta.update.decoders.name,
|
|
46
46
|
createType: `Create${meta.internalSingularNameCapitalized}`,
|
|
47
47
|
updateType: `Update${meta.internalSingularNameCapitalized}`,
|
|
48
48
|
upsertType: `Upsert${meta.internalSingularNameCapitalized}`,
|
|
49
49
|
cloneType: `Clone${meta.internalSingularNameCapitalized}`,
|
|
50
50
|
},
|
|
51
51
|
cloneContext: {
|
|
52
|
-
type: schemaMeta.
|
|
53
|
-
createMethod: schemaMeta.
|
|
54
|
-
map: meta.
|
|
52
|
+
type: schemaMeta.update.cloneContextType,
|
|
53
|
+
createMethod: schemaMeta.update.cloneContextCreateMethod,
|
|
54
|
+
map: meta.update.cloneContextMap,
|
|
55
55
|
},
|
|
56
56
|
};
|
|
57
|
-
const imports = imports_1.ImportsGenerator.from(meta.
|
|
57
|
+
const imports = imports_1.ImportsGenerator.from(meta.update.serviceClassLocation.path);
|
|
58
58
|
imports.addImports({
|
|
59
|
-
[meta.data.
|
|
59
|
+
[meta.data.repository.location.import]: meta.data.repository.className,
|
|
60
60
|
[meta.types.importPath]: [
|
|
61
61
|
Types.toAnnotatedTypeName(m.brandedId),
|
|
62
62
|
Types.toAnnotatedTypeName(m.typeName),
|
|
63
63
|
meta.types.toBrandedIdTypeFnName,
|
|
64
64
|
Types.toFunctionName(`omitId`),
|
|
65
65
|
],
|
|
66
|
-
[meta.
|
|
67
|
-
[schemaMeta.actions.
|
|
68
|
-
[schemaMeta.
|
|
69
|
-
[schemaMeta.
|
|
70
|
-
|
|
71
|
-
schemaMeta.
|
|
66
|
+
[meta.view.serviceLocation.import]: [meta.view.serviceClassName],
|
|
67
|
+
[schemaMeta.actions.execution.interfaceLocation.import]: [schemaMeta.actions.execution.interface],
|
|
68
|
+
[schemaMeta.actions.dispatcher.definitionLocation.import]: [schemaMeta.actions.dispatcher.definition],
|
|
69
|
+
[schemaMeta.update.serviceLocation.path]: schemaMeta.update.serviceClassName,
|
|
70
|
+
[schemaMeta.update.cloneContextFilePath]: [
|
|
71
|
+
schemaMeta.update.cloneContextType,
|
|
72
|
+
schemaMeta.update.cloneContextCreateMethod,
|
|
72
73
|
],
|
|
73
|
-
[schemaMeta.
|
|
74
|
+
[schemaMeta.view.serviceLocation.import]: schemaMeta.view.serviceClassName,
|
|
74
75
|
});
|
|
75
76
|
for (const relation of (0, fields_1.getRelationFields)(model)) {
|
|
76
77
|
// NOTE: We add branded id type and type name imports only for foreign models.
|
|
@@ -91,14 +92,14 @@ function generateModelBusinessLogicUpdate({ model, meta }) {
|
|
|
91
92
|
* (e.g. when we generate business logic service for Aggregation, the AggregationRepository
|
|
92
93
|
* would be referenced using `this.data` variable).
|
|
93
94
|
*/
|
|
94
|
-
const modelRepositoryVariableName = meta.
|
|
95
|
+
const modelRepositoryVariableName = meta.view.dataRepositoryVariableName;
|
|
95
96
|
const constructorParameters = [
|
|
96
97
|
`private readonly ${modelRepositoryVariableName}: ${meta.data.repository.className}`,
|
|
97
|
-
`@Inject(forwardRef(() => ${schemaMeta.
|
|
98
|
-
`@Inject(forwardRef(() => ${schemaMeta.
|
|
98
|
+
`@Inject(forwardRef(() => ${schemaMeta.update.serviceClassName})) private readonly updateService: ${schemaMeta.update.serviceClassName}`,
|
|
99
|
+
`@Inject(forwardRef(() => ${schemaMeta.view.serviceClassName})) private readonly viewService: ${schemaMeta.view.serviceClassName}`,
|
|
99
100
|
];
|
|
100
|
-
const decoders = meta.
|
|
101
|
-
const { view, update } = meta
|
|
101
|
+
const decoders = meta.update.decoders;
|
|
102
|
+
const { view, update } = meta;
|
|
102
103
|
/* prettier-ignore */
|
|
103
104
|
return /* ts */ `
|
|
104
105
|
import { Inject, Injectable, forwardRef } from '@nestjs/common'
|
|
@@ -281,8 +282,8 @@ function generateDeleteFunction({ model, meta, m }) {
|
|
|
281
282
|
backReferenceNames.push(`${refModelMeta.userFriendlyNamePlural}.${referencingField.name}`);
|
|
282
283
|
backReferenceDelete.push(`
|
|
283
284
|
// ${referencingModel.name}.${referencingField.name}
|
|
284
|
-
const ${ids} = await this.viewService.${refModelMeta.
|
|
285
|
-
await this.updateService.${refModelMeta.
|
|
285
|
+
const ${ids} = await this.viewService.${refModelMeta.view.serviceVariableName}.data.${refFieldMeta.getByForeignKeyIdsMethodFnName}(data)
|
|
286
|
+
await this.updateService.${refModelMeta.update.serviceVariableName}.deleteMany({ data: ${ids}, execution })
|
|
286
287
|
`);
|
|
287
288
|
}
|
|
288
289
|
return `
|
|
@@ -310,11 +311,11 @@ function generateDeleteManyMethod({ model, meta, m }) {
|
|
|
310
311
|
idAssignments.push(`
|
|
311
312
|
{
|
|
312
313
|
// ${referencingModel.name}.${referencingField.name}
|
|
313
|
-
const _ids = await this.viewService.${refModelMeta.
|
|
314
|
+
const _ids = await this.viewService.${refModelMeta.view.serviceVariableName}.data.${refFieldMeta.getByForeignKeyIdsMethodFnName}(id)
|
|
314
315
|
${idArray}.push(..._ids)
|
|
315
316
|
}
|
|
316
317
|
`);
|
|
317
|
-
deleteCalls.push(`await this.updateService.${refModelMeta.
|
|
318
|
+
deleteCalls.push(`await this.updateService.${refModelMeta.update.serviceVariableName}.deleteMany({ data: ${idArray}, execution })`);
|
|
318
319
|
}
|
|
319
320
|
let relatedEntities = '';
|
|
320
321
|
if (model.references.length > 0) {
|
|
@@ -356,7 +357,7 @@ function generateCloneMethod({ model, meta, m }) {
|
|
|
356
357
|
const refModelMeta = (0, meta_1.getModelMetadata)({ model: referencingModel });
|
|
357
358
|
const refFieldMeta = (0, meta_1.getFieldMetadata)({ field: referencingField });
|
|
358
359
|
backReferenceNames.push(`${refModelMeta.userFriendlyNamePlural}.${referencingField.name}`);
|
|
359
|
-
const { view, update } = refModelMeta
|
|
360
|
+
const { view, update } = refModelMeta;
|
|
360
361
|
backReferenceCloning.push(`
|
|
361
362
|
// ${referencingModel.name}.${referencingField.name}
|
|
362
363
|
for (const childId of await this.viewService.${view.serviceVariableName}.data.${refFieldMeta.getByForeignKeyIdsMethodFnName}(id)) {
|
|
@@ -14,19 +14,18 @@ const jsdoc_1 = require("../../lib/utils/jsdoc");
|
|
|
14
14
|
*/
|
|
15
15
|
function generateModelBusinessLogicView({ model, meta }) {
|
|
16
16
|
const schemaMeta = (0, meta_1.getSchemaMetadata)({ config: model.schemaConfig });
|
|
17
|
-
const imports = imports_1.ImportsGenerator.from(meta.
|
|
17
|
+
const imports = imports_1.ImportsGenerator.from(meta.view.serviceLocation.path);
|
|
18
18
|
imports.addImports({
|
|
19
|
-
[meta.data.
|
|
19
|
+
[meta.data.repository.location.import]: meta.data.repository.className,
|
|
20
20
|
[meta.types.importPath]: [(0, types_1.toAnnotatedTypeName)(model.brandedIdType), (0, types_1.toAnnotatedTypeName)(meta.types.typeName)],
|
|
21
|
-
[schemaMeta.
|
|
22
|
-
[meta.data.importPath]: [meta.data.repository.className],
|
|
21
|
+
[schemaMeta.view.serviceLocation.path]: schemaMeta.view.serviceClassName,
|
|
23
22
|
});
|
|
24
23
|
/**
|
|
25
24
|
* The name of the variable that holds the repository instance for the current model
|
|
26
25
|
* (e.g. when we generate business logic service for Aggregation, the AggregationRepository
|
|
27
26
|
* would be referenced using `this.data` variable).
|
|
28
27
|
*/
|
|
29
|
-
const modelRepositoryVariableName = meta.
|
|
28
|
+
const modelRepositoryVariableName = meta.view.dataRepositoryVariableName;
|
|
30
29
|
/**
|
|
31
30
|
* The name of the variable that holds the central business logic service instance.
|
|
32
31
|
* Instead of injecting a repository instance for each model, we inject this single instance
|
|
@@ -35,7 +34,7 @@ function generateModelBusinessLogicView({ model, meta }) {
|
|
|
35
34
|
const viewServiceClassName = 'viewService';
|
|
36
35
|
const constructorParameters = [
|
|
37
36
|
`public readonly ${modelRepositoryVariableName}: ${meta.data.repository.className}`,
|
|
38
|
-
`@Inject(forwardRef(() => ${schemaMeta.
|
|
37
|
+
`@Inject(forwardRef(() => ${schemaMeta.view.serviceClassName})) private readonly ${viewServiceClassName}: ${schemaMeta.view.serviceClassName}`,
|
|
39
38
|
];
|
|
40
39
|
/**
|
|
41
40
|
* Variable names and their definitions indexed by the name of the relation they represent.
|
|
@@ -44,7 +43,7 @@ function generateModelBusinessLogicView({ model, meta }) {
|
|
|
44
43
|
for (const relation of (0, fields_1.getRelationFields)(model)) {
|
|
45
44
|
const refModel = relation.relationToModel;
|
|
46
45
|
const refMeta = (0, meta_1.getModelMetadata)({ model: refModel });
|
|
47
|
-
const variableGetter = `await this.${viewServiceClassName}.${refMeta.
|
|
46
|
+
const variableGetter = `await this.${viewServiceClassName}.${refMeta.view.serviceVariableName}.get(itemRaw.${relation.name})`;
|
|
48
47
|
const variablePresenceCheck = `
|
|
49
48
|
if (!${relation.relationFieldName}) {
|
|
50
49
|
throw new Error(\`Could not find ${refMeta.types.typeName} with id \${itemRaw.${relation.name}} for ${model.typeName}.${relation.name}!\`)
|
|
@@ -122,7 +121,7 @@ ${imports.generate()}
|
|
|
122
121
|
${hasLinkedItems ? linkedTypeDefinition : ''}
|
|
123
122
|
|
|
124
123
|
@Injectable()
|
|
125
|
-
export class ${meta.
|
|
124
|
+
export class ${meta.view.serviceClassName} {
|
|
126
125
|
constructor(${constructorParameters.join(',\n')}) {}
|
|
127
126
|
|
|
128
127
|
/**
|
|
@@ -154,7 +153,7 @@ export class ${meta.businessLogic.view.serviceClassName} {
|
|
|
154
153
|
sort?: { field: keyof ${model.typeName}; ascending: boolean }
|
|
155
154
|
cursor?: { startRow: number; endRow: number }
|
|
156
155
|
}) {
|
|
157
|
-
const items = await this.
|
|
156
|
+
const items = await this.${modelRepositoryVariableName}.getAllAsArray()
|
|
158
157
|
const filtered = !filter
|
|
159
158
|
? items
|
|
160
159
|
: items.filter((item) => filterFn(item, filter.field, filter.operator, filter.value))
|
|
@@ -15,10 +15,3 @@ export declare function generateImportExportDecoder({ models, meta }: {
|
|
|
15
15
|
models: Model[];
|
|
16
16
|
meta: SchemaMetaData;
|
|
17
17
|
}): string;
|
|
18
|
-
/**
|
|
19
|
-
* Generates the index file for all the routes.
|
|
20
|
-
*/
|
|
21
|
-
export declare function generateImportExportDecoderIndex({ models, meta }: {
|
|
22
|
-
models: Model[];
|
|
23
|
-
meta: SchemaMetaData;
|
|
24
|
-
}): string;
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
4
|
-
const exports_1 = require("../../lib/exports");
|
|
3
|
+
exports.generateImportExportDecoder = exports.generateModelImportExportDecoder = void 0;
|
|
5
4
|
const imports_1 = require("../../lib/imports");
|
|
6
5
|
const meta_1 = require("../../lib/meta");
|
|
7
6
|
const types_1 = require("../../lib/schema/types");
|
|
@@ -192,7 +191,7 @@ dbTypeName, nullable, imports, schemaMeta, }) {
|
|
|
192
191
|
*/
|
|
193
192
|
function generateImportExportDecoder({ models, meta }) {
|
|
194
193
|
const { decodedPXLModelDataTypeName, encodedExcelDataTypeName, fullDecoderName, fullEncoderFunctionName } = meta.importExport.decoder;
|
|
195
|
-
const imports = imports_1.ImportsGenerator.from(meta.importExport.decoder.
|
|
194
|
+
const imports = imports_1.ImportsGenerator.from(meta.importExport.decoder.location.path);
|
|
196
195
|
imports.addImports({
|
|
197
196
|
[meta.backendModules.common.importPath]: (0, types_1.toFunctionName)('uncapitalizeKeys'),
|
|
198
197
|
});
|
|
@@ -244,16 +243,3 @@ export const ${fullEncoderFunctionName} = (data: ${decodedPXLModelDataTypeName})
|
|
|
244
243
|
`;
|
|
245
244
|
}
|
|
246
245
|
exports.generateImportExportDecoder = generateImportExportDecoder;
|
|
247
|
-
/**
|
|
248
|
-
* Generates the index file for all the routes.
|
|
249
|
-
*/
|
|
250
|
-
function generateImportExportDecoderIndex({ models, meta }) {
|
|
251
|
-
const exports = exports_1.ExportsGenerator.from(meta.importExport.decoder.indexFilePath);
|
|
252
|
-
exports.exportEverythingFromPath(meta.importExport.decoder.fullDecoderFilePath);
|
|
253
|
-
for (const model of models) {
|
|
254
|
-
const modelMeta = (0, meta_1.getModelMetadata)({ model });
|
|
255
|
-
exports.exportEverythingFromPath(modelMeta.importExport.decoder.filePath);
|
|
256
|
-
}
|
|
257
|
-
return exports.generate();
|
|
258
|
-
}
|
|
259
|
-
exports.generateImportExportDecoderIndex = generateImportExportDecoderIndex;
|
|
@@ -15,7 +15,7 @@ const string_1 = require("../../lib/utils/string");
|
|
|
15
15
|
function generateRepository({ model, meta }) {
|
|
16
16
|
const { idField } = model;
|
|
17
17
|
const schemaMeta = (0, meta_1.getSchemaMetadata)({ config: model.schemaConfig });
|
|
18
|
-
const imports = imports_1.ImportsGenerator.from(meta.data.repository.
|
|
18
|
+
const imports = imports_1.ImportsGenerator.from(meta.data.repository.location.path).addImports({
|
|
19
19
|
[schemaMeta.data.repository.typeFilePath]: (0, types_1.toAnnotatedTypeName)(schemaMeta.data.repository.typeName),
|
|
20
20
|
[meta.types.importPath]: [
|
|
21
21
|
(0, types_1.toAnnotatedTypeName)(model.typeName),
|
|
@@ -25,7 +25,7 @@ function generateRepository({ model, meta }) {
|
|
|
25
25
|
(0, types_1.toAnnotatedTypeName)(meta.types.dto.update),
|
|
26
26
|
(0, types_1.toAnnotatedTypeName)(meta.types.dto.upsert),
|
|
27
27
|
],
|
|
28
|
-
[schemaMeta.actions.
|
|
28
|
+
[schemaMeta.actions.execution.interfaceLocation.import]: [schemaMeta.actions.execution.interface],
|
|
29
29
|
});
|
|
30
30
|
// NOTE: We first generate different parts of the code responsible for a particular task
|
|
31
31
|
// and then we combine them into the final code block.
|
|
@@ -218,7 +218,7 @@ exports.generateRepository = generateRepository;
|
|
|
218
218
|
*/
|
|
219
219
|
function generateMockRepository({ model: modelSource, meta: metaSource, }) {
|
|
220
220
|
// We re-use the repository block, but we change the meta data to use the mock repository name and the model to be in memory only
|
|
221
|
-
const meta = Object.assign(Object.assign({}, metaSource), { data: Object.assign(Object.assign({}, metaSource.data), { repository: Object.assign(Object.assign({}, metaSource.data.repository), { className: metaSource.data.mockRepository.className,
|
|
221
|
+
const meta = Object.assign(Object.assign({}, metaSource), { data: Object.assign(Object.assign({}, metaSource.data), { repository: Object.assign(Object.assign({}, metaSource.data.repository), { className: metaSource.data.mockRepository.className, location: metaSource.data.mockRepository.location }) }) });
|
|
222
222
|
const model = Object.assign(Object.assign({}, modelSource), { attributes: Object.assign(Object.assign({}, modelSource.attributes), { inMemoryOnly: true }) });
|
|
223
223
|
return generateRepository({ model, meta });
|
|
224
224
|
}
|
|
@@ -465,10 +465,8 @@ function generateMainBuildingBlocks_InDatabase({ model, meta, schemaMeta, import
|
|
|
465
465
|
const { idField } = model;
|
|
466
466
|
imports.addImports({
|
|
467
467
|
[meta.types.importPath]: [meta.types.zodDecoderFnNames.fromDatabase],
|
|
468
|
-
[schemaMeta.backendModules.db.
|
|
469
|
-
|
|
470
|
-
(0, types_1.toAnnotatedTypeName)((0, types_1.toTypeName)(`${model.sourceName} as DbType`)),
|
|
471
|
-
],
|
|
468
|
+
[schemaMeta.backendModules.db.databaseService.location.import]: [schemaMeta.backendModules.db.databaseService.name],
|
|
469
|
+
[schemaMeta.backendModules.db.typesImportPath]: [(0, types_1.toAnnotatedTypeName)((0, types_1.toTypeName)(`${model.sourceName} as DbType`))],
|
|
472
470
|
[schemaMeta.backendModules.common.importPath]: [
|
|
473
471
|
schemaMeta.backendModules.common.functions.format,
|
|
474
472
|
schemaMeta.backendModules.common.functions.pluralize,
|
|
@@ -8,18 +8,18 @@ const types_1 = require("../../lib/schema/types");
|
|
|
8
8
|
*/
|
|
9
9
|
function generateRoute({ model, meta }) {
|
|
10
10
|
const { idField, defaultField } = model;
|
|
11
|
-
const { scopeName, dataRepositoryVariableName } = meta.
|
|
11
|
+
const { scopeName, dataRepositoryVariableName } = meta.update;
|
|
12
12
|
const defaultValueMethod = `
|
|
13
13
|
getDefault: procedure.query(({ ctx }) => ctx.view.${meta.data.dataServiceName}.${dataRepositoryVariableName}.defaultValue),
|
|
14
14
|
`;
|
|
15
|
-
const decoders = meta.
|
|
15
|
+
const decoders = meta.update.decoders;
|
|
16
16
|
const imports = imports_1.ImportsGenerator.from(meta.trpc.routerFilePath).addImports({
|
|
17
17
|
[meta.types.importPath]: [
|
|
18
18
|
(0, types_1.toAnnotatedTypeName)(model.typeName),
|
|
19
19
|
meta.types.toBrandedIdTypeFnName,
|
|
20
20
|
meta.types.zodDecoderFnNames.id,
|
|
21
21
|
],
|
|
22
|
-
[meta.
|
|
22
|
+
[meta.update.serviceClassLocation.import]: [decoders.name],
|
|
23
23
|
});
|
|
24
24
|
return /* ts */ `
|
|
25
25
|
import { z } from 'zod'
|
|
@@ -21,7 +21,7 @@ function generateSeedModel({ model, itemCount, meta, models, }) {
|
|
|
21
21
|
for (const model of models) {
|
|
22
22
|
modelMap.set(model.name, model);
|
|
23
23
|
}
|
|
24
|
-
const imports = imports_1.ImportsGenerator.from(meta.seed.
|
|
24
|
+
const imports = imports_1.ImportsGenerator.from(meta.seed.location.path).addImport({
|
|
25
25
|
items: [(0, types_1.toAnnotatedTypeName)(model.typeName), meta.types.toBrandedIdTypeFnName],
|
|
26
26
|
from: meta.types.importPath,
|
|
27
27
|
});
|
|
@@ -268,7 +268,7 @@ function generateSeeds({ models, config, }) {
|
|
|
268
268
|
for (const { meta } of models) {
|
|
269
269
|
imports.addImport({
|
|
270
270
|
items: [meta.seed.constantName],
|
|
271
|
-
from: meta.seed.
|
|
271
|
+
from: meta.seed.location.path,
|
|
272
272
|
});
|
|
273
273
|
}
|
|
274
274
|
const seeds = models.map(({ meta }) => `${meta.seed.constantName}`).join(',\n');
|
|
@@ -10,7 +10,7 @@ const types_1 = require("../../lib/types");
|
|
|
10
10
|
*/
|
|
11
11
|
function generateStub({ model, meta }) {
|
|
12
12
|
const { fields, idField } = model;
|
|
13
|
-
const imports = imports_1.ImportsGenerator.from(meta.data.
|
|
13
|
+
const imports = imports_1.ImportsGenerator.from(meta.data.stubLocation.path)
|
|
14
14
|
.addImport({
|
|
15
15
|
items: [meta.types.toBrandedIdTypeFnName],
|
|
16
16
|
from: meta.types.importPath,
|