@memberjunction/codegen-lib 1.8.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Angular/angular-codegen.d.ts.map +1 -1
- package/dist/Angular/angular-codegen.js +6 -3
- package/dist/Angular/angular-codegen.js.map +1 -1
- package/dist/Angular/join-grid-related-entity-component.d.ts +0 -1
- package/dist/Angular/join-grid-related-entity-component.d.ts.map +1 -1
- package/dist/Angular/join-grid-related-entity-component.js +0 -13
- package/dist/Angular/join-grid-related-entity-component.js.map +1 -1
- package/dist/Angular/related-entity-components.d.ts +11 -1
- package/dist/Angular/related-entity-components.d.ts.map +1 -1
- package/dist/Angular/related-entity-components.js +26 -1
- package/dist/Angular/related-entity-components.js.map +1 -1
- package/dist/Angular/timeline-related-entity-component.d.ts +36 -0
- package/dist/Angular/timeline-related-entity-component.d.ts.map +1 -0
- package/dist/Angular/timeline-related-entity-component.js +67 -0
- package/dist/Angular/timeline-related-entity-component.js.map +1 -0
- package/dist/Database/manage-metadata.d.ts +23 -8
- package/dist/Database/manage-metadata.d.ts.map +1 -1
- package/dist/Database/manage-metadata.js +356 -149
- package/dist/Database/manage-metadata.js.map +1 -1
- package/dist/Database/sql.d.ts +6 -0
- package/dist/Database/sql.d.ts.map +1 -1
- package/dist/Database/sql.js +82 -12
- package/dist/Database/sql.js.map +1 -1
- package/dist/Database/sql_codegen.d.ts.map +1 -1
- package/dist/Database/sql_codegen.js +60 -19
- package/dist/Database/sql_codegen.js.map +1 -1
- package/dist/Misc/createNewUser.d.ts.map +1 -1
- package/dist/Misc/createNewUser.js +2 -1
- package/dist/Misc/createNewUser.js.map +1 -1
- package/dist/action_subclasses_codegen.d.ts +1 -1
- package/dist/entity_subclasses_codegen.d.ts.map +1 -1
- package/dist/entity_subclasses_codegen.js +72 -75
- package/dist/entity_subclasses_codegen.js.map +1 -1
- package/dist/graphql_server_codegen.d.ts +4 -4
- package/dist/graphql_server_codegen.d.ts.map +1 -1
- package/dist/graphql_server_codegen.js +65 -58
- package/dist/graphql_server_codegen.js.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -1
- package/package.json +7 -7
- package/dist/createNewUser.d.ts +0 -12
- package/dist/createNewUser.d.ts.map +0 -1
- package/dist/createNewUser.js +0 -113
- package/dist/createNewUser.js.map +0 -1
|
@@ -1,10 +1,36 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
2
18
|
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
19
|
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
20
|
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
21
|
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
22
|
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
23
|
};
|
|
24
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
25
|
+
if (mod && mod.__esModule) return mod;
|
|
26
|
+
var result = {};
|
|
27
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
28
|
+
__setModuleDefault(result, mod);
|
|
29
|
+
return result;
|
|
30
|
+
};
|
|
31
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
32
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
33
|
+
};
|
|
8
34
|
var ManageMetadataBase_1;
|
|
9
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
36
|
exports.ManageMetadataBase = void 0;
|
|
@@ -14,11 +40,19 @@ const logging_1 = require("../Misc/logging");
|
|
|
14
40
|
const sql_1 = require("./sql");
|
|
15
41
|
const advanced_generation_1 = require("../Misc/advanced_generation");
|
|
16
42
|
const global_1 = require("@memberjunction/global");
|
|
43
|
+
const fs = __importStar(require("fs"));
|
|
44
|
+
const path_1 = __importDefault(require("path"));
|
|
17
45
|
/**
|
|
18
46
|
* Base class for managing metadata within the CodeGen system. This class can be sub-classed to extend/override base class functionality. Make sure to use the RegisterClass decorator from the @memberjunction/global package
|
|
19
47
|
* to properly register your subclass with a priority of 1+ to ensure it gets instantiated.
|
|
20
48
|
*/
|
|
21
49
|
let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
50
|
+
constructor() {
|
|
51
|
+
this._sqlUtilityObject = global_1.MJGlobal.Instance.ClassFactory.CreateInstance(sql_1.SQLUtilityBase);
|
|
52
|
+
}
|
|
53
|
+
get SQLUtilityObject() {
|
|
54
|
+
return this._sqlUtilityObject;
|
|
55
|
+
}
|
|
22
56
|
static get newEntityList() {
|
|
23
57
|
return this._newEntityList;
|
|
24
58
|
}
|
|
@@ -31,33 +65,55 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
31
65
|
const md = new core_1.Metadata();
|
|
32
66
|
const excludeSchemas = config_1.configInfo.excludeSchemas ? config_1.configInfo.excludeSchemas : [];
|
|
33
67
|
let bSuccess = true;
|
|
68
|
+
let start = new Date();
|
|
69
|
+
(0, logging_1.logStatus)(' Creating new entities...');
|
|
34
70
|
if (!await this.createNewEntities(ds)) {
|
|
35
|
-
(0, logging_1.logError)('Error creating new entities');
|
|
71
|
+
(0, logging_1.logError)(' Error creating new entities');
|
|
36
72
|
bSuccess = false;
|
|
37
73
|
}
|
|
74
|
+
(0, logging_1.logStatus)(` > Created new entities in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
75
|
+
start = new Date();
|
|
76
|
+
(0, logging_1.logStatus)(' Updating existing entities...');
|
|
38
77
|
if (!await this.updateExistingEntitiesFromSchema(ds, excludeSchemas)) {
|
|
39
|
-
(0, logging_1.logError)('Error updating existing entities');
|
|
78
|
+
(0, logging_1.logError)(' Error updating existing entities');
|
|
40
79
|
bSuccess = false;
|
|
41
80
|
}
|
|
81
|
+
(0, logging_1.logStatus)(` > Updated existing entities in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
82
|
+
start = new Date();
|
|
83
|
+
(0, logging_1.logStatus)(' Scanning for tables that were deleted where entity metadata still exists...');
|
|
84
|
+
if (!await this.checkAndRemoveMetadataForDeletedTables(ds, excludeSchemas)) {
|
|
85
|
+
(0, logging_1.logError)(' Error removing metadata for tables that were removed');
|
|
86
|
+
bSuccess = false;
|
|
87
|
+
}
|
|
88
|
+
(0, logging_1.logStatus)(` > Removed metadata for deleted tables in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
89
|
+
start = new Date();
|
|
90
|
+
(0, logging_1.logStatus)(' Recompiling base views...');
|
|
42
91
|
const sqlUtility = global_1.MJGlobal.Instance.ClassFactory.CreateInstance(sql_1.SQLUtilityBase);
|
|
43
92
|
if (!await sqlUtility.recompileAllBaseViews(ds, excludeSchemas, true)) {
|
|
44
|
-
(0, logging_1.logMessage)('Warning: Non-Fatal error recompiling base views', core_1.SeverityType.Warning, false);
|
|
93
|
+
(0, logging_1.logMessage)(' Warning: Non-Fatal error recompiling base views', core_1.SeverityType.Warning, false);
|
|
45
94
|
// many times the former versions of base views will NOT succesfully recompile, so don't consider that scenario to be a
|
|
46
95
|
// failure for this entire function
|
|
47
96
|
}
|
|
97
|
+
(0, logging_1.logStatus)(` > Recompiled base views in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
98
|
+
start = new Date();
|
|
99
|
+
(0, logging_1.logStatus)(' Managing entity fields...');
|
|
48
100
|
if (!await this.manageEntityFields(ds, excludeSchemas, false)) {
|
|
49
|
-
(0, logging_1.logError)('Error managing entity fields');
|
|
101
|
+
(0, logging_1.logError)(' Error managing entity fields');
|
|
50
102
|
bSuccess = false;
|
|
51
103
|
}
|
|
104
|
+
(0, logging_1.logStatus)(` > Managed entity fields in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
105
|
+
start = new Date();
|
|
106
|
+
(0, logging_1.logStatus)(' Managing entity relationships...');
|
|
52
107
|
if (!await this.manageEntityRelationships(ds, excludeSchemas, md)) {
|
|
53
|
-
(0, logging_1.logError)('Error managing entity relationships');
|
|
108
|
+
(0, logging_1.logError)(' Error managing entity relationships');
|
|
54
109
|
bSuccess = false;
|
|
55
110
|
}
|
|
111
|
+
(0, logging_1.logStatus)(` > Managed entity relationships in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
56
112
|
if (ManageMetadataBase_1.newEntityList.length > 0) {
|
|
57
113
|
await this.generateNewEntityDescriptions(ds, md); // don't pass excludeSchemas becuase by definition this is the NEW entities we created
|
|
58
114
|
}
|
|
59
115
|
if (!await this.manageVirtualEntities(ds)) {
|
|
60
|
-
(0, logging_1.logError)('Error managing virtual entities');
|
|
116
|
+
(0, logging_1.logError)(' Error managing virtual entities');
|
|
61
117
|
bSuccess = false;
|
|
62
118
|
}
|
|
63
119
|
// now - we need to tell our metadata object to refresh itself
|
|
@@ -90,7 +146,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
90
146
|
// // for a given virtual entity, we need to loop through the fields that exist in the current SQL definition for the view
|
|
91
147
|
// // and add/update/delete the entity fields to match what's in the view
|
|
92
148
|
// let bSuccess = true;
|
|
93
|
-
// const sql = `SELECT * FROM vwSQLColumnsAndEntityFields WHERE EntityID = ${ve.ID}`;
|
|
149
|
+
// const sql = `SELECT * FROM vwSQLColumnsAndEntityFields WHERE EntityID = '${ve.ID}'`;
|
|
94
150
|
// const veFields = await ds.query(sql);
|
|
95
151
|
// if (veFields && veFields.length > 0) {
|
|
96
152
|
// // we have 1+ fields, now loop through them and process each one
|
|
@@ -151,10 +207,10 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
151
207
|
* @param md
|
|
152
208
|
* @returns
|
|
153
209
|
*/
|
|
154
|
-
async manageEntityRelationships(ds, excludeSchemas, md) {
|
|
210
|
+
async manageEntityRelationships(ds, excludeSchemas, md, batchItems = 5) {
|
|
155
211
|
let bResult = true;
|
|
156
|
-
bResult = bResult && await this.manageManyToManyEntityRelationships(ds, excludeSchemas);
|
|
157
|
-
bResult = bResult && await this.manageOneToManyEntityRelationships(ds, excludeSchemas, md);
|
|
212
|
+
bResult = bResult && await this.manageManyToManyEntityRelationships(ds, excludeSchemas, batchItems);
|
|
213
|
+
bResult = bResult && await this.manageOneToManyEntityRelationships(ds, excludeSchemas, md, batchItems);
|
|
158
214
|
return bResult;
|
|
159
215
|
}
|
|
160
216
|
/**
|
|
@@ -164,7 +220,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
164
220
|
* @param md
|
|
165
221
|
* @returns
|
|
166
222
|
*/
|
|
167
|
-
async manageOneToManyEntityRelationships(ds, excludeSchemas, md) {
|
|
223
|
+
async manageOneToManyEntityRelationships(ds, excludeSchemas, md, batchItems = 5) {
|
|
168
224
|
// the way this works is that we look for entities in our catalog and we look for
|
|
169
225
|
// foreign keys in those entities. For example, if we saw an entity called Persons and that entity
|
|
170
226
|
// had a foreign key linking to an entity called Organizations via a field called OrganizationID, then we would create a relationship
|
|
@@ -194,23 +250,77 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
194
250
|
for (const rc of relationshipCounts) {
|
|
195
251
|
relationshipCountMap.set(rc.EntityID, rc.Count);
|
|
196
252
|
}
|
|
197
|
-
//
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
253
|
+
// get all relationships in one query for performance improvement
|
|
254
|
+
const sSQLRelationship = `SELECT * FROM ${(0, config_1.mj_core_schema)()}.EntityRelationship`;
|
|
255
|
+
const allRelationships = await ds.query(sSQLRelationship);
|
|
256
|
+
// Function to process a batch of entity fields
|
|
257
|
+
const processBatch = async (batch) => {
|
|
258
|
+
let batchSQL = '';
|
|
259
|
+
batch.forEach((f) => {
|
|
260
|
+
// for each field determine if an existing relationship exists, if not, create it
|
|
261
|
+
const relationships = allRelationships.filter(r => r.EntityID === f.RelatedEntityID && r.RelatedEntityID === f.EntityID);
|
|
262
|
+
if (relationships && relationships.length === 0) {
|
|
263
|
+
// no relationship exists, so create it
|
|
264
|
+
const e = md.Entities.find(e => e.ID === f.EntityID);
|
|
265
|
+
// calculate the sequence by getting the count of existing relationships for the entity and adding 1 and then increment the count for future inserts in this loop
|
|
266
|
+
const relCount = relationshipCountMap.get(f.EntityID) ? relationshipCountMap.get(f.EntityID) : 0;
|
|
267
|
+
const sequence = relCount + 1;
|
|
268
|
+
batchSQL += `INSERT INTO ${(0, config_1.mj_core_schema)()}.EntityRelationship (EntityID, RelatedEntityID, RelatedEntityJoinField, Type, BundleInAPI, DisplayInForm, DisplayName, Sequence)
|
|
269
|
+
VALUES ('${f.RelatedEntityID}', '${f.EntityID}', '${f.Name}', 'One To Many', 1, 1, '${e.Name}', ${sequence});
|
|
270
|
+
`;
|
|
271
|
+
// now update the map for the relationship count
|
|
272
|
+
relationshipCountMap.set(f.EntityID, sequence);
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
if (batchSQL.length > 0)
|
|
276
|
+
await ds.query(batchSQL);
|
|
277
|
+
};
|
|
278
|
+
// Split entityFields into batches and process each batch
|
|
279
|
+
for (let i = 0; i < entityFields.length; i += batchItems) {
|
|
280
|
+
const batch = entityFields.slice(i, i + batchItems);
|
|
281
|
+
await processBatch(batch);
|
|
282
|
+
}
|
|
283
|
+
return true;
|
|
284
|
+
}
|
|
285
|
+
catch (e) {
|
|
286
|
+
(0, logging_1.logError)(e);
|
|
287
|
+
return false;
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* This method will look for situations where entity metadata exist in the entities metadata table but the underlying table has been deleted. In this case, the metadata for the entity
|
|
292
|
+
* should be removed. This method is called as part of the manageMetadata method and is not intended to be called directly.
|
|
293
|
+
* @param ds
|
|
294
|
+
* @param excludeSchemas
|
|
295
|
+
*/
|
|
296
|
+
async checkAndRemoveMetadataForDeletedTables(ds, excludeSchemas) {
|
|
297
|
+
try {
|
|
298
|
+
const sql = `SELECT * FROM ${(0, config_1.mj_core_schema)()}.vwEntitiesWithMissingBaseTables WHERE VirtualEntity=0`;
|
|
299
|
+
const entities = await ds.query(sql);
|
|
300
|
+
if (entities && entities.length > 0) {
|
|
301
|
+
for (const e of entities) {
|
|
302
|
+
// for the given entity, wipe out the entity metadata and its core deps.
|
|
303
|
+
// the below could fail if there are non-core dependencies on the entity, but that's ok, we will flag that in the console
|
|
304
|
+
// for the admin to handle manually
|
|
305
|
+
try {
|
|
306
|
+
const sqlDelete = `__mj.spDeleteEntityWithCoreDependencies @EntityID='${e.ID}'`;
|
|
307
|
+
await ds.query(sqlDelete);
|
|
308
|
+
(0, logging_1.logStatus)(` > Removed metadata for table ${e.SchemaName}.${e.BaseTable}`);
|
|
309
|
+
// next up we need to remove the spCreate, spDelete, spUpdate, BaseView, and FullTextSearchFunction, if provided.
|
|
310
|
+
// We only remoe these artifcacts when they are generated which is info we have in the BaseViewGenerated, spCreateGenerated, etc. fields
|
|
311
|
+
await this.checkDropSQLObject(ds, e.BaseViewGenerated, 'view', e.SchemaName, e.BaseView);
|
|
312
|
+
await this.checkDropSQLObject(ds, e.spCreateGenerated, 'procedure', e.SchemaName, e.spCreate ? e.spCreate : `spCreate${e.ClassName}`);
|
|
313
|
+
await this.checkDropSQLObject(ds, e.spDeleteGenerated, 'procedure', e.SchemaName, e.spDelete ? e.spDelete : `spDelete${e.ClassName}`);
|
|
314
|
+
await this.checkDropSQLObject(ds, e.spUpdateGenerated, 'procedure', e.SchemaName, e.spUpdate ? e.spUpdate : `spUpdate${e.ClassName}`);
|
|
315
|
+
await this.checkDropSQLObject(ds, e.FullTextSearchFunctionGenerated, 'function', e.SchemaName, e.FullTextSearchFunction);
|
|
316
|
+
}
|
|
317
|
+
catch (ex) {
|
|
318
|
+
(0, logging_1.logError)(`Error removing metadata for entity ${ex.Name}, error: ${ex}`);
|
|
319
|
+
}
|
|
213
320
|
}
|
|
321
|
+
// if we get here we now need to refresh our metadata object
|
|
322
|
+
const md = new core_1.Metadata();
|
|
323
|
+
await md.Refresh();
|
|
214
324
|
}
|
|
215
325
|
return true;
|
|
216
326
|
}
|
|
@@ -219,6 +329,31 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
219
329
|
return false;
|
|
220
330
|
}
|
|
221
331
|
}
|
|
332
|
+
async checkDropSQLObject(ds, proceed, type, schemaName, name) {
|
|
333
|
+
try {
|
|
334
|
+
if (proceed && schemaName && name && schemaName.trim().length > 0 && name.trim().length > 0) {
|
|
335
|
+
const sqlDelete = `DROP ${type} IF EXISTS [${schemaName}].[${name}]`;
|
|
336
|
+
await ds.query(sqlDelete);
|
|
337
|
+
// next up, we need to clean up the cache of saved DB objects that may exist for this entity in the appropriate sub-directory.
|
|
338
|
+
const sqlOutputDir = (0, config_1.outputDir)('SQL', true);
|
|
339
|
+
if (sqlOutputDir) {
|
|
340
|
+
// now do the same thing for the /schema directory within the provided directory
|
|
341
|
+
const fType = type === 'procedure' ? 'sp' : type === 'view' ? 'view' : 'full_text_search_function';
|
|
342
|
+
const filePath = path_1.default.join(sqlOutputDir, this.SQLUtilityObject.getDBObjectFileName(fType, schemaName, name, false, true));
|
|
343
|
+
const filePathPermissions = path_1.default.join(sqlOutputDir, this.SQLUtilityObject.getDBObjectFileName(fType, schemaName, name, true, true));
|
|
344
|
+
// if the files exist, delete them
|
|
345
|
+
if (fs.existsSync(filePath))
|
|
346
|
+
fs.unlinkSync(filePath);
|
|
347
|
+
if (fs.existsSync(filePathPermissions))
|
|
348
|
+
fs.unlinkSync(filePathPermissions);
|
|
349
|
+
}
|
|
350
|
+
(0, logging_1.logStatus)(` > Removed ${type} ${schemaName}.${name}`);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
catch (e) {
|
|
354
|
+
(0, logging_1.logError)(` > Error removing ${type} ${schemaName}.${name}, error: ${e}`);
|
|
355
|
+
}
|
|
356
|
+
}
|
|
222
357
|
/**
|
|
223
358
|
* Manages M->M relationships between entities in the metadata based on foreign key relationships in the database.
|
|
224
359
|
* NOT IMPLEMENTED IN CURRENT VERSION IN BASE CLASS. M->M relationships ARE supported fully, but they are not AUTO generated by this
|
|
@@ -227,7 +362,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
227
362
|
* @param excludeSchemas
|
|
228
363
|
* @returns
|
|
229
364
|
*/
|
|
230
|
-
async manageManyToManyEntityRelationships(ds, excludeSchemas) {
|
|
365
|
+
async manageManyToManyEntityRelationships(ds, excludeSchemas, batchItems = 5) {
|
|
231
366
|
return true; // not implemented for now, require the admin to manually create these relationships
|
|
232
367
|
}
|
|
233
368
|
/**
|
|
@@ -236,53 +371,88 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
236
371
|
* @param excludeSchemas
|
|
237
372
|
* @returns
|
|
238
373
|
*/
|
|
239
|
-
async manageEntityFields(ds, excludeSchemas,
|
|
374
|
+
async manageEntityFields(ds, excludeSchemas, skipCreatedAtUpdatedAtDeletedAtFieldValidation) {
|
|
240
375
|
let bSuccess = true;
|
|
241
376
|
const startTime = new Date();
|
|
242
|
-
if (!
|
|
243
|
-
(
|
|
244
|
-
|
|
377
|
+
if (!skipCreatedAtUpdatedAtDeletedAtFieldValidation) {
|
|
378
|
+
if (!await this.ensureCreatedAtUpdatedAtFieldsExist(ds, excludeSchemas) ||
|
|
379
|
+
!await this.ensureDeletedAtFieldsExist(ds, excludeSchemas)) {
|
|
380
|
+
(0, logging_1.logError)(`Error ensuring ${core_1.EntityInfo.CreatedAtFieldName}, ${core_1.EntityInfo.UpdatedAtFieldName} and ${core_1.EntityInfo.DeletedAtFieldName} fields exist`);
|
|
381
|
+
bSuccess = false;
|
|
382
|
+
}
|
|
383
|
+
(0, logging_1.logStatus)(` Ensured ${core_1.EntityInfo.CreatedAtFieldName}/${core_1.EntityInfo.UpdatedAtFieldName}/${core_1.EntityInfo.DeletedAtFieldName} fields exist in ${(new Date().getTime() - startTime.getTime()) / 1000} seconds`);
|
|
245
384
|
}
|
|
246
|
-
(0, logging_1.logStatus)(` Ensured ${core_1.EntityInfo.CreatedAtFieldName}/${core_1.EntityInfo.UpdatedAtFieldName} fields exist in ${(new Date().getTime() - startTime.getTime()) / 1000} seconds`);
|
|
247
385
|
const step1StartTime = new Date();
|
|
248
386
|
if (!await this.deleteUnneededEntityFields(ds, excludeSchemas)) {
|
|
249
387
|
(0, logging_1.logError)('Error deleting unneeded entity fields');
|
|
250
388
|
bSuccess = false;
|
|
251
389
|
}
|
|
252
|
-
(0, logging_1.logStatus)(`
|
|
390
|
+
(0, logging_1.logStatus)(` Deleted unneeded entity fields in ${(new Date().getTime() - step1StartTime.getTime()) / 1000} seconds`);
|
|
253
391
|
const step2StartTime = new Date();
|
|
254
392
|
if (!await this.updateExistingEntityFieldsFromSchema(ds, excludeSchemas)) {
|
|
255
393
|
(0, logging_1.logError)('Error updating existing entity fields from schema');
|
|
256
394
|
bSuccess = false;
|
|
257
395
|
}
|
|
258
|
-
(0, logging_1.logStatus)(`
|
|
396
|
+
(0, logging_1.logStatus)(` Updated existing entity fields from schema in ${(new Date().getTime() - step2StartTime.getTime()) / 1000} seconds`);
|
|
259
397
|
const step3StartTime = new Date();
|
|
260
398
|
if (!await this.createNewEntityFieldsFromSchema(ds)) { // has its own internal filtering for exclude schema/table so don't pass in
|
|
261
399
|
(0, logging_1.logError)('Error creating new entity fields from schema');
|
|
262
400
|
bSuccess = false;
|
|
263
401
|
}
|
|
264
|
-
(0, logging_1.logStatus)(`
|
|
402
|
+
(0, logging_1.logStatus)(` Created new entity fields from schema in ${(new Date().getTime() - step3StartTime.getTime()) / 1000} seconds`);
|
|
265
403
|
const step4StartTime = new Date();
|
|
266
404
|
if (!await this.setDefaultColumnWidthWhereNeeded(ds, excludeSchemas)) {
|
|
267
405
|
(0, logging_1.logError)('Error setting default column width where needed');
|
|
268
406
|
bSuccess = false;
|
|
269
407
|
}
|
|
270
|
-
(0, logging_1.logStatus)(`
|
|
408
|
+
(0, logging_1.logStatus)(` Set default column width where needed in ${(new Date().getTime() - step4StartTime.getTime()) / 1000} seconds`);
|
|
271
409
|
const step5StartTime = new Date();
|
|
272
410
|
if (!await this.updateEntityFieldDisplayNameWhereNull(ds, excludeSchemas)) {
|
|
273
411
|
(0, logging_1.logError)('Error updating entity field display name where null');
|
|
274
412
|
bSuccess = false;
|
|
275
413
|
}
|
|
276
|
-
(0, logging_1.logStatus)(`
|
|
414
|
+
(0, logging_1.logStatus)(` Updated entity field display name where null in ${(new Date().getTime() - step5StartTime.getTime()) / 1000} seconds`);
|
|
277
415
|
const step6StartTime = new Date();
|
|
278
416
|
if (!await this.manageEntityFieldValues(ds, excludeSchemas)) {
|
|
279
417
|
(0, logging_1.logError)('Error managing entity field values');
|
|
280
418
|
bSuccess = false;
|
|
281
419
|
}
|
|
282
|
-
(0, logging_1.logStatus)(`
|
|
283
|
-
(0, logging_1.logStatus)(`
|
|
420
|
+
(0, logging_1.logStatus)(` Managed entity field values in ${(new Date().getTime() - step6StartTime.getTime()) / 1000} seconds`);
|
|
421
|
+
(0, logging_1.logStatus)(` Total time to manage entity fields: ${(new Date().getTime() - startTime.getTime()) / 1000} seconds`);
|
|
284
422
|
return bSuccess;
|
|
285
423
|
}
|
|
424
|
+
/**
|
|
425
|
+
* This method ensures that the __mj_DeletedAt field exists in each entity that has DeleteType=Soft. If the field does not exist, it is created.
|
|
426
|
+
*/
|
|
427
|
+
async ensureDeletedAtFieldsExist(ds, excludeSchemas) {
|
|
428
|
+
try {
|
|
429
|
+
const sqlEntities = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntities WHERE DeleteType='Soft' AND SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})`;
|
|
430
|
+
const entities = await ds.query(sqlEntities);
|
|
431
|
+
let overallResult = true;
|
|
432
|
+
if (entities.length > 0) {
|
|
433
|
+
// we have 1+ entities that need the special fields, so loop through them and ensure the fields exist
|
|
434
|
+
// validate that each entity has the __mj_DeletedAt field, and it is a DATETIMEOFFSET fields, NOT NULL and both are fields that have a DEFAULT value of GETUTCDATE().
|
|
435
|
+
const sql = `SELECT *
|
|
436
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
437
|
+
WHERE
|
|
438
|
+
${entities.map(e => `(TABLE_SCHEMA='${e.SchemaName}' AND TABLE_NAME='${e.BaseTable}')`).join(' OR ')}
|
|
439
|
+
AND COLUMN_NAME='${core_1.EntityInfo.DeletedAtFieldName}'`;
|
|
440
|
+
const result = await ds.query(sql);
|
|
441
|
+
for (const e of entities) {
|
|
442
|
+
const eResult = result.filter(r => r.TABLE_NAME === e.BaseTable && r.TABLE_SCHEMA === e.SchemaName); // get just the fields for this entity
|
|
443
|
+
const deletedAt = eResult.find(r => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.DeletedAtFieldName.trim().toLowerCase());
|
|
444
|
+
// now, if we have the fields, we need to check the default value and update if necessary
|
|
445
|
+
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, e, core_1.EntityInfo.DeletedAtFieldName, deletedAt, true);
|
|
446
|
+
overallResult = overallResult && fieldResult;
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
return overallResult;
|
|
450
|
+
}
|
|
451
|
+
catch (e) {
|
|
452
|
+
(0, logging_1.logError)(e);
|
|
453
|
+
return false;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
286
456
|
/**
|
|
287
457
|
* This method ensures that the __mj_CreatedAt and __mj_UpdatedAt fields exist in each entity that has TrackRecordChanges set to true. If the fields do not exist, they are created.
|
|
288
458
|
* If the fields exist but have incorrect default values, the default values are updated. The default value that is to be used for these special fields is GETUTCDATE() which is the
|
|
@@ -298,20 +468,20 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
298
468
|
// we have 1+ entities that need the special fields, so loop through them and ensure the fields exist
|
|
299
469
|
// validate that each entity has two specific fields, the first one is __mj_CreatedAt and the second one is __mj_UpdatedAt
|
|
300
470
|
// both are DATETIME fields, NOT NULL and both are fields that have a DEFAULT value of GETUTCDATE().
|
|
471
|
+
const sqlCreatedUpdated = `SELECT *
|
|
472
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
473
|
+
WHERE
|
|
474
|
+
${entities.map(e => `(TABLE_SCHEMA='${e.SchemaName}' AND TABLE_NAME='${e.BaseTable}')`).join(' OR ')}
|
|
475
|
+
AND COLUMN_NAME IN ('${core_1.EntityInfo.CreatedAtFieldName}','${core_1.EntityInfo.UpdatedAtFieldName}')`;
|
|
476
|
+
const result = await ds.query(sqlCreatedUpdated);
|
|
301
477
|
for (const e of entities) {
|
|
302
|
-
const sqlCreatedUpdated = `SELECT *
|
|
303
|
-
FROM INFORMATION_SCHEMA.COLUMNS
|
|
304
|
-
WHERE
|
|
305
|
-
TABLE_SCHEMA='${e.SchemaName}'
|
|
306
|
-
AND TABLE_NAME = '${e.BaseTable}'
|
|
307
|
-
AND COLUMN_NAME IN ('${core_1.EntityInfo.CreatedAtFieldName}','${core_1.EntityInfo.UpdatedAtFieldName}')`;
|
|
308
|
-
const result = await ds.query(sqlCreatedUpdated);
|
|
309
478
|
// result has both created at and updated at fields, so filter on the result for each and do what we need to based on that
|
|
310
|
-
const
|
|
311
|
-
const
|
|
479
|
+
const eResult = result.filter(r => r.TABLE_NAME === e.BaseTable && r.TABLE_SCHEMA === e.SchemaName); // get just the fields for this entity
|
|
480
|
+
const createdAt = eResult.find(r => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.CreatedAtFieldName.trim().toLowerCase());
|
|
481
|
+
const updatedAt = eResult.find(r => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.UpdatedAtFieldName.trim().toLowerCase());
|
|
312
482
|
// now, if we have the fields, we need to check the default value and update if necessary
|
|
313
|
-
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, e, core_1.EntityInfo.CreatedAtFieldName, createdAt) &&
|
|
314
|
-
await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, e, core_1.EntityInfo.UpdatedAtFieldName, updatedAt);
|
|
483
|
+
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, e, core_1.EntityInfo.CreatedAtFieldName, createdAt, false) &&
|
|
484
|
+
await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, e, core_1.EntityInfo.UpdatedAtFieldName, updatedAt, false);
|
|
315
485
|
overallResult = overallResult && fieldResult;
|
|
316
486
|
}
|
|
317
487
|
}
|
|
@@ -329,40 +499,57 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
329
499
|
* @param fieldName
|
|
330
500
|
* @param currentFieldData
|
|
331
501
|
*/
|
|
332
|
-
async ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, entity, fieldName, currentFieldData) {
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
}
|
|
338
|
-
else {
|
|
339
|
-
// field does exist, let's first check the data type/nullability
|
|
340
|
-
if (currentFieldData.DATA_TYPE.trim().toLowerCase() !== 'datetimeoffset' || currentFieldData.IS_NULLABLE.trim().toLowerCase() !== 'no') {
|
|
341
|
-
// the column is the wrong type, so let's update it, first removing the default constraint, then
|
|
342
|
-
// modifying the column, and finally adding the default constraint back in.
|
|
343
|
-
await this.dropExistingDefaultConstraint(ds, entity, fieldName);
|
|
344
|
-
const sql = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ALTER COLUMN ${fieldName} DATETIMEOFFSET NOT NULL`;
|
|
502
|
+
async ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(ds, entity, fieldName, currentFieldData, allowNull) {
|
|
503
|
+
try {
|
|
504
|
+
if (!currentFieldData) {
|
|
505
|
+
// field doesn't exist, let's create it
|
|
506
|
+
const sql = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ADD ${fieldName} DATETIMEOFFSET ${allowNull ? 'NULL' : 'NOT NULL DEFAULT GETUTCDATE()'}`;
|
|
345
507
|
await ds.query(sql);
|
|
346
|
-
await this.createDefaultConstraintForSpecialDateField(ds, entity, fieldName);
|
|
347
508
|
}
|
|
348
509
|
else {
|
|
349
|
-
//
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
510
|
+
// field does exist, let's first check the data type/nullability
|
|
511
|
+
if (currentFieldData.DATA_TYPE.trim().toLowerCase() !== 'datetimeoffset' ||
|
|
512
|
+
(currentFieldData.IS_NULLABLE.trim().toLowerCase() !== 'no' && !allowNull) ||
|
|
513
|
+
(currentFieldData.IS_NULLABLE.trim().toLowerCase() === 'no' && allowNull)) {
|
|
514
|
+
// the column is the wrong type, or has wrong nullability attribute, so let's update it, first removing the default constraint, then
|
|
515
|
+
// modifying the column, and finally adding the default constraint back in.
|
|
516
|
+
await this.dropExistingDefaultConstraint(ds, entity, fieldName);
|
|
517
|
+
const sql = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ALTER COLUMN ${fieldName} DATETIMEOFFSET ${allowNull ? 'NULL' : 'NOT NULL'}`;
|
|
518
|
+
await ds.query(sql);
|
|
519
|
+
if (!allowNull)
|
|
520
|
+
await this.createDefaultConstraintForSpecialDateField(ds, entity, fieldName);
|
|
521
|
+
}
|
|
522
|
+
else {
|
|
523
|
+
// if we get here that means the column is the correct type and nullability, so now let's check the default value, but we only do that if we are dealing with a
|
|
524
|
+
// field that is NOT NULL
|
|
525
|
+
if (!allowNull) {
|
|
526
|
+
const defaultValue = currentFieldData.COLUMN_DEFAULT;
|
|
527
|
+
const realDefaultValue = (0, core_1.ExtractActualDefaultValue)(defaultValue);
|
|
528
|
+
if (!realDefaultValue || realDefaultValue.trim().toLowerCase() !== 'getutcdate()') {
|
|
529
|
+
await this.dropAndCreateDefaultConstraintForSpecialDateField(ds, entity, fieldName);
|
|
530
|
+
}
|
|
531
|
+
}
|
|
354
532
|
}
|
|
355
533
|
}
|
|
534
|
+
// if we get here, we're good
|
|
535
|
+
return true;
|
|
536
|
+
}
|
|
537
|
+
catch (e) {
|
|
538
|
+
(0, logging_1.logError)(e);
|
|
539
|
+
return false;
|
|
356
540
|
}
|
|
357
|
-
// if we get here, we're good
|
|
358
|
-
return true;
|
|
359
541
|
}
|
|
360
542
|
/**
|
|
361
543
|
* Creates the default constraint for a special date field. This method is called as part of the ensureSpecialDateFieldExistsAndHasCorrectDefaultValue method and is not intended to be called directly.
|
|
362
544
|
*/
|
|
363
545
|
async createDefaultConstraintForSpecialDateField(ds, entity, fieldName) {
|
|
364
|
-
|
|
365
|
-
|
|
546
|
+
try {
|
|
547
|
+
const sqlAddDefaultConstraint = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ADD CONSTRAINT DF_${entity.SchemaName}_${(0, core_1.CodeNameFromString)(entity.BaseTable)}_${fieldName} DEFAULT GETUTCDATE() FOR [${fieldName}]`;
|
|
548
|
+
await ds.query(sqlAddDefaultConstraint);
|
|
549
|
+
}
|
|
550
|
+
catch (e) {
|
|
551
|
+
(0, logging_1.logError)(e);
|
|
552
|
+
}
|
|
366
553
|
}
|
|
367
554
|
/**
|
|
368
555
|
* Drops and recreates the default constraint for a special date field. This method is called as part of the ensureSpecialDateFieldExistsAndHasCorrectDefaultValue method and is not intended to be called directly.
|
|
@@ -382,7 +569,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
382
569
|
* @param fieldName
|
|
383
570
|
*/
|
|
384
571
|
async dropExistingDefaultConstraint(ds, entity, fieldName) {
|
|
385
|
-
|
|
572
|
+
try {
|
|
573
|
+
const sqlDropDefaultConstraint = `
|
|
386
574
|
DECLARE @constraintName NVARCHAR(255);
|
|
387
575
|
|
|
388
576
|
-- Get the default constraint name
|
|
@@ -400,8 +588,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
400
588
|
BEGIN
|
|
401
589
|
EXEC('ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] DROP CONSTRAINT ' + @constraintName);
|
|
402
590
|
END
|
|
403
|
-
|
|
404
|
-
|
|
591
|
+
`;
|
|
592
|
+
await ds.query(sqlDropDefaultConstraint);
|
|
593
|
+
}
|
|
594
|
+
catch (e) {
|
|
595
|
+
(0, logging_1.logError)(e);
|
|
596
|
+
}
|
|
405
597
|
}
|
|
406
598
|
/**
|
|
407
599
|
* This method generates descriptions for entities in teh system where there is no existing description. This is an experimental feature and is done using AI. In order for it
|
|
@@ -421,7 +613,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
421
613
|
// now loop through the new entities and generate descriptions for them
|
|
422
614
|
for (let e of ManageMetadataBase_1.newEntityList) {
|
|
423
615
|
const data = await ds.query(`SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntities WHERE Name = '${e}'`);
|
|
424
|
-
const fields = await ds.query(`SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFields WHERE EntityID
|
|
616
|
+
const fields = await ds.query(`SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFields WHERE EntityID='${data[0].ID}'`);
|
|
425
617
|
const entityUserMessage = userMessage + `Entity Name: ${e},
|
|
426
618
|
Base Table: ${data[0].BaseTable},
|
|
427
619
|
Schema: ${data[0].SchemaName}.
|
|
@@ -491,7 +683,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
491
683
|
for (const field of fields) {
|
|
492
684
|
const sDisplayName = this.stripTrailingChars(this.convertCamelCaseToHaveSpaces(field.Name), 'ID', true).trim();
|
|
493
685
|
if (sDisplayName.length > 0 && sDisplayName.toLowerCase().trim() !== field.Name.toLowerCase().trim()) {
|
|
494
|
-
const sSQL = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ${core_1.EntityInfo.UpdatedAtFieldName}=GETUTCDATE(), DisplayName = '${sDisplayName}' WHERE ID = ${field.ID}`;
|
|
686
|
+
const sSQL = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ${core_1.EntityInfo.UpdatedAtFieldName}=GETUTCDATE(), DisplayName = '${sDisplayName}' WHERE ID = '${field.ID}'`;
|
|
495
687
|
await ds.query(sSQL);
|
|
496
688
|
}
|
|
497
689
|
}
|
|
@@ -538,8 +730,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
538
730
|
sf.AllowsNull,
|
|
539
731
|
sf.DefaultValue,
|
|
540
732
|
sf.AutoIncrement,
|
|
541
|
-
IIF(sf.IsVirtual = 1, 0, IIF(sf.FieldName = '${core_1.EntityInfo.CreatedAtFieldName}' OR
|
|
733
|
+
IIF(sf.IsVirtual = 1, 0, IIF(sf.FieldName = '${core_1.EntityInfo.CreatedAtFieldName}' OR
|
|
734
|
+
sf.FieldName = '${core_1.EntityInfo.UpdatedAtFieldName}' OR
|
|
735
|
+
sf.FieldName = '${core_1.EntityInfo.DeletedAtFieldName}' OR
|
|
736
|
+
pk.ColumnName IS NOT NULL, 0, 1)) AllowUpdateAPI,
|
|
542
737
|
sf.IsVirtual,
|
|
738
|
+
e.RelationshipDefaultDisplayType,
|
|
543
739
|
re.ID RelatedEntityID,
|
|
544
740
|
fk.referenced_column RelatedEntityFieldName,
|
|
545
741
|
IIF(sf.FieldName = 'Name', 1, 0) IsNameField,
|
|
@@ -591,7 +787,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
591
787
|
SELECT
|
|
592
788
|
*
|
|
593
789
|
FROM
|
|
594
|
-
NumberedRows WHERE rn = 1 -- if someone has two foreign keys with same to/from table and field name this makes sure we only get the field info ONCE
|
|
790
|
+
NumberedRows -- REMOVED - Need all fkey fields WHERE rn = 1 -- if someone has two foreign keys with same to/from table and field name this makes sure we only get the field info ONCE
|
|
595
791
|
ORDER BY EntityID, Sequence`;
|
|
596
792
|
return sSQL;
|
|
597
793
|
}
|
|
@@ -606,14 +802,17 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
606
802
|
n.Sequence <= config_1.configInfo.newEntityDefaults?.IncludeFirstNFieldsAsDefaultInView ||
|
|
607
803
|
n.IsNameField ? true : false);
|
|
608
804
|
const escapedDescription = n.Description ? `'${n.Description.replace(/'/g, "''")}'` : 'NULL';
|
|
609
|
-
let fieldDisplayName;
|
|
805
|
+
let fieldDisplayName = '';
|
|
610
806
|
switch (n.FieldName.trim().toLowerCase()) {
|
|
611
|
-
case
|
|
807
|
+
case core_1.EntityInfo.CreatedAtFieldName.trim().toLowerCase():
|
|
612
808
|
fieldDisplayName = "Created At";
|
|
613
809
|
break;
|
|
614
|
-
case
|
|
810
|
+
case core_1.EntityInfo.UpdatedAtFieldName.trim().toLowerCase():
|
|
615
811
|
fieldDisplayName = "Updated At";
|
|
616
812
|
break;
|
|
813
|
+
case core_1.EntityInfo.DeletedAtFieldName.trim().toLowerCase():
|
|
814
|
+
fieldDisplayName = "Deleted At";
|
|
815
|
+
break;
|
|
617
816
|
default:
|
|
618
817
|
fieldDisplayName = this.convertCamelCaseToHaveSpaces(n.FieldName).trim();
|
|
619
818
|
break;
|
|
@@ -642,11 +841,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
642
841
|
IncludeRelatedEntityNameFieldInBaseView,
|
|
643
842
|
DefaultInView,
|
|
644
843
|
IsPrimaryKey,
|
|
645
|
-
IsUnique
|
|
844
|
+
IsUnique,
|
|
845
|
+
RelatedEntityDisplayType
|
|
646
846
|
)
|
|
647
847
|
VALUES
|
|
648
848
|
(
|
|
649
|
-
${n.EntityID},
|
|
849
|
+
'${n.EntityID}',
|
|
650
850
|
${n.Sequence},
|
|
651
851
|
'${n.FieldName}',
|
|
652
852
|
'${fieldDisplayName}',
|
|
@@ -660,14 +860,15 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
660
860
|
${n.AutoIncrement ? 1 : 0},
|
|
661
861
|
${n.AllowUpdateAPI ? 1 : 0},
|
|
662
862
|
${n.IsVirtual ? 1 : 0},
|
|
663
|
-
${n.RelatedEntityID},
|
|
863
|
+
${n.RelatedEntityID && n.RelatedEntityID.length > 0 ? `'${n.RelatedEntityID}'` : 'NULL'},
|
|
664
864
|
${n.RelatedEntityFieldName && n.RelatedEntityFieldName.length > 0 ? `'${n.RelatedEntityFieldName}'` : 'NULL'},
|
|
665
865
|
${n.IsNameField !== null ? n.IsNameField : 0},
|
|
666
866
|
${n.FieldName === 'ID' || n.IsNameField ? 1 : 0},
|
|
667
867
|
${n.RelatedEntityID && n.RelatedEntityID > 0 && n.Type.trim().toLowerCase() === 'int' ? 1 : 0},
|
|
668
868
|
${bDefaultInView ? 1 : 0},
|
|
669
869
|
${n.IsPrimaryKey},
|
|
670
|
-
${n.IsUnique}
|
|
870
|
+
${n.IsUnique},
|
|
871
|
+
'${n.RelationshipDefaultDisplayType}'
|
|
671
872
|
)`;
|
|
672
873
|
}
|
|
673
874
|
/**
|
|
@@ -701,7 +902,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
701
902
|
// wrap in a transaction so we get all of it or none of it
|
|
702
903
|
for (let i = 0; i < newEntityFields.length; ++i) {
|
|
703
904
|
const n = newEntityFields[i];
|
|
704
|
-
if (n.EntityID !== null && n.EntityID !== undefined && n.EntityID > 0) {
|
|
905
|
+
if (n.EntityID !== null && n.EntityID !== undefined && n.EntityID.length > 0) {
|
|
705
906
|
// need to check for null entity id = that is because the above query can return candidate Entity Fields but the entities may not have been created if the entities
|
|
706
907
|
// that would have been created violate rules - such as not having an ID column, etc.
|
|
707
908
|
const sSQLInsert = this.getPendingEntityFieldINSERTSQL(n);
|
|
@@ -727,7 +928,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
727
928
|
async updateEntityFieldRelatedEntityNameFieldMap(ds, entityFieldID, relatedEntityNameFieldMap) {
|
|
728
929
|
try {
|
|
729
930
|
const sSQL = `EXEC [${(0, config_1.mj_core_schema)()}].spUpdateEntityFieldRelatedEntityNameFieldMap
|
|
730
|
-
@EntityFieldID
|
|
931
|
+
@EntityFieldID='${entityFieldID}',
|
|
731
932
|
@RelatedEntityNameFieldMap='${relatedEntityNameFieldMap}'`;
|
|
732
933
|
await ds.query(sSQL);
|
|
733
934
|
return true;
|
|
@@ -776,6 +977,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
776
977
|
const filter = excludeSchemas && excludeSchemas.length > 0 ? ` WHERE SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})` : '';
|
|
777
978
|
const sSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFieldsWithCheckConstraints${filter}`;
|
|
778
979
|
const result = await ds.query(sSQL);
|
|
980
|
+
const efvSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].EntityFieldValue`;
|
|
981
|
+
const allEntityFieldValues = await ds.query(efvSQL);
|
|
779
982
|
// now, for each of the constraints we get back here, loop through and evaluate if they're simple and if they're simple, parse and sync with entity field values for that field
|
|
780
983
|
for (const r of result) {
|
|
781
984
|
if (r.ConstraintDefinition && r.ConstraintDefinition.length > 0) {
|
|
@@ -784,9 +987,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
784
987
|
// flip the order of parsedValues because they come out in reverse order from SQL Server
|
|
785
988
|
parsedValues.reverse();
|
|
786
989
|
// we have parsed values from the check constraint, so sync them with the entity field values
|
|
787
|
-
await this.syncEntityFieldValues(ds, r.
|
|
990
|
+
await this.syncEntityFieldValues(ds, r.EntityFieldID, parsedValues, allEntityFieldValues);
|
|
788
991
|
// finally, make sure the ValueListType column within the EntityField table is set to "List" because for check constraints we only allow the values specified in the list.
|
|
789
|
-
await ds.query(`UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ValueListType='List' WHERE
|
|
992
|
+
await ds.query(`UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ValueListType='List' WHERE ID='${r.EntityFieldID}'`);
|
|
790
993
|
}
|
|
791
994
|
}
|
|
792
995
|
}
|
|
@@ -797,11 +1000,10 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
797
1000
|
return false;
|
|
798
1001
|
}
|
|
799
1002
|
}
|
|
800
|
-
async syncEntityFieldValues(ds,
|
|
1003
|
+
async syncEntityFieldValues(ds, entityFieldID, possibleValues, allEntityFieldValues) {
|
|
801
1004
|
try {
|
|
802
1005
|
// first, get a list of all of the existing entity field values for the field already in the database
|
|
803
|
-
const
|
|
804
|
-
const existingValues = await ds.query(sSQL);
|
|
1006
|
+
const existingValues = allEntityFieldValues.filter(efv => efv.EntityFieldID === entityFieldID);
|
|
805
1007
|
// now, loop through the possible values and add any that are not already in the database
|
|
806
1008
|
// Step 1: for any existing value that is NOT in the list of possible Values, delete it
|
|
807
1009
|
let numRemoved = 0;
|
|
@@ -809,7 +1011,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
809
1011
|
for (const ev of existingValues) {
|
|
810
1012
|
if (!possibleValues.find(v => v === ev.Value)) {
|
|
811
1013
|
// delete the value from the database
|
|
812
|
-
const sSQLDelete = `DELETE FROM [${(0, config_1.mj_core_schema)()}].EntityFieldValue WHERE ID
|
|
1014
|
+
const sSQLDelete = `DELETE FROM [${(0, config_1.mj_core_schema)()}].EntityFieldValue WHERE ID='${ev.ID}'`;
|
|
813
1015
|
await ds.query(sSQLDelete);
|
|
814
1016
|
numRemoved++;
|
|
815
1017
|
}
|
|
@@ -820,9 +1022,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
820
1022
|
if (!existingValues.find(ev => ev.Value === v)) {
|
|
821
1023
|
// add the value to the database
|
|
822
1024
|
const sSQLInsert = `INSERT INTO [${(0, config_1.mj_core_schema)()}].EntityFieldValue
|
|
823
|
-
(
|
|
1025
|
+
(EntityFieldID, Sequence, Value, Code)
|
|
824
1026
|
VALUES
|
|
825
|
-
(
|
|
1027
|
+
('${entityFieldID}', ${1 + possibleValues.indexOf(v)}, '${v}', '${v}')`;
|
|
826
1028
|
await ds.query(sSQLInsert);
|
|
827
1029
|
numAdded++;
|
|
828
1030
|
}
|
|
@@ -831,9 +1033,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
831
1033
|
let numUpdated = 0;
|
|
832
1034
|
for (const v of possibleValues) {
|
|
833
1035
|
const ev = existingValues.find(ev => ev.Value === v);
|
|
834
|
-
if (ev) {
|
|
835
|
-
// update the sequence to match the order in the possible values list
|
|
836
|
-
const sSQLUpdate = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityFieldValue SET Sequence=${1 + possibleValues.indexOf(v)} WHERE ID
|
|
1036
|
+
if (ev && ev.Sequence !== 1 + possibleValues.indexOf(v)) {
|
|
1037
|
+
// update the sequence to match the order in the possible values list, if it doesn't already match
|
|
1038
|
+
const sSQLUpdate = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityFieldValue SET Sequence=${1 + possibleValues.indexOf(v)} WHERE ID='${ev.ID}'`;
|
|
837
1039
|
await ds.query(sSQLUpdate);
|
|
838
1040
|
numUpdated++;
|
|
839
1041
|
}
|
|
@@ -1009,55 +1211,57 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1009
1211
|
newEntityName = newEntityName + suffix;
|
|
1010
1212
|
(0, core_1.LogError)(` >>>> WARNING: Entity name already exists, so using ${newEntityName} instead. If you did not intend for this, please rename the ${newEntity.SchemaName}.${newEntity.TableName} table in the database.`);
|
|
1011
1213
|
}
|
|
1012
|
-
|
|
1013
|
-
const
|
|
1014
|
-
const
|
|
1015
|
-
const
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
//
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
const appName = newEntity.SchemaName === (0, config_1.mj_core_schema)() ? 'Admin' : newEntity.SchemaName; // for the __mj schema or whatever it is installed as for mj_core - we want to drop stuff into the admin app
|
|
1214
|
+
const isNewSchema = await this.isSchemaNew(ds, newEntity.SchemaName);
|
|
1215
|
+
const sSQLInsert = this.createNewEntityInsertSQL(newEntityName, newEntity, suffix);
|
|
1216
|
+
const newEntityResult = await ds.query(sSQLInsert);
|
|
1217
|
+
const newEntityID = newEntityResult && newEntityResult.length > 0 ? newEntityResult[0].ID : null;
|
|
1218
|
+
if (!newEntityID)
|
|
1219
|
+
throw new Error(`Failed to create new entity ${newEntityName} for table ${newEntity.SchemaName}.${newEntity.TableName}`);
|
|
1220
|
+
// if we get here we created a new entity safely, otherwise we get exception
|
|
1221
|
+
// add it to the new entity list
|
|
1222
|
+
ManageMetadataBase_1.newEntityList.push(newEntityName);
|
|
1223
|
+
// next, check if this entity is in a schema that is new (e.g. no other entities have been added to this schema yet), if so and if
|
|
1224
|
+
// our config option is set to create new applications from new schemas, then create a new application for this schema
|
|
1225
|
+
if (isNewSchema && config_1.configInfo.newSchemaDefaults.CreateNewApplicationWithSchemaName) {
|
|
1226
|
+
// new schema and config option is to create a new application from the schema name so do that
|
|
1227
|
+
if (!await this.applicationExists(ds, newEntity.SchemaName))
|
|
1228
|
+
await this.createNewApplication(ds, newEntity.SchemaName);
|
|
1229
|
+
}
|
|
1230
|
+
else {
|
|
1231
|
+
// not a new schema, attempt to look up the application for this schema
|
|
1232
|
+
await this.getApplicationIDForSchema(ds, newEntity.SchemaName);
|
|
1233
|
+
}
|
|
1234
|
+
// now we have an application ID, but make sure that we are configured to add this new entity to an application at all
|
|
1235
|
+
if (config_1.configInfo.newEntityDefaults.AddToApplicationWithSchemaName) {
|
|
1236
|
+
// we should add this entity to the application
|
|
1237
|
+
const appName = newEntity.SchemaName === (0, config_1.mj_core_schema)() ? 'Admin' : newEntity.SchemaName; // for the __mj schema or whatever it is installed as for mj_core - we want to drop stuff into the admin app
|
|
1238
|
+
const app = md.Applications.find(a => a.Name.trim().toLowerCase() === appName.trim().toLowerCase());
|
|
1239
|
+
if (app) {
|
|
1039
1240
|
const sSQLInsertApplicationEntity = `INSERT INTO ${(0, config_1.mj_core_schema)()}.ApplicationEntity
|
|
1040
|
-
(
|
|
1041
|
-
('${
|
|
1241
|
+
(ApplicationID, EntityID, Sequence) VALUES
|
|
1242
|
+
('${app.ID}', '${newEntityID}', (SELECT ISNULL(MAX(Sequence),0)+1 FROM ${(0, config_1.mj_core_schema)()}.ApplicationEntity WHERE ApplicationID = '${app.ID}'))`;
|
|
1042
1243
|
await ds.query(sSQLInsertApplicationEntity);
|
|
1043
1244
|
}
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1245
|
+
else
|
|
1246
|
+
(0, core_1.LogError)(` >>>> ERROR: Unable to find Application ID for application ${appName} to add new entity ${newEntityName} to it`);
|
|
1247
|
+
}
|
|
1248
|
+
// next up, we need to check if we're configured to add permissions for new entities, and if so, add them
|
|
1249
|
+
if (config_1.configInfo.newEntityDefaults.PermissionDefaults && config_1.configInfo.newEntityDefaults.PermissionDefaults.AutoAddPermissionsForNewEntities) {
|
|
1250
|
+
// we are asked to add permissions for new entities, so do that by looping through the permissions and adding them
|
|
1251
|
+
const permissions = config_1.configInfo.newEntityDefaults.PermissionDefaults.Permissions;
|
|
1252
|
+
for (const p of permissions) {
|
|
1253
|
+
const RoleID = md.Roles.find(r => r.Name.trim().toLowerCase() === p.RoleName.trim().toLowerCase())?.ID;
|
|
1254
|
+
if (RoleID) {
|
|
1049
1255
|
const sSQLInsertPermission = `INSERT INTO ${(0, config_1.mj_core_schema)()}.EntityPermission
|
|
1050
|
-
|
|
1051
|
-
|
|
1256
|
+
(EntityID, RoleID, CanRead, CanCreate, CanUpdate, CanDelete) VALUES
|
|
1257
|
+
('${newEntityID}', '${RoleID}', ${p.CanRead ? 1 : 0}, ${p.CanCreate ? 1 : 0}, ${p.CanUpdate ? 1 : 0}, ${p.CanDelete ? 1 : 0})`;
|
|
1052
1258
|
await ds.query(sSQLInsertPermission);
|
|
1053
1259
|
}
|
|
1260
|
+
else
|
|
1261
|
+
(0, core_1.LogError)(` >>>> ERROR: Unable to find Role ID for role ${p.RoleName} to add permissions for new entity ${newEntityName}`);
|
|
1054
1262
|
}
|
|
1055
|
-
(0, core_1.LogStatus)(` Created new entity ${newEntityName} for table ${newEntity.SchemaName}.${newEntity.TableName}`);
|
|
1056
|
-
}
|
|
1057
|
-
else {
|
|
1058
|
-
(0, core_1.LogError)(`ERROR: Unable to get next entity ID for ${newEntity.SchemaName}.${newEntity.TableName} - it is possible that the schema has reached its MAX Id,
|
|
1059
|
-
check the Schema Info entity for this schema to see if all ID values have been allocated.`);
|
|
1060
1263
|
}
|
|
1264
|
+
(0, core_1.LogStatus)(` Created new entity ${newEntityName} for table ${newEntity.SchemaName}.${newEntity.TableName}`);
|
|
1061
1265
|
}
|
|
1062
1266
|
else {
|
|
1063
1267
|
(0, core_1.LogStatus)(` Skipping new entity ${newEntity.TableName} because it doesn't qualify to be created. Reason: ${validationMessage}`);
|
|
@@ -1089,11 +1293,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1089
1293
|
const result = await ds.query(sSQL);
|
|
1090
1294
|
return result && result.length > 0 ? result[0].ID : null;
|
|
1091
1295
|
}
|
|
1092
|
-
createNewEntityInsertSQL(
|
|
1296
|
+
createNewEntityInsertSQL(newEntityName, newEntity, newEntitySuffix) {
|
|
1093
1297
|
const newEntityDefaults = config_1.configInfo.newEntityDefaults;
|
|
1094
1298
|
const newEntityDescriptionEscaped = newEntity.Description ? `'${newEntity.Description.replace(/'/g, "''")}` : null;
|
|
1095
|
-
const sSQLInsert = `
|
|
1096
|
-
|
|
1299
|
+
const sSQLInsert = `
|
|
1300
|
+
DECLARE @InsertedRow TABLE ([ID] UNIQUEIDENTIFIER)
|
|
1301
|
+
INSERT INTO [${(0, config_1.mj_core_schema)()}].Entity (
|
|
1097
1302
|
Name,
|
|
1098
1303
|
Description,
|
|
1099
1304
|
NameSuffix,
|
|
@@ -1110,9 +1315,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1110
1315
|
${newEntityDefaults.AllowUpdateAPI === undefined ? '' : ', AllowUpdateAPI'}
|
|
1111
1316
|
${newEntityDefaults.AllowDeleteAPI === undefined ? '' : ', AllowDeleteAPI'}
|
|
1112
1317
|
${newEntityDefaults.UserViewMaxRows === undefined ? '' : ', UserViewMaxRows'}
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1318
|
+
)
|
|
1319
|
+
OUTPUT INSERTED.[ID] INTO @InsertedRow
|
|
1320
|
+
VALUES (
|
|
1116
1321
|
'${newEntityName}',
|
|
1117
1322
|
${newEntityDescriptionEscaped ? newEntityDescriptionEscaped : 'NULL' /*if no description, then null*/},
|
|
1118
1323
|
${newEntitySuffix && newEntitySuffix.length > 0 ? `'${newEntitySuffix}'` : 'NULL'},
|
|
@@ -1129,7 +1334,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1129
1334
|
${newEntityDefaults.AllowUpdateAPI === undefined ? '' : ', ' + (newEntityDefaults.AllowUpdateAPI ? '1' : '0')}
|
|
1130
1335
|
${newEntityDefaults.AllowDeleteAPI === undefined ? '' : ', ' + (newEntityDefaults.AllowDeleteAPI ? '1' : '0')}
|
|
1131
1336
|
${newEntityDefaults.UserViewMaxRows === undefined ? '' : ', ' + (newEntityDefaults.UserViewMaxRows)}
|
|
1132
|
-
|
|
1337
|
+
)
|
|
1338
|
+
SELECT * FROM [__mj].vwEntities WHERE [ID] = (SELECT [ID] FROM @InsertedRow)
|
|
1339
|
+
`;
|
|
1133
1340
|
return sSQLInsert;
|
|
1134
1341
|
}
|
|
1135
1342
|
stripTrailingChars(s, charsToStrip, skipIfExactMatch) {
|