@memberjunction/codegen-lib 2.47.0 → 2.49.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Angular/angular-codegen.d.ts +164 -6
- package/dist/Angular/angular-codegen.d.ts.map +1 -1
- package/dist/Angular/angular-codegen.js +179 -25
- package/dist/Angular/angular-codegen.js.map +1 -1
- package/dist/Angular/join-grid-related-entity-component.d.ts +52 -3
- package/dist/Angular/join-grid-related-entity-component.d.ts.map +1 -1
- package/dist/Angular/join-grid-related-entity-component.js +58 -3
- package/dist/Angular/join-grid-related-entity-component.js.map +1 -1
- package/dist/Angular/related-entity-components.d.ts +99 -42
- package/dist/Angular/related-entity-components.d.ts.map +1 -1
- package/dist/Angular/related-entity-components.js +116 -26
- package/dist/Angular/related-entity-components.js.map +1 -1
- package/dist/Angular/timeline-related-entity-component.d.ts +46 -7
- package/dist/Angular/timeline-related-entity-component.d.ts.map +1 -1
- package/dist/Angular/timeline-related-entity-component.js +64 -7
- package/dist/Angular/timeline-related-entity-component.js.map +1 -1
- package/dist/Angular/user-view-grid-related-entity-component.d.ts +33 -1
- package/dist/Angular/user-view-grid-related-entity-component.d.ts.map +1 -1
- package/dist/Angular/user-view-grid-related-entity-component.js +33 -1
- package/dist/Angular/user-view-grid-related-entity-component.js.map +1 -1
- package/dist/Config/config.d.ts +369 -45
- package/dist/Config/config.d.ts.map +1 -1
- package/dist/Config/config.js +136 -2
- package/dist/Config/config.js.map +1 -1
- package/dist/Config/db-connection.d.ts +17 -3
- package/dist/Config/db-connection.d.ts.map +1 -1
- package/dist/Config/db-connection.js +31 -19
- package/dist/Config/db-connection.js.map +1 -1
- package/dist/Database/dbSchema.d.ts +44 -1
- package/dist/Database/dbSchema.d.ts.map +1 -1
- package/dist/Database/dbSchema.js +46 -13
- package/dist/Database/dbSchema.js.map +1 -1
- package/dist/Database/manage-metadata.d.ts +52 -46
- package/dist/Database/manage-metadata.d.ts.map +1 -1
- package/dist/Database/manage-metadata.js +235 -191
- package/dist/Database/manage-metadata.js.map +1 -1
- package/dist/Database/reorder-columns.d.ts +2 -2
- package/dist/Database/reorder-columns.d.ts.map +1 -1
- package/dist/Database/reorder-columns.js +23 -17
- package/dist/Database/reorder-columns.js.map +1 -1
- package/dist/Database/sql.d.ts +4 -4
- package/dist/Database/sql.d.ts.map +1 -1
- package/dist/Database/sql.js +5 -16
- package/dist/Database/sql.js.map +1 -1
- package/dist/Database/sql_codegen.d.ts +15 -15
- package/dist/Database/sql_codegen.d.ts.map +1 -1
- package/dist/Database/sql_codegen.js +186 -123
- package/dist/Database/sql_codegen.js.map +1 -1
- package/dist/Misc/action_subclasses_codegen.d.ts.map +1 -1
- package/dist/Misc/action_subclasses_codegen.js +2 -12
- package/dist/Misc/action_subclasses_codegen.js.map +1 -1
- package/dist/Misc/advanced_generation.d.ts.map +1 -1
- package/dist/Misc/advanced_generation.js +82 -92
- package/dist/Misc/advanced_generation.js.map +1 -1
- package/dist/Misc/createNewUser.d.ts.map +1 -1
- package/dist/Misc/createNewUser.js +2 -12
- package/dist/Misc/createNewUser.js.map +1 -1
- package/dist/Misc/entity_subclasses_codegen.d.ts +5 -5
- package/dist/Misc/entity_subclasses_codegen.d.ts.map +1 -1
- package/dist/Misc/entity_subclasses_codegen.js +12 -20
- package/dist/Misc/entity_subclasses_codegen.js.map +1 -1
- package/dist/Misc/graphql_server_codegen.d.ts.map +1 -1
- package/dist/Misc/graphql_server_codegen.js +35 -40
- package/dist/Misc/graphql_server_codegen.js.map +1 -1
- package/dist/Misc/runCommand.d.ts.map +1 -1
- package/dist/Misc/runCommand.js +2 -12
- package/dist/Misc/runCommand.js.map +1 -1
- package/dist/Misc/sql_logging.d.ts +2 -2
- package/dist/Misc/sql_logging.d.ts.map +1 -1
- package/dist/Misc/sql_logging.js +4 -3
- package/dist/Misc/sql_logging.js.map +1 -1
- package/dist/Misc/status_logging.d.ts +37 -0
- package/dist/Misc/status_logging.d.ts.map +1 -1
- package/dist/Misc/status_logging.js +147 -14
- package/dist/Misc/status_logging.js.map +1 -1
- package/dist/Misc/system_integrity.d.ts +9 -9
- package/dist/Misc/system_integrity.d.ts.map +1 -1
- package/dist/Misc/system_integrity.js +23 -21
- package/dist/Misc/system_integrity.js.map +1 -1
- package/dist/index.d.ts +45 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +51 -7
- package/dist/index.js.map +1 -1
- package/dist/runCodeGen.d.ts +84 -6
- package/dist/runCodeGen.d.ts.map +1 -1
- package/dist/runCodeGen.js +244 -93
- package/dist/runCodeGen.js.map +1 -1
- package/package.json +14 -14
|
@@ -15,12 +15,6 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
19
|
-
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
20
|
-
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
21
|
-
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
22
|
-
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
23
|
-
};
|
|
24
18
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|
25
19
|
if (mod && mod.__esModule) return mod;
|
|
26
20
|
var result = {};
|
|
@@ -31,9 +25,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
31
25
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
32
26
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
33
27
|
};
|
|
34
|
-
var ManageMetadataBase_1;
|
|
35
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
29
|
exports.ManageMetadataBase = exports.ValidatorResult = void 0;
|
|
30
|
+
const sql = __importStar(require("mssql"));
|
|
37
31
|
const config_1 = require("../Config/config");
|
|
38
32
|
const core_1 = require("@memberjunction/core");
|
|
39
33
|
const status_logging_1 = require("../Misc/status_logging");
|
|
@@ -46,48 +40,48 @@ const path_1 = __importDefault(require("path"));
|
|
|
46
40
|
const sql_logging_1 = require("../Misc/sql_logging");
|
|
47
41
|
const aiengine_1 = require("@memberjunction/aiengine");
|
|
48
42
|
class ValidatorResult {
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
}
|
|
43
|
+
entityName = "";
|
|
44
|
+
fieldName;
|
|
45
|
+
sourceCheckConstraint = "";
|
|
46
|
+
functionText = "";
|
|
47
|
+
functionName = "";
|
|
48
|
+
functionDescription = "";
|
|
49
|
+
/**
|
|
50
|
+
* The ID value in the Generated Codes entity that was created for this validator.
|
|
51
|
+
*/
|
|
52
|
+
generatedCodeId = "";
|
|
53
|
+
/**
|
|
54
|
+
* The ID for the AI Model that was used to generate the code
|
|
55
|
+
*/
|
|
56
|
+
aiModelID = "";
|
|
57
|
+
wasGenerated = true;
|
|
58
|
+
success = false;
|
|
66
59
|
}
|
|
67
60
|
exports.ValidatorResult = ValidatorResult;
|
|
68
61
|
/**
|
|
69
62
|
* Base class for managing metadata within the CodeGen system. This class can be sub-classed to extend/override base class functionality. Make sure to use the RegisterClass decorator from the @memberjunction/global package
|
|
70
63
|
* to properly register your subclass with a priority of 1+ to ensure it gets instantiated.
|
|
71
64
|
*/
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
this._sqlUtilityObject = global_1.MJGlobal.Instance.ClassFactory.CreateInstance(sql_1.SQLUtilityBase);
|
|
75
|
-
}
|
|
65
|
+
class ManageMetadataBase {
|
|
66
|
+
_sqlUtilityObject = global_1.MJGlobal.Instance.ClassFactory.CreateInstance(sql_1.SQLUtilityBase);
|
|
76
67
|
get SQLUtilityObject() {
|
|
77
68
|
return this._sqlUtilityObject;
|
|
78
69
|
}
|
|
70
|
+
static _newEntityList = [];
|
|
79
71
|
/**
|
|
80
72
|
* Globally scoped list of entities that have been created during the metadata management process.
|
|
81
73
|
*/
|
|
82
74
|
static get newEntityList() {
|
|
83
75
|
return this._newEntityList;
|
|
84
76
|
}
|
|
77
|
+
static _modifiedEntityList = [];
|
|
85
78
|
/**
|
|
86
79
|
* Globally scoped list of entities that have been modified during the metadata management process.
|
|
87
80
|
*/
|
|
88
81
|
static get modifiedEntityList() {
|
|
89
82
|
return this._modifiedEntityList;
|
|
90
83
|
}
|
|
84
|
+
static _generatedValidators = [];
|
|
91
85
|
/**
|
|
92
86
|
* Globally scoped list of validators that have been generated during the metadata management process.
|
|
93
87
|
*/
|
|
@@ -96,30 +90,30 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
96
90
|
}
|
|
97
91
|
/**
|
|
98
92
|
* Primary function to manage metadata within the CodeGen system. This function will call a series of sub-functions to manage the metadata.
|
|
99
|
-
* @param
|
|
93
|
+
* @param pool - the ConnectionPool object to use for querying and updating the database
|
|
100
94
|
* @returns
|
|
101
95
|
*/
|
|
102
|
-
async manageMetadata(
|
|
96
|
+
async manageMetadata(pool, currentUser) {
|
|
103
97
|
const md = new core_1.Metadata();
|
|
104
98
|
const excludeSchemas = config_1.configInfo.excludeSchemas ? config_1.configInfo.excludeSchemas : [];
|
|
105
99
|
let bSuccess = true;
|
|
106
100
|
let start = new Date();
|
|
107
101
|
(0, status_logging_1.logStatus)(' Creating new entities...');
|
|
108
|
-
if (!await this.createNewEntities(
|
|
102
|
+
if (!await this.createNewEntities(pool)) {
|
|
109
103
|
(0, status_logging_1.logError)(' Error creating new entities');
|
|
110
104
|
bSuccess = false;
|
|
111
105
|
}
|
|
112
106
|
(0, status_logging_1.logStatus)(` > Created new entities in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
113
107
|
start = new Date();
|
|
114
108
|
(0, status_logging_1.logStatus)(' Updating existing entities...');
|
|
115
|
-
if (!await this.updateExistingEntitiesFromSchema(
|
|
109
|
+
if (!await this.updateExistingEntitiesFromSchema(pool, excludeSchemas)) {
|
|
116
110
|
(0, status_logging_1.logError)(' Error updating existing entities');
|
|
117
111
|
bSuccess = false;
|
|
118
112
|
}
|
|
119
113
|
(0, status_logging_1.logStatus)(` > Updated existing entities in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
120
114
|
start = new Date();
|
|
121
115
|
(0, status_logging_1.logStatus)(' Scanning for tables that were deleted where entity metadata still exists...');
|
|
122
|
-
if (!await this.checkAndRemoveMetadataForDeletedTables(
|
|
116
|
+
if (!await this.checkAndRemoveMetadataForDeletedTables(pool, excludeSchemas)) {
|
|
123
117
|
(0, status_logging_1.logError)(' Error removing metadata for tables that were removed');
|
|
124
118
|
bSuccess = false;
|
|
125
119
|
}
|
|
@@ -131,7 +125,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
131
125
|
const schemasToExclude = (0, config_1.getSettingValue)('recompile_mj_views', true)
|
|
132
126
|
? excludeSchemas.filter((s) => s !== adminSchema)
|
|
133
127
|
: excludeSchemas;
|
|
134
|
-
if (!await sqlUtility.recompileAllBaseViews(
|
|
128
|
+
if (!await sqlUtility.recompileAllBaseViews(pool, schemasToExclude, true, ManageMetadataBase._newEntityList /*exclude the newly created entities from the above step the first time we run as those views don't exist yet*/)) {
|
|
135
129
|
(0, status_logging_1.logMessage)(' Warning: Non-Fatal error recompiling base views', core_1.SeverityType.Warning, false);
|
|
136
130
|
// many times the former versions of base views will NOT succesfully recompile, so don't consider that scenario to be a
|
|
137
131
|
// failure for this entire function
|
|
@@ -139,40 +133,41 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
139
133
|
(0, status_logging_1.logStatus)(` > Recompiled base views in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
140
134
|
start = new Date();
|
|
141
135
|
(0, status_logging_1.logStatus)(' Managing entity fields...');
|
|
142
|
-
if (!await this.manageEntityFields(
|
|
136
|
+
if (!await this.manageEntityFields(pool, excludeSchemas, false, false, currentUser)) {
|
|
143
137
|
(0, status_logging_1.logError)(' Error managing entity fields');
|
|
144
138
|
bSuccess = false;
|
|
145
139
|
}
|
|
146
140
|
(0, status_logging_1.logStatus)(` > Managed entity fields in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
147
141
|
start = new Date();
|
|
148
142
|
(0, status_logging_1.logStatus)(' Managing entity relationships...');
|
|
149
|
-
if (!await this.manageEntityRelationships(
|
|
143
|
+
if (!await this.manageEntityRelationships(pool, excludeSchemas, md)) {
|
|
150
144
|
(0, status_logging_1.logError)(' Error managing entity relationships');
|
|
151
145
|
bSuccess = false;
|
|
152
146
|
}
|
|
153
147
|
(0, status_logging_1.logStatus)(` > Managed entity relationships in ${(new Date().getTime() - start.getTime()) / 1000} seconds`);
|
|
154
|
-
if (
|
|
155
|
-
await this.generateNewEntityDescriptions(
|
|
148
|
+
if (ManageMetadataBase.newEntityList.length > 0) {
|
|
149
|
+
await this.generateNewEntityDescriptions(pool, md); // don't pass excludeSchemas becuase by definition this is the NEW entities we created
|
|
156
150
|
}
|
|
157
|
-
const veResult = await this.manageVirtualEntities(
|
|
151
|
+
const veResult = await this.manageVirtualEntities(pool);
|
|
158
152
|
if (!veResult.success) {
|
|
159
153
|
(0, status_logging_1.logError)(' Error managing virtual entities');
|
|
160
154
|
bSuccess = false;
|
|
161
155
|
}
|
|
162
156
|
return bSuccess;
|
|
163
157
|
}
|
|
164
|
-
async manageVirtualEntities(
|
|
158
|
+
async manageVirtualEntities(pool) {
|
|
165
159
|
let bSuccess = true;
|
|
166
160
|
// virtual entities are records defined in the entity metadata and do NOT define a distinct base table
|
|
167
161
|
// but they do specify a base view. We DO NOT generate a base view for a virtual entity, we simply use it to figure
|
|
168
162
|
// out the fields that should be in the entity definition and add/update/delete the entity definition to match what's in the view when this runs
|
|
169
163
|
const sql = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntities WHERE VirtualEntity = 1`;
|
|
170
|
-
const
|
|
164
|
+
const virtualEntitiesResult = await pool.request().query(sql);
|
|
165
|
+
const virtualEntities = virtualEntitiesResult.recordset;
|
|
171
166
|
let anyUpdates = false;
|
|
172
167
|
if (virtualEntities && virtualEntities.length > 0) {
|
|
173
168
|
// we have 1+ virtual entities, now loop through them and process each one
|
|
174
169
|
for (const ve of virtualEntities) {
|
|
175
|
-
const { success, updatedEntity } = await this.manageSingleVirtualEntity(
|
|
170
|
+
const { success, updatedEntity } = await this.manageSingleVirtualEntity(pool, ve);
|
|
176
171
|
anyUpdates = anyUpdates || updatedEntity;
|
|
177
172
|
if (!success) {
|
|
178
173
|
(0, status_logging_1.logError)(` Error managing virtual entity ${ve.Name}`);
|
|
@@ -182,7 +177,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
182
177
|
}
|
|
183
178
|
return { success: bSuccess, anyUpdates: anyUpdates };
|
|
184
179
|
}
|
|
185
|
-
async manageSingleVirtualEntity(
|
|
180
|
+
async manageSingleVirtualEntity(pool, virtualEntity) {
|
|
186
181
|
let bSuccess = true;
|
|
187
182
|
let bUpdated = false;
|
|
188
183
|
try {
|
|
@@ -201,7 +196,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
201
196
|
SCHEMA_NAME(v.schema_id) = '${virtualEntity.SchemaName}'
|
|
202
197
|
ORDER BY
|
|
203
198
|
c.column_id`;
|
|
204
|
-
const
|
|
199
|
+
const veFieldsResult = await pool.request().query(sql);
|
|
200
|
+
const veFields = veFieldsResult.recordset;
|
|
205
201
|
if (veFields && veFields.length > 0) {
|
|
206
202
|
// we have 1+ fields, now loop through them and process each one
|
|
207
203
|
// first though, remove any fields that are no longer in the view
|
|
@@ -216,7 +212,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
216
212
|
if (removeList.length > 0) {
|
|
217
213
|
const sqlRemove = `DELETE FROM [${(0, config_1.mj_core_schema)()}].EntityField WHERE ID IN (${removeList.map(removeId => `'${removeId}'`).join(',')})`;
|
|
218
214
|
// this removes the fields that shouldn't be there anymore
|
|
219
|
-
await this.LogSQLAndExecute(
|
|
215
|
+
await this.LogSQLAndExecute(pool, sqlRemove, `SQL text to remove fields from entity ${virtualEntity.Name}`);
|
|
220
216
|
bUpdated = true;
|
|
221
217
|
}
|
|
222
218
|
// check to see if any of the fields in the virtual entity have Pkey attribute set. If not, we will default to the first field
|
|
@@ -225,7 +221,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
225
221
|
// now create/update the fields that are in the view
|
|
226
222
|
for (let i = 0; i < veFields.length; i++) {
|
|
227
223
|
const vef = veFields[i];
|
|
228
|
-
const { success, updatedField } = await this.manageSingleVirtualEntityField(
|
|
224
|
+
const { success, updatedField } = await this.manageSingleVirtualEntityField(pool, virtualEntity, vef, i + 1, !hasPkey && i === 0);
|
|
229
225
|
bUpdated = bUpdated || updatedField;
|
|
230
226
|
if (!success) {
|
|
231
227
|
(0, status_logging_1.logError)(`Error managing virtual entity field ${vef.FieldName} for virtual entity ${virtualEntity.Name}`);
|
|
@@ -237,7 +233,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
237
233
|
if (bUpdated) {
|
|
238
234
|
// finally make sure we update the UpdatedAt field for the entity if we made changes to its fields
|
|
239
235
|
const sqlUpdate = `UPDATE [${(0, config_1.mj_core_schema)()}].Entity SET [${core_1.EntityInfo.UpdatedAtFieldName}]=GETUTCDATE() WHERE ID='${virtualEntity.ID}'`;
|
|
240
|
-
await this.LogSQLAndExecute(
|
|
236
|
+
await this.LogSQLAndExecute(pool, sqlUpdate, `SQL text to update virtual entity updated date for ${virtualEntity.Name}`);
|
|
241
237
|
}
|
|
242
238
|
return { success: bSuccess, updatedEntity: bUpdated };
|
|
243
239
|
}
|
|
@@ -246,7 +242,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
246
242
|
return { success: false, updatedEntity: bUpdated };
|
|
247
243
|
}
|
|
248
244
|
}
|
|
249
|
-
async manageSingleVirtualEntityField(
|
|
245
|
+
async manageSingleVirtualEntityField(pool, virtualEntity, veField, fieldSequence, makePrimaryKey) {
|
|
250
246
|
// this protected checks to see if the field exists in the entity definition, and if not, adds it
|
|
251
247
|
// if it exist it updates the entity field to match the view's data type and nullability attributes
|
|
252
248
|
// first, get the entity definition
|
|
@@ -278,7 +274,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
278
274
|
Scale=${veField.Scale}
|
|
279
275
|
WHERE
|
|
280
276
|
ID = '${field.ID}'`; // don't need to update the __mj_UpdatedAt field here, that happens automatically via the trigger
|
|
281
|
-
await this.LogSQLAndExecute(
|
|
277
|
+
await this.LogSQLAndExecute(pool, sqlUpdate, `SQL text to update virtual entity field ${veField.FieldName} for entity ${virtualEntity.Name}`);
|
|
282
278
|
didUpdate = true;
|
|
283
279
|
}
|
|
284
280
|
}
|
|
@@ -293,7 +289,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
293
289
|
${veField.Length}, ${veField.Precision}, ${veField.Scale},
|
|
294
290
|
${fieldSequence}, ${makePrimaryKey ? 1 : 0}, ${makePrimaryKey ? 1 : 0}
|
|
295
291
|
)`;
|
|
296
|
-
await this.LogSQLAndExecute(
|
|
292
|
+
await this.LogSQLAndExecute(pool, sqlAdd, `SQL text to add virtual entity field ${veField.FieldName} for entity ${virtualEntity.Name}`);
|
|
297
293
|
didUpdate = true;
|
|
298
294
|
}
|
|
299
295
|
}
|
|
@@ -301,25 +297,25 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
301
297
|
}
|
|
302
298
|
/**
|
|
303
299
|
* This method creates and updates relationships in the metadata based on foreign key relationships in the database.
|
|
304
|
-
* @param
|
|
300
|
+
* @param pool
|
|
305
301
|
* @param excludeSchemas - specify any schemas to exclude here and any relationships to/from the specified schemas will be ignored
|
|
306
302
|
* @param md
|
|
307
303
|
* @returns
|
|
308
304
|
*/
|
|
309
|
-
async manageEntityRelationships(
|
|
305
|
+
async manageEntityRelationships(pool, excludeSchemas, md, batchItems = 5) {
|
|
310
306
|
let bResult = true;
|
|
311
|
-
bResult = bResult && await this.manageManyToManyEntityRelationships(
|
|
312
|
-
bResult = bResult && await this.manageOneToManyEntityRelationships(
|
|
307
|
+
bResult = bResult && await this.manageManyToManyEntityRelationships(pool, excludeSchemas, batchItems);
|
|
308
|
+
bResult = bResult && await this.manageOneToManyEntityRelationships(pool, excludeSchemas, md, batchItems);
|
|
313
309
|
return bResult;
|
|
314
310
|
}
|
|
315
311
|
/**
|
|
316
312
|
* Manages 1->M relationships between entities in the metadata based on foreign key relationships in the database.
|
|
317
|
-
* @param
|
|
313
|
+
* @param pool
|
|
318
314
|
* @param excludeSchemas - specify any schemas to exclude here and any relationships to/from the specified schemas will be ignored
|
|
319
315
|
* @param md
|
|
320
316
|
* @returns
|
|
321
317
|
*/
|
|
322
|
-
async manageOneToManyEntityRelationships(
|
|
318
|
+
async manageOneToManyEntityRelationships(pool, excludeSchemas, md, batchItems = 5) {
|
|
323
319
|
// the way this works is that we look for entities in our catalog and we look for
|
|
324
320
|
// foreign keys in those entities. For example, if we saw an entity called Persons and that entity
|
|
325
321
|
// had a foreign key linking to an entity called Organizations via a field called OrganizationID, then we would create a relationship
|
|
@@ -341,17 +337,20 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
341
337
|
IsVirtual = 0 AND
|
|
342
338
|
EntityID NOT IN (SELECT ID FROM ${(0, config_1.mj_core_schema)()}.Entity WHERE SchemaName IN (${excludeSchemas.map(s => `'${s}'`).join(',')}))
|
|
343
339
|
ORDER BY RelatedEntityID`;
|
|
344
|
-
const
|
|
340
|
+
const entityFieldsResult = await pool.request().query(sSQL);
|
|
341
|
+
const entityFields = entityFieldsResult.recordset;
|
|
345
342
|
// Get the relationship counts for each entity
|
|
346
343
|
const sSQLRelationshipCount = `SELECT EntityID, COUNT(*) AS Count FROM ${(0, config_1.mj_core_schema)()}.EntityRelationship GROUP BY EntityID`;
|
|
347
|
-
const
|
|
344
|
+
const relationshipCountsResult = await pool.request().query(sSQLRelationshipCount);
|
|
345
|
+
const relationshipCounts = relationshipCountsResult.recordset;
|
|
348
346
|
const relationshipCountMap = new Map();
|
|
349
347
|
for (const rc of relationshipCounts) {
|
|
350
348
|
relationshipCountMap.set(rc.EntityID, rc.Count);
|
|
351
349
|
}
|
|
352
350
|
// get all relationships in one query for performance improvement
|
|
353
351
|
const sSQLRelationship = `SELECT * FROM ${(0, config_1.mj_core_schema)()}.EntityRelationship`;
|
|
354
|
-
const
|
|
352
|
+
const allRelationshipsResult = await pool.request().query(sSQLRelationship);
|
|
353
|
+
const allRelationships = allRelationshipsResult.recordset;
|
|
355
354
|
// Function to process a batch of entity fields
|
|
356
355
|
const processBatch = async (batch) => {
|
|
357
356
|
let batchSQL = '';
|
|
@@ -381,7 +380,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
381
380
|
}
|
|
382
381
|
});
|
|
383
382
|
if (batchSQL.length > 0) {
|
|
384
|
-
await this.LogSQLAndExecute(
|
|
383
|
+
await this.LogSQLAndExecute(pool, batchSQL, `SQL text to create Entitiy Relationships`);
|
|
385
384
|
}
|
|
386
385
|
};
|
|
387
386
|
// Split entityFields into batches and process each batch
|
|
@@ -402,10 +401,11 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
402
401
|
* @param ds
|
|
403
402
|
* @param excludeSchemas
|
|
404
403
|
*/
|
|
405
|
-
async checkAndRemoveMetadataForDeletedTables(
|
|
404
|
+
async checkAndRemoveMetadataForDeletedTables(pool, excludeSchemas) {
|
|
406
405
|
try {
|
|
407
406
|
const sql = `SELECT * FROM ${(0, config_1.mj_core_schema)()}.vwEntitiesWithMissingBaseTables WHERE VirtualEntity=0`;
|
|
408
|
-
const
|
|
407
|
+
const entitiesResult = await pool.request().query(sql);
|
|
408
|
+
const entities = entitiesResult.recordset;
|
|
409
409
|
if (entities && entities.length > 0) {
|
|
410
410
|
for (const e of entities) {
|
|
411
411
|
// for the given entity, wipe out the entity metadata and its core deps.
|
|
@@ -413,15 +413,15 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
413
413
|
// for the admin to handle manually
|
|
414
414
|
try {
|
|
415
415
|
const sqlDelete = `__mj.spDeleteEntityWithCoreDependencies @EntityID='${e.ID}'`;
|
|
416
|
-
await this.LogSQLAndExecute(
|
|
416
|
+
await this.LogSQLAndExecute(pool, sqlDelete, `SQL text to remove entity ${e.Name}`);
|
|
417
417
|
(0, status_logging_1.logStatus)(` > Removed metadata for table ${e.SchemaName}.${e.BaseTable}`);
|
|
418
418
|
// next up we need to remove the spCreate, spDelete, spUpdate, BaseView, and FullTextSearchFunction, if provided.
|
|
419
419
|
// We only remoe these artifcacts when they are generated which is info we have in the BaseViewGenerated, spCreateGenerated, etc. fields
|
|
420
|
-
await this.checkDropSQLObject(
|
|
421
|
-
await this.checkDropSQLObject(
|
|
422
|
-
await this.checkDropSQLObject(
|
|
423
|
-
await this.checkDropSQLObject(
|
|
424
|
-
await this.checkDropSQLObject(
|
|
420
|
+
await this.checkDropSQLObject(pool, e.BaseViewGenerated, 'view', e.SchemaName, e.BaseView);
|
|
421
|
+
await this.checkDropSQLObject(pool, e.spCreateGenerated, 'procedure', e.SchemaName, e.spCreate ? e.spCreate : `spCreate${e.ClassName}`);
|
|
422
|
+
await this.checkDropSQLObject(pool, e.spDeleteGenerated, 'procedure', e.SchemaName, e.spDelete ? e.spDelete : `spDelete${e.ClassName}`);
|
|
423
|
+
await this.checkDropSQLObject(pool, e.spUpdateGenerated, 'procedure', e.SchemaName, e.spUpdate ? e.spUpdate : `spUpdate${e.ClassName}`);
|
|
424
|
+
await this.checkDropSQLObject(pool, e.FullTextSearchFunctionGenerated, 'function', e.SchemaName, e.FullTextSearchFunction);
|
|
425
425
|
}
|
|
426
426
|
catch (ex) {
|
|
427
427
|
(0, status_logging_1.logError)(`Error removing metadata for entity ${ex.Name}, error: ${ex}`);
|
|
@@ -438,11 +438,11 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
438
438
|
return false;
|
|
439
439
|
}
|
|
440
440
|
}
|
|
441
|
-
async checkDropSQLObject(
|
|
441
|
+
async checkDropSQLObject(pool, proceed, type, schemaName, name) {
|
|
442
442
|
try {
|
|
443
443
|
if (proceed && schemaName && name && schemaName.trim().length > 0 && name.trim().length > 0) {
|
|
444
444
|
const sqlDelete = `DROP ${type} IF EXISTS [${schemaName}].[${name}]`;
|
|
445
|
-
await this.LogSQLAndExecute(
|
|
445
|
+
await this.LogSQLAndExecute(pool, sqlDelete, `SQL text to remove ${type} ${schemaName}.${name}`);
|
|
446
446
|
// next up, we need to clean up the cache of saved DB objects that may exist for this entity in the appropriate sub-directory.
|
|
447
447
|
const sqlOutputDir = (0, config_1.outputDir)('SQL', true);
|
|
448
448
|
if (sqlOutputDir) {
|
|
@@ -471,52 +471,58 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
471
471
|
* @param excludeSchemas
|
|
472
472
|
* @returns
|
|
473
473
|
*/
|
|
474
|
-
async manageManyToManyEntityRelationships(
|
|
474
|
+
async manageManyToManyEntityRelationships(pool, excludeSchemas, batchItems = 5) {
|
|
475
475
|
return true; // not implemented for now, require the admin to manually create these relationships
|
|
476
476
|
}
|
|
477
477
|
/**
|
|
478
478
|
* Manages the creation, updating and deletion of entity field records in the metadata based on the database schema.
|
|
479
|
-
* @param
|
|
479
|
+
* @param pool
|
|
480
480
|
* @param excludeSchemas
|
|
481
481
|
* @returns
|
|
482
482
|
*/
|
|
483
|
-
async manageEntityFields(
|
|
483
|
+
async manageEntityFields(pool, excludeSchemas, skipCreatedAtUpdatedAtDeletedAtFieldValidation, skipEntityFieldValues, currentUser) {
|
|
484
484
|
let bSuccess = true;
|
|
485
485
|
const startTime = new Date();
|
|
486
486
|
if (!skipCreatedAtUpdatedAtDeletedAtFieldValidation) {
|
|
487
|
-
if (!await this.ensureCreatedAtUpdatedAtFieldsExist(
|
|
488
|
-
!await this.ensureDeletedAtFieldsExist(
|
|
487
|
+
if (!await this.ensureCreatedAtUpdatedAtFieldsExist(pool, excludeSchemas) ||
|
|
488
|
+
!await this.ensureDeletedAtFieldsExist(pool, excludeSchemas)) {
|
|
489
489
|
(0, status_logging_1.logError)(`Error ensuring ${core_1.EntityInfo.CreatedAtFieldName}, ${core_1.EntityInfo.UpdatedAtFieldName} and ${core_1.EntityInfo.DeletedAtFieldName} fields exist`);
|
|
490
490
|
bSuccess = false;
|
|
491
491
|
}
|
|
492
492
|
(0, status_logging_1.logStatus)(` Ensured ${core_1.EntityInfo.CreatedAtFieldName}/${core_1.EntityInfo.UpdatedAtFieldName}/${core_1.EntityInfo.DeletedAtFieldName} fields exist in ${(new Date().getTime() - startTime.getTime()) / 1000} seconds`);
|
|
493
493
|
}
|
|
494
494
|
const step1StartTime = new Date();
|
|
495
|
-
if (!await this.deleteUnneededEntityFields(
|
|
495
|
+
if (!await this.deleteUnneededEntityFields(pool, excludeSchemas)) {
|
|
496
496
|
(0, status_logging_1.logError)('Error deleting unneeded entity fields');
|
|
497
497
|
bSuccess = false;
|
|
498
498
|
}
|
|
499
499
|
(0, status_logging_1.logStatus)(` Deleted unneeded entity fields in ${(new Date().getTime() - step1StartTime.getTime()) / 1000} seconds`);
|
|
500
|
+
// AN: 14-June-2025 - See note below about the new order of these steps, this must
|
|
501
|
+
// happen before we update existing entity fields from schema.
|
|
500
502
|
const step2StartTime = new Date();
|
|
501
|
-
if (!await this.
|
|
502
|
-
(0, status_logging_1.logError)('Error
|
|
503
|
+
if (!await this.createNewEntityFieldsFromSchema(pool)) { // has its own internal filtering for exclude schema/table so don't pass in
|
|
504
|
+
(0, status_logging_1.logError)('Error creating new entity fields from schema');
|
|
503
505
|
bSuccess = false;
|
|
504
506
|
}
|
|
505
|
-
(0, status_logging_1.logStatus)(`
|
|
507
|
+
(0, status_logging_1.logStatus)(` Created new entity fields from schema in ${(new Date().getTime() - step2StartTime.getTime()) / 1000} seconds`);
|
|
508
|
+
// AN: 14-June-2025 - we are now running this AFTER we create new entity fields from schema
|
|
509
|
+
// which results in the same pattern of behavior as migrations where we first create new fields
|
|
510
|
+
// with VERY HIGH sequence numbers (e.g. 100,000 above what they will be approx) and then
|
|
511
|
+
// we align them properly in sequential order from 1+ via this method below.
|
|
506
512
|
const step3StartTime = new Date();
|
|
507
|
-
if (!await this.
|
|
508
|
-
(0, status_logging_1.logError)('Error
|
|
513
|
+
if (!await this.updateExistingEntityFieldsFromSchema(pool, excludeSchemas)) {
|
|
514
|
+
(0, status_logging_1.logError)('Error updating existing entity fields from schema');
|
|
509
515
|
bSuccess = false;
|
|
510
516
|
}
|
|
511
|
-
(0, status_logging_1.logStatus)(`
|
|
517
|
+
(0, status_logging_1.logStatus)(` Updated existing entity fields from schema in ${(new Date().getTime() - step3StartTime.getTime()) / 1000} seconds`);
|
|
512
518
|
const step4StartTime = new Date();
|
|
513
|
-
if (!await this.setDefaultColumnWidthWhereNeeded(
|
|
519
|
+
if (!await this.setDefaultColumnWidthWhereNeeded(pool, excludeSchemas)) {
|
|
514
520
|
(0, status_logging_1.logError)('Error setting default column width where needed');
|
|
515
521
|
bSuccess = false;
|
|
516
522
|
}
|
|
517
523
|
(0, status_logging_1.logStatus)(` Set default column width where needed in ${(new Date().getTime() - step4StartTime.getTime()) / 1000} seconds`);
|
|
518
524
|
const step5StartTime = new Date();
|
|
519
|
-
if (!await this.updateEntityFieldDisplayNameWhereNull(
|
|
525
|
+
if (!await this.updateEntityFieldDisplayNameWhereNull(pool, excludeSchemas)) {
|
|
520
526
|
(0, status_logging_1.logError)('Error updating entity field display name where null');
|
|
521
527
|
bSuccess = false;
|
|
522
528
|
}
|
|
@@ -524,7 +530,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
524
530
|
if (!skipEntityFieldValues) {
|
|
525
531
|
const step6StartTime = new Date();
|
|
526
532
|
(0, status_logging_1.logStatus)(` Starting to manage entity field values...`);
|
|
527
|
-
if (!await this.manageEntityFieldValuesAndValidatorFunctions(
|
|
533
|
+
if (!await this.manageEntityFieldValuesAndValidatorFunctions(pool, excludeSchemas, currentUser, false)) {
|
|
528
534
|
(0, status_logging_1.logError)('Error managing entity field values');
|
|
529
535
|
bSuccess = false;
|
|
530
536
|
}
|
|
@@ -536,7 +542,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
536
542
|
/**
|
|
537
543
|
* This method ensures that the __mj_DeletedAt field exists in each entity that has DeleteType=Soft. If the field does not exist, it is created.
|
|
538
544
|
*/
|
|
539
|
-
async ensureDeletedAtFieldsExist(
|
|
545
|
+
async ensureDeletedAtFieldsExist(pool, excludeSchemas) {
|
|
540
546
|
try {
|
|
541
547
|
const sqlEntities = `SELECT
|
|
542
548
|
*
|
|
@@ -546,7 +552,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
546
552
|
VirtualEntity=0 AND
|
|
547
553
|
DeleteType='Soft' AND
|
|
548
554
|
SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})`;
|
|
549
|
-
const
|
|
555
|
+
const entitiesResult = await pool.request().query(sqlEntities);
|
|
556
|
+
const entities = entitiesResult.recordset;
|
|
550
557
|
let overallResult = true;
|
|
551
558
|
if (entities.length > 0) {
|
|
552
559
|
// we have 1+ entities that need the special fields, so loop through them and ensure the fields exist
|
|
@@ -556,12 +563,13 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
556
563
|
WHERE
|
|
557
564
|
${entities.map((e) => `(TABLE_SCHEMA='${e.SchemaName}' AND TABLE_NAME='${e.BaseTable}')`).join(' OR ')}
|
|
558
565
|
AND COLUMN_NAME='${core_1.EntityInfo.DeletedAtFieldName}'`;
|
|
559
|
-
const
|
|
566
|
+
const resultResult = await pool.request().query(sql);
|
|
567
|
+
const result = resultResult.recordset;
|
|
560
568
|
for (const e of entities) {
|
|
561
569
|
const eResult = result.filter((r) => r.TABLE_NAME === e.BaseTable && r.TABLE_SCHEMA === e.SchemaName); // get just the fields for this entity
|
|
562
570
|
const deletedAt = eResult.find((r) => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.DeletedAtFieldName.trim().toLowerCase());
|
|
563
571
|
// now, if we have the fields, we need to check the default value and update if necessary
|
|
564
|
-
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(
|
|
572
|
+
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(pool, e, core_1.EntityInfo.DeletedAtFieldName, deletedAt, true);
|
|
565
573
|
overallResult = overallResult && fieldResult;
|
|
566
574
|
}
|
|
567
575
|
}
|
|
@@ -576,9 +584,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
576
584
|
* This method ensures that the __mj_CreatedAt and __mj_UpdatedAt fields exist in each entity that has TrackRecordChanges set to true. If the fields do not exist, they are created.
|
|
577
585
|
* If the fields exist but have incorrect default values, the default values are updated. The default value that is to be used for these special fields is GETUTCDATE() which is the
|
|
578
586
|
* UTC date and time. This method is called as part of the manageEntityFields method and is not intended to be called directly.
|
|
579
|
-
* @param
|
|
587
|
+
* @param pool
|
|
580
588
|
*/
|
|
581
|
-
async ensureCreatedAtUpdatedAtFieldsExist(
|
|
589
|
+
async ensureCreatedAtUpdatedAtFieldsExist(pool, excludeSchemas) {
|
|
582
590
|
try {
|
|
583
591
|
const sqlEntities = `SELECT
|
|
584
592
|
*
|
|
@@ -588,7 +596,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
588
596
|
VirtualEntity = 0 AND
|
|
589
597
|
TrackRecordChanges = 1 AND
|
|
590
598
|
SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})`;
|
|
591
|
-
const
|
|
599
|
+
const entitiesResult = await pool.request().query(sqlEntities);
|
|
600
|
+
const entities = entitiesResult.recordset;
|
|
592
601
|
let overallResult = true;
|
|
593
602
|
if (entities.length > 0) {
|
|
594
603
|
// we have 1+ entities that need the special fields, so loop through them and ensure the fields exist
|
|
@@ -599,15 +608,16 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
599
608
|
WHERE
|
|
600
609
|
${entities.map((e) => `(TABLE_SCHEMA='${e.SchemaName}' AND TABLE_NAME='${e.BaseTable}')`).join(' OR ')}
|
|
601
610
|
AND COLUMN_NAME IN ('${core_1.EntityInfo.CreatedAtFieldName}','${core_1.EntityInfo.UpdatedAtFieldName}')`;
|
|
602
|
-
const
|
|
611
|
+
const resultResult = await pool.request().query(sqlCreatedUpdated);
|
|
612
|
+
const result = resultResult.recordset;
|
|
603
613
|
for (const e of entities) {
|
|
604
614
|
// result has both created at and updated at fields, so filter on the result for each and do what we need to based on that
|
|
605
615
|
const eResult = result.filter((r) => r.TABLE_NAME === e.BaseTable && r.TABLE_SCHEMA === e.SchemaName); // get just the fields for this entity
|
|
606
616
|
const createdAt = eResult.find((r) => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.CreatedAtFieldName.trim().toLowerCase());
|
|
607
617
|
const updatedAt = eResult.find((r) => r.COLUMN_NAME.trim().toLowerCase() === core_1.EntityInfo.UpdatedAtFieldName.trim().toLowerCase());
|
|
608
618
|
// now, if we have the fields, we need to check the default value and update if necessary
|
|
609
|
-
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(
|
|
610
|
-
await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(
|
|
619
|
+
const fieldResult = await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(pool, e, core_1.EntityInfo.CreatedAtFieldName, createdAt, false) &&
|
|
620
|
+
await this.ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(pool, e, core_1.EntityInfo.UpdatedAtFieldName, updatedAt, false);
|
|
611
621
|
overallResult = overallResult && fieldResult;
|
|
612
622
|
}
|
|
613
623
|
}
|
|
@@ -625,12 +635,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
625
635
|
* @param fieldName
|
|
626
636
|
* @param currentFieldData
|
|
627
637
|
*/
|
|
628
|
-
async ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(
|
|
638
|
+
async ensureSpecialDateFieldExistsAndHasCorrectDefaultValue(pool, entity, fieldName, currentFieldData, allowNull) {
|
|
629
639
|
try {
|
|
630
640
|
if (!currentFieldData) {
|
|
631
641
|
// field doesn't exist, let's create it
|
|
632
642
|
const sql = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ADD ${fieldName} DATETIMEOFFSET ${allowNull ? 'NULL' : 'NOT NULL DEFAULT GETUTCDATE()'}`;
|
|
633
|
-
await this.LogSQLAndExecute(
|
|
643
|
+
await this.LogSQLAndExecute(pool, sql, `SQL text to add special date field ${fieldName} to entity ${entity.SchemaName}.${entity.BaseTable}`);
|
|
634
644
|
}
|
|
635
645
|
else {
|
|
636
646
|
// field does exist, let's first check the data type/nullability
|
|
@@ -639,11 +649,11 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
639
649
|
(currentFieldData.IS_NULLABLE.trim().toLowerCase() === 'no' && allowNull)) {
|
|
640
650
|
// the column is the wrong type, or has wrong nullability attribute, so let's update it, first removing the default constraint, then
|
|
641
651
|
// modifying the column, and finally adding the default constraint back in.
|
|
642
|
-
await this.dropExistingDefaultConstraint(
|
|
652
|
+
await this.dropExistingDefaultConstraint(pool, entity, fieldName);
|
|
643
653
|
const sql = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ALTER COLUMN ${fieldName} DATETIMEOFFSET ${allowNull ? 'NULL' : 'NOT NULL'}`;
|
|
644
|
-
await this.LogSQLAndExecute(
|
|
654
|
+
await this.LogSQLAndExecute(pool, sql, `SQL text to update special date field ${fieldName} in entity ${entity.SchemaName}.${entity.BaseTable}`);
|
|
645
655
|
if (!allowNull)
|
|
646
|
-
await this.createDefaultConstraintForSpecialDateField(
|
|
656
|
+
await this.createDefaultConstraintForSpecialDateField(pool, entity, fieldName);
|
|
647
657
|
}
|
|
648
658
|
else {
|
|
649
659
|
// if we get here that means the column is the correct type and nullability, so now let's check the default value, but we only do that if we are dealing with a
|
|
@@ -652,7 +662,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
652
662
|
const defaultValue = currentFieldData.COLUMN_DEFAULT;
|
|
653
663
|
const realDefaultValue = (0, core_1.ExtractActualDefaultValue)(defaultValue);
|
|
654
664
|
if (!realDefaultValue || realDefaultValue.trim().toLowerCase() !== 'getutcdate()') {
|
|
655
|
-
await this.dropAndCreateDefaultConstraintForSpecialDateField(
|
|
665
|
+
await this.dropAndCreateDefaultConstraintForSpecialDateField(pool, entity, fieldName);
|
|
656
666
|
}
|
|
657
667
|
}
|
|
658
668
|
}
|
|
@@ -668,10 +678,10 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
668
678
|
/**
|
|
669
679
|
* Creates the default constraint for a special date field. This method is called as part of the ensureSpecialDateFieldExistsAndHasCorrectDefaultValue method and is not intended to be called directly.
|
|
670
680
|
*/
|
|
671
|
-
async createDefaultConstraintForSpecialDateField(
|
|
681
|
+
async createDefaultConstraintForSpecialDateField(pool, entity, fieldName) {
|
|
672
682
|
try {
|
|
673
683
|
const sqlAddDefaultConstraint = `ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] ADD CONSTRAINT DF_${entity.SchemaName}_${(0, core_1.CodeNameFromString)(entity.BaseTable)}_${fieldName} DEFAULT GETUTCDATE() FOR [${fieldName}]`;
|
|
674
|
-
await this.LogSQLAndExecute(
|
|
684
|
+
await this.LogSQLAndExecute(pool, sqlAddDefaultConstraint, `SQL text to add default constraint for special date field ${fieldName} in entity ${entity.SchemaName}.${entity.BaseTable}`);
|
|
675
685
|
}
|
|
676
686
|
catch (e) {
|
|
677
687
|
(0, status_logging_1.logError)(e);
|
|
@@ -683,18 +693,18 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
683
693
|
* @param entity
|
|
684
694
|
* @param fieldName
|
|
685
695
|
*/
|
|
686
|
-
async dropAndCreateDefaultConstraintForSpecialDateField(
|
|
696
|
+
async dropAndCreateDefaultConstraintForSpecialDateField(pool, entity, fieldName) {
|
|
687
697
|
// default value is not correct, so let's update it
|
|
688
|
-
await this.dropExistingDefaultConstraint(
|
|
689
|
-
await this.createDefaultConstraintForSpecialDateField(
|
|
698
|
+
await this.dropExistingDefaultConstraint(pool, entity, fieldName);
|
|
699
|
+
await this.createDefaultConstraintForSpecialDateField(pool, entity, fieldName);
|
|
690
700
|
}
|
|
691
701
|
/**
|
|
692
702
|
* Drops an existing default constraint from a given column within a given entity, if it exists
|
|
693
|
-
* @param
|
|
703
|
+
* @param pool
|
|
694
704
|
* @param entity
|
|
695
705
|
* @param fieldName
|
|
696
706
|
*/
|
|
697
|
-
async dropExistingDefaultConstraint(
|
|
707
|
+
async dropExistingDefaultConstraint(pool, entity, fieldName) {
|
|
698
708
|
try {
|
|
699
709
|
const sqlDropDefaultConstraint = `
|
|
700
710
|
DECLARE @constraintName NVARCHAR(255);
|
|
@@ -715,7 +725,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
715
725
|
EXEC('ALTER TABLE [${entity.SchemaName}].[${entity.BaseTable}] DROP CONSTRAINT ' + @constraintName);
|
|
716
726
|
END
|
|
717
727
|
`;
|
|
718
|
-
await this.LogSQLAndExecute(
|
|
728
|
+
await this.LogSQLAndExecute(pool, sqlDropDefaultConstraint, `SQL text to drop default existing default constraints in entity ${entity.SchemaName}.${entity.BaseTable}`);
|
|
719
729
|
}
|
|
720
730
|
catch (e) {
|
|
721
731
|
(0, status_logging_1.logError)(e);
|
|
@@ -724,10 +734,10 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
724
734
|
/**
|
|
725
735
|
* This method generates descriptions for entities in teh system where there is no existing description. This is an experimental feature and is done using AI. In order for it
|
|
726
736
|
* to be invoked, the EntityDescriptions feature must be enabled in the Advanced Generation configuration.
|
|
727
|
-
* @param
|
|
737
|
+
* @param pool
|
|
728
738
|
* @param md
|
|
729
739
|
*/
|
|
730
|
-
async generateNewEntityDescriptions(
|
|
740
|
+
async generateNewEntityDescriptions(pool, md) {
|
|
731
741
|
// for the list of new entities, go through and attempt to generate new entity descriptions
|
|
732
742
|
const ag = new advanced_generation_1.AdvancedGeneration();
|
|
733
743
|
if (ag.featureEnabled('EntityDescriptions')) {
|
|
@@ -737,9 +747,11 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
737
747
|
const systemPrompt = prompt.systemPrompt;
|
|
738
748
|
const userMessage = prompt.userMessage + '\n\n';
|
|
739
749
|
// now loop through the new entities and generate descriptions for them
|
|
740
|
-
for (let e of
|
|
741
|
-
const
|
|
742
|
-
const
|
|
750
|
+
for (let e of ManageMetadataBase.newEntityList) {
|
|
751
|
+
const dataResult = await pool.request().query(`SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntities WHERE Name = '${e}'`);
|
|
752
|
+
const data = dataResult.recordset;
|
|
753
|
+
const fieldsResult = await pool.request().query(`SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFields WHERE EntityID='${data[0].ID}'`);
|
|
754
|
+
const fields = fieldsResult.recordset;
|
|
743
755
|
const entityUserMessage = userMessage + `Entity Name: ${e},
|
|
744
756
|
Base Table: ${data[0].BaseTable},
|
|
745
757
|
Schema: ${data[0].SchemaName}.
|
|
@@ -764,7 +776,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
764
776
|
const structuredResult = JSON.parse(resultText);
|
|
765
777
|
if (structuredResult?.entityDescription && structuredResult.entityDescription.length > 0) {
|
|
766
778
|
const sSQL = `UPDATE [${(0, config_1.mj_core_schema)()}].Entity SET Description = '${structuredResult.entityDescription}' WHERE Name = '${e}'`;
|
|
767
|
-
await this.LogSQLAndExecute(
|
|
779
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL text to update entity description for entity ${e}`);
|
|
768
780
|
}
|
|
769
781
|
else {
|
|
770
782
|
console.warn(' >>> Advanced Generation Error: LLM returned a blank entity description, skipping entity description for entity ' + e);
|
|
@@ -788,7 +800,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
788
800
|
* @param excludeSchemas
|
|
789
801
|
* @returns
|
|
790
802
|
*/
|
|
791
|
-
async updateEntityFieldDisplayNameWhereNull(
|
|
803
|
+
async updateEntityFieldDisplayNameWhereNull(pool, excludeSchemas) {
|
|
792
804
|
try {
|
|
793
805
|
const sql = `SELECT
|
|
794
806
|
ef.ID, ef.Name
|
|
@@ -804,13 +816,14 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
804
816
|
ef.Name <> \'ID\' AND
|
|
805
817
|
e.SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})
|
|
806
818
|
`;
|
|
807
|
-
const
|
|
819
|
+
const fieldsResult = await pool.request().query(sql);
|
|
820
|
+
const fields = fieldsResult.recordset;
|
|
808
821
|
if (fields && fields.length > 0)
|
|
809
822
|
for (const field of fields) {
|
|
810
823
|
const sDisplayName = (0, global_1.stripTrailingChars)((0, global_1.convertCamelCaseToHaveSpaces)(field.Name), 'ID', true).trim();
|
|
811
824
|
if (sDisplayName.length > 0 && sDisplayName.toLowerCase().trim() !== field.Name.toLowerCase().trim()) {
|
|
812
825
|
const sSQL = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ${core_1.EntityInfo.UpdatedAtFieldName}=GETUTCDATE(), DisplayName = '${sDisplayName}' WHERE ID = '${field.ID}'`;
|
|
813
|
-
await this.LogSQLAndExecute(
|
|
826
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL text to update display name for field ${field.Name}`);
|
|
814
827
|
}
|
|
815
828
|
}
|
|
816
829
|
return true;
|
|
@@ -824,14 +837,14 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
824
837
|
* This method updates the DefaultColumnWidth field in the EntityField metadata. The default logic uses a stored procedure called spSetDefaultColumnWidthWhereNeeded
|
|
825
838
|
* which is part of the MJ Core Schema. You can override this method to implement custom logic for setting default column widths. It is NOT recommended to
|
|
826
839
|
* modify the stored procedure in the MJ Core Schema because your changes will be overriden during a future upgrade.
|
|
827
|
-
* @param
|
|
840
|
+
* @param pool
|
|
828
841
|
* @param excludeSchemas
|
|
829
842
|
* @returns
|
|
830
843
|
*/
|
|
831
|
-
async setDefaultColumnWidthWhereNeeded(
|
|
844
|
+
async setDefaultColumnWidthWhereNeeded(pool, excludeSchemas) {
|
|
832
845
|
try {
|
|
833
846
|
const sSQL = `EXEC ${(0, config_1.mj_core_schema)()}.spSetDefaultColumnWidthWhereNeeded @ExcludedSchemaNames='${excludeSchemas.join(',')}'`;
|
|
834
|
-
await this.LogSQLAndExecute(
|
|
847
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL text to set default column width where needed`, true);
|
|
835
848
|
return true;
|
|
836
849
|
}
|
|
837
850
|
catch (e) {
|
|
@@ -842,12 +855,18 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
842
855
|
/**
|
|
843
856
|
* Creates a SQL statement to retrieve all of the pending entity fields that need to be created in the metadata. This method looks for fields that exist in the underlying
|
|
844
857
|
* database but are NOT in the metadata.
|
|
858
|
+
*
|
|
859
|
+
* IMPORTANT: The sequence shown below has a 100,000 added to it to ensure that there is no collision with existing sequences. The spUpdateExistingEntityFieldsFromSchema
|
|
860
|
+
* stored procedure runs AFTER this method and will correct the sequences to ensure they are in the correct order. In a migration, the spUpdateExistingEntityFieldsFromSchema
|
|
861
|
+
* runs afterwards as well so this behavior ensures CodeGen works consistently.
|
|
862
|
+
*
|
|
863
|
+
* @returns {string} - The SQL statement to retrieve pending entity fields.
|
|
845
864
|
*/
|
|
846
865
|
getPendingEntityFieldsSELECTSQL() {
|
|
847
866
|
const sSQL = `WITH NumberedRows AS (
|
|
848
867
|
SELECT
|
|
849
868
|
sf.EntityID,
|
|
850
|
-
sf.Sequence,
|
|
869
|
+
sf.Sequence + 100000 Sequence, -- add a large number to the sequence to ensure no collision with existing sequences - spUpdateExistingEntityFieldsFromSchema runs AFTER this and will correct them.
|
|
851
870
|
sf.FieldName,
|
|
852
871
|
sf.Description,
|
|
853
872
|
sf.Type,
|
|
@@ -1051,11 +1070,14 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1051
1070
|
}
|
|
1052
1071
|
return sResult;
|
|
1053
1072
|
}
|
|
1054
|
-
async createNewEntityFieldsFromSchema(
|
|
1073
|
+
async createNewEntityFieldsFromSchema(pool) {
|
|
1055
1074
|
try {
|
|
1056
1075
|
const sSQL = this.getPendingEntityFieldsSELECTSQL();
|
|
1057
|
-
const
|
|
1058
|
-
|
|
1076
|
+
const newEntityFieldsResult = await pool.request().query(sSQL);
|
|
1077
|
+
const newEntityFields = newEntityFieldsResult.recordset;
|
|
1078
|
+
const transaction = new sql.Transaction(pool);
|
|
1079
|
+
await transaction.begin();
|
|
1080
|
+
try {
|
|
1059
1081
|
// wrap in a transaction so we get all of it or none of it
|
|
1060
1082
|
for (let i = 0; i < newEntityFields.length; ++i) {
|
|
1061
1083
|
const n = newEntityFields[i];
|
|
@@ -1065,7 +1087,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1065
1087
|
const newEntityFieldUUID = this.createNewUUID();
|
|
1066
1088
|
const sSQLInsert = this.getPendingEntityFieldINSERTSQL(newEntityFieldUUID, n);
|
|
1067
1089
|
try {
|
|
1068
|
-
await this.LogSQLAndExecute(
|
|
1090
|
+
await this.LogSQLAndExecute(pool, sSQLInsert, `SQL text to insert new entity field`);
|
|
1069
1091
|
// if we get here, we're okay, otherwise we have an exception, which we want as it blows up transaction
|
|
1070
1092
|
}
|
|
1071
1093
|
catch (e) {
|
|
@@ -1075,11 +1097,16 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1075
1097
|
}
|
|
1076
1098
|
}
|
|
1077
1099
|
}
|
|
1078
|
-
|
|
1100
|
+
await transaction.commit();
|
|
1101
|
+
}
|
|
1102
|
+
catch (e) {
|
|
1103
|
+
await transaction.rollback();
|
|
1104
|
+
throw e;
|
|
1105
|
+
}
|
|
1079
1106
|
// if we get here now send a distinct list of the entities that had new fields to the modified entity list
|
|
1080
1107
|
// column in the resultset is called EntityName, we dont have to dedupe them here because the method below
|
|
1081
1108
|
// will do that for us
|
|
1082
|
-
|
|
1109
|
+
ManageMetadataBase.addNewEntitiesToModifiedList(newEntityFields.map((f) => f.EntityName));
|
|
1083
1110
|
return true;
|
|
1084
1111
|
}
|
|
1085
1112
|
catch (e) {
|
|
@@ -1094,12 +1121,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1094
1121
|
* @param relatedEntityNameFieldMap
|
|
1095
1122
|
* @returns
|
|
1096
1123
|
*/
|
|
1097
|
-
async updateEntityFieldRelatedEntityNameFieldMap(
|
|
1124
|
+
async updateEntityFieldRelatedEntityNameFieldMap(pool, entityFieldID, relatedEntityNameFieldMap) {
|
|
1098
1125
|
try {
|
|
1099
1126
|
const sSQL = `EXEC [${(0, config_1.mj_core_schema)()}].spUpdateEntityFieldRelatedEntityNameFieldMap
|
|
1100
1127
|
@EntityFieldID='${entityFieldID}',
|
|
1101
1128
|
@RelatedEntityNameFieldMap='${relatedEntityNameFieldMap}'`;
|
|
1102
|
-
await this.LogSQLAndExecute(
|
|
1129
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL text to update entity field related entity name field map for entity field ID ${entityFieldID}`);
|
|
1103
1130
|
return true;
|
|
1104
1131
|
}
|
|
1105
1132
|
catch (e) {
|
|
@@ -1107,14 +1134,14 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1107
1134
|
return false;
|
|
1108
1135
|
}
|
|
1109
1136
|
}
|
|
1110
|
-
async updateExistingEntitiesFromSchema(
|
|
1137
|
+
async updateExistingEntitiesFromSchema(pool, excludeSchemas) {
|
|
1111
1138
|
try {
|
|
1112
1139
|
const sSQL = `EXEC [${(0, config_1.mj_core_schema)()}].spUpdateExistingEntitiesFromSchema @ExcludedSchemaNames='${excludeSchemas.join(',')}'`;
|
|
1113
|
-
const result = await this.LogSQLAndExecute(
|
|
1140
|
+
const result = await this.LogSQLAndExecute(pool, sSQL, `SQL text to update existing entities from schema`, true);
|
|
1114
1141
|
// result contains the updated entities, and there is a property of each row called Name which has the entity name that was modified
|
|
1115
1142
|
// add these to the modified entity list if they're not already in there
|
|
1116
1143
|
if (result && result.length > 0) {
|
|
1117
|
-
|
|
1144
|
+
ManageMetadataBase.addNewEntitiesToModifiedList(result.map((r) => r.Name));
|
|
1118
1145
|
}
|
|
1119
1146
|
return true;
|
|
1120
1147
|
}
|
|
@@ -1128,19 +1155,19 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1128
1155
|
*/
|
|
1129
1156
|
static addNewEntitiesToModifiedList(entityNames) {
|
|
1130
1157
|
const distinctEntityNames = [...new Set(entityNames)];
|
|
1131
|
-
const newlyModifiedEntityNames = distinctEntityNames.filter((e) => !
|
|
1158
|
+
const newlyModifiedEntityNames = distinctEntityNames.filter((e) => !ManageMetadataBase._modifiedEntityList.includes(e));
|
|
1132
1159
|
// now make sure that each of these entity names is in the modified entity list
|
|
1133
|
-
|
|
1160
|
+
ManageMetadataBase._modifiedEntityList = ManageMetadataBase._modifiedEntityList.concat(newlyModifiedEntityNames);
|
|
1134
1161
|
}
|
|
1135
|
-
async updateExistingEntityFieldsFromSchema(
|
|
1162
|
+
async updateExistingEntityFieldsFromSchema(pool, excludeSchemas) {
|
|
1136
1163
|
try {
|
|
1137
1164
|
const sSQL = `EXEC [${(0, config_1.mj_core_schema)()}].spUpdateExistingEntityFieldsFromSchema @ExcludedSchemaNames='${excludeSchemas.join(',')}'`;
|
|
1138
|
-
const result = await this.LogSQLAndExecute(
|
|
1165
|
+
const result = await this.LogSQLAndExecute(pool, sSQL, `SQL text to update existing entity fields from schema`, true);
|
|
1139
1166
|
// result contains the updated entity fields
|
|
1140
1167
|
// there is a field in there called EntityName. Get a distinct list of entity names from this and add them
|
|
1141
1168
|
// to the modified entity list if they're not already in there
|
|
1142
1169
|
if (result && result.length > 0) {
|
|
1143
|
-
|
|
1170
|
+
ManageMetadataBase.addNewEntitiesToModifiedList(result.map((r) => r.EntityName));
|
|
1144
1171
|
}
|
|
1145
1172
|
return true;
|
|
1146
1173
|
}
|
|
@@ -1149,15 +1176,15 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1149
1176
|
return false;
|
|
1150
1177
|
}
|
|
1151
1178
|
}
|
|
1152
|
-
async deleteUnneededEntityFields(
|
|
1179
|
+
async deleteUnneededEntityFields(pool, excludeSchemas) {
|
|
1153
1180
|
try {
|
|
1154
1181
|
const sSQL = `EXEC [${(0, config_1.mj_core_schema)()}].spDeleteUnneededEntityFields @ExcludedSchemaNames='${excludeSchemas.join(',')}'`;
|
|
1155
|
-
const result = await this.LogSQLAndExecute(
|
|
1182
|
+
const result = await this.LogSQLAndExecute(pool, sSQL, `SQL text to delete unneeded entity fields`, true);
|
|
1156
1183
|
// result contains the DELETED entity fields
|
|
1157
1184
|
// there is a field in there called Entity. Get a distinct list of entity names from this and add them
|
|
1158
1185
|
// to the modified entity list if they're not already in there
|
|
1159
1186
|
if (result && result.length > 0) {
|
|
1160
|
-
|
|
1187
|
+
ManageMetadataBase.addNewEntitiesToModifiedList(result.map((r) => r.Entity));
|
|
1161
1188
|
}
|
|
1162
1189
|
return true;
|
|
1163
1190
|
}
|
|
@@ -1166,7 +1193,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1166
1193
|
return false;
|
|
1167
1194
|
}
|
|
1168
1195
|
}
|
|
1169
|
-
async manageEntityFieldValuesAndValidatorFunctions(
|
|
1196
|
+
async manageEntityFieldValuesAndValidatorFunctions(pool, excludeSchemas, currentUser, skipDBUpdate) {
|
|
1170
1197
|
try {
|
|
1171
1198
|
// here we want to get all of the entity fields that have check constraints attached to them. For each field that has a check constraint, we want to
|
|
1172
1199
|
// evaluate it to see if it is a simple series of OR statements or not, if it is a simple series of OR statements, we can parse the possible values
|
|
@@ -1174,11 +1201,14 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1174
1201
|
// just ignore it.
|
|
1175
1202
|
const filter = excludeSchemas && excludeSchemas.length > 0 ? ` WHERE SchemaName NOT IN (${excludeSchemas.map(s => `'${s}'`).join(',')})` : '';
|
|
1176
1203
|
const sSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFieldsWithCheckConstraints${filter}`;
|
|
1177
|
-
const
|
|
1204
|
+
const resultResult = await pool.request().query(sSQL);
|
|
1205
|
+
const result = resultResult.recordset;
|
|
1178
1206
|
const efvSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].EntityFieldValue`;
|
|
1179
|
-
const
|
|
1207
|
+
const allEntityFieldValuesResult = await pool.request().query(efvSQL);
|
|
1208
|
+
const allEntityFieldValues = allEntityFieldValuesResult.recordset;
|
|
1180
1209
|
const efSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwEntityFields ORDER BY EntityID, Sequence`;
|
|
1181
|
-
const
|
|
1210
|
+
const allEntityFieldsResult = await pool.request().query(efSQL);
|
|
1211
|
+
const allEntityFields = allEntityFieldsResult.recordset;
|
|
1182
1212
|
const generationPromises = [];
|
|
1183
1213
|
const columnLevelResults = result.filter((r) => r.EntityFieldID); // get the column level constraints
|
|
1184
1214
|
const tableLevelResults = result.filter((r) => !r.EntityFieldID); // get the table level constraints
|
|
@@ -1193,14 +1223,15 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1193
1223
|
// 1st, flip the order of parsedValues because they come out in reverse order from SQL Server
|
|
1194
1224
|
parsedValues.reverse();
|
|
1195
1225
|
// we have parsed values from the check constraint, so sync them with the entity field values
|
|
1196
|
-
await this.syncEntityFieldValues(
|
|
1226
|
+
await this.syncEntityFieldValues(pool, r.EntityFieldID, parsedValues, allEntityFieldValues);
|
|
1197
1227
|
// finally, make sure the ValueListType column within the EntityField table is set to "List" because for check constraints we only allow the values specified in the list.
|
|
1198
1228
|
// check to see if the ValueListType is already set to "List", if not, update it
|
|
1199
1229
|
const sSQLCheck = `SELECT ValueListType FROM [${(0, config_1.mj_core_schema)()}].EntityField WHERE ID='${r.EntityFieldID}'`;
|
|
1200
|
-
const
|
|
1230
|
+
const checkResultResult = await pool.request().query(sSQLCheck);
|
|
1231
|
+
const checkResult = checkResultResult.recordset;
|
|
1201
1232
|
if (checkResult && checkResult.length > 0 && checkResult[0].ValueListType.trim().toLowerCase() !== 'list') {
|
|
1202
1233
|
const sSQL = `UPDATE [${(0, config_1.mj_core_schema)()}].EntityField SET ValueListType='List' WHERE ID='${r.EntityFieldID}'`;
|
|
1203
|
-
await this.LogSQLAndExecute(
|
|
1234
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL text to update ValueListType for entity field ID ${r.EntityFieldID}`);
|
|
1204
1235
|
}
|
|
1205
1236
|
}
|
|
1206
1237
|
else {
|
|
@@ -1239,13 +1270,13 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1239
1270
|
}
|
|
1240
1271
|
/**
|
|
1241
1272
|
* This method will load all generated code from the database - this is intended to be used when you are bypassing managing the metadata.
|
|
1242
|
-
* @param
|
|
1273
|
+
* @param pool
|
|
1243
1274
|
* @param currentUser
|
|
1244
1275
|
*/
|
|
1245
|
-
async loadGeneratedCode(
|
|
1276
|
+
async loadGeneratedCode(pool, currentUser) {
|
|
1246
1277
|
try {
|
|
1247
1278
|
// right now we're just doing validator functions which are handled here
|
|
1248
|
-
return await this.manageEntityFieldValuesAndValidatorFunctions(
|
|
1279
|
+
return await this.manageEntityFieldValuesAndValidatorFunctions(pool, [], currentUser, true);
|
|
1249
1280
|
}
|
|
1250
1281
|
catch (e) {
|
|
1251
1282
|
(0, status_logging_1.logError)(e);
|
|
@@ -1256,7 +1287,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1256
1287
|
const generatedFunction = await this.generateValidatorFunctionFromCheckConstraint(r, allEntityFields, currentUser, generateNewCode);
|
|
1257
1288
|
if (generatedFunction?.success) {
|
|
1258
1289
|
// LLM was able to generate a function for us, so let's store it in the static array, will be used later when we emit the BaseEntity sub-class
|
|
1259
|
-
|
|
1290
|
+
ManageMetadataBase._generatedValidators.push(generatedFunction);
|
|
1260
1291
|
}
|
|
1261
1292
|
}
|
|
1262
1293
|
/**
|
|
@@ -1363,7 +1394,9 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1363
1394
|
// now, loop through the possible values and add any that are not already in the database
|
|
1364
1395
|
// Step 1: for any existing value that is NOT in the list of possible Values, delete it
|
|
1365
1396
|
let numRemoved = 0;
|
|
1366
|
-
|
|
1397
|
+
const transaction = new sql.Transaction(ds);
|
|
1398
|
+
await transaction.begin();
|
|
1399
|
+
try {
|
|
1367
1400
|
for (const ev of existingValues) {
|
|
1368
1401
|
if (!possibleValues.find(v => v === ev.Value)) {
|
|
1369
1402
|
// delete the value from the database
|
|
@@ -1396,7 +1429,12 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1396
1429
|
numUpdated++;
|
|
1397
1430
|
}
|
|
1398
1431
|
}
|
|
1399
|
-
|
|
1432
|
+
await transaction.commit();
|
|
1433
|
+
}
|
|
1434
|
+
catch (e) {
|
|
1435
|
+
await transaction.rollback();
|
|
1436
|
+
throw e;
|
|
1437
|
+
}
|
|
1400
1438
|
return true;
|
|
1401
1439
|
}
|
|
1402
1440
|
catch (e) {
|
|
@@ -1461,20 +1499,28 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1461
1499
|
(sExcludeSchemas.length > 0 ? (sExcludeTables.length > 0 ? ` AND ` : ``) + '(' + sExcludeSchemas + ')' : '');
|
|
1462
1500
|
return sWhere;
|
|
1463
1501
|
}
|
|
1464
|
-
async createNewEntities(
|
|
1502
|
+
async createNewEntities(pool) {
|
|
1465
1503
|
try {
|
|
1466
1504
|
const sSQL = `SELECT * FROM [${(0, config_1.mj_core_schema)()}].vwSQLTablesAndEntities WHERE EntityID IS NULL ` + this.createExcludeTablesAndSchemasFilter('');
|
|
1467
|
-
const
|
|
1505
|
+
const newEntitiesResult = await pool.request().query(sSQL);
|
|
1506
|
+
const newEntities = newEntitiesResult.recordset;
|
|
1468
1507
|
if (newEntities && newEntities.length > 0) {
|
|
1469
1508
|
const md = new core_1.Metadata();
|
|
1470
|
-
|
|
1509
|
+
const transaction = new sql.Transaction(pool);
|
|
1510
|
+
await transaction.begin();
|
|
1511
|
+
try {
|
|
1471
1512
|
// wrap in a transaction so we get all of it or none of it
|
|
1472
1513
|
for (let i = 0; i < newEntities.length; ++i) {
|
|
1473
1514
|
// process each of the new entities
|
|
1474
|
-
await this.createNewEntity(
|
|
1515
|
+
await this.createNewEntity(pool, newEntities[i], md);
|
|
1475
1516
|
}
|
|
1476
|
-
|
|
1477
|
-
|
|
1517
|
+
await transaction.commit();
|
|
1518
|
+
}
|
|
1519
|
+
catch (e) {
|
|
1520
|
+
await transaction.rollback();
|
|
1521
|
+
throw e;
|
|
1522
|
+
}
|
|
1523
|
+
if (ManageMetadataBase.newEntityList.length > 0) {
|
|
1478
1524
|
// only do this if we actually created new entities
|
|
1479
1525
|
(0, core_1.LogStatus)(` Done creating entities, refreshing metadata to reflect new entities...`);
|
|
1480
1526
|
await md.Refresh(); // refresh now since we've added some new entities
|
|
@@ -1494,7 +1540,8 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1494
1540
|
// validate all of these factors by getting the sql from SQL Server and check the result, if failure, shouldCreate=false and generate validation message, otherwise return empty validation message and true for shouldCreate.
|
|
1495
1541
|
const query = `EXEC ${core_1.Metadata.Provider.ConfigData.MJCoreSchemaName}.spGetPrimaryKeyForTable @TableName='${newEntity.TableName}', @SchemaName='${newEntity.SchemaName}'`;
|
|
1496
1542
|
try {
|
|
1497
|
-
const
|
|
1543
|
+
const resultResult = await ds.request().query(query);
|
|
1544
|
+
const result = resultResult.recordset;
|
|
1498
1545
|
if (result.length === 0) {
|
|
1499
1546
|
return { shouldCreate: false, validationMessage: "No primary key found" };
|
|
1500
1547
|
}
|
|
@@ -1587,15 +1634,15 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1587
1634
|
createNewUUID() {
|
|
1588
1635
|
return (0, uuid_1.v4)();
|
|
1589
1636
|
}
|
|
1590
|
-
async createNewEntity(
|
|
1637
|
+
async createNewEntity(pool, newEntity, md) {
|
|
1591
1638
|
try {
|
|
1592
|
-
const { shouldCreate, validationMessage } = await this.shouldCreateNewEntity(
|
|
1639
|
+
const { shouldCreate, validationMessage } = await this.shouldCreateNewEntity(pool, newEntity);
|
|
1593
1640
|
if (shouldCreate) {
|
|
1594
1641
|
// process a single new entity
|
|
1595
1642
|
let newEntityName = await this.createNewEntityName(newEntity);
|
|
1596
1643
|
let suffix = '';
|
|
1597
1644
|
const existingEntity = md.Entities.find(e => e.Name.toLowerCase() === newEntityName.toLowerCase());
|
|
1598
|
-
const existingEntityInNewEntityList =
|
|
1645
|
+
const existingEntityInNewEntityList = ManageMetadataBase.newEntityList.find(e => e === newEntityName); // check the newly created entity list to make sure we didn't create the new entity name along the way in this RUN of CodeGen as it wouldn't yet be in our metadata above
|
|
1599
1646
|
if (existingEntity || existingEntityInNewEntityList) {
|
|
1600
1647
|
// the generated name is already in place, so we need another name
|
|
1601
1648
|
// use Entity Name __ SchemaName instead of just the table name as basis
|
|
@@ -1603,24 +1650,24 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1603
1650
|
newEntityName = newEntityName + suffix;
|
|
1604
1651
|
(0, core_1.LogError)(` >>>> WARNING: Entity name already exists, so using ${newEntityName} instead. If you did not intend for this, please rename the ${newEntity.SchemaName}.${newEntity.TableName} table in the database.`);
|
|
1605
1652
|
}
|
|
1606
|
-
const isNewSchema = await this.isSchemaNew(
|
|
1653
|
+
const isNewSchema = await this.isSchemaNew(pool, newEntity.SchemaName);
|
|
1607
1654
|
const newEntityID = this.createNewUUID();
|
|
1608
1655
|
const sSQLInsert = this.createNewEntityInsertSQL(newEntityID, newEntityName, newEntity, suffix);
|
|
1609
|
-
await this.LogSQLAndExecute(
|
|
1656
|
+
await this.LogSQLAndExecute(pool, sSQLInsert, `SQL generated to create new entity ${newEntityName}`);
|
|
1610
1657
|
// if we get here we created a new entity safely, otherwise we get exception
|
|
1611
1658
|
// add it to the new entity list
|
|
1612
|
-
|
|
1659
|
+
ManageMetadataBase.newEntityList.push(newEntityName);
|
|
1613
1660
|
// next, check if this entity is in a schema that is new (e.g. no other entities have been added to this schema yet), if so and if
|
|
1614
1661
|
// our config option is set to create new applications from new schemas, then create a new application for this schema
|
|
1615
1662
|
let apps;
|
|
1616
1663
|
if (isNewSchema && config_1.configInfo.newSchemaDefaults.CreateNewApplicationWithSchemaName) {
|
|
1617
1664
|
// new schema and config option is to create a new application from the schema name so do that
|
|
1618
1665
|
// check to see if the app already exists
|
|
1619
|
-
apps = await this.getApplicationIDForSchema(
|
|
1666
|
+
apps = await this.getApplicationIDForSchema(pool, newEntity.SchemaName);
|
|
1620
1667
|
if (!apps || apps.length === 0) {
|
|
1621
1668
|
// doesn't already exist, so create it
|
|
1622
1669
|
const appUUID = this.createNewUUID();
|
|
1623
|
-
const newAppID = await this.createNewApplication(
|
|
1670
|
+
const newAppID = await this.createNewApplication(pool, appUUID, newEntity.SchemaName, newEntity.SchemaName);
|
|
1624
1671
|
if (newAppID) {
|
|
1625
1672
|
apps = [newAppID];
|
|
1626
1673
|
}
|
|
@@ -1633,7 +1680,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1633
1680
|
}
|
|
1634
1681
|
else {
|
|
1635
1682
|
// not a new schema, attempt to look up the application for this schema
|
|
1636
|
-
apps = await this.getApplicationIDForSchema(
|
|
1683
|
+
apps = await this.getApplicationIDForSchema(pool, newEntity.SchemaName);
|
|
1637
1684
|
}
|
|
1638
1685
|
if (apps && apps.length > 0) {
|
|
1639
1686
|
if (config_1.configInfo.newEntityDefaults.AddToApplicationWithSchemaName) {
|
|
@@ -1642,7 +1689,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1642
1689
|
const sSQLInsertApplicationEntity = `INSERT INTO ${(0, config_1.mj_core_schema)()}.ApplicationEntity
|
|
1643
1690
|
(ApplicationID, EntityID, Sequence) VALUES
|
|
1644
1691
|
('${appUUID}', '${newEntityID}', (SELECT ISNULL(MAX(Sequence),0)+1 FROM ${(0, config_1.mj_core_schema)()}.ApplicationEntity WHERE ApplicationID = '${appUUID}'))`;
|
|
1645
|
-
await this.LogSQLAndExecute(
|
|
1692
|
+
await this.LogSQLAndExecute(pool, sSQLInsertApplicationEntity, `SQL generated to add new entity ${newEntityName} to application ID: '${appUUID}'`);
|
|
1646
1693
|
}
|
|
1647
1694
|
}
|
|
1648
1695
|
else {
|
|
@@ -1663,7 +1710,7 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1663
1710
|
const sSQLInsertPermission = `INSERT INTO ${(0, config_1.mj_core_schema)()}.EntityPermission
|
|
1664
1711
|
(EntityID, RoleID, CanRead, CanCreate, CanUpdate, CanDelete) VALUES
|
|
1665
1712
|
('${newEntityID}', '${RoleID}', ${p.CanRead ? 1 : 0}, ${p.CanCreate ? 1 : 0}, ${p.CanUpdate ? 1 : 0}, ${p.CanDelete ? 1 : 0})`;
|
|
1666
|
-
await this.LogSQLAndExecute(
|
|
1713
|
+
await this.LogSQLAndExecute(pool, sSQLInsertPermission, `SQL generated to add new permission for entity ${newEntityName} for role ${p.RoleName}`);
|
|
1667
1714
|
}
|
|
1668
1715
|
else
|
|
1669
1716
|
(0, core_1.LogError)(` >>>> ERROR: Unable to find Role ID for role ${p.RoleName} to add permissions for new entity ${newEntityName}`);
|
|
@@ -1680,16 +1727,17 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1680
1727
|
(0, core_1.LogError)(`Failed to create new entity ${newEntity?.TableName}`);
|
|
1681
1728
|
}
|
|
1682
1729
|
}
|
|
1683
|
-
async isSchemaNew(
|
|
1730
|
+
async isSchemaNew(pool, schemaName) {
|
|
1684
1731
|
// check to see if there are any entities in the db with this schema name
|
|
1685
1732
|
const sSQL = `SELECT COUNT(*) AS Count FROM [${(0, config_1.mj_core_schema)()}].Entity WHERE SchemaName = '${schemaName}'`;
|
|
1686
|
-
const
|
|
1733
|
+
const resultResult = await pool.request().query(sSQL);
|
|
1734
|
+
const result = resultResult.recordset;
|
|
1687
1735
|
return result && result.length > 0 ? result[0].Count === 0 : true;
|
|
1688
1736
|
}
|
|
1689
|
-
async createNewApplication(
|
|
1737
|
+
async createNewApplication(pool, appID, appName, schemaName) {
|
|
1690
1738
|
try {
|
|
1691
1739
|
const sSQL = "INSERT INTO [" + (0, config_1.mj_core_schema)() + "].Application (ID, Name, Description, SchemaAutoAddNewEntities) VALUES ('" + appID + "', '" + appName + "', 'Generated for schema', '" + schemaName + "')";
|
|
1692
|
-
await this.LogSQLAndExecute(
|
|
1740
|
+
await this.LogSQLAndExecute(pool, sSQL, `SQL generated to create new application ${appName}`);
|
|
1693
1741
|
return appID; // if we get here, we successfully created the application, so return the ID
|
|
1694
1742
|
}
|
|
1695
1743
|
catch (e) {
|
|
@@ -1697,15 +1745,17 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1697
1745
|
return null; // if we get here, we failed to create the application
|
|
1698
1746
|
}
|
|
1699
1747
|
}
|
|
1700
|
-
async applicationExists(
|
|
1748
|
+
async applicationExists(pool, applicationName) {
|
|
1701
1749
|
const sSQL = `SELECT ID FROM [${(0, config_1.mj_core_schema)()}].Application WHERE Name = '${applicationName}'`;
|
|
1702
|
-
const
|
|
1750
|
+
const resultResult = await pool.request().query(sSQL);
|
|
1751
|
+
const result = resultResult.recordset;
|
|
1703
1752
|
return result && result.length > 0 ? result[0].ID.length > 0 : false;
|
|
1704
1753
|
}
|
|
1705
|
-
async getApplicationIDForSchema(
|
|
1754
|
+
async getApplicationIDForSchema(pool, schemaName) {
|
|
1706
1755
|
// get all the apps each time from DB as we might be adding, don't use Metadata here for that reason
|
|
1707
1756
|
const sSQL = `SELECT ID, Name, SchemaAutoAddNewEntities FROM [${(0, config_1.mj_core_schema)()}].vwApplications`;
|
|
1708
|
-
const
|
|
1757
|
+
const resultResult = await pool.request().query(sSQL);
|
|
1758
|
+
const result = resultResult.recordset;
|
|
1709
1759
|
if (!result || result.length === 0) {
|
|
1710
1760
|
// no applications found, return null
|
|
1711
1761
|
return null;
|
|
@@ -1768,24 +1818,18 @@ let ManageMetadataBase = ManageMetadataBase_1 = class ManageMetadataBase {
|
|
|
1768
1818
|
return sSQLInsert;
|
|
1769
1819
|
}
|
|
1770
1820
|
/**
|
|
1771
|
-
* Executes the given SQL query using the given
|
|
1821
|
+
* Executes the given SQL query using the given ConnectionPool object.
|
|
1772
1822
|
* If the appendToLogFile parameter is true, the query will also be appended to the log file.
|
|
1773
1823
|
* Note that in order to append to the log file, ManageMetadataBase.manageMetaDataLogging must be called first.
|
|
1774
|
-
* @param
|
|
1824
|
+
* @param pool - The ConnectionPool object to use to execute the query.
|
|
1775
1825
|
* @param query - The SQL query to execute.
|
|
1776
1826
|
* @param description - A description of the query to append to the log file.
|
|
1777
1827
|
* @param isRecurringScript - if set to true tells the logger that the provided SQL represents a recurring script meaning it is something that is executed, generally, for all CodeGen runs. In these cases, the Config settings can result in omitting these recurring scripts from being logged because the configuration environment may have those recurring scripts already set to run after all run-specific migrations get run.
|
|
1778
1828
|
* @returns - The result of the query execution.
|
|
1779
1829
|
*/
|
|
1780
|
-
async LogSQLAndExecute(
|
|
1781
|
-
return await sql_logging_1.SQLLogging.LogSQLAndExecute(
|
|
1830
|
+
async LogSQLAndExecute(pool, query, description, isRecurringScript = false) {
|
|
1831
|
+
return await sql_logging_1.SQLLogging.LogSQLAndExecute(pool, query, description, isRecurringScript);
|
|
1782
1832
|
}
|
|
1783
|
-
}
|
|
1833
|
+
}
|
|
1784
1834
|
exports.ManageMetadataBase = ManageMetadataBase;
|
|
1785
|
-
ManageMetadataBase._newEntityList = [];
|
|
1786
|
-
ManageMetadataBase._modifiedEntityList = [];
|
|
1787
|
-
ManageMetadataBase._generatedValidators = [];
|
|
1788
|
-
exports.ManageMetadataBase = ManageMetadataBase = ManageMetadataBase_1 = __decorate([
|
|
1789
|
-
(0, global_1.RegisterClass)(ManageMetadataBase)
|
|
1790
|
-
], ManageMetadataBase);
|
|
1791
1835
|
//# sourceMappingURL=manage-metadata.js.map
|