@memberjunction/sqlserver-dataprovider 5.4.1 → 5.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,29 +15,84 @@
15
15
  * In practice - this FILE will NOT exist in the entities library, we need to move to its own separate project
16
16
  * so it is only included by the consumer of the entities library if they want to use it.
17
17
  **************************************************************************************************************/
18
- import { ApplicationInfo, EntityFieldTSType, ProviderType, UserInfo, AuditLogTypeInfo, AuthorizationInfo, TransactionItem, EntityPermissionType, EntitySaveOptions, LogError, StripStopWords, LogStatus, CompositeKey, EntityDeleteOptions, BaseEntityResult, Metadata, DatabaseProviderBase, QueryInfo, QueryCache, InMemoryLocalStorageProvider, } from '@memberjunction/core';
19
- import { QueryParameterProcessor } from './queryParameterProcessor.js';
18
+ import { EntityFieldTSType, ProviderType, LogError, LogStatus, CompositeKey, QueryCache, InMemoryLocalStorageProvider, } from '@memberjunction/core';
19
+ import { QueryParameterProcessor } from '@memberjunction/query-processor';
20
20
  import { NodeFileSystemProvider } from './NodeFileSystemProvider.js';
21
- import { QueryEngine, ViewInfo, } from '@memberjunction/core-entities';
22
- import { AIEngine } from '@memberjunction/aiengine';
23
21
  import { QueueManager } from '@memberjunction/queue';
22
+ import { GenericDatabaseProvider } from '@memberjunction/generic-database-provider';
24
23
  import sql from 'mssql';
25
24
  import { BehaviorSubject, Subject, concatMap, from, tap, catchError, of } from 'rxjs';
26
25
  import { SQLServerTransactionGroup } from './SQLServerTransactionGroup.js';
27
- import { SqlLoggingSessionImpl } from './SqlLogger.js';
28
- import { EntityActionEngineServer } from '@memberjunction/actions';
29
26
  import { EncryptionEngine } from '@memberjunction/encryption';
30
27
  import { v4 as uuidv4 } from 'uuid';
31
- import { MJGlobal, SQLExpressionValidator } from '@memberjunction/global';
28
+ import { SQLExpressionValidator, UUIDsEqual } from '@memberjunction/global';
32
29
  /**
33
- * Core SQL execution function - handles the actual database query execution
30
+ * Checks whether an error indicates a stale/dead database connection that
31
+ * could be resolved by retrying with a fresh connection from the pool.
32
+ * Common in Azure environments where load balancers or SQL connection
33
+ * governance drop idle TCP connections.
34
+ */
35
+ function isStaleConnectionError(error) {
36
+ const message = error.message ?? '';
37
+ const code = error.code ?? '';
38
+ return (
39
+ // tedious connection state errors (e.g. "Requests can only be made in the LoggedIn state, not the Final state")
40
+ message.includes('not the Final state') ||
41
+ message.includes('not the SentClientRequest state') ||
42
+ // General connection-dropped errors
43
+ message.includes('Connection lost') ||
44
+ message.includes('Connection closed') ||
45
+ message.includes('socket hang up') ||
46
+ code === 'ESOCKET' ||
47
+ code === 'ECONNRESET');
48
+ }
49
+ /**
50
+ * Builds a sql.Request, binds parameters, and processes positional placeholders.
51
+ */
52
+ function buildRequest(connectionSource, query, parameters) {
53
+ // Note: The branch looks redundant but is required for TypeScript type narrowing.
54
+ // The sql.Request constructor has overloads for ConnectionPool and Transaction,
55
+ // but TypeScript can't resolve the overload with a union type parameter.
56
+ let request;
57
+ if (connectionSource instanceof sql.Transaction) {
58
+ request = new sql.Request(connectionSource);
59
+ }
60
+ else {
61
+ request = new sql.Request(connectionSource);
62
+ }
63
+ let processedQuery = query;
64
+ if (parameters) {
65
+ if (Array.isArray(parameters)) {
66
+ // Handle positional parameters (legacy TypeORM style)
67
+ parameters.forEach((value, index) => {
68
+ request.input(`p${index}`, value);
69
+ });
70
+ let paramIndex = 0;
71
+ processedQuery = query.replace(/\?/g, () => `@p${paramIndex++}`);
72
+ }
73
+ else if (typeof parameters === 'object') {
74
+ for (const [key, value] of Object.entries(parameters)) {
75
+ request.input(key, value);
76
+ }
77
+ }
78
+ }
79
+ return { request, processedQuery };
80
+ }
81
+ /**
82
+ * Core SQL execution function - handles the actual database query execution.
34
83
  * This is outside the class to allow both static and instance methods to use it
35
- * without creating circular dependencies or forcing everything to be static
84
+ * without creating circular dependencies or forcing everything to be static.
85
+ *
86
+ * Includes automatic retry logic for stale connection errors: when Azure drops
87
+ * idle TCP connections, the pool may hand out dead connections that fail on first
88
+ * use. For non-transactional queries, we retry once so the pool can evict the
89
+ * dead connection and provide a fresh one.
36
90
  */
37
- async function executeSQLCore(query, parameters, context, options) {
91
+ async function executeSQLCore(query, parameters, context, options, isRetry = false) {
38
92
  // Determine which connection source to use
39
93
  let connectionSource;
40
- if (context.transaction) {
94
+ const isTransaction = !!context.transaction;
95
+ if (isTransaction) {
41
96
  // Use the transaction if provided
42
97
  // Note: We no longer test the transaction validity here because:
43
98
  // 1. It could cause race conditions with concurrent queries
@@ -64,36 +119,7 @@ async function executeSQLCore(query, parameters, context, options) {
64
119
  logPromise = Promise.resolve();
65
120
  }
66
121
  try {
67
- // Create a new request object for this query
68
- // Note: This looks redundant but is required for TypeScript type narrowing.
69
- // The sql.Request constructor has overloads for ConnectionPool and Transaction,
70
- // but TypeScript can't resolve the overload with a union type parameter.
71
- let request;
72
- if (connectionSource instanceof sql.Transaction) {
73
- request = new sql.Request(connectionSource);
74
- }
75
- else {
76
- request = new sql.Request(connectionSource);
77
- }
78
- // Add parameters if provided
79
- let processedQuery = query;
80
- if (parameters) {
81
- if (Array.isArray(parameters)) {
82
- // Handle positional parameters (legacy TypeORM style)
83
- parameters.forEach((value, index) => {
84
- request.input(`p${index}`, value);
85
- });
86
- // Replace ? with @p0, @p1, etc. in the query
87
- let paramIndex = 0;
88
- processedQuery = query.replace(/\?/g, () => `@p${paramIndex++}`);
89
- }
90
- else if (typeof parameters === 'object') {
91
- // Handle named parameters
92
- for (const [key, value] of Object.entries(parameters)) {
93
- request.input(key, value);
94
- }
95
- }
96
- }
122
+ const { request, processedQuery } = buildRequest(connectionSource, query, parameters);
97
123
  // Execute query and logging in parallel
98
124
  const [result] = await Promise.all([
99
125
  request.query(processedQuery),
@@ -102,6 +128,14 @@ async function executeSQLCore(query, parameters, context, options) {
102
128
  return result;
103
129
  }
104
130
  catch (error) {
131
+ // Retry once for stale connection errors on non-transactional queries.
132
+ // Transactions cannot be retried because the connection state is already
133
+ // corrupted and the caller must handle rollback/retry at a higher level.
134
+ if (!isRetry && !isTransaction && isStaleConnectionError(error)) {
135
+ console.warn(`[SQLServerDataProvider] Stale connection detected, retrying query once. ` +
136
+ `Original error: ${error.message}`);
137
+ return executeSQLCore(query, parameters, context, options, true);
138
+ }
105
139
  // Build detailed error message with query and parameters
106
140
  const errorMessage = `Error executing SQL
107
141
  Error: ${error?.message ? error.message : error}
@@ -130,8 +164,11 @@ async function executeSQLCore(query, parameters, context, options) {
130
164
  * await provider.Config();
131
165
  * ```
132
166
  */
133
- export class SQLServerDataProvider extends DatabaseProviderBase {
167
+ export class SQLServerDataProvider extends GenericDatabaseProvider {
134
168
  constructor() {
169
+ /**************************************************************************/
170
+ // SQL Dialect Implementations (override abstract methods from DatabaseProviderBase)
171
+ /**************************************************************************/
135
172
  super(...arguments);
136
173
  this._transactionDepth = 0;
137
174
  this._savepointCounter = 0;
@@ -148,18 +185,19 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
148
185
  this._transactionState$ = new BehaviorSubject(false);
149
186
  this._deferredTasks = [];
150
187
  }
151
- static { this._sqlLoggingSessionsKey = 'MJ_SQLServerDataProvider_SqlLoggingSessions'; }
152
- get _sqlLoggingSessions() {
153
- const g = MJGlobal.Instance.GetGlobalObjectStore();
154
- if (g) {
155
- if (!g[SQLServerDataProvider._sqlLoggingSessionsKey]) {
156
- g[SQLServerDataProvider._sqlLoggingSessionsKey] = new Map();
157
- }
158
- return g[SQLServerDataProvider._sqlLoggingSessionsKey];
159
- }
160
- else {
161
- throw new Error('No global object store available for SQL logging session');
162
- }
188
+ QuoteIdentifier(name) {
189
+ return `[${name}]`;
190
+ }
191
+ QuoteSchemaAndView(schemaName, objectName) {
192
+ return `[${schemaName}].[${objectName}]`;
193
+ }
194
+ static { this._sqlServerUUIDPattern = /^\s*(newid|newsequentialid)\s*\(\s*\)\s*$/i; }
195
+ static { this._sqlServerDefaultPattern = /^\s*(getdate|getutcdate|sysdatetime|sysdatetimeoffset|sysutcdatetime|current_timestamp)\s*\(\s*\)\s*$/i; }
196
+ get UUIDFunctionPattern() {
197
+ return SQLServerDataProvider._sqlServerUUIDPattern;
198
+ }
199
+ get DBDefaultFunctionPattern() {
200
+ return SQLServerDataProvider._sqlServerDefaultPattern;
163
201
  }
164
202
  /**
165
203
  * Observable that emits the current transaction state (true when active, false when not)
@@ -288,105 +326,14 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
288
326
  get MJCoreSchemaName() {
289
327
  return this.ConfigData.MJCoreSchemaName;
290
328
  }
291
- /**************************************************************************/
292
- // START ---- SQL Logging Methods
293
- /**************************************************************************/
294
- /**
295
- * Creates a new SQL logging session that will capture all SQL operations to a file.
296
- * Returns a disposable session object that must be disposed to stop logging.
297
- *
298
- * @param filePath - Full path to the file where SQL statements will be logged
299
- * @param options - Optional configuration for the logging session
300
- * @returns Promise<SqlLoggingSession> - Disposable session object
301
- *
302
- * @example
303
- * ```typescript
304
- * // Basic usage
305
- * const session = await provider.CreateSqlLogger('./logs/metadata-sync.sql');
306
- * try {
307
- * // Perform operations that will be logged
308
- * await provider.ExecuteSQL('INSERT INTO ...');
309
- * } finally {
310
- * await session.dispose(); // Stop logging
311
- * }
312
- *
313
- * // With migration formatting
314
- * const session = await provider.CreateSqlLogger('./migrations/changes.sql', {
315
- * formatAsMigration: true,
316
- * description: 'MetadataSync push operation'
317
- * });
318
- * ```
319
- */
320
- async CreateSqlLogger(filePath, options) {
321
- const sessionId = uuidv4();
322
- const mjCoreSchema = this.ConfigData.MJCoreSchemaName;
323
- const session = new SqlLoggingSessionImpl(sessionId, filePath, {
324
- defaultSchemaName: mjCoreSchema,
325
- ...options // if defaultSchemaName is not provided, it will use the MJCoreSchemaName, otherwise
326
- // the caller's defaultSchemaName will be used
327
- });
328
- // Initialize the session (create file, write header)
329
- await session.initialize();
330
- // Store in active sessions map
331
- this._sqlLoggingSessions.set(sessionId, session);
332
- // Return a proxy that handles cleanup on dispose
333
- return {
334
- id: session.id,
335
- filePath: session.filePath,
336
- startTime: session.startTime,
337
- get statementCount() {
338
- return session.statementCount;
339
- },
340
- options: session.options,
341
- dispose: async () => {
342
- await session.dispose();
343
- this._sqlLoggingSessions.delete(sessionId);
344
- },
345
- };
346
- }
347
329
  async GetCurrentUser() {
348
330
  return this.CurrentUser;
349
331
  }
350
- /**
351
- * Gets information about all active SQL logging sessions.
352
- * Useful for monitoring and debugging.
353
- *
354
- * @returns Array of session information objects
355
- */
356
- GetActiveSqlLoggingSessions() {
357
- return Array.from(this._sqlLoggingSessions.values()).map((session) => ({
358
- id: session.id,
359
- filePath: session.filePath,
360
- startTime: session.startTime,
361
- statementCount: session.statementCount,
362
- options: session.options,
363
- }));
364
- }
365
- /**
366
- * Gets a specific SQL logging session by its ID.
367
- * Returns the session if found, or undefined if not found.
368
- *
369
- * @param sessionId - The unique identifier of the session to retrieve
370
- * @returns The SqlLoggingSession if found, undefined otherwise
371
- */
372
- GetSqlLoggingSessionById(sessionId) {
373
- return this._sqlLoggingSessions.get(sessionId);
374
- }
375
- /**
376
- * Disposes all active SQL logging sessions.
377
- * Useful for cleanup on provider shutdown.
378
- */
379
- async DisposeAllSqlLoggingSessions() {
380
- const disposePromises = Array.from(this._sqlLoggingSessions.values()).map((session) => session.dispose());
381
- await Promise.all(disposePromises);
382
- this._sqlLoggingSessions.clear();
383
- }
384
332
  /**
385
333
  * Dispose of this provider instance and clean up resources.
386
334
  * This should be called when the provider is no longer needed.
387
335
  */
388
336
  async Dispose() {
389
- // Dispose all SQL logging sessions
390
337
  await this.DisposeAllSqlLoggingSessions();
391
338
  // Unsubscribe from the SQL queue
392
339
  if (this._queueSubscription) {
@@ -400,155 +347,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
400
347
  // Note: We don't close the pool here as it might be shared
401
348
  // The caller is responsible for closing the pool when appropriate
402
349
  }
403
- /**
404
- * Internal method to log SQL statement to all active logging sessions.
405
- * This is called automatically by ExecuteSQL methods.
406
- *
407
- * @param query - The SQL query being executed
408
- * @param parameters - Parameters for the query
409
- * @param description - Optional description for this operation
410
- * @param ignoreLogging - If true, this statement will not be logged
411
- * @param isMutation - Whether this is a data mutation operation
412
- * @param simpleSQLFallback - Optional simple SQL to use for loggers with logRecordChangeMetadata=false
413
- */
414
- async _logSqlStatement(query, parameters, description, ignoreLogging = false, isMutation = false, simpleSQLFallback, contextUser) {
415
- if (ignoreLogging || this._sqlLoggingSessions.size === 0) {
416
- return;
417
- }
418
- // Check if any session has verbose output enabled for debug logging
419
- const allSessions = Array.from(this._sqlLoggingSessions.values());
420
- const hasVerboseSession = allSessions.some(s => s.options.verboseOutput === true);
421
- if (hasVerboseSession) {
422
- console.log('=== SQL LOGGING DEBUG ===');
423
- console.log(`Query to log: ${query.substring(0, 100)}...`);
424
- console.log(`Context user email: ${contextUser?.Email || 'NOT_PROVIDED'}`);
425
- console.log(`Active sessions count: ${this._sqlLoggingSessions.size}`);
426
- console.log(`All sessions:`, allSessions.map(s => ({
427
- id: s.id,
428
- filterByUserId: s.options.filterByUserId,
429
- sessionName: s.options.sessionName
430
- })));
431
- }
432
- const filteredSessions = allSessions.filter((session) => {
433
- // If session has user filter, only log if contextUser matches AND contextUser is provided
434
- if (session.options.filterByUserId) {
435
- if (!contextUser?.Email) {
436
- if (hasVerboseSession) {
437
- console.log(`Session ${session.id}: Has user filter but no contextUser provided - SKIPPING`);
438
- }
439
- return false; // Don't log if filtering requested but no user context provided
440
- }
441
- const matches = session.options.filterByUserId === contextUser.ID;
442
- if (hasVerboseSession) {
443
- console.log(`Session ${session.id} filter check:`, {
444
- filterByUserId: session.options.filterByUserId,
445
- contextUserEmail: contextUser.Email,
446
- matches: matches
447
- });
448
- }
449
- return matches;
450
- }
451
- // No filter means log for all users (regardless of contextUser)
452
- if (hasVerboseSession) {
453
- console.log(`Session ${session.id} has no filter - including`);
454
- }
455
- return true;
456
- });
457
- if (hasVerboseSession) {
458
- console.log(`Sessions after filtering: ${filteredSessions.length}`);
459
- }
460
- const logPromises = filteredSessions.map((session) => session.logSqlStatement(query, parameters, description, isMutation, simpleSQLFallback));
461
- await Promise.all(logPromises);
462
- if (hasVerboseSession) {
463
- console.log('=== SQL LOGGING DEBUG END ===');
464
- }
465
- }
466
- /**
467
- * Static method to log SQL statements from external sources like transaction groups
468
- *
469
- * @param query - The SQL query being executed
470
- * @param parameters - Parameters for the query
471
- * @param description - Optional description for this operation
472
- * @param isMutation - Whether this is a data mutation operation
473
- * @param simpleSQLFallback - Optional simple SQL to use for loggers with logRecordChangeMetadata=false
474
- */
475
- static async LogSQLStatement(query, parameters, description, isMutation = false, simpleSQLFallback, contextUser) {
476
- // Get the current provider instance
477
- const provider = Metadata.Provider;
478
- if (provider && provider._sqlLoggingSessions.size > 0) {
479
- await provider._logSqlStatement(query, parameters, description, false, isMutation, simpleSQLFallback, contextUser);
480
- }
481
- }
482
- /**************************************************************************/
483
- // END ---- SQL Logging Methods
484
- /**************************************************************************/
485
- /**************************************************************************/
486
- // START ---- IRunReportProvider
487
- /**************************************************************************/
488
- async RunReport(params, contextUser) {
489
- const ReportID = params.ReportID;
490
- // run the sql and return the data
491
- const sqlReport = `SELECT ReportSQL FROM [${this.MJCoreSchemaName}].vwReports WHERE ID =${ReportID}`;
492
- const reportInfo = await this.ExecuteSQL(sqlReport, undefined, undefined, contextUser);
493
- if (reportInfo && reportInfo.length > 0) {
494
- const start = new Date().getTime();
495
- const sql = reportInfo[0].ReportSQL;
496
- const result = await this.ExecuteSQL(sql, undefined, undefined, contextUser);
497
- const end = new Date().getTime();
498
- if (result)
499
- return {
500
- Success: true,
501
- ReportID,
502
- Results: result,
503
- RowCount: result.length,
504
- ExecutionTime: end - start,
505
- ErrorMessage: '',
506
- };
507
- else
508
- return {
509
- Success: false,
510
- ReportID,
511
- Results: [],
512
- RowCount: 0,
513
- ExecutionTime: end - start,
514
- ErrorMessage: 'Error running report SQL',
515
- };
516
- }
517
- else
518
- return { Success: false, ReportID, Results: [], RowCount: 0, ExecutionTime: 0, ErrorMessage: 'Report not found' };
519
- }
520
- /**************************************************************************/
521
- // END ---- IRunReportProvider
522
- /**************************************************************************/
523
- /**
524
- * Resolves a hierarchical category path (e.g., "/MJ/AI/Agents/") to a CategoryID.
525
- * The path is split by "/" and each segment is matched case-insensitively against
526
- * category names, walking down the hierarchy from root to leaf.
527
- *
528
- * @param categoryPath The hierarchical category path (e.g., "/MJ/AI/Agents/")
529
- * @returns The CategoryID if the path exists, null otherwise
530
- */
531
- resolveCategoryPath(categoryPath) {
532
- if (!categoryPath)
533
- return null;
534
- // Split path and clean segments - remove empty strings from leading/trailing slashes
535
- const segments = categoryPath.split('/')
536
- .map(s => s.trim())
537
- .filter(s => s.length > 0);
538
- if (segments.length === 0)
539
- return null;
540
- // Walk down the hierarchy to find the target category
541
- let currentCategory = null;
542
- for (const segment of segments) {
543
- const parentId = currentCategory?.ID || null;
544
- currentCategory = this.QueryCategories.find(cat => cat.Name.trim().toLowerCase() === segment.toLowerCase() &&
545
- cat.ParentID === parentId);
546
- if (!currentCategory) {
547
- return null; // Path not found
548
- }
549
- }
550
- return currentCategory.ID;
551
- }
552
350
  /**
553
351
  * Finds a query by ID or by Name+Category combination.
554
352
  * Supports both direct CategoryID lookup and hierarchical CategoryPath path resolution.
@@ -581,7 +379,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
581
379
  else if (CategoryPath) {
582
380
  const resolvedCategoryId = this.resolveCategoryPath(CategoryPath);
583
381
  if (resolvedCategoryId) {
584
- matches = matches && q.CategoryID === resolvedCategoryId;
382
+ matches = matches && UUIDsEqual(q.CategoryID, resolvedCategoryId);
585
383
  }
586
384
  else {
587
385
  matches = matches && q.Category.trim().toLowerCase() === CategoryPath.trim().toLowerCase();
@@ -604,59 +402,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
604
402
  return queries[0];
605
403
  }
606
404
  }
607
- /**
608
- * Looks up a query from QueryEngine's auto-refreshed cache by ID, name, and optional category filters.
609
- */
610
- findQueryInEngine(QueryID, QueryName, CategoryID, CategoryPath) {
611
- const engineQueries = QueryEngine.Instance?.Queries;
612
- if (!engineQueries || engineQueries.length === 0) {
613
- return null; // Engine not loaded yet
614
- }
615
- if (QueryID) {
616
- const lower = QueryID.trim().toLowerCase();
617
- return engineQueries.find(q => q.ID.trim().toLowerCase() === lower) ?? null;
618
- }
619
- if (QueryName) {
620
- const lowerName = QueryName.trim().toLowerCase();
621
- const matches = engineQueries.filter(q => q.Name.trim().toLowerCase() === lowerName);
622
- if (matches.length === 0)
623
- return null;
624
- if (matches.length === 1)
625
- return matches[0];
626
- // Disambiguate by category
627
- if (CategoryID) {
628
- const byId = matches.find(q => q.CategoryID?.trim().toLowerCase() === CategoryID.trim().toLowerCase());
629
- if (byId)
630
- return byId;
631
- }
632
- if (CategoryPath) {
633
- const resolvedCategoryId = this.resolveCategoryPath(CategoryPath);
634
- if (resolvedCategoryId) {
635
- const byPath = matches.find(q => q.CategoryID === resolvedCategoryId);
636
- if (byPath)
637
- return byPath;
638
- }
639
- }
640
- return matches[0];
641
- }
642
- return null;
643
- }
644
- /**
645
- * Creates a fresh QueryInfo from a MJQueryEntity and patches the ProviderBase in-memory cache.
646
- * This avoids stale data without requiring a full metadata reload.
647
- */
648
- refreshQueryInfoFromEntity(entity) {
649
- const freshInfo = new QueryInfo(entity.GetAll());
650
- // Patch the ProviderBase cache: replace the stale entry or add the new one
651
- const existingIndex = this.Queries.findIndex(q => q.ID === freshInfo.ID);
652
- if (existingIndex >= 0) {
653
- this.Queries[existingIndex] = freshInfo;
654
- }
655
- else {
656
- this.Queries.push(freshInfo);
657
- }
658
- return freshInfo;
659
- }
660
405
  /**************************************************************************/
661
406
  // START ---- IRunQueryProvider
662
407
  /**************************************************************************/
@@ -799,10 +544,10 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
799
544
  * Processes query parameters and applies template substitution if needed
800
545
  */
801
546
  processQueryParameters(query, parameters) {
802
- let finalSQL = query.SQL;
547
+ let finalSQL = query.GetPlatformSQL(this.PlatformKey);
803
548
  let appliedParameters = {};
804
549
  if (query.UsesTemplate) {
805
- const processingResult = QueryParameterProcessor.processQueryTemplate(query, parameters);
550
+ const processingResult = QueryParameterProcessor.processQueryTemplate(query, parameters, finalSQL);
806
551
  if (!processingResult.success) {
807
552
  throw new Error(processingResult.error);
808
553
  }
@@ -925,154 +670,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
925
670
  const promises = params.map((p) => this.InternalRunQuery(p, contextUser));
926
671
  return Promise.all(promises);
927
672
  }
928
- /**
929
- * RunQueriesWithCacheCheck - Smart cache validation for batch RunQueries.
930
- * For each query request, if cacheStatus is provided, uses the Query's CacheValidationSQL
931
- * to check if the cached data is still current by comparing MAX(__mj_UpdatedAt) and COUNT(*)
932
- * with client's values. Returns 'current' if cache is valid (no data), or 'stale' with fresh data.
933
- *
934
- * Queries without CacheValidationSQL configured will return 'no_validation' status with full data.
935
- */
936
- async RunQueriesWithCacheCheck(params, contextUser) {
937
- try {
938
- const user = contextUser || this.CurrentUser;
939
- if (!user) {
940
- return {
941
- success: false,
942
- results: [],
943
- errorMessage: 'No user context available',
944
- };
945
- }
946
- // Separate items that need cache check from those that don't
947
- const itemsNeedingCacheCheck = [];
948
- const itemsWithoutCacheCheck = [];
949
- const itemsWithoutValidationSQL = [];
950
- const errorResults = [];
951
- // Pre-process all items to resolve query info and validate
952
- for (let i = 0; i < params.length; i++) {
953
- const item = params[i];
954
- // Resolve query info
955
- const queryInfo = this.resolveQueryInfo(item.params);
956
- if (!queryInfo) {
957
- errorResults.push({
958
- queryIndex: i,
959
- queryId: item.params.QueryID || '',
960
- status: 'error',
961
- errorMessage: `Query not found: ${item.params.QueryID || item.params.QueryName}`,
962
- });
963
- continue;
964
- }
965
- // Check permissions
966
- if (!queryInfo.UserCanRun(user)) {
967
- errorResults.push({
968
- queryIndex: i,
969
- queryId: queryInfo.ID,
970
- status: 'error',
971
- errorMessage: `User does not have permission to run query: ${queryInfo.Name}`,
972
- });
973
- continue;
974
- }
975
- if (!item.cacheStatus) {
976
- // No cache status provided - will run full query
977
- itemsWithoutCacheCheck.push({ index: i, item });
978
- continue;
979
- }
980
- // Check if query has CacheValidationSQL
981
- if (!queryInfo.CacheValidationSQL) {
982
- // No validation SQL configured - will run full query and return 'no_validation'
983
- itemsWithoutValidationSQL.push({ index: i, item, queryInfo });
984
- continue;
985
- }
986
- itemsNeedingCacheCheck.push({ index: i, item, queryInfo });
987
- }
988
- // Execute batched cache status check for all items that need it
989
- const cacheStatusResults = await this.getBatchedQueryCacheStatus(itemsNeedingCacheCheck, contextUser);
990
- // Determine which items are current vs stale
991
- const staleItems = [];
992
- const currentResults = [];
993
- for (const { index, item, queryInfo } of itemsNeedingCacheCheck) {
994
- const serverStatus = cacheStatusResults.get(index);
995
- if (!serverStatus || !serverStatus.success) {
996
- errorResults.push({
997
- queryIndex: index,
998
- queryId: queryInfo.ID,
999
- status: 'error',
1000
- errorMessage: serverStatus?.errorMessage || 'Failed to get cache status',
1001
- });
1002
- continue;
1003
- }
1004
- const isCurrent = this.isCacheCurrent(item.cacheStatus, serverStatus);
1005
- if (isCurrent) {
1006
- currentResults.push({
1007
- queryIndex: index,
1008
- queryId: queryInfo.ID,
1009
- status: 'current',
1010
- });
1011
- }
1012
- else {
1013
- staleItems.push({ index, params: item.params, queryInfo });
1014
- }
1015
- }
1016
- // Run full queries in parallel for:
1017
- // 1. Items without cache status (no fingerprint from client)
1018
- // 2. Items without CacheValidationSQL (always return data with 'no_validation' status)
1019
- // 3. Items with stale cache
1020
- const fullQueryPromises = [
1021
- ...itemsWithoutCacheCheck.map(({ index, item }) => this.runFullQueryAndReturnForQuery(item.params, index, 'stale', contextUser)),
1022
- ...itemsWithoutValidationSQL.map(({ index, item, queryInfo }) => this.runFullQueryAndReturnForQuery(item.params, index, 'no_validation', contextUser, queryInfo.ID)),
1023
- ...staleItems.map(({ index, params: queryParams, queryInfo }) => this.runFullQueryAndReturnForQuery(queryParams, index, 'stale', contextUser, queryInfo.ID)),
1024
- ];
1025
- const fullQueryResults = await Promise.all(fullQueryPromises);
1026
- // Combine all results and sort by queryIndex
1027
- const allResults = [...errorResults, ...currentResults, ...fullQueryResults];
1028
- allResults.sort((a, b) => a.queryIndex - b.queryIndex);
1029
- return {
1030
- success: true,
1031
- results: allResults,
1032
- };
1033
- }
1034
- catch (e) {
1035
- LogError(e);
1036
- return {
1037
- success: false,
1038
- results: [],
1039
- errorMessage: e instanceof Error ? e.message : String(e),
1040
- };
1041
- }
1042
- }
1043
- /**
1044
- * Resolves QueryInfo from RunQueryParams (by ID or Name+CategoryPath).
1045
- */
1046
- resolveQueryInfo(params) {
1047
- // Try QueryEngine first for fresh, auto-refreshed data
1048
- const freshEntity = this.findQueryInEngine(params.QueryID, params.QueryName, params.CategoryID, params.CategoryPath);
1049
- if (freshEntity) {
1050
- return this.refreshQueryInfoFromEntity(freshEntity);
1051
- }
1052
- // Fall back to ProviderBase cache if engine isn't loaded
1053
- if (params.QueryID) {
1054
- return this.Queries.find((q) => q.ID === params.QueryID);
1055
- }
1056
- if (params.QueryName) {
1057
- const matchingQueries = this.Queries.filter((q) => q.Name.trim().toLowerCase() === params.QueryName?.trim().toLowerCase());
1058
- if (matchingQueries.length === 0)
1059
- return undefined;
1060
- if (matchingQueries.length === 1)
1061
- return matchingQueries[0];
1062
- if (params.CategoryPath) {
1063
- const byPath = matchingQueries.find((q) => q.CategoryPath.toLowerCase() === params.CategoryPath?.toLowerCase());
1064
- if (byPath)
1065
- return byPath;
1066
- }
1067
- if (params.CategoryID) {
1068
- const byId = matchingQueries.find((q) => q.CategoryID === params.CategoryID);
1069
- if (byId)
1070
- return byId;
1071
- }
1072
- return matchingQueries[0];
1073
- }
1074
- return undefined;
1075
- }
1076
673
  /**
1077
674
  * Executes a batched cache status check for multiple queries using their CacheValidationSQL.
1078
675
  */
@@ -1116,1193 +713,81 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
1116
713
  }
1117
714
  return results;
1118
715
  }
1119
- /**
1120
- * Runs the full query and returns results with cache metadata.
1121
- */
1122
- async runFullQueryAndReturnForQuery(params, queryIndex, status, contextUser, queryId) {
1123
- const result = await this.InternalRunQuery(params, contextUser);
1124
- if (!result.Success) {
1125
- return {
1126
- queryIndex,
1127
- queryId: queryId || result.QueryID || '',
1128
- status: 'error',
1129
- errorMessage: result.ErrorMessage || 'Unknown error executing query',
1130
- };
1131
- }
1132
- // Extract maxUpdatedAt from results
1133
- const maxUpdatedAt = this.extractMaxUpdatedAt(result.Results);
1134
- return {
1135
- queryIndex,
1136
- queryId: result.QueryID,
1137
- status,
1138
- results: result.Results,
1139
- maxUpdatedAt,
1140
- rowCount: result.Results.length,
1141
- };
1142
- }
1143
716
  /**************************************************************************/
1144
717
  // END ---- IRunQueryProvider
1145
718
  /**************************************************************************/
719
+ /**************************************************************************/
720
+ // START ---- IRunViewProvider
721
+ /**************************************************************************/
722
+ BuildTopClause(maxRows) {
723
+ return `TOP ${maxRows}`;
724
+ }
725
+ BuildPaginationSQL(maxRows, startRow) {
726
+ return `OFFSET ${startRow} ROWS FETCH NEXT ${maxRows} ROWS ONLY`;
727
+ }
728
+ BuildParameterPlaceholder(index) {
729
+ return `@p${index}`;
730
+ }
1146
731
  /**
1147
- * This method will check to see if the where clause for the view provided has any templating within it, and if it does
1148
- * will replace the templating with the appropriate run-time values. This is done recursively with depth-first traversal
1149
- * so that if there are nested templates, they will be replaced as well. We also maintain a stack to ensure that any
1150
- * possible circular references are caught and an error is thrown if that is the case.
1151
- * @param viewEntity
1152
- * @param user
732
+ * Executes a batched cache status check for multiple views in a single SQL call.
733
+ * Uses multiple result sets to return status for each view efficiently.
1153
734
  */
1154
- async RenderViewWhereClause(viewEntity, user, stack = []) {
735
+ async getBatchedServerCacheStatus(items, contextUser) {
736
+ const results = new Map();
737
+ if (items.length === 0) {
738
+ return results;
739
+ }
740
+ // Build array of SQL statements for batch execution
741
+ const sqlStatements = [];
742
+ for (const { entityInfo, whereSQL } of items) {
743
+ const statusSQL = `SELECT COUNT(*) AS TotalRows, MAX(__mj_UpdatedAt) AS MaxUpdatedAt FROM [${entityInfo.SchemaName}].${entityInfo.BaseView}${whereSQL ? ' WHERE ' + whereSQL : ''}`;
744
+ sqlStatements.push(statusSQL);
745
+ }
1155
746
  try {
1156
- let sWhere = viewEntity.WhereClause;
1157
- if (sWhere && sWhere.length > 0) {
1158
- // check for the existence of one or more templated values in the where clause which will follow the nunjucks format of {%variable%}
1159
- const templateRegex = /{%([^%]+)%}/g;
1160
- const matches = sWhere.match(templateRegex);
1161
- if (matches) {
1162
- for (const match of matches) {
1163
- const variable = match.substring(2, match.length - 2); // remove the {% and %}
1164
- // the variable has a name and a parameter value for example {%UserView "123456"%}
1165
- // where UserView is the variable name and 123456 is the parameter value, in this case the View ID
1166
- // we need to split the variable into its name and parameter value
1167
- const parts = variable.split(' ');
1168
- const variableName = parts[0];
1169
- if (variableName.trim().toLowerCase() === 'userview') {
1170
- let variableValue = parts.length > 1 ? parts[1] : null;
1171
- // now strip the quotes from the variable value if they are there
1172
- if (variableValue && variableValue.startsWith('"') && variableValue.endsWith('"'))
1173
- variableValue = variableValue.substring(1, variableValue.length - 1);
1174
- if (stack.includes(variable))
1175
- throw new Error(`Circular reference detected in view where clause for variable ${variable}`);
1176
- else
1177
- stack.push(variable); // add to the stack for circular reference detection
1178
- // variable values is the view ID of the view that we want to get its WHERE CLAUSE, so we need to get the view entity
1179
- const innerViewEntity = await ViewInfo.GetViewEntity(variableValue, user);
1180
- if (innerViewEntity) {
1181
- // we have the inner view, so now call this function recursively to get the where clause for the inner view
1182
- const innerWhere = await this.RenderViewWhereClause(innerViewEntity, user, stack);
1183
- const innerSQL = `SELECT [${innerViewEntity.ViewEntityInfo.FirstPrimaryKey.Name}] FROM [${innerViewEntity.ViewEntityInfo.SchemaName}].[${innerViewEntity.ViewEntityInfo.BaseView}] WHERE (${innerWhere})`;
1184
- sWhere = sWhere.replace(match, innerSQL);
1185
- }
1186
- else
1187
- throw new Error(`View ID ${variableValue} not found in metadata`);
1188
- }
1189
- else {
1190
- // we don't know what this variable is, so throw an error
1191
- throw new Error(`Unknown variable ${variableName} as part of template match ${match} in view where clause`);
1192
- }
1193
- }
747
+ // Execute the batched SQL using existing ExecuteSQLBatch method
748
+ const resultSets = await this.ExecuteSQLBatch(sqlStatements, undefined, undefined, contextUser);
749
+ // Process each result set and map to the corresponding item index
750
+ for (let i = 0; i < items.length; i++) {
751
+ const { index } = items[i];
752
+ const resultSet = resultSets[i];
753
+ if (resultSet && resultSet.length > 0) {
754
+ const row = resultSet[0];
755
+ results.set(index, {
756
+ success: true,
757
+ rowCount: row.TotalRows,
758
+ maxUpdatedAt: row.MaxUpdatedAt ? new Date(row.MaxUpdatedAt).toISOString() : undefined,
759
+ });
1194
760
  }
1195
761
  else {
1196
- // no matches, just a regular old SQL where clause, so we're done, do nothing here as the return process will be below
762
+ results.set(index, { success: true, rowCount: 0, maxUpdatedAt: undefined });
1197
763
  }
1198
764
  }
1199
- return sWhere;
1200
765
  }
1201
766
  catch (e) {
1202
- LogError(e);
1203
- throw e;
767
+ // If batch fails, mark all items as failed
768
+ const errorMessage = e instanceof Error ? e.message : String(e);
769
+ for (const { index } of items) {
770
+ results.set(index, { success: false, errorMessage });
771
+ }
1204
772
  }
773
+ return results;
774
+ }
775
+ async executeSQLForUserViewRunLogging(viewId, entityBaseView, whereSQL, orderBySQL, user) {
776
+ const entityInfo = this.Entities.find((e) => e.BaseView.trim().toLowerCase() === entityBaseView.trim().toLowerCase());
777
+ const sSQL = `
778
+ DECLARE @ViewIDList TABLE ( ID NVARCHAR(255) );
779
+ INSERT INTO @ViewIDList (ID) (SELECT ${entityInfo.FirstPrimaryKey.Name} FROM [${entityInfo.SchemaName}].${entityBaseView} WHERE (${whereSQL}))
780
+ EXEC [${this.MJCoreSchemaName}].spCreateUserViewRunWithDetail(${viewId},${user.Email}, @ViewIDLIst)
781
+ `;
782
+ const runIDResult = await this.ExecuteSQL(sSQL, undefined, undefined, user);
783
+ const runID = runIDResult[0].UserViewRunID;
784
+ const sRetSQL = `SELECT * FROM [${entityInfo.SchemaName}].${entityBaseView} WHERE ${entityInfo.FirstPrimaryKey.Name} IN
785
+ (SELECT RecordID FROM [${this.MJCoreSchemaName}].vwUserViewRunDetails WHERE UserViewRunID=${runID})
786
+ ${orderBySQL && orderBySQL.length > 0 ? ` ORDER BY ${orderBySQL}` : ''}`;
787
+ return { executeViewSQL: sRetSQL, runID };
1205
788
  }
1206
789
  /**************************************************************************/
1207
- // START ---- IRunViewProvider
1208
- /**************************************************************************/
1209
- async InternalRunView(params, contextUser) {
1210
- // This is the internal implementation - pre/post processing is handled by ProviderBase.RunView()
1211
- // Log aggregate input for debugging
1212
- if (params?.Aggregates?.length) {
1213
- LogStatus(`[SQLServerDataProvider] InternalRunView received aggregates: entityName=${params.EntityName}, viewID=${params.ViewID}, viewName=${params.ViewName}, aggregateCount=${params.Aggregates.length}, aggregates=${JSON.stringify(params.Aggregates.map(a => ({ expression: a.expression, alias: a.alias })))}`);
1214
- }
1215
- const startTime = new Date();
1216
- try {
1217
- if (params) {
1218
- const user = contextUser ? contextUser : this.CurrentUser;
1219
- if (!user)
1220
- throw new Error(`User not found in metadata and no contextUser provided to RunView()`);
1221
- let viewEntity = null, entityInfo = null;
1222
- if (params.ViewEntity)
1223
- viewEntity = params.ViewEntity;
1224
- else if (params.ViewID && params.ViewID.length > 0)
1225
- viewEntity = await ViewInfo.GetViewEntity(params.ViewID, contextUser);
1226
- else if (params.ViewName && params.ViewName.length > 0)
1227
- viewEntity = await ViewInfo.GetViewEntityByName(params.ViewName, contextUser);
1228
- if (!viewEntity) {
1229
- // if we don't have viewEntity, that means it is a dynamic view, so we need EntityName at a minimum
1230
- if (!params.EntityName || params.EntityName.length === 0)
1231
- throw new Error(`EntityName is required when ViewID or ViewName is not provided`);
1232
- entityInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === params.EntityName.trim().toLowerCase());
1233
- if (!entityInfo)
1234
- throw new Error(`Entity ${params.EntityName} not found in metadata`);
1235
- }
1236
- else {
1237
- entityInfo = this.Entities.find((e) => e.ID === viewEntity.EntityID);
1238
- if (!entityInfo)
1239
- throw new Error(`Entity ID: ${viewEntity.EntityID} not found in metadata`);
1240
- }
1241
- // check permissions now, this call will throw an error if the user doesn't have permission
1242
- this.CheckUserReadPermissions(entityInfo.Name, user);
1243
- // get other variaables from params
1244
- const extraFilter = params.ExtraFilter;
1245
- const userSearchString = params.UserSearchString;
1246
- const excludeUserViewRunID = params.ExcludeUserViewRunID;
1247
- const overrideExcludeFilter = params.OverrideExcludeFilter;
1248
- const saveViewResults = params.SaveViewResults;
1249
- let topSQL = '';
1250
- // Only use TOP if we're NOT using OFFSET/FETCH pagination
1251
- const usingPagination = params.MaxRows && params.MaxRows > 0 && (params.StartRow !== undefined && params.StartRow >= 0);
1252
- if (params.IgnoreMaxRows === true) {
1253
- // do nothing, leave it blank, this structure is here to make the code easier to read
1254
- }
1255
- else if (usingPagination) {
1256
- // When using OFFSET/FETCH, don't add TOP clause
1257
- // do nothing, leave it blank
1258
- }
1259
- else if (params.MaxRows && params.MaxRows > 0) {
1260
- // user provided a max rows, so we use that (but not using pagination)
1261
- topSQL = 'TOP ' + params.MaxRows;
1262
- }
1263
- else if (entityInfo.UserViewMaxRows && entityInfo.UserViewMaxRows > 0) {
1264
- topSQL = 'TOP ' + entityInfo.UserViewMaxRows;
1265
- }
1266
- const fields = this.getRunTimeViewFieldString(params, viewEntity);
1267
- let viewSQL = `SELECT ${topSQL} ${fields} FROM [${entityInfo.SchemaName}].${entityInfo.BaseView}`;
1268
- // We need countSQL for pagination (to get total count) or when using TOP (to show limited vs total)
1269
- let countSQL = (usingPagination || (topSQL && topSQL.length > 0)) ? `SELECT COUNT(*) AS TotalRowCount FROM [${entityInfo.SchemaName}].${entityInfo.BaseView}` : null;
1270
- let whereSQL = '';
1271
- let bHasWhere = false;
1272
- let userViewRunID = '';
1273
- // The view may have a where clause that is part of the view definition. If so, we need to add it to the SQL
1274
- if (viewEntity?.WhereClause && viewEntity?.WhereClause.length > 0) {
1275
- const renderedWhere = await this.RenderViewWhereClause(viewEntity, contextUser);
1276
- whereSQL = `(${renderedWhere})`;
1277
- bHasWhere = true;
1278
- }
1279
- // a developer calling the function can provide an additional Extra Filter which is any valid SQL exprssion that can be added to the WHERE clause
1280
- if (extraFilter && extraFilter.length > 0) {
1281
- // extra filter is simple- we just AND it to the where clause if it exists, or we add it as a where clause if there was no prior WHERE
1282
- if (!this.validateUserProvidedSQLClause(extraFilter))
1283
- throw new Error(`Invalid Extra Filter: ${extraFilter}, contains one more for forbidden keywords`);
1284
- if (bHasWhere) {
1285
- whereSQL += ` AND (${extraFilter})`;
1286
- }
1287
- else {
1288
- whereSQL = `(${extraFilter})`;
1289
- bHasWhere = true;
1290
- }
1291
- }
1292
- // check for a user provided search string and generate SQL as needed if provided
1293
- if (userSearchString && userSearchString.length > 0) {
1294
- if (!this.validateUserProvidedSQLClause(userSearchString))
1295
- throw new Error(`Invalid User Search SQL clause: ${userSearchString}, contains one more for forbidden keywords`);
1296
- const sUserSearchSQL = this.createViewUserSearchSQL(entityInfo, userSearchString);
1297
- if (sUserSearchSQL.length > 0) {
1298
- if (bHasWhere) {
1299
- whereSQL += ` AND (${sUserSearchSQL})`;
1300
- }
1301
- else {
1302
- whereSQL = `(${sUserSearchSQL})`;
1303
- bHasWhere = true;
1304
- }
1305
- }
1306
- }
1307
- // now, check for an exclude UserViewRunID, or exclusion of ALL prior runs
1308
- // if provided, we need to exclude the records that were part of that run (or all prior runs)
1309
- if ((excludeUserViewRunID && excludeUserViewRunID.length > 0) || params.ExcludeDataFromAllPriorViewRuns === true) {
1310
- let sExcludeSQL = `ID NOT IN (SELECT RecordID FROM [${this.MJCoreSchemaName}].vwUserViewRunDetails WHERE EntityID='${viewEntity.EntityID}' AND`;
1311
- if (params.ExcludeDataFromAllPriorViewRuns === true)
1312
- sExcludeSQL += ` UserViewID=${viewEntity.ID})`; // exclude ALL prior runs for this view, we do NOT need to also add the UserViewRunID even if it was provided because this will automatically filter that out too
1313
- else
1314
- sExcludeSQL += `UserViewRunID=${excludeUserViewRunID})`; // exclude just the run that was provided
1315
- if (overrideExcludeFilter && overrideExcludeFilter.length > 0) {
1316
- if (!this.validateUserProvidedSQLClause(overrideExcludeFilter))
1317
- throw new Error(`Invalid OverrideExcludeFilter: ${overrideExcludeFilter}, contains one more for forbidden keywords`);
1318
- // add in the OVERRIDE filter with an OR statement, this results in those rows that match the Exclude filter to be included
1319
- // even if they're in the UserViewRunID that we're excluding
1320
- sExcludeSQL += ' OR (' + overrideExcludeFilter + ')';
1321
- }
1322
- if (bHasWhere) {
1323
- whereSQL += ` AND (${sExcludeSQL})`;
1324
- }
1325
- else {
1326
- whereSQL = `(${sExcludeSQL})`;
1327
- bHasWhere = true;
1328
- }
1329
- }
1330
- // NEXT, apply Row Level Security (RLS)
1331
- if (!entityInfo.UserExemptFromRowLevelSecurity(user, EntityPermissionType.Read)) {
1332
- // user is NOT exempt from RLS, so we need to apply it
1333
- const rlsWhereClause = entityInfo.GetUserRowLevelSecurityWhereClause(user, EntityPermissionType.Read, '');
1334
- if (rlsWhereClause && rlsWhereClause.length > 0) {
1335
- if (bHasWhere) {
1336
- whereSQL += ` AND (${rlsWhereClause})`;
1337
- }
1338
- else {
1339
- whereSQL = `(${rlsWhereClause})`;
1340
- bHasWhere = true;
1341
- }
1342
- }
1343
- }
1344
- if (bHasWhere) {
1345
- viewSQL += ` WHERE ${whereSQL}`;
1346
- if (countSQL)
1347
- countSQL += ` WHERE ${whereSQL}`;
1348
- }
1349
- // figure out the sorting for the view
1350
- // first check params.OrderBy, that takes first priority
1351
- // if that's not provided, then we check the view definition for its SortState
1352
- // if that's not provided we do NOT sort
1353
- const orderBy = params.OrderBy ? params.OrderBy : viewEntity ? viewEntity.OrderByClause : '';
1354
- // if we're saving the view results, we need to wrap the entire SQL statement
1355
- if (viewEntity?.ID && viewEntity?.ID.length > 0 && saveViewResults && user) {
1356
- const { executeViewSQL, runID } = await this.executeSQLForUserViewRunLogging(viewEntity.ID, viewEntity.EntityBaseView, whereSQL, orderBy, user);
1357
- viewSQL = executeViewSQL;
1358
- userViewRunID = runID;
1359
- }
1360
- else if (orderBy && orderBy.length > 0) {
1361
- // we only add order by if we're not doing run logging. This is becuase the run logging will
1362
- // add the order by to its SELECT query that pulls from the list of records that were returned
1363
- // there is no point in ordering the rows as they are saved into an audit list anyway so no order-by above
1364
- // just here for final step before we execute it.
1365
- if (!this.validateUserProvidedSQLClause(orderBy))
1366
- throw new Error(`Invalid Order By clause: ${orderBy}, contains one more for forbidden keywords`);
1367
- viewSQL += ` ORDER BY ${orderBy}`;
1368
- }
1369
- // Apply pagination using OFFSET/FETCH if both MaxRows and StartRow are specified
1370
- if (params.MaxRows && params.MaxRows > 0 && (params.StartRow !== undefined && params.StartRow >= 0) && entityInfo.FirstPrimaryKey) {
1371
- // If no ORDER BY was already added, add one based on primary key (required for OFFSET/FETCH)
1372
- if (!orderBy) {
1373
- viewSQL += ` ORDER BY ${entityInfo.FirstPrimaryKey.Name} `;
1374
- }
1375
- viewSQL += ` OFFSET ${params.StartRow} ROWS FETCH NEXT ${params.MaxRows} ROWS ONLY`;
1376
- }
1377
- // Build aggregate SQL if aggregates are requested
1378
- let aggregateSQL = null;
1379
- let aggregateValidationErrors = [];
1380
- if (params.Aggregates && params.Aggregates.length > 0) {
1381
- const aggregateBuild = this.buildAggregateSQL(params.Aggregates, entityInfo, entityInfo.SchemaName, entityInfo.BaseView, whereSQL);
1382
- aggregateSQL = aggregateBuild.aggregateSQL;
1383
- aggregateValidationErrors = aggregateBuild.validationErrors;
1384
- }
1385
- // Execute queries in parallel for better performance
1386
- // - Data query (if not count_only)
1387
- // - Count query (if needed)
1388
- // - Aggregate query (if aggregates requested)
1389
- const queries = [];
1390
- const queryKeys = [];
1391
- // Data query
1392
- if (params.ResultType !== 'count_only') {
1393
- queries.push(this.ExecuteSQL(viewSQL, undefined, undefined, contextUser));
1394
- queryKeys.push('data');
1395
- }
1396
- // Count query (run in parallel if we'll need it)
1397
- const maxRowsUsed = params.MaxRows || entityInfo.UserViewMaxRows;
1398
- const willNeedCount = countSQL && (usingPagination || params.ResultType === 'count_only');
1399
- if (willNeedCount) {
1400
- queries.push(this.ExecuteSQL(countSQL, undefined, undefined, contextUser));
1401
- queryKeys.push('count');
1402
- }
1403
- // Aggregate query (runs in parallel with data/count queries)
1404
- const aggregateStartTime = Date.now();
1405
- if (aggregateSQL) {
1406
- queries.push(this.ExecuteSQL(aggregateSQL, undefined, undefined, contextUser));
1407
- queryKeys.push('aggregate');
1408
- }
1409
- // Execute all queries in parallel
1410
- const results = await Promise.all(queries);
1411
- // Map results back to their queries
1412
- const resultMap = {};
1413
- queryKeys.forEach((key, index) => {
1414
- resultMap[key] = results[index];
1415
- });
1416
- // Process data results
1417
- let retData = resultMap['data'] || [];
1418
- // Process rows for datetime conversion and field-level decryption
1419
- // This is critical for encrypted fields - without this, encrypted data stays encrypted in the UI
1420
- if (retData.length > 0 && params.ResultType !== 'count_only') {
1421
- retData = await this.ProcessEntityRows(retData, entityInfo, contextUser);
1422
- }
1423
- // Process count results - also check if we need count based on result length
1424
- let rowCount = null;
1425
- if (willNeedCount && resultMap['count']) {
1426
- const countResult = resultMap['count'];
1427
- if (countResult && countResult.length > 0) {
1428
- rowCount = countResult[0].TotalRowCount;
1429
- }
1430
- }
1431
- else if (countSQL && maxRowsUsed && retData.length === maxRowsUsed) {
1432
- // Need to run count query because we hit the limit
1433
- const countResult = await this.ExecuteSQL(countSQL, undefined, undefined, contextUser);
1434
- if (countResult && countResult.length > 0) {
1435
- rowCount = countResult[0].TotalRowCount;
1436
- }
1437
- }
1438
- // Process aggregate results
1439
- let aggregateResults = undefined;
1440
- let aggregateExecutionTime = undefined;
1441
- if (params.Aggregates && params.Aggregates.length > 0) {
1442
- aggregateExecutionTime = Date.now() - aggregateStartTime;
1443
- if (resultMap['aggregate']) {
1444
- // Map raw aggregate results back to original expressions
1445
- const rawAggregateResult = resultMap['aggregate'];
1446
- if (rawAggregateResult && rawAggregateResult.length > 0) {
1447
- const row = rawAggregateResult[0];
1448
- aggregateResults = [];
1449
- let validExprIndex = 0;
1450
- for (let i = 0; i < params.Aggregates.length; i++) {
1451
- const agg = params.Aggregates[i];
1452
- const alias = agg.alias || agg.expression;
1453
- // Check if this expression had a validation error
1454
- const validationError = aggregateValidationErrors.find(e => e.expression === agg.expression);
1455
- if (validationError) {
1456
- aggregateResults.push(validationError);
1457
- }
1458
- else {
1459
- // Get the value from the result using the numbered alias
1460
- const rawValue = row[`Agg_${validExprIndex}`];
1461
- // Cast to AggregateValue - SQL Server returns numbers, strings, dates, or null
1462
- const value = rawValue === undefined ? null : rawValue;
1463
- aggregateResults.push({
1464
- expression: agg.expression,
1465
- alias: alias,
1466
- value: value,
1467
- error: undefined
1468
- });
1469
- validExprIndex++;
1470
- }
1471
- }
1472
- }
1473
- }
1474
- else if (aggregateValidationErrors.length > 0) {
1475
- // All expressions had validation errors
1476
- aggregateResults = aggregateValidationErrors;
1477
- }
1478
- }
1479
- const stopTime = new Date();
1480
- if (params.ForceAuditLog ||
1481
- (viewEntity?.ID && (extraFilter === undefined || extraFilter === null || extraFilter?.trim().length === 0) && entityInfo.AuditViewRuns)) {
1482
- // ONLY LOG TOP LEVEL VIEW EXECUTION - this would be for views with an ID, and don't have ExtraFilter as ExtraFilter
1483
- // is only used in the system on a tab or just for ad hoc view execution
1484
- // we do NOT want to wait for this, so no await,
1485
- this.CreateAuditLogRecord(user, 'Run View', 'Run View', 'Success', JSON.stringify({
1486
- ViewID: viewEntity?.ID,
1487
- ViewName: viewEntity?.Name,
1488
- Description: params.AuditLogDescription,
1489
- RowCount: retData.length,
1490
- SQL: viewSQL,
1491
- }), entityInfo.ID, null, params.AuditLogDescription, null);
1492
- }
1493
- const result = {
1494
- RowCount: params.ResultType === 'count_only'
1495
- ? rowCount
1496
- : retData.length /*this property should be total row count if the ResultType='count_only' otherwise it should be the row count of the returned rows */,
1497
- TotalRowCount: rowCount ? rowCount : retData.length,
1498
- Results: retData,
1499
- UserViewRunID: userViewRunID,
1500
- ExecutionTime: stopTime.getTime() - startTime.getTime(),
1501
- Success: true,
1502
- ErrorMessage: null,
1503
- AggregateResults: aggregateResults,
1504
- AggregateExecutionTime: aggregateExecutionTime,
1505
- };
1506
- return result;
1507
- }
1508
- else {
1509
- return null;
1510
- }
1511
- }
1512
- catch (e) {
1513
- const exceptionStopTime = new Date();
1514
- LogError(e);
1515
- return {
1516
- RowCount: 0,
1517
- TotalRowCount: 0,
1518
- Results: [],
1519
- UserViewRunID: '',
1520
- ExecutionTime: exceptionStopTime.getTime() - startTime.getTime(),
1521
- Success: false,
1522
- ErrorMessage: e.message,
1523
- };
1524
- }
1525
- }
1526
- async InternalRunViews(params, contextUser) {
1527
- // This is the internal implementation - pre/post processing is handled by ProviderBase.RunViews()
1528
- // Note: We call InternalRunView directly since we're already inside the internal flow
1529
- const promises = params.map((p) => this.InternalRunView(p, contextUser));
1530
- const results = await Promise.all(promises);
1531
- return results;
1532
- }
1533
- /**
1534
- * RunViewsWithCacheCheck - Smart cache validation for batch RunViews.
1535
- * For each view request, if cacheStatus is provided, first checks if the cache is current
1536
- * by comparing MAX(__mj_UpdatedAt) and COUNT(*) with client's values.
1537
- * Returns 'current' if cache is valid (no data), or 'stale' with fresh data if cache is outdated.
1538
- *
1539
- * Optimized to batch all cache status checks into a single SQL call with multiple result sets.
1540
- */
1541
- async RunViewsWithCacheCheck(params, contextUser) {
1542
- try {
1543
- const user = contextUser || this.CurrentUser;
1544
- if (!user) {
1545
- return {
1546
- success: false,
1547
- results: [],
1548
- errorMessage: 'No user context available',
1549
- };
1550
- }
1551
- // Separate items that need cache check from those that don't
1552
- const itemsNeedingCacheCheck = [];
1553
- const itemsWithoutCacheCheck = [];
1554
- const errorResults = [];
1555
- // Pre-process all items to build WHERE clauses and validate
1556
- for (let i = 0; i < params.length; i++) {
1557
- const item = params[i];
1558
- if (!item.cacheStatus) {
1559
- // No cache status - will run full query
1560
- itemsWithoutCacheCheck.push({ index: i, item });
1561
- continue;
1562
- }
1563
- // Get entity info
1564
- const entityInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === item.params.EntityName?.trim().toLowerCase());
1565
- if (!entityInfo) {
1566
- errorResults.push({
1567
- viewIndex: i,
1568
- status: 'error',
1569
- errorMessage: `Entity ${item.params.EntityName} not found in metadata`,
1570
- });
1571
- continue;
1572
- }
1573
- try {
1574
- // Check permissions
1575
- this.CheckUserReadPermissions(entityInfo.Name, user);
1576
- // Build WHERE clause
1577
- const whereSQL = await this.buildWhereClauseForCacheCheck(item.params, entityInfo, user);
1578
- itemsNeedingCacheCheck.push({ index: i, item, entityInfo, whereSQL });
1579
- }
1580
- catch (e) {
1581
- errorResults.push({
1582
- viewIndex: i,
1583
- status: 'error',
1584
- errorMessage: e instanceof Error ? e.message : String(e),
1585
- });
1586
- }
1587
- }
1588
- // Execute batched cache status check for all items that need it
1589
- const cacheStatusResults = await this.getBatchedServerCacheStatus(itemsNeedingCacheCheck, contextUser);
1590
- // Determine which items are current vs stale, and whether they support differential updates
1591
- const differentialItems = [];
1592
- const staleItemsNoTracking = [];
1593
- const currentResults = [];
1594
- for (const { index, item, entityInfo, whereSQL } of itemsNeedingCacheCheck) {
1595
- const serverStatus = cacheStatusResults.get(index);
1596
- if (!serverStatus || !serverStatus.success) {
1597
- errorResults.push({
1598
- viewIndex: index,
1599
- status: 'error',
1600
- errorMessage: serverStatus?.errorMessage || 'Failed to get cache status',
1601
- });
1602
- continue;
1603
- }
1604
- const isCurrent = this.isCacheCurrent(item.cacheStatus, serverStatus);
1605
- if (isCurrent) {
1606
- currentResults.push({
1607
- viewIndex: index,
1608
- status: 'current',
1609
- });
1610
- }
1611
- else {
1612
- // Cache is stale - check if entity supports differential updates
1613
- if (entityInfo.TrackRecordChanges) {
1614
- // Entity tracks record changes - we can do differential update
1615
- differentialItems.push({
1616
- index,
1617
- params: item.params,
1618
- entityInfo,
1619
- whereSQL,
1620
- clientMaxUpdatedAt: item.cacheStatus.maxUpdatedAt,
1621
- clientRowCount: item.cacheStatus.rowCount,
1622
- serverStatus,
1623
- });
1624
- }
1625
- else {
1626
- // Entity doesn't track record changes - fall back to full refresh
1627
- staleItemsNoTracking.push({ index, params: item.params });
1628
- }
1629
- }
1630
- }
1631
- // Run queries in parallel:
1632
- // 1. Items without cache status (no fingerprint from client) - full query
1633
- // 2. Items with stale cache but no tracking - full query
1634
- // 3. Items with stale cache and tracking - differential query
1635
- const queryPromises = [
1636
- // Full queries for items without cache status
1637
- ...itemsWithoutCacheCheck.map(({ index, item }) => this.runFullQueryAndReturn(item.params, index, contextUser)),
1638
- // Full queries for entities that don't track record changes
1639
- ...staleItemsNoTracking.map(({ index, params: viewParams }) => this.runFullQueryAndReturn(viewParams, index, contextUser)),
1640
- // Differential queries for entities that track record changes
1641
- ...differentialItems.map(({ index, params: viewParams, entityInfo, whereSQL, clientMaxUpdatedAt, clientRowCount, serverStatus }) => this.runDifferentialQueryAndReturn(viewParams, entityInfo, clientMaxUpdatedAt, clientRowCount, serverStatus, whereSQL, index, contextUser)),
1642
- ];
1643
- const fullQueryResults = await Promise.all(queryPromises);
1644
- // Combine all results and sort by viewIndex
1645
- const allResults = [...errorResults, ...currentResults, ...fullQueryResults];
1646
- allResults.sort((a, b) => a.viewIndex - b.viewIndex);
1647
- return {
1648
- success: true,
1649
- results: allResults,
1650
- };
1651
- }
1652
- catch (e) {
1653
- LogError(e);
1654
- return {
1655
- success: false,
1656
- results: [],
1657
- errorMessage: e instanceof Error ? e.message : String(e),
1658
- };
1659
- }
1660
- }
1661
- /**
1662
- * Executes a batched cache status check for multiple views in a single SQL call.
1663
- * Uses multiple result sets to return status for each view efficiently.
1664
- */
1665
- async getBatchedServerCacheStatus(items, contextUser) {
1666
- const results = new Map();
1667
- if (items.length === 0) {
1668
- return results;
1669
- }
1670
- // Build array of SQL statements for batch execution
1671
- const sqlStatements = [];
1672
- for (const { entityInfo, whereSQL } of items) {
1673
- const statusSQL = `SELECT COUNT(*) AS TotalRows, MAX(__mj_UpdatedAt) AS MaxUpdatedAt FROM [${entityInfo.SchemaName}].${entityInfo.BaseView}${whereSQL ? ' WHERE ' + whereSQL : ''}`;
1674
- sqlStatements.push(statusSQL);
1675
- }
1676
- try {
1677
- // Execute the batched SQL using existing ExecuteSQLBatch method
1678
- const resultSets = await this.ExecuteSQLBatch(sqlStatements, undefined, undefined, contextUser);
1679
- // Process each result set and map to the corresponding item index
1680
- for (let i = 0; i < items.length; i++) {
1681
- const { index } = items[i];
1682
- const resultSet = resultSets[i];
1683
- if (resultSet && resultSet.length > 0) {
1684
- const row = resultSet[0];
1685
- results.set(index, {
1686
- success: true,
1687
- rowCount: row.TotalRows,
1688
- maxUpdatedAt: row.MaxUpdatedAt ? new Date(row.MaxUpdatedAt).toISOString() : undefined,
1689
- });
1690
- }
1691
- else {
1692
- results.set(index, { success: true, rowCount: 0, maxUpdatedAt: undefined });
1693
- }
1694
- }
1695
- }
1696
- catch (e) {
1697
- // If batch fails, mark all items as failed
1698
- const errorMessage = e instanceof Error ? e.message : String(e);
1699
- for (const { index } of items) {
1700
- results.set(index, { success: false, errorMessage });
1701
- }
1702
- }
1703
- return results;
1704
- }
1705
- /**
1706
- * Builds the WHERE clause for cache status check, using same logic as InternalRunView.
1707
- */
1708
- async buildWhereClauseForCacheCheck(params, entityInfo, user) {
1709
- let whereSQL = '';
1710
- let bHasWhere = false;
1711
- // Extra filter
1712
- if (params.ExtraFilter && params.ExtraFilter.length > 0) {
1713
- if (!this.validateUserProvidedSQLClause(params.ExtraFilter)) {
1714
- throw new Error(`Invalid Extra Filter: ${params.ExtraFilter}`);
1715
- }
1716
- whereSQL = `(${params.ExtraFilter})`;
1717
- bHasWhere = true;
1718
- }
1719
- // User search string
1720
- if (params.UserSearchString && params.UserSearchString.length > 0) {
1721
- if (!this.validateUserProvidedSQLClause(params.UserSearchString)) {
1722
- throw new Error(`Invalid User Search SQL clause: ${params.UserSearchString}`);
1723
- }
1724
- const sUserSearchSQL = this.createViewUserSearchSQL(entityInfo, params.UserSearchString);
1725
- if (sUserSearchSQL.length > 0) {
1726
- if (bHasWhere) {
1727
- whereSQL += ` AND (${sUserSearchSQL})`;
1728
- }
1729
- else {
1730
- whereSQL = `(${sUserSearchSQL})`;
1731
- bHasWhere = true;
1732
- }
1733
- }
1734
- }
1735
- // Row Level Security
1736
- if (!entityInfo.UserExemptFromRowLevelSecurity(user, EntityPermissionType.Read)) {
1737
- const rlsWhereClause = entityInfo.GetUserRowLevelSecurityWhereClause(user, EntityPermissionType.Read, '');
1738
- if (rlsWhereClause && rlsWhereClause.length > 0) {
1739
- if (bHasWhere) {
1740
- whereSQL += ` AND (${rlsWhereClause})`;
1741
- }
1742
- else {
1743
- whereSQL = `(${rlsWhereClause})`;
1744
- }
1745
- }
1746
- }
1747
- return whereSQL;
1748
- }
1749
- /**
1750
- * Compares client cache status with server status to determine if cache is current.
1751
- */
1752
- isCacheCurrent(clientStatus, serverStatus) {
1753
- // Row count must match
1754
- if (clientStatus.rowCount !== serverStatus.rowCount) {
1755
- return false;
1756
- }
1757
- // Compare maxUpdatedAt dates
1758
- const clientDate = new Date(clientStatus.maxUpdatedAt);
1759
- const serverDate = serverStatus.maxUpdatedAt ? new Date(serverStatus.maxUpdatedAt) : null;
1760
- if (!serverDate) {
1761
- // No records on server, so if client has any, it's stale
1762
- return clientStatus.rowCount === 0;
1763
- }
1764
- // Dates must match (compare as ISO strings for precision)
1765
- return clientDate.toISOString() === serverDate.toISOString();
1766
- }
1767
- /**
1768
- * Runs the full query and returns results with cache metadata.
1769
- */
1770
- async runFullQueryAndReturn(params, viewIndex, contextUser) {
1771
- const result = await this.InternalRunView(params, contextUser);
1772
- if (!result.Success) {
1773
- return {
1774
- viewIndex,
1775
- status: 'error',
1776
- errorMessage: result.ErrorMessage || 'Unknown error executing view',
1777
- };
1778
- }
1779
- // Extract maxUpdatedAt from results
1780
- const maxUpdatedAt = this.extractMaxUpdatedAt(result.Results);
1781
- return {
1782
- viewIndex,
1783
- status: 'stale',
1784
- results: result.Results,
1785
- maxUpdatedAt,
1786
- rowCount: result.Results.length,
1787
- };
1788
- }
1789
- /**
1790
- * Extracts the maximum __mj_UpdatedAt value from a result set.
1791
- * @param results - Array of result objects that may contain __mj_UpdatedAt
1792
- * @returns ISO string of the max timestamp, or current time if none found
1793
- */
1794
- extractMaxUpdatedAt(results) {
1795
- if (!results || results.length === 0) {
1796
- return new Date().toISOString();
1797
- }
1798
- let maxDate = null;
1799
- for (const row of results) {
1800
- const rowObj = row;
1801
- const updatedAt = rowObj['__mj_UpdatedAt'];
1802
- if (updatedAt) {
1803
- const date = new Date(updatedAt);
1804
- if (!isNaN(date.getTime()) && (!maxDate || date > maxDate)) {
1805
- maxDate = date;
1806
- }
1807
- }
1808
- }
1809
- return maxDate ? maxDate.toISOString() : new Date().toISOString();
1810
- }
1811
- /**
1812
- * Gets the IDs of records that have been deleted since a given timestamp.
1813
- * Uses the RecordChange table which tracks all deletions for entities with TrackRecordChanges enabled.
1814
- * @param entityID - The entity ID to check deletions for
1815
- * @param sinceTimestamp - ISO timestamp to check deletions since
1816
- * @param contextUser - Optional user context for permissions
1817
- * @returns Array of record IDs (in CompositeKey concatenated string format)
1818
- */
1819
- async getDeletedRecordIDsSince(entityID, sinceTimestamp, contextUser) {
1820
- try {
1821
- const sql = `
1822
- SELECT DISTINCT RecordID
1823
- FROM [${this.MJCoreSchemaName}].vwRecordChanges
1824
- WHERE EntityID = '${entityID}'
1825
- AND Type = 'Delete'
1826
- AND ChangedAt > '${sinceTimestamp}'
1827
- `;
1828
- const results = await this.ExecuteSQL(sql, undefined, undefined, contextUser);
1829
- return results.map(r => r.RecordID);
1830
- }
1831
- catch (e) {
1832
- LogError(e);
1833
- return [];
1834
- }
1835
- }
1836
- /**
1837
- * Gets rows that have been created or updated since a given timestamp.
1838
- * @param params - RunView parameters (used for entity, filter, etc.)
1839
- * @param entityInfo - Entity metadata
1840
- * @param sinceTimestamp - ISO timestamp to check updates since
1841
- * @param whereSQL - Pre-built WHERE clause from the original query
1842
- * @param contextUser - Optional user context for permissions
1843
- * @returns Array of updated/created rows
1844
- */
1845
- async getUpdatedRowsSince(params, entityInfo, sinceTimestamp, whereSQL, contextUser) {
1846
- try {
1847
- // Add the timestamp filter to the existing WHERE clause
1848
- const timestampFilter = `__mj_UpdatedAt > '${sinceTimestamp}'`;
1849
- const combinedWhere = whereSQL
1850
- ? `(${whereSQL}) AND ${timestampFilter}`
1851
- : timestampFilter;
1852
- // Build field list
1853
- const fields = params.Fields && params.Fields.length > 0
1854
- ? params.Fields.map(f => `[${f}]`).join(', ')
1855
- : '*';
1856
- // Build the query
1857
- let sql = `SELECT ${fields} FROM [${entityInfo.SchemaName}].${entityInfo.BaseView} WHERE ${combinedWhere}`;
1858
- // Add ORDER BY if specified
1859
- if (params.OrderBy && params.OrderBy.length > 0) {
1860
- if (!this.validateUserProvidedSQLClause(params.OrderBy)) {
1861
- throw new Error(`Invalid OrderBy clause: ${params.OrderBy}`);
1862
- }
1863
- sql += ` ORDER BY ${params.OrderBy}`;
1864
- }
1865
- const results = await this.ExecuteSQL(sql, undefined, undefined, contextUser);
1866
- return results;
1867
- }
1868
- catch (e) {
1869
- LogError(e);
1870
- return [];
1871
- }
1872
- }
1873
- /**
1874
- * Runs a differential query and returns only changes since the client's cached state.
1875
- * This includes updated/created rows and deleted record IDs.
1876
- *
1877
- * Validates that the differential can be safely applied by checking for "hidden" deletes
1878
- * (rows deleted outside of MJ's RecordChanges tracking, e.g., direct SQL deletes).
1879
- * If hidden deletes are detected, falls back to a full query with 'stale' status.
1880
- *
1881
- * @param params - RunView parameters
1882
- * @param entityInfo - Entity metadata
1883
- * @param clientMaxUpdatedAt - Client's cached maxUpdatedAt timestamp
1884
- * @param clientRowCount - Client's cached row count
1885
- * @param serverStatus - Current server status (for new row count)
1886
- * @param whereSQL - Pre-built WHERE clause
1887
- * @param viewIndex - Index for correlation in batch operations
1888
- * @param contextUser - Optional user context
1889
- * @returns RunViewWithCacheCheckResult with differential data, or falls back to full query if unsafe
1890
- */
1891
- async runDifferentialQueryAndReturn(params, entityInfo, clientMaxUpdatedAt, clientRowCount, serverStatus, whereSQL, viewIndex, contextUser) {
1892
- try {
1893
- // Get updated/created rows since client's timestamp
1894
- const updatedRows = await this.getUpdatedRowsSince(params, entityInfo, clientMaxUpdatedAt, whereSQL, contextUser);
1895
- // Get deleted record IDs since client's timestamp
1896
- const deletedRecordIDs = await this.getDeletedRecordIDsSince(entityInfo.ID, clientMaxUpdatedAt, contextUser);
1897
- // === VALIDATION: Detect "hidden" deletes not tracked in RecordChanges ===
1898
- // Count how many returned rows are NEW (created after client's cache timestamp)
1899
- // vs rows that already existed and were just updated
1900
- const clientMaxUpdatedDate = new Date(clientMaxUpdatedAt);
1901
- const newInserts = updatedRows.filter(row => {
1902
- const createdAt = row['__mj_CreatedAt'];
1903
- if (!createdAt)
1904
- return false;
1905
- return new Date(String(createdAt)) > clientMaxUpdatedDate;
1906
- }).length;
1907
- // Calculate implied deletes using the algebra:
1908
- // serverRowCount = clientRowCount - deletes + inserts
1909
- // Therefore: impliedDeletes = clientRowCount + newInserts - serverRowCount
1910
- const serverRowCount = serverStatus.rowCount ?? 0;
1911
- const impliedDeletes = clientRowCount + newInserts - serverRowCount;
1912
- const actualDeletes = deletedRecordIDs.length;
1913
- // Validate: if impliedDeletes < 0, there are unexplained rows on the server
1914
- // This could happen with direct SQL inserts that bypassed MJ's tracking
1915
- if (impliedDeletes < 0) {
1916
- LogStatus(`Differential validation failed for ${entityInfo.Name}: impliedDeletes=${impliedDeletes} (negative). ` +
1917
- `clientRowCount=${clientRowCount}, newInserts=${newInserts}, serverRowCount=${serverRowCount}. ` +
1918
- `Falling back to full refresh.`);
1919
- return this.runFullQueryAndReturn(params, viewIndex, contextUser);
1920
- }
1921
- // Validate: if impliedDeletes > actualDeletes, there are "hidden" deletes
1922
- // not tracked in RecordChanges (e.g., direct SQL deletes)
1923
- if (impliedDeletes > actualDeletes) {
1924
- LogStatus(`Differential validation failed for ${entityInfo.Name}: hidden deletes detected. ` +
1925
- `impliedDeletes=${impliedDeletes}, actualDeletes=${actualDeletes}. ` +
1926
- `clientRowCount=${clientRowCount}, newInserts=${newInserts}, serverRowCount=${serverRowCount}. ` +
1927
- `Falling back to full refresh.`);
1928
- return this.runFullQueryAndReturn(params, viewIndex, contextUser);
1929
- }
1930
- // Validation passed - safe to apply differential
1931
- // Extract maxUpdatedAt from the updated rows (or use server status)
1932
- const newMaxUpdatedAt = updatedRows.length > 0
1933
- ? this.extractMaxUpdatedAt(updatedRows)
1934
- : serverStatus.maxUpdatedAt || new Date().toISOString();
1935
- return {
1936
- viewIndex,
1937
- status: 'differential',
1938
- differentialData: {
1939
- updatedRows,
1940
- deletedRecordIDs,
1941
- },
1942
- maxUpdatedAt: newMaxUpdatedAt,
1943
- rowCount: serverStatus.rowCount,
1944
- };
1945
- }
1946
- catch (e) {
1947
- LogError(e);
1948
- return {
1949
- viewIndex,
1950
- status: 'error',
1951
- errorMessage: e instanceof Error ? e.message : String(e),
1952
- };
1953
- }
1954
- }
1955
- validateUserProvidedSQLClause(clause) {
1956
- // First, remove all string literals from the clause to avoid false positives
1957
- // This regex matches both single and double quoted strings, handling escaped quotes
1958
- const stringLiteralPattern = /(['"])(?:(?=(\\?))\2[\s\S])*?\1/g;
1959
- // Replace all string literals with empty strings for validation purposes
1960
- const clauseWithoutStrings = clause.replace(stringLiteralPattern, '');
1961
- // convert the clause to lower case to make the keyword search case-insensitive
1962
- const lowerClause = clauseWithoutStrings.toLowerCase();
1963
- // Define forbidden keywords and characters as whole words using regular expressions
1964
- const forbiddenPatterns = [
1965
- /\binsert\b/,
1966
- /\bupdate\b/,
1967
- /\bdelete\b/,
1968
- /\bexec\b/,
1969
- /\bexecute\b/,
1970
- /\bdrop\b/,
1971
- /--/,
1972
- /\/\*/,
1973
- /\*\//,
1974
- /\bunion\b/,
1975
- /\bcast\b/,
1976
- /\bxp_/,
1977
- /;/,
1978
- ];
1979
- // Check for forbidden patterns
1980
- for (const pattern of forbiddenPatterns) {
1981
- if (pattern.test(lowerClause)) {
1982
- return false;
1983
- }
1984
- }
1985
- return true;
1986
- }
1987
- /**
1988
- * Validates and builds an aggregate SQL query from the provided aggregate expressions.
1989
- * Uses the SQLExpressionValidator to ensure expressions are safe from SQL injection.
1990
- *
1991
- * @param aggregates - Array of aggregate expressions to validate and build
1992
- * @param entityInfo - Entity metadata for field reference validation
1993
- * @param schemaName - Schema name for the table
1994
- * @param baseView - Base view name for the table
1995
- * @param whereSQL - WHERE clause to apply (without the WHERE keyword)
1996
- * @returns Object with aggregateSQL string and any validation errors
1997
- */
1998
- buildAggregateSQL(aggregates, entityInfo, schemaName, baseView, whereSQL) {
1999
- if (!aggregates || aggregates.length === 0) {
2000
- return { aggregateSQL: null, validationErrors: [] };
2001
- }
2002
- const validator = SQLExpressionValidator.Instance;
2003
- const validationErrors = [];
2004
- const validExpressions = [];
2005
- const fieldNames = entityInfo.Fields.map(f => f.Name);
2006
- for (let i = 0; i < aggregates.length; i++) {
2007
- const agg = aggregates[i];
2008
- const alias = agg.alias || agg.expression;
2009
- // Validate the expression using SQLExpressionValidator
2010
- const result = validator.validate(agg.expression, {
2011
- context: 'aggregate',
2012
- entityFields: fieldNames
2013
- });
2014
- if (!result.valid) {
2015
- // Record the error but continue processing other expressions
2016
- validationErrors.push({
2017
- expression: agg.expression,
2018
- alias: alias,
2019
- value: null,
2020
- error: result.error || 'Validation failed'
2021
- });
2022
- }
2023
- else {
2024
- // Expression is valid, add to the query with an alias
2025
- // Use a numbered alias for the SQL to make result mapping easier
2026
- validExpressions.push(`${agg.expression} AS [Agg_${i}]`);
2027
- }
2028
- }
2029
- if (validExpressions.length === 0) {
2030
- return { aggregateSQL: null, validationErrors };
2031
- }
2032
- // Build the aggregate SQL query
2033
- let aggregateSQL = `SELECT ${validExpressions.join(', ')} FROM [${schemaName}].${baseView}`;
2034
- if (whereSQL && whereSQL.length > 0) {
2035
- aggregateSQL += ` WHERE ${whereSQL}`;
2036
- }
2037
- return { aggregateSQL, validationErrors };
2038
- }
2039
- /**
2040
- * Executes the aggregate query and maps results back to the original expressions.
2041
- *
2042
- * @param aggregateSQL - The aggregate SQL query to execute
2043
- * @param aggregates - Original aggregate expressions (for result mapping)
2044
- * @param validationErrors - Any validation errors from buildAggregateSQL
2045
- * @param contextUser - User context for query execution
2046
- * @returns Array of AggregateResult objects
2047
- */
2048
- async executeAggregateQuery(aggregateSQL, aggregates, validationErrors, contextUser) {
2049
- const startTime = Date.now();
2050
- if (!aggregateSQL) {
2051
- // No valid expressions to execute, return only validation errors
2052
- return { results: validationErrors, executionTime: 0 };
2053
- }
2054
- try {
2055
- const queryResult = await this.ExecuteSQL(aggregateSQL, undefined, undefined, contextUser);
2056
- const executionTime = Date.now() - startTime;
2057
- if (!queryResult || queryResult.length === 0) {
2058
- // Query returned no results, which shouldn't happen for aggregates
2059
- // Return validation errors plus null values for valid expressions
2060
- const nullResults = aggregates
2061
- .filter((_, i) => !validationErrors.some(e => e.expression === aggregates[i].expression))
2062
- .map(agg => ({
2063
- expression: agg.expression,
2064
- alias: agg.alias || agg.expression,
2065
- value: null,
2066
- error: undefined
2067
- }));
2068
- return { results: [...validationErrors, ...nullResults], executionTime };
2069
- }
2070
- // Map query results back to original expressions
2071
- const row = queryResult[0];
2072
- const results = [];
2073
- let validExprIndex = 0;
2074
- for (let i = 0; i < aggregates.length; i++) {
2075
- const agg = aggregates[i];
2076
- const alias = agg.alias || agg.expression;
2077
- // Check if this expression had a validation error
2078
- const validationError = validationErrors.find(e => e.expression === agg.expression);
2079
- if (validationError) {
2080
- results.push(validationError);
2081
- }
2082
- else {
2083
- // Get the value from the result using the numbered alias
2084
- const value = row[`Agg_${validExprIndex}`];
2085
- results.push({
2086
- expression: agg.expression,
2087
- alias: alias,
2088
- value: value ?? null,
2089
- error: undefined
2090
- });
2091
- validExprIndex++;
2092
- }
2093
- }
2094
- return { results, executionTime };
2095
- }
2096
- catch (error) {
2097
- const executionTime = Date.now() - startTime;
2098
- const errorMessage = error instanceof Error ? error.message : String(error);
2099
- // Return all expressions with the error
2100
- const errorResults = aggregates.map(agg => ({
2101
- expression: agg.expression,
2102
- alias: agg.alias || agg.expression,
2103
- value: null,
2104
- error: errorMessage
2105
- }));
2106
- return { results: errorResults, executionTime };
2107
- }
2108
- }
2109
- getRunTimeViewFieldString(params, viewEntity) {
2110
- const fieldList = this.getRunTimeViewFieldArray(params, viewEntity);
2111
- // pass this back as a comma separated list, put square brackets around field names to make sure if they are reserved words or have spaces, that they'll still work.
2112
- if (fieldList.length === 0)
2113
- return '*';
2114
- else
2115
- return fieldList
2116
- .map((f) => {
2117
- const asString = f.CodeName === f.Name ? '' : ` AS [${f.CodeName}]`;
2118
- return `[${f.Name}]${asString}`;
2119
- })
2120
- .join(',');
2121
- }
2122
- getRunTimeViewFieldArray(params, viewEntity) {
2123
- const fieldList = [];
2124
- try {
2125
- let entityInfo = null;
2126
- if (viewEntity) {
2127
- entityInfo = viewEntity.ViewEntityInfo;
2128
- }
2129
- else {
2130
- entityInfo = this.Entities.find((e) => e.Name === params.EntityName);
2131
- if (!entityInfo)
2132
- throw new Error(`Entity ${params.EntityName} not found in metadata`);
2133
- }
2134
- if (params.Fields) {
2135
- // fields provided, if primary key isn't included, add it first
2136
- for (const ef of entityInfo.PrimaryKeys) {
2137
- if (params.Fields.find((f) => f.trim().toLowerCase() === ef.Name.toLowerCase()) === undefined)
2138
- fieldList.push(ef); // always include the primary key fields in view run time field list
2139
- }
2140
- // now add the rest of the param.Fields to fields
2141
- params.Fields.forEach((f) => {
2142
- const field = entityInfo.Fields.find((field) => field.Name.trim().toLowerCase() === f.trim().toLowerCase());
2143
- if (field)
2144
- fieldList.push(field);
2145
- else
2146
- LogError(`Field ${f} not found in entity ${entityInfo.Name}`);
2147
- });
2148
- }
2149
- else {
2150
- // fields weren't provided by the caller. So, let's do the following
2151
- // * if this is a defined view, using a View Name or View ID, we use the fields that are used wtihin the View and always return the ID
2152
- // * if this is an dynamic view, we return ALL fields in the entity using *
2153
- if (viewEntity) {
2154
- // saved view, figure out it's field list
2155
- viewEntity.Columns.forEach((c) => {
2156
- if (!c.hidden) {
2157
- // only return the non-hidden fields
2158
- if (c.EntityField) {
2159
- fieldList.push(c.EntityField);
2160
- }
2161
- else {
2162
- LogError(`View Field ${c.Name} doesn't match an Entity Field in entity ${entityInfo.Name}. This can happen if the view was saved with a field that no longer exists in the entity. It is best to update the view to remove this field.`);
2163
- }
2164
- }
2165
- });
2166
- // the below shouldn't happen as the pkey fields should always be included by now, but make SURE...
2167
- for (const ef of entityInfo.PrimaryKeys) {
2168
- if (fieldList.find((f) => f.Name?.trim().toLowerCase() === ef.Name?.toLowerCase()) === undefined)
2169
- fieldList.push(ef); // always include the primary key fields in view run time field list
2170
- }
2171
- }
2172
- }
2173
- }
2174
- catch (e) {
2175
- LogError(e);
2176
- }
2177
- finally {
2178
- return fieldList;
2179
- }
2180
- }
2181
- async executeSQLForUserViewRunLogging(viewId, entityBaseView, whereSQL, orderBySQL, user) {
2182
- const entityInfo = this.Entities.find((e) => e.BaseView.trim().toLowerCase() === entityBaseView.trim().toLowerCase());
2183
- const sSQL = `
2184
- DECLARE @ViewIDList TABLE ( ID NVARCHAR(255) );
2185
- INSERT INTO @ViewIDList (ID) (SELECT ${entityInfo.FirstPrimaryKey.Name} FROM [${entityInfo.SchemaName}].${entityBaseView} WHERE (${whereSQL}))
2186
- EXEC [${this.MJCoreSchemaName}].spCreateUserViewRunWithDetail(${viewId},${user.Email}, @ViewIDLIst)
2187
- `;
2188
- const runIDResult = await this.ExecuteSQL(sSQL, undefined, undefined, user);
2189
- const runID = runIDResult[0].UserViewRunID;
2190
- const sRetSQL = `SELECT * FROM [${entityInfo.SchemaName}].${entityBaseView} WHERE ${entityInfo.FirstPrimaryKey.Name} IN
2191
- (SELECT RecordID FROM [${this.MJCoreSchemaName}].vwUserViewRunDetails WHERE UserViewRunID=${runID})
2192
- ${orderBySQL && orderBySQL.length > 0 ? ` ORDER BY ${orderBySQL}` : ''}`;
2193
- return { executeViewSQL: sRetSQL, runID };
2194
- }
2195
- createViewUserSearchSQL(entityInfo, userSearchString) {
2196
- // we have a user search string.
2197
- // if we have full text search, we use that.
2198
- // Otherwise, we need to manually construct the additional filter associated with this. The user search string is just text from the user
2199
- // we need to apply it to one or more fields that are part of the entity that support being part of a user search.
2200
- // we need to get the list of fields that are part of the entity that support being part of a user search.
2201
- let sUserSearchSQL = '';
2202
- if (entityInfo.FullTextSearchEnabled) {
2203
- // we have full text search, so we use that, do as subquery but that gets optimized into JOIN by SQL Server, so we can keep our situation logially simpler
2204
- // in the context of overall filtering by doing as a SUBQUERY here.
2205
- // if we have a user search string that includes AND, OR, or NOT, we leave it alone
2206
- // otherwise, we check to see if the userSearchString is a single word, if so, we also leave it alone
2207
- // if the userSearchString doesn't have AND OR or NOT in it, and has multiple words, we convert the spaces to
2208
- // AND so that we can do a full text search on all the words
2209
- let u = userSearchString;
2210
- const uUpper = u.toUpperCase();
2211
- if (uUpper.includes(' AND ') || uUpper.includes(' OR ') || uUpper.includes(' NOT ')) {
2212
- //replace all spaces with %, but add spaces inbetween the original and, or and not keywords
2213
- u = uUpper.replace(/ /g, '%').replace(/%AND%/g, ' AND ').replace(/%OR%/g, ' OR ').replace(/%NOT%/g, ' NOT ');
2214
- }
2215
- else if (uUpper.includes('AND') || uUpper.includes('OR') || uUpper.includes('NOT')) {
2216
- //leave the string alone, except replacing spaces with %
2217
- u = u.replace(/ /g, '%');
2218
- }
2219
- else if (u.includes(' ')) {
2220
- if (u.startsWith('"') && u.endsWith('"')) {
2221
- // do nothing because we start AND end with a quote, so we have a phrase search
2222
- }
2223
- else {
2224
- // we have multiple words, so we need to convert the spaces to AND
2225
- // but first, let's strip the stopwords out of the string
2226
- u = StripStopWords(userSearchString);
2227
- // next, include "AND" between all the words so that we have a full text search on all the words
2228
- u = u.replace(/ /g, ' AND ');
2229
- }
2230
- }
2231
- sUserSearchSQL = `${entityInfo.FirstPrimaryKey.Name} IN (SELECT ${entityInfo.FirstPrimaryKey.Name} FROM ${entityInfo.SchemaName}.${entityInfo.FullTextSearchFunction}('${u}'))`;
2232
- }
2233
- else {
2234
- const entityFields = entityInfo.Fields;
2235
- for (const field of entityFields) {
2236
- if (field.IncludeInUserSearchAPI) {
2237
- let sParam = '';
2238
- if (sUserSearchSQL.length > 0)
2239
- sUserSearchSQL += ' OR ';
2240
- if (field.UserSearchParamFormatAPI && field.UserSearchParamFormatAPI.length > 0)
2241
- // we have a search param format. we need to apply it to the user search string
2242
- sParam = field.UserSearchParamFormatAPI.replace('{0}', userSearchString);
2243
- else
2244
- sParam = ` LIKE '%${userSearchString}%'`;
2245
- sUserSearchSQL += `(${field.Name} ${sParam})`;
2246
- }
2247
- }
2248
- if (sUserSearchSQL.length > 0)
2249
- sUserSearchSQL = '(' + sUserSearchSQL + ')'; // wrap the entire search string in parens
2250
- }
2251
- return sUserSearchSQL;
2252
- }
2253
- async CreateAuditLogRecord(user, authorizationName, auditLogTypeName, status, details, entityId, recordId, auditLogDescription, saveOptions) {
2254
- try {
2255
- const authorization = authorizationName
2256
- ? this.Authorizations.find((a) => a?.Name?.trim().toLowerCase() === authorizationName.trim().toLowerCase())
2257
- : null;
2258
- const auditLogType = auditLogTypeName ? this.AuditLogTypes.find((a) => a?.Name?.trim().toLowerCase() === auditLogTypeName.trim().toLowerCase()) : null;
2259
- if (!user)
2260
- throw new Error(`User is a required parameter`);
2261
- if (!auditLogType) {
2262
- throw new Error(`Audit Log Type ${auditLogTypeName} not found in metadata`);
2263
- }
2264
- const auditLog = await this.GetEntityObject('MJ: Audit Logs', user); // must pass user context on back end as we're not authenticated the same way as the front end
2265
- auditLog.NewRecord();
2266
- auditLog.UserID = user.ID;
2267
- auditLog.AuditLogTypeID = auditLogType.ID;
2268
- if (status?.trim().toLowerCase() === 'success')
2269
- auditLog.Status = 'Success';
2270
- else
2271
- auditLog.Status = 'Failed';
2272
- auditLog.EntityID = entityId;
2273
- auditLog.RecordID = recordId;
2274
- if (authorization)
2275
- auditLog.AuthorizationID = authorization.ID;
2276
- if (details)
2277
- auditLog.Details = details;
2278
- if (auditLogDescription)
2279
- auditLog.Description = auditLogDescription;
2280
- if (await auditLog.Save(saveOptions)) {
2281
- return auditLog;
2282
- }
2283
- else
2284
- throw new Error(`Error saving audit log record`);
2285
- }
2286
- catch (err) {
2287
- LogError(err);
2288
- return null;
2289
- }
2290
- }
2291
- CheckUserReadPermissions(entityName, contextUser) {
2292
- const entityInfo = this.Entities.find((e) => e.Name === entityName);
2293
- if (!contextUser)
2294
- throw new Error(`contextUser is null`);
2295
- // first check permissions, the logged in user must have read permissions on the entity to run the view
2296
- if (entityInfo) {
2297
- const userPermissions = entityInfo.GetUserPermisions(contextUser);
2298
- if (!userPermissions.CanRead)
2299
- throw new Error(`User ${contextUser.Email} does not have read permissions on ${entityInfo.Name}`);
2300
- }
2301
- else
2302
- throw new Error(`Entity not found in metadata`);
2303
- }
2304
- /**************************************************************************/
2305
- // END ---- IRunViewProvider
790
+ // END ---- IRunViewProvider
2306
791
  /**************************************************************************/
2307
792
  /**************************************************************************/
2308
793
  // START ---- IEntityDataProvider
@@ -2310,67 +795,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2310
795
  get ProviderType() {
2311
796
  return ProviderType.Database;
2312
797
  }
2313
- async GetRecordFavoriteStatus(userId, entityName, CompositeKey, contextUser) {
2314
- const id = await this.GetRecordFavoriteID(userId, entityName, CompositeKey, contextUser);
2315
- return id !== null;
2316
- }
2317
- async GetRecordFavoriteID(userId, entityName, CompositeKey, contextUser) {
2318
- try {
2319
- const sSQL = `SELECT ID FROM [${this.MJCoreSchemaName}].vwUserFavorites WHERE UserID='${userId}' AND Entity='${entityName}' AND RecordID='${CompositeKey.Values()}'`;
2320
- const result = await this.ExecuteSQL(sSQL, null, undefined, contextUser);
2321
- if (result && result.length > 0)
2322
- return result[0].ID;
2323
- else
2324
- return null;
2325
- }
2326
- catch (e) {
2327
- LogError(e);
2328
- throw e;
2329
- }
2330
- }
2331
- async SetRecordFavoriteStatus(userId, entityName, CompositeKey, isFavorite, contextUser) {
2332
- try {
2333
- const currentFavoriteId = await this.GetRecordFavoriteID(userId, entityName, CompositeKey);
2334
- if ((currentFavoriteId === null && isFavorite === false) || (currentFavoriteId !== null && isFavorite === true))
2335
- return; // no change
2336
- // if we're here that means we need to invert the status, which either means creating a record or deleting a record
2337
- const e = this.Entities.find((e) => e.Name === entityName);
2338
- const ufEntity = await this.GetEntityObject('MJ: User Favorites', contextUser || this.CurrentUser);
2339
- if (currentFavoriteId !== null) {
2340
- // delete the record since we are setting isFavorite to FALSE
2341
- await ufEntity.Load(currentFavoriteId);
2342
- if (await ufEntity.Delete())
2343
- return;
2344
- else
2345
- throw new Error(`Error deleting user favorite`);
2346
- }
2347
- else {
2348
- // create the record since we are setting isFavorite to TRUE
2349
- ufEntity.NewRecord();
2350
- ufEntity.Set('EntityID', e.ID);
2351
- ufEntity.Set('RecordID', CompositeKey.Values()); // this is a comma separated list of primary key values, which is fine as the primary key is a string
2352
- ufEntity.Set('UserID', userId);
2353
- if (await ufEntity.Save())
2354
- return;
2355
- else
2356
- throw new Error(`Error saving user favorite`);
2357
- }
2358
- }
2359
- catch (e) {
2360
- LogError(e);
2361
- throw e;
2362
- }
2363
- }
2364
- async GetRecordChanges(entityName, compositeKey, contextUser) {
2365
- try {
2366
- const sSQL = `SELECT * FROM [${this.MJCoreSchemaName}].vwRecordChanges WHERE Entity='${entityName}' AND RecordID='${compositeKey.ToConcatenatedString()}' ORDER BY ChangedAt DESC`;
2367
- return this.ExecuteSQL(sSQL, undefined, undefined, contextUser);
2368
- }
2369
- catch (e) {
2370
- LogError(e);
2371
- throw e;
2372
- }
2373
- }
2374
798
  /**
2375
799
  * This function will generate SQL statements for all of the possible soft links that are not traditional foreign keys but exist in entities
2376
800
  * where there is a column that has the EntityIDFieldName set to a column name (not null). We need to get a list of all such soft link fields across ALL entities
@@ -2378,7 +802,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2378
802
  * @param entityName
2379
803
  * @param compositeKey
2380
804
  */
2381
- GetSoftLinkDependencySQL(entityName, compositeKey) {
805
+ BuildSoftLinkDependencySQL(entityName, compositeKey) {
2382
806
  // we need to go through ALL of the entities in the system and find all of the EntityFields that have a non-null EntityIDFieldName
2383
807
  // for each of these, we generate a SQL Statement that will return the EntityName, RelatedEntityName, FieldName, and the primary key values of the related entity
2384
808
  let sSQL = '';
@@ -2410,7 +834,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2410
834
  });
2411
835
  return sSQL;
2412
836
  }
2413
- GetHardLinkDependencySQL(entityDependencies, compositeKey) {
837
+ BuildHardLinkDependencySQL(entityDependencies, compositeKey) {
2414
838
  let sSQL = '';
2415
839
  for (const entityDependency of entityDependencies) {
2416
840
  const entityInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === entityDependency.EntityName?.trim().toLowerCase());
@@ -2431,67 +855,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2431
855
  }
2432
856
  return sSQL;
2433
857
  }
2434
- /**
2435
- * Returns a list of dependencies - records that are linked to the specified Entity/RecordID combination. A dependency is as defined by the relationships in the database. The MemberJunction metadata that is used
2436
- * for this simply reflects the foreign key relationships that exist in the database. The CodeGen tool is what detects all of the relationships and generates the metadata that is used by MemberJunction. The metadata in question
2437
- * is within the EntityField table and specifically the RelatedEntity and RelatedEntityField columns. In turn, this method uses that metadata and queries the database to determine the dependencies. To get the list of entity dependencies
2438
- * you can use the utility method GetEntityDependencies(), which doesn't check for dependencies on a specific record, but rather gets the metadata in one shot that can be used for dependency checking.
2439
- * @param entityName the name of the entity to check
2440
- * @param KeyValuePairs the primary key(s) to check - only send multiple if you have an entity with a composite primary key
2441
- */
2442
- async GetRecordDependencies(entityName, compositeKey, contextUser) {
2443
- try {
2444
- const recordDependencies = [];
2445
- // first, get the entity dependencies for this entity
2446
- const entityDependencies = await this.GetEntityDependencies(entityName);
2447
- if (entityDependencies.length === 0) {
2448
- // no dependencies, exit early
2449
- return recordDependencies;
2450
- }
2451
- // now, we have to construct a query that will return the dependencies for this record, both hard and soft links
2452
- const sSQL = this.GetHardLinkDependencySQL(entityDependencies, compositeKey) + '\n' + this.GetSoftLinkDependencySQL(entityName, compositeKey);
2453
- // now, execute the query
2454
- const result = await this.ExecuteSQL(sSQL, null, undefined, contextUser);
2455
- if (!result || result.length === 0) {
2456
- return recordDependencies;
2457
- }
2458
- // now we go through the results and create the RecordDependency objects
2459
- for (const r of result) {
2460
- const entityInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === r.EntityName?.trim().toLowerCase());
2461
- if (!entityInfo) {
2462
- throw new Error(`Entity ${r.EntityName} not found in metadata`);
2463
- }
2464
- // future, if we support foreign keys that are composite keys, we'll need to enable this code
2465
- // const pkeyValues: KeyValuePair[] = [];
2466
- // entityInfo.PrimaryKeys.forEach((pk) => {
2467
- // pkeyValues.push({FieldName: pk.Name, Value: r[pk.Name]}) // add all of the primary keys, which often is as simple as just "ID", but this is generic way to do it
2468
- // })
2469
- const compositeKey = new CompositeKey();
2470
- // the row r will have a PrimaryKeyValue field that is a string that is a concatenation of the primary key field names and values
2471
- // we need to parse that out so that we can then pass it to the CompositeKey object
2472
- const pkeys = {};
2473
- const keyValues = r.PrimaryKeyValue.split(CompositeKey.DefaultFieldDelimiter);
2474
- keyValues.forEach((kv) => {
2475
- const parts = kv.split(CompositeKey.DefaultValueDelimiter);
2476
- pkeys[parts[0]] = parts[1];
2477
- });
2478
- compositeKey.LoadFromEntityInfoAndRecord(entityInfo, pkeys);
2479
- const recordDependency = {
2480
- EntityName: r.EntityName,
2481
- RelatedEntityName: r.RelatedEntityName,
2482
- FieldName: r.FieldName,
2483
- PrimaryKey: compositeKey,
2484
- };
2485
- recordDependencies.push(recordDependency);
2486
- }
2487
- return recordDependencies;
2488
- }
2489
- catch (e) {
2490
- // log and throw
2491
- LogError(e);
2492
- throw e;
2493
- }
2494
- }
2495
858
  GetRecordDependencyLinkSQL(dep, entity, relatedEntity, CompositeKey) {
2496
859
  const f = relatedEntity.Fields.find((f) => f.Name.trim().toLowerCase() === dep.FieldName?.trim().toLowerCase());
2497
860
  const quotes = entity.FirstPrimaryKey.NeedsQuotes ? "'" : '';
@@ -2509,185 +872,6 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2509
872
  return `(SELECT ${f.RelatedEntityFieldName} FROM [${entity.SchemaName}].${entity.BaseView} WHERE ${entity.FirstPrimaryKey.Name}=${quotes}${CompositeKey.GetValueByIndex(0)}${quotes})`;
2510
873
  }
2511
874
  }
2512
- async GetRecordDuplicates(params, contextUser) {
2513
- if (!contextUser) {
2514
- throw new Error('User context is required to get record duplicates.');
2515
- }
2516
- const listEntity = await this.GetEntityObject('MJ: Lists');
2517
- listEntity.ContextCurrentUser = contextUser;
2518
- const success = await listEntity.Load(params.ListID);
2519
- if (!success) {
2520
- throw new Error(`List with ID ${params.ListID} not found.`);
2521
- }
2522
- const duplicateRun = await this.GetEntityObject('MJ: Duplicate Runs');
2523
- duplicateRun.NewRecord();
2524
- duplicateRun.EntityID = params.EntityID;
2525
- duplicateRun.StartedByUserID = contextUser.ID;
2526
- duplicateRun.StartedAt = new Date();
2527
- duplicateRun.ProcessingStatus = 'In Progress';
2528
- duplicateRun.ApprovalStatus = 'Pending';
2529
- duplicateRun.SourceListID = listEntity.ID;
2530
- duplicateRun.ContextCurrentUser = contextUser;
2531
- const saveResult = await duplicateRun.Save();
2532
- if (!saveResult) {
2533
- throw new Error(`Failed to save Duplicate Run Entity`);
2534
- }
2535
- const response = {
2536
- Status: 'Inprogress',
2537
- PotentialDuplicateResult: [],
2538
- };
2539
- return response;
2540
- }
2541
- async MergeRecords(request, contextUser, options) {
2542
- const e = this.Entities.find((e) => e.Name.trim().toLowerCase() === request.EntityName.trim().toLowerCase());
2543
- if (!e || !e.AllowRecordMerge)
2544
- throw new Error(`Entity ${request.EntityName} does not allow record merging, check the AllowRecordMerge property in the entity metadata`);
2545
- const result = {
2546
- Success: false,
2547
- RecordMergeLogID: null,
2548
- RecordStatus: [],
2549
- Request: request,
2550
- OverallStatus: null,
2551
- };
2552
- const mergeRecordLog = await this.StartMergeLogging(request, result, contextUser);
2553
- try {
2554
- /*
2555
- we will follow this process...
2556
- * 1. Begin Transaction
2557
- * 2. The surviving record is loaded and fields are updated from the field map, if provided, and the record is saved. If a FieldMap not provided within the request object, this step is skipped.
2558
- * 3. For each of the records that will be merged INTO the surviving record, we call the GetEntityDependencies() method and get a list of all other records in the database are linked to the record to be deleted. We then go through each of those dependencies and update the link to point to the SurvivingRecordID and save the record.
2559
- * 4. The record to be deleted is then deleted.
2560
- * 5. Commit or Rollback Transaction
2561
- */
2562
- // Step 1 - begin transaction
2563
- await this.BeginTransaction();
2564
- // Step 2 - update the surviving record, but only do this if we were provided a field map
2565
- if (request.FieldMap && request.FieldMap.length > 0) {
2566
- const survivor = await this.GetEntityObject(request.EntityName, contextUser);
2567
- await survivor.InnerLoad(request.SurvivingRecordCompositeKey);
2568
- for (const fieldMap of request.FieldMap) {
2569
- survivor.Set(fieldMap.FieldName, fieldMap.Value);
2570
- }
2571
- if (!(await survivor.Save())) {
2572
- result.OverallStatus = 'Error saving survivor record with values from provided field map.';
2573
- throw new Error(result.OverallStatus);
2574
- }
2575
- }
2576
- // Step 3 - update the dependencies for each of the records we will delete
2577
- for (const pksToDelete of request.RecordsToMerge) {
2578
- const newRecStatus = {
2579
- CompositeKey: pksToDelete,
2580
- Success: false,
2581
- RecordMergeDeletionLogID: null,
2582
- Message: null,
2583
- };
2584
- result.RecordStatus.push(newRecStatus);
2585
- const dependencies = await this.GetRecordDependencies(request.EntityName, pksToDelete);
2586
- // now, loop through the dependencies and update the link to point to the surviving record
2587
- for (const dependency of dependencies) {
2588
- const reInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === dependency.RelatedEntityName.trim().toLowerCase());
2589
- const relatedEntity = await this.GetEntityObject(dependency.RelatedEntityName, contextUser);
2590
- await relatedEntity.InnerLoad(dependency.PrimaryKey);
2591
- relatedEntity.Set(dependency.FieldName, request.SurvivingRecordCompositeKey.GetValueByIndex(0)); // only support single field foreign keys for now
2592
- /*
2593
- if we later support composite foreign keys, we'll need to do this instead, at the moment this code will break as dependency.KeyValuePair is a single value, not an array
2594
-
2595
- for (let pkv of dependency.KeyValuePairs) {
2596
- relatedEntity.Set(dependency.FieldName, pkv.Value);
2597
- }
2598
- */
2599
- if (!(await relatedEntity.Save())) {
2600
- newRecStatus.Success = false;
2601
- newRecStatus.Message = `Error updating dependency record ${dependency.PrimaryKey.ToString} for entity ${dependency.RelatedEntityName} to point to surviving record ${request.SurvivingRecordCompositeKey.ToString()}`;
2602
- throw new Error(newRecStatus.Message);
2603
- }
2604
- }
2605
- // if we get here, that means that all of the dependencies were updated successfully, so we can now delete the records to be merged
2606
- const recordToDelete = await this.GetEntityObject(request.EntityName, contextUser);
2607
- await recordToDelete.InnerLoad(pksToDelete);
2608
- if (!(await recordToDelete.Delete())) {
2609
- newRecStatus.Message = `Error deleting record ${pksToDelete.ToString()} for entity ${request.EntityName}`;
2610
- throw new Error(newRecStatus.Message);
2611
- }
2612
- else
2613
- newRecStatus.Success = true;
2614
- }
2615
- result.Success = true;
2616
- await this.CompleteMergeLogging(mergeRecordLog, result, contextUser);
2617
- // Step 5 - commit transaction
2618
- await this.CommitTransaction();
2619
- result.Success = true;
2620
- return result;
2621
- }
2622
- catch (e) {
2623
- LogError(e);
2624
- await this.RollbackTransaction();
2625
- // attempt to persist the status to the DB, although that might fail
2626
- await this.CompleteMergeLogging(mergeRecordLog, result, contextUser);
2627
- throw e;
2628
- }
2629
- }
2630
- async StartMergeLogging(request, result, contextUser) {
2631
- try {
2632
- // create records in the Record Merge Logs entity and Record Merge Deletion Logs entity
2633
- const recordMergeLog = await this.GetEntityObject('MJ: Record Merge Logs', contextUser);
2634
- const entity = this.Entities.find((e) => e.Name === request.EntityName);
2635
- if (!entity)
2636
- throw new Error(`Entity ${result.Request.EntityName} not found in metadata`);
2637
- if (!contextUser && !this.CurrentUser)
2638
- throw new Error(`contextUser is null and no CurrentUser is set`);
2639
- recordMergeLog.NewRecord();
2640
- recordMergeLog.EntityID = entity.ID;
2641
- recordMergeLog.SurvivingRecordID = request.SurvivingRecordCompositeKey.Values(); // this would join together all of the primary key values, which is fine as the primary key is a string
2642
- recordMergeLog.InitiatedByUserID = contextUser ? contextUser.ID : this.CurrentUser?.ID;
2643
- recordMergeLog.ApprovalStatus = 'Approved';
2644
- recordMergeLog.ApprovedByUserID = contextUser ? contextUser.ID : this.CurrentUser?.ID;
2645
- recordMergeLog.ProcessingStatus = 'Started';
2646
- recordMergeLog.ProcessingStartedAt = new Date();
2647
- if (await recordMergeLog.Save()) {
2648
- result.RecordMergeLogID = recordMergeLog.ID;
2649
- return recordMergeLog;
2650
- }
2651
- else
2652
- throw new Error(`Error saving record merge log`);
2653
- }
2654
- catch (e) {
2655
- LogError(e);
2656
- throw e;
2657
- }
2658
- }
2659
- async CompleteMergeLogging(recordMergeLog, result, contextUser) {
2660
- try {
2661
- // create records in the Record Merge Logs entity and Record Merge Deletion Logs entity
2662
- if (!contextUser && !this.CurrentUser)
2663
- throw new Error(`contextUser is null and no CurrentUser is set`);
2664
- recordMergeLog.ProcessingStatus = result.Success ? 'Complete' : 'Error';
2665
- recordMergeLog.ProcessingEndedAt = new Date();
2666
- if (!result.Success)
2667
- // only create the log record if the merge failed, otherwise it is wasted space
2668
- recordMergeLog.ProcessingLog = result.OverallStatus;
2669
- if (await recordMergeLog.Save()) {
2670
- // top level saved, now let's create the deletion detail records for each of the records that were merged
2671
- for (const d of result.RecordStatus) {
2672
- const recordMergeDeletionLog = await this.GetEntityObject('MJ: Record Merge Deletion Logs', contextUser);
2673
- recordMergeDeletionLog.NewRecord();
2674
- recordMergeDeletionLog.RecordMergeLogID = recordMergeLog.ID;
2675
- recordMergeDeletionLog.DeletedRecordID = d.CompositeKey.Values(); // this would join together all of the primary key values, which is fine as the primary key is a string
2676
- recordMergeDeletionLog.Status = d.Success ? 'Complete' : 'Error';
2677
- recordMergeDeletionLog.ProcessingLog = d.Success ? null : d.Message; // only save the message if it failed
2678
- if (!(await recordMergeDeletionLog.Save()))
2679
- throw new Error(`Error saving record merge deletion log`);
2680
- }
2681
- }
2682
- else
2683
- throw new Error(`Error saving record merge log`);
2684
- }
2685
- catch (e) {
2686
- // do nothing here because we often will get here since some conditions lead to no DB updates possible...
2687
- LogError(e);
2688
- // don't bubble up the error here as we're sometimes already in an exception block in caller
2689
- }
2690
- }
2691
875
  /**
2692
876
  * Generates the SQL Statement that will Save a record to the database.
2693
877
  *
@@ -2802,311 +986,17 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
2802
986
  return { fullSQL: sSQL, simpleSQL: sSimpleSQL, overlappingChangeData };
2803
987
  }
2804
988
  /**
2805
- * Gets AI actions configured for an entity based on trigger timing
2806
- *
2807
- * @param entityInfo - The entity to get AI actions for
2808
- * @param before - True to get before-save actions, false for after-save
2809
- * @returns Array of AI action entities
2810
- * @internal
2811
- */
2812
- GetEntityAIActions(entityInfo, before) {
2813
- return AIEngine.Instance.EntityAIActions.filter((a) => a.EntityID === entityInfo.ID && a.TriggerEvent.toLowerCase().trim() === (before ? 'before save' : 'after save'));
2814
- }
2815
- /**
2816
- * Handles entity actions (non-AI) for save, delete, or validate operations
2817
- *
2818
- * @param entity - The entity being operated on
2819
- * @param baseType - The type of operation
2820
- * @param before - Whether this is before or after the operation
2821
- * @param user - The user performing the operation
2822
- * @returns Array of action results
2823
- * @internal
989
+ * Override to defer AI action tasks when a transaction is active.
990
+ * When inside a transaction, tasks are queued to _deferredTasks and
991
+ * processed after transaction commit (see processDeferredTasks).
2824
992
  */
2825
- async HandleEntityActions(entity, baseType, before, user) {
2826
- // use the EntityActionEngine for this
2827
- try {
2828
- const engine = EntityActionEngineServer.Instance;
2829
- await engine.Config(false, user);
2830
- const newRecord = entity.IsSaved ? false : true;
2831
- const baseTypeType = baseType === 'save' ? (newRecord ? 'Create' : 'Update') : 'Delete';
2832
- const invocationType = baseType === 'validate' ? 'Validate' : before ? 'Before' + baseTypeType : 'After' + baseTypeType;
2833
- const invocationTypeEntity = engine.InvocationTypes.find((i) => i.Name === invocationType);
2834
- if (!invocationTypeEntity) {
2835
- LogError(`Invocation Type ${invocationType} not found in metadata`);
2836
- return [];
2837
- // throw new Error(`Invocation Type ${invocationType} not found in metadata`);
2838
- }
2839
- const activeActions = engine.GetActionsByEntityNameAndInvocationType(entity.EntityInfo.Name, invocationType, 'Active');
2840
- const results = [];
2841
- for (const a of activeActions) {
2842
- const result = await engine.RunEntityAction({
2843
- EntityAction: a,
2844
- EntityObject: entity,
2845
- InvocationType: invocationTypeEntity,
2846
- ContextUser: user,
2847
- });
2848
- results.push(result);
2849
- }
2850
- return results;
2851
- }
2852
- catch (e) {
2853
- LogError(e);
2854
- return [];
2855
- }
2856
- }
2857
- /**
2858
- * Handles Entity AI Actions. Parameters are setup for a future support of delete actions, but currently that isn't supported so the baseType parameter
2859
- * isn't fully functional. If you pass in delete, the function will just exit for now, and in the future calling code will start working when we support
2860
- * Delete as a trigger event for Entity AI Actions...
2861
- * @param entity
2862
- * @param baseType
2863
- * @param before
2864
- * @param user
2865
- */
2866
- async HandleEntityAIActions(entity, baseType, before, user) {
2867
- try {
2868
- // TEMP while we don't support delete
2869
- if (baseType === 'delete')
2870
- return;
2871
- // Make sure AI Metadata is loaded here...
2872
- await AIEngine.Instance.Config(false, user);
2873
- const actions = this.GetEntityAIActions(entity.EntityInfo, before); // get the actions we need to do for this entity
2874
- if (actions && actions.length > 0) {
2875
- const ai = AIEngine.Instance;
2876
- for (let i = 0; i < actions.length; i++) {
2877
- const a = actions[i];
2878
- if ((a.TriggerEvent === 'before save' && before) || (a.TriggerEvent === 'after save' && !before)) {
2879
- const p = {
2880
- entityAIActionId: a.ID,
2881
- entityRecord: entity,
2882
- actionId: a.AIActionID,
2883
- modelId: a.AIModelID,
2884
- };
2885
- if (before) {
2886
- // do it with await so we're blocking, as it needs to complete before the record save continues
2887
- await ai.ExecuteEntityAIAction(p);
2888
- }
2889
- else {
2890
- // just add a task and move on, we are doing 'after save' so we don't wait
2891
- try {
2892
- if (this.isTransactionActive) {
2893
- // Defer the task until after the transaction completes
2894
- this._deferredTasks.push({ type: 'Entity AI Action', data: p, options: null, user });
2895
- }
2896
- else {
2897
- // No transaction active, add the task immediately
2898
- QueueManager.AddTask('Entity AI Action', p, null, user);
2899
- }
2900
- }
2901
- catch (e) {
2902
- LogError(e.message);
2903
- }
2904
- }
2905
- }
2906
- }
2907
- }
2908
- }
2909
- catch (e) {
2910
- LogError(e);
2911
- }
2912
- }
2913
- async Save(entity, user, options) {
2914
- const entityResult = new BaseEntityResult();
2915
- try {
2916
- entity.RegisterTransactionPreprocessing();
2917
- const bNewRecord = !entity.IsSaved;
2918
- if (!options)
2919
- options = new EntitySaveOptions();
2920
- const bReplay = !!options.ReplayOnly;
2921
- if (!bReplay && !bNewRecord && !entity.EntityInfo.AllowUpdateAPI) {
2922
- // existing record and not allowed to update
2923
- throw new Error(`UPDATE not allowed for entity ${entity.EntityInfo.Name}`);
2924
- }
2925
- else if (!bReplay && bNewRecord && !entity.EntityInfo.AllowCreateAPI) {
2926
- // new record and not allowed to create
2927
- throw new Error(`CREATE not allowed for entity ${entity.EntityInfo.Name}`);
2928
- }
2929
- else {
2930
- // getting here means we are good to save, now check to see if we're dirty and need to save
2931
- // REMEMBER - this is the provider and the BaseEntity/subclasses handle user-level permission checking already, we just make sure API was turned on for the operation
2932
- if (entity.Dirty || options.IgnoreDirtyState || options.ReplayOnly) {
2933
- entityResult.StartedAt = new Date();
2934
- entityResult.Type = bNewRecord ? 'create' : 'update';
2935
- entityResult.OriginalValues = entity.Fields.map((f) => {
2936
- const tempStatus = f.ActiveStatusAssertions;
2937
- f.ActiveStatusAssertions = false; // turn off warnings for this operation
2938
- const ret = {
2939
- FieldName: f.Name,
2940
- Value: f.Value
2941
- };
2942
- f.ActiveStatusAssertions = tempStatus; // restore the status assertions
2943
- return ret;
2944
- }); // save the original values before we start the process
2945
- entity.ResultHistory.push(entityResult); // push the new result as we have started a process
2946
- // The assumption is that Validate() has already been called by the BaseEntity object that is invoking this provider.
2947
- // However, we have an extra responsibility in this situation which is to fire off the EntityActions for the Validate invocation type and
2948
- // make sure they clear. If they don't clear we throw an exception with the message provided.
2949
- if (!bReplay) {
2950
- const validationResult = await this.HandleEntityActions(entity, 'validate', false, user);
2951
- if (validationResult && validationResult.length > 0) {
2952
- // one or more actions executed, see the reults and if any failed, concat their messages and return as exception being thrown
2953
- const message = validationResult
2954
- .filter((v) => !v.Success)
2955
- .map((v) => v.Message)
2956
- .join('\n\n');
2957
- if (message) {
2958
- entityResult.Success = false;
2959
- entityResult.EndedAt = new Date();
2960
- entityResult.Message = message;
2961
- return false;
2962
- }
2963
- }
2964
- }
2965
- else {
2966
- // we are in replay mode we so do NOT need to do the validation stuff, skipping it...
2967
- }
2968
- const spName = this.GetCreateUpdateSPName(entity, bNewRecord);
2969
- if (options.SkipEntityActions !== true /*options set, but not set to skip entity actions*/) {
2970
- await this.HandleEntityActions(entity, 'save', true, user);
2971
- }
2972
- if (options.SkipEntityAIActions !== true /*options set, but not set to skip entity AI actions*/) {
2973
- // process any Entity AI actions that are set to trigger BEFORE the save, these are generally a really bad idea to do before save
2974
- // but they are supported (for now)
2975
- await this.HandleEntityAIActions(entity, 'save', true, user);
2976
- }
2977
- // Generate the SQL for the save operation
2978
- // This is async because it may need to encrypt field values
2979
- const sqlDetails = await this.GetSaveSQLWithDetails(entity, bNewRecord, spName, user);
2980
- const sSQL = sqlDetails.fullSQL;
2981
- if (entity.TransactionGroup && !bReplay /*we never participate in a transaction if we're in replay mode*/) {
2982
- // we have a transaction group, need to play nice and be part of it
2983
- entity.RaiseReadyForTransaction(); // let the entity know we're ready to be part of the transaction
2984
- // we are part of a transaction group, so just add our query to the list
2985
- // and when the transaction is committed, we will send all the queries at once
2986
- this._bAllowRefresh = false; // stop refreshes of metadata while we're doing work
2987
- entity.TransactionGroup.AddTransaction(new TransactionItem(entity, entityResult.Type === 'create' ? 'Create' : 'Update', sSQL, null, {
2988
- dataSource: this._pool,
2989
- simpleSQLFallback: entity.EntityInfo.TrackRecordChanges ? sqlDetails.simpleSQL : undefined,
2990
- entityName: entity.EntityInfo.Name
2991
- }, (transactionResult, success) => {
2992
- // we get here whenever the transaction group does gets around to committing
2993
- // our query.
2994
- this._bAllowRefresh = true; // allow refreshes again
2995
- entityResult.EndedAt = new Date();
2996
- if (success && transactionResult) {
2997
- // process any Entity AI actions that are set to trigger AFTER the save
2998
- // these are fired off but are NOT part of the transaction group, so if they fail,
2999
- // the transaction group will still commit, but the AI action will not be executed
3000
- if (options.SkipEntityAIActions !== true /*options set, but not set to skip entity AI actions*/) {
3001
- this.HandleEntityAIActions(entity, 'save', false, user); // NO AWAIT INTENTIONALLY
3002
- }
3003
- // Same approach to Entity Actions as Entity AI Actions
3004
- if (options.SkipEntityActions !== true) {
3005
- this.HandleEntityActions(entity, 'save', false, user); // NO AWAIT INTENTIONALLY
3006
- }
3007
- entityResult.Success = true;
3008
- entityResult.NewValues = this.MapTransactionResultToNewValues(transactionResult);
3009
- }
3010
- else {
3011
- // the transaction failed, nothing to update, but we need to call Reject so the
3012
- // promise resolves with a rejection so our outer caller knows
3013
- entityResult.Success = false;
3014
- entityResult.Message = 'Transaction Failed';
3015
- }
3016
- }));
3017
- return true; // we're part of a transaction group, so we're done here
3018
- }
3019
- else {
3020
- // no transaction group, just execute this immediately...
3021
- this._bAllowRefresh = false; // stop refreshes of metadata while we're doing work
3022
- let result;
3023
- if (bReplay) {
3024
- result = [entity.GetAll()]; // just return the entity as it was before the save as we are NOT saving anything as we are in replay mode
3025
- }
3026
- else {
3027
- try {
3028
- // Execute SQL with optional simple SQL fallback for loggers
3029
- // IS-A: use entity's ProviderTransaction when available for shared transaction
3030
- const rawResult = await this.ExecuteSQL(sSQL, null, {
3031
- isMutation: true,
3032
- description: `Save ${entity.EntityInfo.Name}`,
3033
- simpleSQLFallback: entity.EntityInfo.TrackRecordChanges ? sqlDetails.simpleSQL : undefined,
3034
- connectionSource: entity.ProviderTransaction ?? undefined
3035
- }, user);
3036
- // Process rows with user context for decryption
3037
- result = await this.ProcessEntityRows(rawResult, entity.EntityInfo, user);
3038
- }
3039
- catch (e) {
3040
- throw e; // rethrow
3041
- }
3042
- }
3043
- this._bAllowRefresh = true; // allow refreshes now
3044
- entityResult.EndedAt = new Date();
3045
- if (result && result.length > 0) {
3046
- // Entity AI Actions - fired off async, NO await on purpose
3047
- if (options.SkipEntityAIActions !== true /*options set, but not set to skip entity AI actions*/)
3048
- this.HandleEntityAIActions(entity, 'save', false, user); // fire off any AFTER SAVE AI actions, but don't wait for them
3049
- // Entity Actions - fired off async, NO await on purpose
3050
- if (options.SkipEntityActions !== true)
3051
- this.HandleEntityActions(entity, 'save', false, user); // NO AWAIT INTENTIONALLY
3052
- entityResult.Success = true;
3053
- // IS-A overlapping subtypes: propagate Record Change entries to sibling branches.
3054
- // Runs after this entity's save succeeds. Skips the active child branch (if this
3055
- // is a parent save in a chain) so siblings don't get duplicate entries.
3056
- if (sqlDetails.overlappingChangeData
3057
- && entity.EntityInfo.AllowMultipleSubtypes
3058
- && entity.EntityInfo.TrackRecordChanges) {
3059
- await this.PropagateRecordChangesToSiblings(entity.EntityInfo, sqlDetails.overlappingChangeData, entity.PrimaryKey.Values(), user?.ID ?? '', options.ISAActiveChildEntityName, entity.ProviderTransaction ?? undefined);
3060
- }
3061
- return result[0];
3062
- }
3063
- else {
3064
- if (bNewRecord) {
3065
- throw new Error(`SQL Error: Error creating new record, no rows returned from SQL: ` + sSQL);
3066
- }
3067
- else {
3068
- // if we get here that means that SQL did NOT find a matching row to update in the DB, so we need to throw an error
3069
- throw new Error(`SQL Error: Error updating record, no MATCHING rows found within the database: ` + sSQL);
3070
- }
3071
- }
3072
- }
3073
- }
3074
- else {
3075
- return entity; // nothing to save, just return the entity
3076
- }
3077
- }
3078
- }
3079
- catch (e) {
3080
- this._bAllowRefresh = true; // allow refreshes again if we get a failure here
3081
- entityResult.EndedAt = new Date();
3082
- entityResult.Message = e.message;
3083
- LogError(e);
3084
- throw e; // rethrow the error
993
+ EnqueueAfterSaveAIAction(params, user) {
994
+ if (this.isTransactionActive) {
995
+ this._deferredTasks.push({ type: 'Entity AI Action', data: params, options: null, user });
996
+ }
997
+ else {
998
+ QueueManager.AddTask('Entity AI Action', params, null, user);
3085
999
  }
3086
- }
3087
- MapTransactionResultToNewValues(transactionResult) {
3088
- return Object.keys(transactionResult).map((k) => {
3089
- return {
3090
- FieldName: k,
3091
- Value: transactionResult[k],
3092
- };
3093
- }); // transform the result into a list of field/value pairs
3094
- }
3095
- /**
3096
- * Returns the stored procedure name to use for the given entity based on if it is a new record or an existing record.
3097
- * @param entity
3098
- * @param bNewRecord
3099
- * @returns
3100
- */
3101
- GetCreateUpdateSPName(entity, bNewRecord) {
3102
- const spName = bNewRecord
3103
- ? entity.EntityInfo.spCreate?.length > 0
3104
- ? entity.EntityInfo.spCreate
3105
- : 'spCreate' + entity.EntityInfo.BaseTableCodeName
3106
- : entity.EntityInfo.spUpdate?.length > 0
3107
- ? entity.EntityInfo.spUpdate
3108
- : 'spUpdate' + entity.EntityInfo.BaseTableCodeName;
3109
- return spName;
3110
1000
  }
3111
1001
  getAllEntityColumnsSQL(entityInfo) {
3112
1002
  let sRet = '', outputCount = 0;
@@ -3373,7 +1263,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
3373
1263
  return unicodePrefix + quoteString + pVal + quoteString;
3374
1264
  }
3375
1265
  GetLogRecordChangeSQL(newData, oldData, entityName, recordID, entityInfo, type, user, wrapRecordIdInQuotes) {
3376
- const fullRecordJSON = JSON.stringify(this.escapeQuotesInProperties(newData ? newData : oldData, "'")); // stringify old data if we don't have new - means we are DELETING A RECORD
1266
+ const fullRecordJSON = JSON.stringify(this.EscapeQuotesInProperties(newData ? newData : oldData, "'")); // stringify old data if we don't have new - means we are DELETING A RECORD
3377
1267
  const changes = this.DiffObjects(oldData, newData, entityInfo, "'");
3378
1268
  const changesKeys = changes ? Object.keys(changes) : [];
3379
1269
  if (changesKeys.length > 0 || oldData === null /*new record*/ || newData === null /*deleted record*/) {
@@ -3393,280 +1283,14 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
3393
1283
  else
3394
1284
  return null;
3395
1285
  }
3396
- async LogRecordChange(newData, oldData, entityName, recordID, entityInfo, type, user) {
3397
- const sSQL = this.GetLogRecordChangeSQL(newData, oldData, entityName, recordID, entityInfo, type, user, true);
3398
- if (sSQL) {
3399
- const result = await this.ExecuteSQL(sSQL, undefined, undefined, user);
3400
- return result;
3401
- }
3402
- }
3403
- /**
3404
- * This method will create a human-readable string that describes the changes object that was created using the DiffObjects() method
3405
- * @param changesObject JavaScript object that has properties for each changed field that in turn have field, oldValue and newValue as sub-properties
3406
- * @param maxValueLength If not specified, default value of 200 characters applies where any values after the maxValueLength is cut off. The actual values are stored in the ChangesJSON and FullRecordJSON in the RecordChange table, this is only for the human-display
3407
- * @param cutOffText If specified, and if maxValueLength applies to any of the values being included in the description, this cutOffText param will be appended to the end of the cut off string to indicate to the human reader that the value is partial.
3408
- * @returns
3409
- */
3410
- CreateUserDescriptionOfChanges(changesObject, maxValueLength = 200, cutOffText = '...') {
3411
- let sRet = '';
3412
- const keys = Object.keys(changesObject);
3413
- for (let i = 0; i < keys.length; i++) {
3414
- const change = changesObject[keys[i]];
3415
- if (sRet.length > 0) {
3416
- sRet += '\n';
3417
- }
3418
- if (change.oldValue && change.newValue)
3419
- // both old and new values set, show change
3420
- sRet += `${change.field} changed from ${this.trimString(change.oldValue, maxValueLength, cutOffText)} to ${this.trimString(change.newValue, maxValueLength, cutOffText)}`;
3421
- else if (change.newValue)
3422
- // old value was blank, new value isn't
3423
- sRet += `${change.field} set to ${this.trimString(change.newValue, maxValueLength, cutOffText)}`;
3424
- else if (change.oldValue)
3425
- // new value is blank, old value wasn't
3426
- sRet += `${change.field} cleared from ${this.trimString(change.oldValue, maxValueLength, cutOffText)}`;
3427
- }
3428
- return sRet.replace(/'/g, "''");
3429
- }
3430
- trimString(value, maxLength, trailingChars) {
3431
- if (value && typeof value === 'string' && value.length > maxLength) {
3432
- value = value.substring(0, maxLength) + trailingChars;
3433
- }
3434
- return value;
3435
- }
3436
- /**
3437
- * Recursively escapes quotes in all string properties of an object or array.
3438
- * This method traverses through nested objects and arrays, escaping the specified
3439
- * quote character in all string values to prevent SQL injection and syntax errors.
3440
- *
3441
- * @param obj - The object, array, or primitive value to process
3442
- * @param quoteToEscape - The quote character to escape (typically single quote "'")
3443
- * @returns A new object/array with all string values having quotes properly escaped.
3444
- * Non-string values are preserved as-is.
3445
- *
3446
- * @example
3447
- * // Escaping single quotes in a nested object
3448
- * const input = {
3449
- * name: "John's Company",
3450
- * details: {
3451
- * description: "It's the best",
3452
- * tags: ["Won't fail", "Can't stop"]
3453
- * }
3454
- * };
3455
- * const escaped = this.escapeQuotesInProperties(input, "'");
3456
- * // Result: {
3457
- * // name: "John''s Company",
3458
- * // details: {
3459
- * // description: "It''s the best",
3460
- * // tags: ["Won''t fail", "Can''t stop"]
3461
- * // }
3462
- * // }
3463
- *
3464
- * @remarks
3465
- * This method is essential for preparing data to be embedded in SQL strings.
3466
- * It handles:
3467
- * - Nested objects of any depth
3468
- * - Arrays (including arrays of objects)
3469
- * - Mixed-type objects with strings, numbers, booleans, null values
3470
- * - Circular references are NOT handled and will cause stack overflow
3471
- */
3472
- escapeQuotesInProperties(obj, quoteToEscape) {
3473
- // Handle null/undefined
3474
- if (obj === null || obj === undefined) {
3475
- return obj;
3476
- }
3477
- // Handle arrays recursively
3478
- if (Array.isArray(obj)) {
3479
- return obj.map(item => this.escapeQuotesInProperties(item, quoteToEscape));
3480
- }
3481
- // Handle Date objects - convert to ISO string before they lose their value
3482
- if (obj instanceof Date) {
3483
- return obj.toISOString();
3484
- }
3485
- // Handle objects recursively
3486
- if (typeof obj === 'object') {
3487
- const sRet = {};
3488
- for (const key in obj) {
3489
- if (obj.hasOwnProperty(key)) {
3490
- const element = obj[key];
3491
- if (typeof element === 'string') {
3492
- const reg = new RegExp(quoteToEscape, 'g');
3493
- sRet[key] = element.replace(reg, quoteToEscape + quoteToEscape);
3494
- }
3495
- else if (typeof element === 'object') {
3496
- // Recursively escape nested objects and arrays
3497
- sRet[key] = this.escapeQuotesInProperties(element, quoteToEscape);
3498
- }
3499
- else {
3500
- // Keep primitive values as-is (numbers, booleans, etc.)
3501
- sRet[key] = element;
3502
- }
3503
- }
3504
- }
3505
- return sRet;
3506
- }
3507
- // For non-object types (shouldn't normally happen), return as-is
3508
- return obj;
3509
- }
3510
1286
  /**
3511
- * Creates a changes object by comparing two javascript objects, identifying fields that have different values.
3512
- * Each property in the returned object represents a changed field, with the field name as the key.
3513
- *
3514
- * @param oldData - The original data object to compare from
3515
- * @param newData - The new data object to compare to
3516
- * @param entityInfo - Entity metadata used to validate fields and determine comparison logic
3517
- * @param quoteToEscape - The quote character to escape in string values (typically "'")
3518
- * @returns A Record mapping field names to FieldChange objects containing the field name, old value, and new value.
3519
- * Returns null if either oldData or newData is null/undefined.
3520
- * Only includes fields that have actually changed and are not read-only.
3521
- *
3522
- * @remarks
3523
- * - Read-only fields are never considered changed
3524
- * - null and undefined are treated as equivalent
3525
- * - Date fields are compared by timestamp
3526
- * - String and object values have quotes properly escaped for SQL
3527
- * - Objects/arrays are recursively escaped using escapeQuotesInProperties
3528
- *
3529
- * @example
3530
- * ```typescript
3531
- * const changes = provider.DiffObjects(
3532
- * { name: "John's Co", revenue: 1000 },
3533
- * { name: "John's Co", revenue: 2000 },
3534
- * entityInfo,
3535
- * "'"
3536
- * );
3537
- * // Returns: { revenue: { field: "revenue", oldValue: 1000, newValue: 2000 } }
3538
- * ```
1287
+ * Implements the abstract BuildRecordChangeSQL from DatabaseProviderBase.
1288
+ * Delegates to GetLogRecordChangeSQL for T-SQL generation.
3539
1289
  */
3540
- DiffObjects(oldData, newData, entityInfo, quoteToEscape) {
3541
- if (!oldData || !newData)
3542
- return null;
3543
- else {
3544
- const changes = {};
3545
- for (const key in newData) {
3546
- const f = entityInfo.Fields.find((f) => f.Name.toLowerCase() === key.toLowerCase());
3547
- if (!f) {
3548
- continue; // skip if field not found in entity info, sometimes objects have extra properties that are not part of the entity
3549
- }
3550
- let bDiff = false;
3551
- if (f.ReadOnly)
3552
- bDiff = false; // read only fields are never different, they can change in the database, but we don't consider them to be a change for record changes purposes.
3553
- else if ((oldData[key] == undefined || oldData[key] == null) && (newData[key] == undefined || newData[key] == null))
3554
- bDiff = false; // this branch of logic ensures that undefined and null are treated the same
3555
- else {
3556
- switch (f.TSType) {
3557
- case EntityFieldTSType.String:
3558
- bDiff = oldData[key] !== newData[key];
3559
- break;
3560
- case EntityFieldTSType.Date:
3561
- bDiff = new Date(oldData[key]).getTime() !== new Date(newData[key]).getTime();
3562
- break;
3563
- case EntityFieldTSType.Number:
3564
- case EntityFieldTSType.Boolean:
3565
- bDiff = oldData[key] !== newData[key];
3566
- break;
3567
- }
3568
- }
3569
- if (bDiff) {
3570
- // make sure we escape things properly
3571
- let o = oldData[key];
3572
- let n = newData[key];
3573
- if (typeof o === 'string') {
3574
- // Escape strings directly
3575
- const r = new RegExp(quoteToEscape, 'g');
3576
- o = o.replace(r, quoteToEscape + quoteToEscape);
3577
- }
3578
- else if (typeof o === 'object' && o !== null) {
3579
- // For objects/arrays, recursively escape all string properties
3580
- o = this.escapeQuotesInProperties(o, quoteToEscape);
3581
- }
3582
- if (typeof n === 'string') {
3583
- // Escape strings directly
3584
- const r = new RegExp(quoteToEscape, 'g');
3585
- n = n.replace(r, quoteToEscape + quoteToEscape);
3586
- }
3587
- else if (typeof n === 'object' && n !== null) {
3588
- // For objects/arrays, recursively escape all string properties
3589
- n = this.escapeQuotesInProperties(n, quoteToEscape);
3590
- }
3591
- changes[key] = {
3592
- field: key,
3593
- oldValue: o,
3594
- newValue: n,
3595
- };
3596
- }
3597
- }
3598
- return changes;
3599
- }
3600
- }
3601
- async Load(entity, CompositeKey, EntityRelationshipsToLoad = null, user) {
3602
- const where = CompositeKey.KeyValuePairs.map((val) => {
3603
- const pk = entity.EntityInfo.PrimaryKeys.find((pk) => pk.Name.trim().toLowerCase() === val.FieldName.trim().toLowerCase());
3604
- if (!pk)
3605
- throw new Error(`Primary key ${val.FieldName} not found in entity ${entity.EntityInfo.Name}`);
3606
- const quotes = pk.NeedsQuotes ? "'" : '';
3607
- return `[${pk.CodeName}]=${quotes}${val.Value}${quotes}`;
3608
- }).join(' AND ');
3609
- const sql = `SELECT * FROM [${entity.EntityInfo.SchemaName}].${entity.EntityInfo.BaseView} WHERE ${where}`;
3610
- const rawData = await this.ExecuteSQL(sql, undefined, undefined, user);
3611
- // Process rows with user context for decryption
3612
- const d = await this.ProcessEntityRows(rawData, entity.EntityInfo, user);
3613
- if (d && d.length > 0) {
3614
- // got the record, now process the relationships if there are any
3615
- const ret = d[0];
3616
- // we need to post process the retrieval to see if we have any char or nchar fields and we need to remove their trailing spaces
3617
- for (const field of entity.EntityInfo.Fields) {
3618
- if (field.TSType === EntityFieldTSType.String &&
3619
- field.Type.toLowerCase().includes('char') &&
3620
- !field.Type.toLowerCase().includes('varchar')) {
3621
- // trim trailing spaces for char and nchar fields
3622
- ret[field.Name] = ret[field.Name] ? ret[field.Name].trimEnd() : ret[field.Name];
3623
- }
3624
- }
3625
- if (EntityRelationshipsToLoad && EntityRelationshipsToLoad.length > 0) {
3626
- for (let i = 0; i < EntityRelationshipsToLoad.length; i++) {
3627
- const rel = EntityRelationshipsToLoad[i];
3628
- const relInfo = entity.EntityInfo.RelatedEntities.find((r) => r.RelatedEntity == rel);
3629
- if (relInfo) {
3630
- let relSql = '';
3631
- const relEntitySchemaName = this.Entities.find((e) => e.Name.trim().toLowerCase() === relInfo.RelatedEntity.trim().toLowerCase())?.SchemaName;
3632
- const quotes = entity.FirstPrimaryKey.NeedsQuotes ? "'" : '';
3633
- if (relInfo.Type.trim().toLowerCase() === 'one to many')
3634
- // one to many - simple query
3635
- relSql = ` SELECT
3636
- *
3637
- FROM
3638
- [${relEntitySchemaName}].[${relInfo.RelatedEntityBaseView}]
3639
- WHERE
3640
- [${relInfo.RelatedEntityJoinField}] = ${quotes}${ret[entity.FirstPrimaryKey.Name]}${quotes}`;
3641
- // don't yet support composite foreign keys
3642
- // many to many - need to use join view
3643
- else
3644
- relSql = ` SELECT
3645
- _theview.*
3646
- FROM
3647
- [${relEntitySchemaName}].[${relInfo.RelatedEntityBaseView}] _theview
3648
- INNER JOIN
3649
- [${relEntitySchemaName}].[${relInfo.JoinView}] _jv ON _theview.[${relInfo.RelatedEntityJoinField}] = _jv.[${relInfo.JoinEntityInverseJoinField}]
3650
- WHERE
3651
- _jv.${relInfo.JoinEntityJoinField} = ${quotes}${ret[entity.FirstPrimaryKey.Name]}${quotes}`; // don't yet support composite foreign keys
3652
- const rawRelData = await this.ExecuteSQL(relSql, undefined, undefined, user);
3653
- if (rawRelData && rawRelData.length > 0) {
3654
- // Find the related entity info to process datetime fields correctly
3655
- const relEntityInfo = this.Entities.find((e) => e.Name.trim().toLowerCase() === relInfo.RelatedEntity.trim().toLowerCase());
3656
- if (relEntityInfo) {
3657
- ret[rel] = await this.ProcessEntityRows(rawRelData, relEntityInfo, user);
3658
- }
3659
- else {
3660
- // Fallback if we can't find entity info
3661
- ret[rel] = rawRelData;
3662
- }
3663
- }
3664
- }
3665
- }
3666
- }
3667
- return ret;
3668
- }
3669
- // if we get here, something didn't go right
1290
+ BuildRecordChangeSQL(newData, oldData, entityName, recordID, entityInfo, type, user) {
1291
+ const sql = this.GetLogRecordChangeSQL(newData, oldData, entityName, recordID, entityInfo, type, user, true);
1292
+ if (sql)
1293
+ return { sql };
3670
1294
  return null;
3671
1295
  }
3672
1296
  /**
@@ -3746,647 +1370,144 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
3746
1370
  }
3747
1371
  return { fullSQL: sSQL, simpleSQL: sSimpleSQL };
3748
1372
  }
3749
- async Delete(entity, options, user) {
3750
- const result = new BaseEntityResult();
3751
- try {
3752
- entity.RegisterTransactionPreprocessing();
3753
- if (!options)
3754
- options = new EntityDeleteOptions();
3755
- const bReplay = options.ReplayOnly;
3756
- if (!entity.IsSaved && !bReplay)
3757
- // existing record and not allowed to update
3758
- throw new Error(`Delete() isn't callable for records that haven't yet been saved - ${entity.EntityInfo.Name}`);
3759
- if (!entity.EntityInfo.AllowDeleteAPI && !bReplay)
3760
- // not allowed to delete
3761
- throw new Error(`Delete() isn't callable for ${entity.EntityInfo.Name} as AllowDeleteAPI is false`);
3762
- result.StartedAt = new Date();
3763
- result.Type = 'delete';
3764
- result.OriginalValues = entity.Fields.map((f) => {
3765
- return { FieldName: f.Name, Value: f.Value };
3766
- }); // save the original values before we start the process
3767
- entity.ResultHistory.push(result); // push the new result as we have started a process
3768
- // REMEMBER - this is the provider and the BaseEntity/subclasses handle user-level permission checking already, we just make sure API was turned on for the operation
3769
- // if we get here we can delete, so build the SQL and then handle appropriately either as part of TransGroup or directly...
3770
- const sqlDetails = this.GetDeleteSQLWithDetails(entity, user);
3771
- const sSQL = sqlDetails.fullSQL;
3772
- // Handle Entity and Entity AI Actions here w/ before and after handling
3773
- if (false === options?.SkipEntityActions)
3774
- await this.HandleEntityActions(entity, 'delete', true, user);
3775
- if (false === options?.SkipEntityAIActions)
3776
- await this.HandleEntityAIActions(entity, 'delete', true, user);
3777
- if (entity.TransactionGroup && !bReplay) {
3778
- // we have a transaction group, need to play nice and be part of it
3779
- entity.RaiseReadyForTransaction();
3780
- // we are part of a transaction group, so just add our query to the list
3781
- // and when the transaction is committed, we will send all the queries at once
3782
- entity.TransactionGroup.AddTransaction(new TransactionItem(entity, 'Delete', sSQL, null, {
3783
- dataSource: this._pool,
3784
- simpleSQLFallback: entity.EntityInfo.TrackRecordChanges ? sqlDetails.simpleSQL : undefined,
3785
- entityName: entity.EntityInfo.Name
3786
- }, (transactionResult, success) => {
3787
- // we get here whenever the transaction group does gets around to committing
3788
- // our query.
3789
- result.EndedAt = new Date();
3790
- if (success && result) {
3791
- // Entity AI Actions and Actions - fired off async, NO await on purpose
3792
- if (false === options?.SkipEntityActions) {
3793
- this.HandleEntityActions(entity, 'delete', false, user);
3794
- }
3795
- if (false === options?.SkipEntityAIActions) {
3796
- this.HandleEntityAIActions(entity, 'delete', false, user);
3797
- }
3798
- // Make sure the return value matches up as that is how we know the SP was succesfully internally
3799
- for (const key of entity.PrimaryKeys) {
3800
- if (key.Value !== transactionResult[key.Name]) {
3801
- result.Success = false;
3802
- result.Message = 'Transaction failed to commit';
3803
- }
3804
- }
3805
- result.NewValues = this.MapTransactionResultToNewValues(transactionResult);
3806
- result.Success = true;
3807
- }
3808
- else {
3809
- // the transaction failed, nothing to update, but we need to call Reject so the
3810
- // promise resolves with a rejection so our outer caller knows
3811
- result.Success = false;
3812
- result.Message = 'Transaction failed to commit';
3813
- }
3814
- }));
3815
- return true; // we're part of a transaction group, so we're done here
3816
- }
3817
- else {
3818
- let d;
3819
- if (bReplay) {
3820
- d = [entity.GetAll()]; // just return the entity as it was before the save as we are NOT saving anything as we are in replay mode
3821
- }
3822
- else {
3823
- // IS-A: use entity's ProviderTransaction when available for shared transaction
3824
- d = await this.ExecuteSQL(sSQL, null, {
3825
- isMutation: true,
3826
- description: `Delete ${entity.EntityInfo.Name}`,
3827
- simpleSQLFallback: entity.EntityInfo.TrackRecordChanges ? sqlDetails.simpleSQL : undefined,
3828
- connectionSource: entity.ProviderTransaction ?? undefined
3829
- }, user);
3830
- }
3831
- if (d && d.length > 0) {
3832
- // SP executed, now make sure the return value matches up as that is how we know the SP was succesfully internally
3833
- // Note: When CASCADE operations exist, multiple result sets are returned (d is array of arrays).
3834
- // When no CASCADE operations exist, a single result set is returned (d is array of objects).
3835
- // We need to handle both cases by checking if the first element is an array.
3836
- const isMultipleResultSets = Array.isArray(d[0]);
3837
- const deletedRecord = isMultipleResultSets
3838
- ? d[d.length - 1][0] // Multiple result sets: get last result set, first row
3839
- : d[0]; // Single result set: get first row directly
3840
- for (const key of entity.PrimaryKeys) {
3841
- if (key.Value !== deletedRecord[key.Name]) {
3842
- // we can get here if the sp returns NULL for a given key. The reason that would be the case is if the record
3843
- // was not found in the DB. This was the existing logic prior to the SP modifications in 2.68.0, just documenting
3844
- // it here for clarity.
3845
- result.Message = `Transaction failed to commit, record with primary key ${key.Name}=${key.Value} not found`;
3846
- result.EndedAt = new Date();
3847
- result.Success = false;
3848
- return false;
3849
- }
3850
- }
3851
- // Entity AI Actions and Actions - fired off async, NO await on purpose
3852
- this.HandleEntityActions(entity, 'delete', false, user);
3853
- this.HandleEntityAIActions(entity, 'delete', false, user);
3854
- result.EndedAt = new Date();
3855
- return true;
3856
- }
3857
- else {
3858
- result.Message = 'No result returned from SQL';
3859
- result.EndedAt = new Date();
3860
- return false;
3861
- }
3862
- }
3863
- }
3864
- catch (e) {
3865
- LogError(e);
3866
- result.Message = e.message;
3867
- result.Success = false;
3868
- result.EndedAt = new Date();
3869
- return false;
3870
- }
3871
- }
3872
- /**************************************************************************/
3873
- // END ---- IEntityDataProvider
3874
1373
  /**************************************************************************/
1374
+ // START ---- DatabaseProviderBase Override Hooks (Phase 2)
3875
1375
  /**************************************************************************/
3876
- // START ---- IMetadataProvider
3877
- /**************************************************************************/
3878
- async GetDatasetByName(datasetName, itemFilters, contextUser, providerToUse) {
3879
- const sSQL = `SELECT
3880
- di.*,
3881
- e.BaseView EntityBaseView,
3882
- e.SchemaName EntitySchemaName,
3883
- di.__mj_UpdatedAt AS DatasetItemUpdatedAt,
3884
- d.__mj_UpdatedAt AS DatasetUpdatedAt
3885
- FROM
3886
- [${this.MJCoreSchemaName}].vwDatasets d
3887
- INNER JOIN
3888
- [${this.MJCoreSchemaName}].vwDatasetItems di
3889
- ON
3890
- d.ID = di.DatasetID
3891
- INNER JOIN
3892
- [${this.MJCoreSchemaName}].vwEntities e
3893
- ON
3894
- di.EntityID = e.ID
3895
- WHERE
3896
- d.Name = @p0`;
3897
- let items = [];
3898
- const useThisProvider = providerToUse ? providerToUse : this;
3899
- items = await useThisProvider.ExecuteSQL(sSQL, [datasetName], undefined, contextUser);
3900
- // now we have the dataset and the items, we need to get the update date from the items underlying entities
3901
- if (items && items.length > 0) {
3902
- // Optimization: Use batch SQL execution for multiple items
3903
- // Build SQL queries for all items
3904
- const queries = [];
3905
- const itemsWithSQL = [];
3906
- for (const item of items) {
3907
- const itemSQL = useThisProvider.GetDatasetItemSQL(item, itemFilters, datasetName);
3908
- if (itemSQL) {
3909
- queries.push(itemSQL);
3910
- itemsWithSQL.push(item);
3911
- }
3912
- else {
3913
- // Handle invalid SQL case - add to results with error
3914
- itemsWithSQL.push({ ...item, hasError: true });
3915
- }
3916
- }
3917
- // Execute all queries in a single batch
3918
- const batchResults = await useThisProvider.ExecuteSQLBatch(queries, undefined, undefined, contextUser);
3919
- // Process results for each item
3920
- const results = [];
3921
- let queryIndex = 0;
3922
- for (const item of itemsWithSQL) {
3923
- if (item.hasError) {
3924
- // Handle error case for invalid columns
3925
- results.push({
3926
- EntityID: item.EntityID,
3927
- EntityName: item.Entity,
3928
- Code: item.Code,
3929
- Results: null,
3930
- LatestUpdateDate: null,
3931
- Status: 'Invalid columns specified for dataset item',
3932
- Success: false,
3933
- });
3934
- }
3935
- else {
3936
- // Process successful query result
3937
- let itemData = batchResults[queryIndex] || [];
3938
- // Process rows for datetime conversion and field-level decryption
3939
- // This is critical for datasets that contain encrypted fields
3940
- if (itemData.length > 0) {
3941
- const entityInfo = useThisProvider.Entities.find(e => e.Name.trim().toLowerCase() === item.Entity.trim().toLowerCase());
3942
- if (entityInfo) {
3943
- itemData = await useThisProvider.ProcessEntityRows(itemData, entityInfo, contextUser);
3944
- }
3945
- }
3946
- const itemUpdatedAt = new Date(item.DatasetItemUpdatedAt);
3947
- const datasetUpdatedAt = new Date(item.DatasetUpdatedAt);
3948
- const datasetMaxUpdatedAt = new Date(Math.max(itemUpdatedAt.getTime(), datasetUpdatedAt.getTime()));
3949
- // get the latest update date
3950
- let latestUpdateDate = new Date(1900, 1, 1);
3951
- if (itemData && itemData.length > 0) {
3952
- itemData.forEach((data) => {
3953
- if (data[item.DateFieldToCheck] && new Date(data[item.DateFieldToCheck]) > latestUpdateDate) {
3954
- latestUpdateDate = new Date(data[item.DateFieldToCheck]);
3955
- }
3956
- });
3957
- }
3958
- // finally, compare the latestUpdatedDate to the dataset max date, and use the latter if it is more recent
3959
- if (datasetMaxUpdatedAt > latestUpdateDate) {
3960
- latestUpdateDate = datasetMaxUpdatedAt;
3961
- }
3962
- results.push({
3963
- EntityID: item.EntityID,
3964
- EntityName: item.Entity,
3965
- Code: item.Code,
3966
- Results: itemData,
3967
- LatestUpdateDate: latestUpdateDate,
3968
- Success: itemData !== null && itemData !== undefined,
3969
- });
3970
- queryIndex++;
3971
- }
3972
- }
3973
- // determine overall success
3974
- const bSuccess = results.every((result) => result.Success);
3975
- // get the latest update date from all the results
3976
- const latestUpdateDate = results.reduce((acc, result) => {
3977
- if (result?.LatestUpdateDate) {
3978
- const theDate = new Date(result.LatestUpdateDate);
3979
- if (result.LatestUpdateDate && theDate.getTime() > acc.getTime()) {
3980
- return theDate;
3981
- }
3982
- }
3983
- return acc;
3984
- }, new Date(0));
3985
- return {
3986
- DatasetID: items[0].DatasetID,
3987
- DatasetName: datasetName,
3988
- Success: bSuccess,
3989
- Status: '',
3990
- LatestUpdateDate: latestUpdateDate,
3991
- Results: results,
3992
- };
3993
- }
3994
- else {
3995
- return {
3996
- DatasetID: '',
3997
- DatasetName: datasetName,
3998
- Success: false,
3999
- Status: 'No Dataset or Items found for DatasetName: ' + datasetName,
4000
- LatestUpdateDate: null,
4001
- Results: null,
4002
- };
4003
- }
4004
- }
4005
- /**
4006
- * Constructs the SQL query for a dataset item.
4007
- * @param item - The dataset item metadata
4008
- * @param itemFilters - Optional filters to apply
4009
- * @param datasetName - Name of the dataset (for error logging)
4010
- * @returns The SQL query string, or null if columns are invalid
4011
- */
4012
- GetDatasetItemSQL(item, itemFilters, datasetName) {
4013
- let filterSQL = '';
4014
- if (itemFilters && itemFilters.length > 0) {
4015
- const filter = itemFilters.find((f) => f.ItemCode === item.Code);
4016
- if (filter)
4017
- filterSQL = (item.WhereClause ? ' AND ' : ' WHERE ') + '(' + filter.Filter + ')';
4018
- }
4019
- const columns = this.GetColumnsForDatasetItem(item, datasetName);
4020
- if (!columns) {
4021
- return null; // Invalid columns
1376
+ async GenerateSaveSQL(entity, isNew, user) {
1377
+ const spName = this.GetCreateUpdateSPName(entity, isNew);
1378
+ const sqlDetails = await this.GetSaveSQLWithDetails(entity, isNew, spName, user);
1379
+ const result = {
1380
+ fullSQL: sqlDetails.fullSQL,
1381
+ simpleSQL: sqlDetails.simpleSQL,
1382
+ };
1383
+ if (sqlDetails.overlappingChangeData) {
1384
+ result.extraData = { overlappingChangeData: sqlDetails.overlappingChangeData };
4022
1385
  }
4023
- return `SELECT ${columns} FROM [${item.EntitySchemaName}].[${item.EntityBaseView}] ${item.WhereClause ? 'WHERE ' + item.WhereClause : ''}${filterSQL}`;
1386
+ return result;
4024
1387
  }
4025
- async GetDatasetItem(item, itemFilters, datasetName, contextUser) {
4026
- const itemUpdatedAt = new Date(item.DatasetItemUpdatedAt);
4027
- const datasetUpdatedAt = new Date(item.DatasetUpdatedAt);
4028
- const datasetMaxUpdatedAt = new Date(Math.max(itemUpdatedAt.getTime(), datasetUpdatedAt.getTime()));
4029
- const itemSQL = this.GetDatasetItemSQL(item, itemFilters, datasetName);
4030
- if (!itemSQL) {
4031
- // failure condition within columns, return a failed result
4032
- return {
4033
- EntityID: item.EntityID,
4034
- EntityName: item.Entity,
4035
- Code: item.Code,
4036
- Results: null,
4037
- LatestUpdateDate: null,
4038
- Status: 'Invalid columns specified for dataset item',
4039
- Success: false,
4040
- };
4041
- }
4042
- const itemData = await this.ExecuteSQL(itemSQL, undefined, undefined, contextUser);
4043
- // get the latest update date
4044
- let latestUpdateDate = new Date(1900, 1, 1);
4045
- if (itemData && itemData.length > 0) {
4046
- itemData.forEach((data) => {
4047
- if (data[item.DateFieldToCheck] && new Date(data[item.DateFieldToCheck]) > latestUpdateDate) {
4048
- latestUpdateDate = new Date(data[item.DateFieldToCheck]);
4049
- }
4050
- });
4051
- }
4052
- // finally, compare the latestUpdatedDate to the dataset max date, and use the latter if it is more recent
4053
- if (datasetMaxUpdatedAt > latestUpdateDate) {
4054
- latestUpdateDate = datasetMaxUpdatedAt;
4055
- }
1388
+ GenerateDeleteSQL(entity, user) {
1389
+ const sqlDetails = this.GetDeleteSQLWithDetails(entity, user);
4056
1390
  return {
4057
- EntityID: item.EntityID,
4058
- EntityName: item.Entity,
4059
- Code: item.Code,
4060
- Results: itemData,
4061
- LatestUpdateDate: latestUpdateDate,
4062
- Success: itemData !== null && itemData !== undefined,
1391
+ fullSQL: sqlDetails.fullSQL,
1392
+ simpleSQL: sqlDetails.simpleSQL,
4063
1393
  };
4064
1394
  }
4065
- /**
4066
- * Gets column info for a dataset item, which might be * for all columns or if a Columns field was provided in the DatasetItem table,
4067
- * attempts to use those columns assuming they are valid.
4068
- * @param item
4069
- * @param datasetName
4070
- * @returns
4071
- */
4072
- GetColumnsForDatasetItem(item, datasetName) {
4073
- const specifiedColumns = item.Columns ? item.Columns.split(',').map((col) => col.trim()) : [];
4074
- if (specifiedColumns.length > 0) {
4075
- // validate that the columns specified are valid within the entity metadata
4076
- const entity = this.Entities.find((e) => e.ID === item.EntityID);
4077
- if (!entity && this.Entities.length > 0) {
4078
- // we have loaded entities (e.g. Entites.length > 0) but the entity wasn't found, log an error and return a failed result
4079
- // the reason we continue below if we have NOT loaded Entities is that when the system first bootstraps, DATASET gets loaded
4080
- // FIRST before Entities are loaded to load the entity metadata so this would ALWAYS fail :)
4081
- // entity not found, return a failed result, shouldn't ever get here due to the foreign key constraint on the table
4082
- LogError(`Entity not found for dataset item ${item.Code} in dataset ${datasetName}`);
4083
- return null;
4084
- }
4085
- else {
4086
- if (entity) {
4087
- // have a valid entity, now make sure that all of the columns specified are valid
4088
- // only do the column validity check if we have an entity, we can get here if the entity wasn't found IF we haven't loaded entities yet per above comment
4089
- const invalidColumns = [];
4090
- specifiedColumns.forEach((col) => {
4091
- if (!entity.Fields.find((f) => f.Name.trim().toLowerCase() === col.trim().toLowerCase())) {
4092
- invalidColumns.push(col);
4093
- }
4094
- });
4095
- if (invalidColumns.length > 0) {
4096
- LogError(`Invalid columns specified for dataset item ${item.Code} in dataset ${datasetName}: ${invalidColumns.join(', ')}`);
4097
- return null;
4098
- }
4099
- }
4100
- // check to see if the specified columns include the DateFieldToCheck
4101
- // in the below we only check entity metadata if we have it, if we don't have it, we just add the special fields back in
4102
- if (item.DateFieldToCheck && item.DateFieldToCheck.trim().length > 0 && specifiedColumns.indexOf(item.DateFieldToCheck) === -1) {
4103
- // we only check the entity if we have it, otherwise we just add it back in
4104
- if (!entity || entity.Fields.find((f) => f.Name.trim().toLowerCase() === item.DateFieldToCheck.trim().toLowerCase()))
4105
- specifiedColumns.push(item.DateFieldToCheck);
4106
- }
4107
- }
1395
+ async OnSaveCompleted(entity, saveSQLResult, user, options) {
1396
+ const overlappingChangeData = saveSQLResult.extraData?.overlappingChangeData;
1397
+ if (overlappingChangeData &&
1398
+ entity.EntityInfo.AllowMultipleSubtypes &&
1399
+ entity.EntityInfo.TrackRecordChanges) {
1400
+ const transaction = entity.ProviderTransaction ?? undefined;
1401
+ await this.PropagateRecordChangesToSiblings(entity.EntityInfo, overlappingChangeData, entity.PrimaryKey.Values(), user?.ID ?? '', options.ISAActiveChildEntityName, transaction ? { connectionSource: transaction } : undefined);
4108
1402
  }
4109
- return specifiedColumns.length > 0 ? specifiedColumns.map((colName) => `[${colName.trim()}]`).join(',') : '*';
4110
1403
  }
4111
- async GetDatasetStatusByName(datasetName, itemFilters, contextUser, providerToUse) {
4112
- const sSQL = `
4113
- SELECT
4114
- di.*,
4115
- e.BaseView EntityBaseView,
4116
- e.SchemaName EntitySchemaName,
4117
- d.__mj_UpdatedAt AS DatasetUpdatedAt,
4118
- di.__mj_UpdatedAt AS DatasetItemUpdatedAt
4119
- FROM
4120
- [${this.MJCoreSchemaName}].vwDatasets d
4121
- INNER JOIN
4122
- [${this.MJCoreSchemaName}].vwDatasetItems di
4123
- ON
4124
- d.ID = di.DatasetID
4125
- INNER JOIN
4126
- [${this.MJCoreSchemaName}].vwEntities e
4127
- ON
4128
- di.EntityID = e.ID
4129
- WHERE
4130
- d.Name = @p0`;
4131
- let items = [];
4132
- const useThisProvider = providerToUse ? providerToUse : this;
4133
- items = await useThisProvider.ExecuteSQL(sSQL, [datasetName], undefined, contextUser);
4134
- // now we have the dataset and the items, we need to get the update date from the items underlying entities
4135
- if (items && items.length > 0) {
4136
- // loop through each of the items and get the update date from the underlying entity by building a combined UNION ALL SQL statement
4137
- let combinedSQL = '';
4138
- const updateDates = [];
4139
- items.forEach((item, index) => {
4140
- let filterSQL = '';
4141
- if (itemFilters && itemFilters.length > 0) {
4142
- const filter = itemFilters.find((f) => f.ItemCode === item.Code);
4143
- if (filter)
4144
- filterSQL = ' WHERE ' + filter.Filter;
4145
- }
4146
- const itemUpdatedAt = new Date(item.DatasetItemUpdatedAt);
4147
- const datasetUpdatedAt = new Date(item.DatasetUpdatedAt);
4148
- const datasetMaxUpdatedAt = new Date(Math.max(itemUpdatedAt.getTime(), datasetUpdatedAt.getTime())).toISOString();
4149
- const itemSQL = `SELECT
4150
- CASE
4151
- WHEN MAX(${item.DateFieldToCheck}) > '${datasetMaxUpdatedAt}' THEN MAX(${item.DateFieldToCheck})
4152
- ELSE '${datasetMaxUpdatedAt}'
4153
- END AS UpdateDate,
4154
- COUNT(*) AS TheRowCount,
4155
- '${item.EntityID}' AS EntityID,
4156
- '${item.Entity}' AS EntityName
4157
- FROM
4158
- [${item.EntitySchemaName}].[${item.EntityBaseView}]${filterSQL}`;
4159
- combinedSQL += itemSQL;
4160
- if (index < items.length - 1) {
4161
- combinedSQL += ' UNION ALL ';
4162
- }
4163
- });
4164
- const itemUpdateDates = await useThisProvider.ExecuteSQL(combinedSQL, null, undefined, contextUser);
4165
- if (itemUpdateDates && itemUpdateDates.length > 0) {
4166
- let latestUpdateDate = new Date(1900, 1, 1);
4167
- itemUpdateDates.forEach((itemUpdate) => {
4168
- const updateDate = new Date(itemUpdate.UpdateDate);
4169
- updateDates.push({
4170
- EntityID: itemUpdate.EntityID,
4171
- EntityName: itemUpdate.EntityName,
4172
- RowCount: itemUpdate.TheRowCount,
4173
- UpdateDate: updateDate,
4174
- });
4175
- if (updateDate > latestUpdateDate) {
4176
- latestUpdateDate = updateDate;
4177
- }
4178
- });
4179
- return {
4180
- DatasetID: items[0].DatasetID,
4181
- DatasetName: datasetName,
4182
- Success: true,
4183
- Status: '',
4184
- LatestUpdateDate: latestUpdateDate,
4185
- EntityUpdateDates: updateDates,
4186
- };
4187
- }
4188
- else {
4189
- return {
4190
- DatasetID: items[0].DatasetID,
4191
- DatasetName: datasetName,
4192
- Success: false,
4193
- Status: 'No update dates found for DatasetName: ' + datasetName,
4194
- LatestUpdateDate: null,
4195
- EntityUpdateDates: null,
4196
- };
4197
- }
4198
- }
4199
- else {
4200
- return {
4201
- DatasetID: '',
4202
- DatasetName: datasetName,
4203
- Success: false,
4204
- Status: 'No Dataset or Items found for DatasetName: ' + datasetName,
4205
- EntityUpdateDates: null,
4206
- LatestUpdateDate: null,
4207
- };
4208
- }
1404
+ OnSuspendRefresh() {
1405
+ this._bAllowRefresh = false;
4209
1406
  }
4210
- async GetApplicationMetadata(contextUser) {
4211
- const apps = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwApplications`, null, undefined, contextUser);
4212
- const appEntities = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwApplicationEntities ORDER BY ApplicationName`, undefined, undefined, contextUser);
4213
- const ret = [];
4214
- for (let i = 0; i < apps.length; i++) {
4215
- ret.push(new ApplicationInfo(this, {
4216
- ...apps[i],
4217
- ApplicationEntities: appEntities.filter((ae) => ae.ApplicationName.trim().toLowerCase() === apps[i].Name.trim().toLowerCase()),
4218
- }));
4219
- }
4220
- return ret;
1407
+ OnResumeRefresh() {
1408
+ this._bAllowRefresh = true;
4221
1409
  }
4222
- async GetAuditLogTypeMetadata(contextUser) {
4223
- const alts = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwAuditLogTypes`, null, undefined, contextUser);
4224
- const ret = [];
4225
- for (let i = 0; i < alts.length; i++) {
4226
- const alt = new AuditLogTypeInfo(alts[i]);
4227
- ret.push(alt);
4228
- }
4229
- return ret;
1410
+ GetTransactionExtraData(_entity) {
1411
+ return { dataSource: this._pool };
4230
1412
  }
4231
- async GetUserMetadata(contextUser) {
4232
- const users = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwUsers`, null, undefined, contextUser);
4233
- const userRoles = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwUserRoles ORDER BY UserID`, undefined, undefined, contextUser);
4234
- const ret = [];
4235
- for (let i = 0; i < users.length; i++) {
4236
- ret.push(new UserInfo(this, {
4237
- ...users[i],
4238
- UserRoles: userRoles.filter((ur) => ur.UserID === users[i].ID),
4239
- }));
4240
- }
4241
- return ret;
1413
+ BuildSaveExecuteOptions(entity, sqlDetails) {
1414
+ const opts = super.BuildSaveExecuteOptions(entity, sqlDetails);
1415
+ opts.connectionSource =
1416
+ entity.ProviderTransaction ?? undefined;
1417
+ return opts;
4242
1418
  }
4243
- async GetAuthorizationMetadata(contextUser) {
4244
- const auths = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwAuthorizations`, null, undefined, contextUser);
4245
- const authRoles = await this.ExecuteSQL(`SELECT * FROM [${this.MJCoreSchemaName}].vwAuthorizationRoles ORDER BY AuthorizationName`, undefined, undefined, contextUser);
4246
- const ret = [];
4247
- for (let i = 0; i < auths.length; i++) {
4248
- ret.push(new AuthorizationInfo(this, {
4249
- ...auths[i],
4250
- AuthorizationRoles: authRoles.filter((ar) => ar.AuthorizationName.trim().toLowerCase() === auths[i].Name.trim().toLowerCase()),
4251
- }));
1419
+ BuildDeleteExecuteOptions(entity, sqlDetails) {
1420
+ const opts = super.BuildDeleteExecuteOptions(entity, sqlDetails);
1421
+ opts.connectionSource =
1422
+ entity.ProviderTransaction ?? undefined;
1423
+ return opts;
1424
+ }
1425
+ ValidateDeleteResult(entity, rawResult, entityResult) {
1426
+ if (!rawResult || rawResult.length === 0)
1427
+ return false;
1428
+ // SQL Server CASCADE deletes can return multiple result sets (array of arrays)
1429
+ const isMultipleResultSets = Array.isArray(rawResult[0]);
1430
+ const deletedRecord = isMultipleResultSets
1431
+ ? rawResult[rawResult.length - 1][0]
1432
+ : rawResult[0];
1433
+ for (const key of entity.PrimaryKeys) {
1434
+ if (key.Value !== deletedRecord[key.Name]) {
1435
+ entityResult.Message = `Transaction failed to commit, record with primary key ${key.Name}=${key.Value} not found`;
1436
+ return false;
1437
+ }
4252
1438
  }
4253
- return ret;
1439
+ return true;
4254
1440
  }
1441
+ /**************************************************************************/
1442
+ // END ---- DatabaseProviderBase Override Hooks (Phase 2)
1443
+ /**************************************************************************/
1444
+ /**************************************************************************/
1445
+ // END ---- IEntityDataProvider
1446
+ /**************************************************************************/
1447
+ /**************************************************************************/
1448
+ // START ---- IMetadataProvider
1449
+ /**************************************************************************/
4255
1450
  /**
4256
- * Processes entity rows returned from SQL Server to handle:
4257
- * 1. Timezone conversions for datetime fields
4258
- * 2. Field-level decryption for encrypted fields
4259
- *
4260
- * This method specifically handles the conversion of datetime2 fields (which SQL Server returns without timezone info)
4261
- * to proper UTC dates, preventing JavaScript from incorrectly interpreting them as local time.
1451
+ * Public backward-compatible wrapper that delegates to PostProcessRows (inherited from GenericDP).
1452
+ * Used by SQLServerTransactionGroup which needs a public entry point for row processing.
4262
1453
  *
4263
- * For encrypted fields, this method decrypts values at the data provider level.
4264
- * API-level filtering (AllowDecryptInAPI/SendEncryptedValue) is handled by the GraphQL layer.
4265
- *
4266
- * @param rows The raw result rows from SQL Server
4267
- * @param entityInfo The entity metadata to determine field types
4268
- * @param contextUser Optional user context for decryption operations
4269
- * @returns The processed rows with corrected datetime values and decrypted fields
4270
- *
4271
- * @security Encrypted fields are decrypted here for internal use.
4272
- * The API layer handles response filtering based on AllowDecryptInAPI settings.
1454
+ * PostProcessRows (GenericDP) handles: AdjustDatetimeFields encryption decryption.
4273
1455
  */
4274
1456
  async ProcessEntityRows(rows, entityInfo, contextUser) {
4275
- if (!rows || rows.length === 0) {
4276
- return rows;
4277
- }
4278
- // Find all datetime fields in the entity
4279
- const datetimeFields = entityInfo.Fields.filter((field) => field.TSType === EntityFieldTSType.Date);
4280
- // Find all encrypted fields in the entity
4281
- const encryptedFields = entityInfo.Fields.filter((field) => field.Encrypt && field.EncryptionKeyID);
4282
- // If there are no fields requiring processing, return the rows as-is
4283
- if (datetimeFields.length === 0 && encryptedFields.length === 0) {
1457
+ if (!rows || rows.length === 0)
4284
1458
  return rows;
4285
- }
4286
- // Check if we need datetimeoffset adjustment (lazy loaded on first use)
4287
- const needsAdjustment = datetimeFields.length > 0 ? await this.NeedsDatetimeOffsetAdjustment() : false;
4288
- // Get encryption engine instance (lazy - only if we have encrypted fields)
4289
- let encryptionEngine = null;
4290
- if (encryptedFields.length > 0) {
4291
- encryptionEngine = EncryptionEngine.Instance;
4292
- await encryptionEngine.Config(false, contextUser);
4293
- }
4294
- // Process each row - need to use Promise.all for async decryption
4295
- const processedRows = await Promise.all(rows.map(async (row) => {
1459
+ return this.PostProcessRows(rows, entityInfo, contextUser);
1460
+ }
1461
+ /**
1462
+ * SQL Server-specific datetime field adjustments.
1463
+ *
1464
+ * SQL Server's `datetime2` and `datetime` types store values without timezone info.
1465
+ * The mssql driver creates Date objects using local timezone interpretation, which is
1466
+ * incorrect when the server stores UTC. This method adjusts dates back to UTC.
1467
+ *
1468
+ * For `datetimeoffset`, the driver sometimes mishandles timezone info (detected via
1469
+ * `NeedsDatetimeOffsetAdjustment()`), requiring similar correction.
1470
+ */
1471
+ async AdjustDatetimeFields(rows, datetimeFields, entityInfo) {
1472
+ const needsAdjustment = await this.NeedsDatetimeOffsetAdjustment();
1473
+ return rows.map((row) => {
4296
1474
  const processedRow = { ...row };
4297
- // ========================================================================
4298
- // DATETIME FIELD PROCESSING
4299
- // ========================================================================
4300
1475
  for (const field of datetimeFields) {
4301
1476
  const fieldValue = processedRow[field.Name];
4302
- // Skip null/undefined values
4303
- if (fieldValue === null || fieldValue === undefined) {
1477
+ if (fieldValue === null || fieldValue === undefined)
4304
1478
  continue;
4305
- }
4306
- // Handle different datetime field types
4307
1479
  if (field.Type.toLowerCase() === 'datetime2') {
4308
1480
  if (typeof fieldValue === 'string') {
4309
- // If it's still a string (rare case), convert to UTC
4310
1481
  if (!fieldValue.includes('Z') && !fieldValue.includes('+') && !fieldValue.includes('-')) {
4311
- const utcValue = fieldValue.replace(' ', 'T') + 'Z';
4312
- processedRow[field.Name] = new Date(utcValue);
1482
+ processedRow[field.Name] = new Date(fieldValue.replace(' ', 'T') + 'Z');
4313
1483
  }
4314
1484
  else {
4315
1485
  processedRow[field.Name] = new Date(fieldValue);
4316
1486
  }
4317
1487
  }
4318
1488
  else if (fieldValue instanceof Date) {
4319
- // DB driver has already converted to a Date object using local timezone
4320
- // We need to adjust it back to UTC
4321
- // SQL Server stores datetime2 as UTC, but DB Driver interprets it as local
4322
- const localDate = fieldValue;
4323
- const timezoneOffsetMs = localDate.getTimezoneOffset() * 60 * 1000;
4324
- const utcDate = new Date(localDate.getTime() + timezoneOffsetMs);
4325
- processedRow[field.Name] = utcDate;
1489
+ const timezoneOffsetMs = fieldValue.getTimezoneOffset() * 60 * 1000;
1490
+ processedRow[field.Name] = new Date(fieldValue.getTime() + timezoneOffsetMs);
4326
1491
  }
4327
1492
  }
4328
1493
  else if (field.Type.toLowerCase() === 'datetimeoffset') {
4329
- // Handle datetimeoffset based on empirical test results
4330
1494
  if (typeof fieldValue === 'string') {
4331
- // String format should include timezone offset, parse it correctly
4332
1495
  processedRow[field.Name] = new Date(fieldValue);
4333
1496
  }
4334
1497
  else if (fieldValue instanceof Date && needsAdjustment) {
4335
- // The database driver has incorrectly converted to a Date object using local timezone
4336
- // For datetimeoffset, SQL Server provides the value with timezone info, but the driver
4337
- // creates the Date as if it were in local time, ignoring the offset
4338
- // We need to adjust it back to the correct UTC time
4339
- const localDate = fieldValue;
4340
- const timezoneOffsetMs = localDate.getTimezoneOffset() * 60 * 1000;
4341
- const utcDate = new Date(localDate.getTime() + timezoneOffsetMs);
4342
- processedRow[field.Name] = utcDate;
1498
+ const timezoneOffsetMs = fieldValue.getTimezoneOffset() * 60 * 1000;
1499
+ processedRow[field.Name] = new Date(fieldValue.getTime() + timezoneOffsetMs);
4343
1500
  }
4344
- // If it's already a Date object and no adjustment needed, leave as-is
4345
1501
  }
4346
1502
  else if (field.Type.toLowerCase() === 'datetime') {
4347
- // Legacy datetime type - similar handling to datetime2
4348
1503
  if (fieldValue instanceof Date) {
4349
- const localDate = fieldValue;
4350
- const timezoneOffsetMs = localDate.getTimezoneOffset() * 60 * 1000;
4351
- const utcDate = new Date(localDate.getTime() + timezoneOffsetMs);
4352
- processedRow[field.Name] = utcDate;
4353
- }
4354
- }
4355
- // For other types (date, time), leave as-is
4356
- }
4357
- // ========================================================================
4358
- // ENCRYPTED FIELD PROCESSING (DECRYPTION)
4359
- // Decrypt at the data provider level for internal use.
4360
- // API-level filtering based on AllowDecryptInAPI is handled by GraphQL resolvers.
4361
- // ========================================================================
4362
- if (encryptionEngine && encryptedFields.length > 0) {
4363
- for (const field of encryptedFields) {
4364
- const fieldValue = processedRow[field.Name];
4365
- // Skip null/undefined/empty values
4366
- if (fieldValue === null || fieldValue === undefined || fieldValue === '') {
4367
- continue;
4368
- }
4369
- // Only decrypt if the value is actually encrypted
4370
- const keyMarker = field.EncryptionKeyID ? encryptionEngine.GetKeyByID(field.EncryptionKeyID)?.Marker : undefined;
4371
- if (typeof fieldValue === 'string' && encryptionEngine.IsEncrypted(fieldValue, keyMarker)) {
4372
- try {
4373
- const decryptedValue = await encryptionEngine.Decrypt(fieldValue, contextUser);
4374
- processedRow[field.Name] = decryptedValue;
4375
- }
4376
- catch (decryptError) {
4377
- // Log error but don't fail the entire operation
4378
- // Return the encrypted value so the caller knows something is wrong
4379
- const message = decryptError instanceof Error ? decryptError.message : String(decryptError);
4380
- LogError(`Failed to decrypt field "${field.Name}" on entity "${entityInfo.Name}": ${message}. ` +
4381
- 'The encrypted value will be returned unchanged.');
4382
- // Keep the encrypted value in the row - let the caller decide what to do
4383
- }
1504
+ const timezoneOffsetMs = fieldValue.getTimezoneOffset() * 60 * 1000;
1505
+ processedRow[field.Name] = new Date(fieldValue.getTime() + timezoneOffsetMs);
4384
1506
  }
4385
1507
  }
4386
1508
  }
4387
1509
  return processedRow;
4388
- }));
4389
- return processedRows;
1510
+ });
4390
1511
  }
4391
1512
  /**
4392
1513
  * Static method for executing SQL with proper handling of connections and logging.
@@ -4535,7 +1656,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
4535
1656
  transaction: null,
4536
1657
  logSqlStatement: async (q, p, d, i, m, s, u) => {
4537
1658
  // Use static logging method
4538
- await SQLServerDataProvider.LogSQLStatement(q, p, d || 'ExecuteSQLWithPool', m || false, s, u);
1659
+ await GenericDatabaseProvider.LogSQLStatement(q, p, d || 'ExecuteSQLWithPool', m || false, s, u);
4539
1660
  }
4540
1661
  };
4541
1662
  // Create options
@@ -4628,7 +1749,7 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
4628
1749
  pool: pool,
4629
1750
  transaction: transaction,
4630
1751
  logSqlStatement: async (q, p, d, i, m, s, u) => {
4631
- await SQLServerDataProvider.LogSQLStatement(q, p, d || 'Batch execution', m || false, s, u);
1752
+ await GenericDatabaseProvider.LogSQLStatement(q, p, d || 'Batch execution', m || false, s, u);
4632
1753
  }
4633
1754
  };
4634
1755
  // Use named parameters for batch SQL
@@ -4848,60 +1969,12 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
4848
1969
  await txn.rollback();
4849
1970
  }
4850
1971
  }
4851
- /**
4852
- * Discovers which IS-A child entity, if any, has a record with the given primary key.
4853
- * Executes a single UNION ALL query across all child entity tables for maximum efficiency.
4854
- * Each branch of the UNION is a PK lookup on a clustered index — effectively instant.
4855
- *
4856
- * @param entityInfo The parent entity whose children to search
4857
- * @param recordPKValue The primary key value to find in child tables
4858
- * @param contextUser Optional context user for audit/permission purposes
4859
- * @returns The child entity name if found, or null if no child record exists
4860
- */
4861
- async FindISAChildEntity(entityInfo, recordPKValue, contextUser) {
4862
- const childEntities = entityInfo.ChildEntities;
4863
- if (childEntities.length === 0)
4864
- return null;
4865
- const unionSQL = this.buildChildDiscoverySQL(childEntities, recordPKValue);
4866
- if (!unionSQL)
4867
- return null;
4868
- const results = await this.ExecuteSQL(unionSQL, undefined, undefined, contextUser);
4869
- if (results && results.length > 0 && results[0].EntityName) {
4870
- return { ChildEntityName: results[0].EntityName };
4871
- }
4872
- return null;
4873
- }
4874
- /**
4875
- * Discovers ALL IS-A child entities that have records with the given primary key.
4876
- * Used for overlapping subtype parents (AllowMultipleSubtypes = true) where multiple
4877
- * children can coexist. Same UNION ALL query as FindISAChildEntity, but returns all matches.
4878
- *
4879
- * @param entityInfo The parent entity whose children to search
4880
- * @param recordPKValue The primary key value to find in child tables
4881
- * @param contextUser Optional context user for audit/permission purposes
4882
- * @returns Array of child entity names found (empty if none)
4883
- */
4884
- async FindISAChildEntities(entityInfo, recordPKValue, contextUser) {
4885
- const childEntities = entityInfo.ChildEntities;
4886
- if (childEntities.length === 0)
4887
- return [];
4888
- const unionSQL = this.buildChildDiscoverySQL(childEntities, recordPKValue);
4889
- if (!unionSQL)
4890
- return [];
4891
- const results = await this.ExecuteSQL(unionSQL, undefined, undefined, contextUser);
4892
- if (results && results.length > 0) {
4893
- return results
4894
- .filter((r) => r.EntityName)
4895
- .map((r) => ({ ChildEntityName: r.EntityName }));
4896
- }
4897
- return [];
4898
- }
4899
1972
  /**
4900
1973
  * Builds a UNION ALL query that checks each child entity's base table for a record
4901
1974
  * with the given primary key. Returns the first match (disjoint subtypes guarantee
4902
1975
  * at most one result) unless used with overlapping subtypes.
4903
1976
  */
4904
- buildChildDiscoverySQL(childEntities, recordPKValue) {
1977
+ BuildChildDiscoverySQL(childEntities, recordPKValue) {
4905
1978
  // Sanitize the PK value to prevent SQL injection
4906
1979
  const safePKValue = recordPKValue.replace(/'/g, "''");
4907
1980
  const unionParts = childEntities
@@ -4937,74 +2010,16 @@ export class SQLServerDataProvider extends DatabaseProviderBase {
4937
2010
  * Undefined when saving the parent directly — all children get propagated to.
4938
2011
  * @param transaction The active IS-A transaction, or undefined for standalone saves
4939
2012
  */
4940
- async PropagateRecordChangesToSiblings(parentInfo, changeData, pkValue, userId, activeChildEntityName, transaction) {
4941
- const sqlParts = [];
4942
- let varIndex = 0;
4943
- const safePKValue = pkValue.replace(/'/g, "''");
4944
- const safeUserId = userId.replace(/'/g, "''");
4945
- const safeChangesJSON = changeData.changesJSON.replace(/'/g, "''");
4946
- const safeChangesDesc = changeData.changesDescription.replace(/'/g, "''");
4947
- for (const childInfo of parentInfo.ChildEntities) {
4948
- // Skip the active branch (the child that initiated the parent save).
4949
- // When activeChildEntityName is undefined (direct save on parent), propagate to ALL children.
4950
- if (activeChildEntityName && this.isEntityOrAncestorOf(childInfo, activeChildEntityName))
4951
- continue;
4952
- // Recursively enumerate this child's entire sub-tree from metadata
4953
- const subTree = this.getFullSubTree(childInfo);
4954
- for (const entityInTree of subTree) {
4955
- if (!entityInTree.TrackRecordChanges)
4956
- continue;
4957
- const varName = `@_rc_prop_${varIndex++}`;
4958
- sqlParts.push(this.buildSiblingRecordChangeSQL(varName, entityInTree, safeChangesJSON, safeChangesDesc, safePKValue, safeUserId));
4959
- }
4960
- }
4961
- // Execute as single batch
4962
- if (sqlParts.length > 0) {
4963
- const batch = sqlParts.join('\n');
4964
- await this.ExecuteSQL(batch, undefined, {
4965
- connectionSource: transaction,
4966
- description: 'IS-A overlapping subtype Record Change propagation',
4967
- isMutation: true
4968
- });
4969
- }
4970
- }
4971
- /**
4972
- * Checks whether a given entity matches the target name, or is an ancestor
4973
- * of the target (i.e., the target is somewhere in its descendant sub-tree).
4974
- * Used to identify and skip the active branch during sibling propagation.
4975
- */
4976
- isEntityOrAncestorOf(entityInfo, targetName) {
4977
- if (entityInfo.Name === targetName)
4978
- return true;
4979
- for (const child of entityInfo.ChildEntities) {
4980
- if (this.isEntityOrAncestorOf(child, targetName))
4981
- return true;
4982
- }
4983
- return false;
4984
- }
4985
- /**
4986
- * Recursively enumerates an entity's entire sub-tree from metadata.
4987
- * No DB queries — uses EntityInfo.ChildEntities which is populated from metadata.
4988
- */
4989
- getFullSubTree(entityInfo) {
4990
- const result = [entityInfo];
4991
- for (const child of entityInfo.ChildEntities) {
4992
- result.push(...this.getFullSubTree(child));
4993
- }
4994
- return result;
4995
- }
4996
2013
  /**
4997
- * Generates a single block of SQL for one sibling entity in the Record Change
2014
+ * Generates a single block of T-SQL for one sibling entity in the Record Change
4998
2015
  * propagation batch. Uses SELECT...FOR JSON to get the full record, then
4999
- * conditionally inserts a Record Change entry if the record exists.
2016
+ * conditionally inserts a Record Change entry via spCreateRecordChange_Internal.
5000
2017
  */
5001
- buildSiblingRecordChangeSQL(varName, entityInfo, safeChangesJSON, safeChangesDesc, safePKValue, safeUserId) {
2018
+ BuildSiblingRecordChangeSQL(varName, entityInfo, safeChangesJSON, safeChangesDesc, safePKValue, safeUserId) {
5002
2019
  const schema = entityInfo.SchemaName || '__mj';
5003
2020
  const view = entityInfo.BaseView;
5004
2021
  const pkName = entityInfo.PrimaryKeys[0]?.Name ?? 'ID';
5005
2022
  const safeEntityName = entityInfo.Name.replace(/'/g, "''");
5006
- // Build RecordID in CompositeKey format: "FieldCodeName|Value" (or "F1|V1||F2|V2" for composite PKs)
5007
- // Must match the format used by the main save flow (concatPKIDString in GetSaveSQLWithDetails)
5008
2023
  const recordID = entityInfo.PrimaryKeys
5009
2024
  .map(pk => `${pk.CodeName}${CompositeKey.DefaultValueDelimiter}${safePKValue}`)
5010
2025
  .join(CompositeKey.DefaultFieldDelimiter);
@@ -5187,64 +2202,6 @@ IF ${varName} IS NOT NULL
5187
2202
  this._fileSystemProvider = new NodeFileSystemProvider();
5188
2203
  return this._fileSystemProvider;
5189
2204
  }
5190
- async InternalGetEntityRecordNames(info, contextUser) {
5191
- const promises = info.map(async (item) => {
5192
- const r = await this.InternalGetEntityRecordName(item.EntityName, item.CompositeKey, contextUser);
5193
- return {
5194
- EntityName: item.EntityName,
5195
- CompositeKey: item.CompositeKey,
5196
- RecordName: r,
5197
- Success: r ? true : false,
5198
- Status: r ? 'Success' : 'Error',
5199
- };
5200
- });
5201
- return Promise.all(promises);
5202
- }
5203
- async InternalGetEntityRecordName(entityName, CompositeKey, contextUser) {
5204
- try {
5205
- const sql = this.GetEntityRecordNameSQL(entityName, CompositeKey);
5206
- if (sql) {
5207
- const data = await this.ExecuteSQL(sql, null, undefined, contextUser);
5208
- if (data && data.length === 1) {
5209
- const fields = Object.keys(data[0]);
5210
- return data[0][fields[0]]; // return first field
5211
- }
5212
- else {
5213
- LogError(`Entity ${entityName} record ${CompositeKey.ToString()} not found, returning null`);
5214
- return null;
5215
- }
5216
- }
5217
- }
5218
- catch (e) {
5219
- LogError(e);
5220
- return null;
5221
- }
5222
- }
5223
- GetEntityRecordNameSQL(entityName, CompositeKey) {
5224
- const e = this.Entities.find((e) => e.Name === entityName);
5225
- if (!e)
5226
- throw new Error(`Entity ${entityName} not found`);
5227
- else {
5228
- const f = e.NameField;
5229
- if (!f) {
5230
- LogError(`Entity ${entityName} does not have an IsNameField or a field with the column name of Name, returning null, use recordId`);
5231
- return null;
5232
- }
5233
- else {
5234
- // got our field, create a SQL Query
5235
- const sql = `SELECT [${f.Name}] FROM [${e.SchemaName}].[${e.BaseView}] WHERE `;
5236
- let where = '';
5237
- for (const pkv of CompositeKey.KeyValuePairs) {
5238
- const pk = e.PrimaryKeys.find((pk) => pk.Name === pkv.FieldName);
5239
- const quotes = pk.NeedsQuotes ? "'" : '';
5240
- if (where.length > 0)
5241
- where += ' AND ';
5242
- where += `[${pkv.FieldName}]=${quotes}${pkv.Value}${quotes}`;
5243
- }
5244
- return sql + where;
5245
- }
5246
- }
5247
- }
5248
2205
  async CreateTransactionGroup() {
5249
2206
  return new SQLServerTransactionGroup();
5250
2207
  }