@memberjunction/metadata-sync 2.52.0 → 2.54.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -131,9 +131,10 @@ exports.cleanupProvider = cleanupProvider;
131
131
  *
132
132
  * Retrieves the "System" user from MemberJunction's UserCache. This user is
133
133
  * typically used for CLI operations where no specific user context exists.
134
+ * The System user must have the Developer role to perform metadata sync operations.
134
135
  *
135
136
  * @returns The System UserInfo object
136
- * @throws Error if System user is not found in the cache
137
+ * @throws Error if System user is not found in the cache or doesn't have Developer role
137
138
  *
138
139
  * @example
139
140
  * ```typescript
@@ -146,6 +147,14 @@ function getSystemUser() {
146
147
  if (!sysUser) {
147
148
  throw new Error("System user not found in cache. Ensure the system user exists in the database.");
148
149
  }
150
+ // Check if the System user has the Developer role
151
+ const hasDeveloperRole = sysUser.UserRoles && sysUser.UserRoles.some(userRole => userRole.Role.trim().toLowerCase() === 'developer');
152
+ if (!hasDeveloperRole) {
153
+ throw new Error("System user does not have the 'Developer' role. " +
154
+ "The Developer role is required for metadata sync operations. " +
155
+ "Please ensure the System user is assigned the Developer role in the database:\n" +
156
+ "* Add a record to the __mj.UserRole table linking the System user to the Developer role");
157
+ }
149
158
  return sysUser;
150
159
  }
151
160
  exports.getSystemUser = getSystemUser;
@@ -179,6 +188,7 @@ exports.getDataProvider = getDataProvider;
179
188
  * @param dir - Base directory to search from
180
189
  * @param specificDir - Optional specific subdirectory name to check
181
190
  * @param directoryOrder - Optional array specifying the order directories should be processed
191
+ * @param ignoreDirectories - Optional array of directory patterns to ignore
182
192
  * @returns Array of absolute directory paths containing .mj-sync.json files, ordered according to directoryOrder
183
193
  *
184
194
  * @example
@@ -193,7 +203,7 @@ exports.getDataProvider = getDataProvider;
193
203
  * const dirs = findEntityDirectories(process.cwd(), undefined, ['prompts', 'agent-types']);
194
204
  * ```
195
205
  */
196
- function findEntityDirectories(dir, specificDir, directoryOrder) {
206
+ function findEntityDirectories(dir, specificDir, directoryOrder, ignoreDirectories) {
197
207
  const results = [];
198
208
  // If specific directory is provided, check if it's an entity directory or root config directory
199
209
  if (specificDir) {
@@ -212,7 +222,12 @@ function findEntityDirectories(dir, specificDir, directoryOrder) {
212
222
  // If this config has directoryOrder but no entity, treat it as a root config
213
223
  // and look for entity directories in its subdirectories
214
224
  if (config.directoryOrder) {
215
- return findEntityDirectories(targetDir, undefined, config.directoryOrder);
225
+ // Merge ignore directories from parent with current config
226
+ const mergedIgnoreDirectories = [
227
+ ...(ignoreDirectories || []),
228
+ ...(config.ignoreDirectories || [])
229
+ ];
230
+ return findEntityDirectories(targetDir, undefined, config.directoryOrder, mergedIgnoreDirectories);
216
231
  }
217
232
  }
218
233
  catch (error) {
@@ -220,7 +235,7 @@ function findEntityDirectories(dir, specificDir, directoryOrder) {
220
235
  }
221
236
  }
222
237
  // Fallback: look for entity subdirectories in the target directory
223
- return findEntityDirectories(targetDir, undefined, directoryOrder);
238
+ return findEntityDirectories(targetDir, undefined, directoryOrder, ignoreDirectories);
224
239
  }
225
240
  return results;
226
241
  }
@@ -229,6 +244,13 @@ function findEntityDirectories(dir, specificDir, directoryOrder) {
229
244
  const foundDirectories = [];
230
245
  for (const entry of entries) {
231
246
  if (entry.isDirectory() && !entry.name.startsWith('.')) {
247
+ // Check if this directory should be ignored
248
+ if (ignoreDirectories && ignoreDirectories.some(pattern => {
249
+ // Simple pattern matching: exact name or ends with pattern
250
+ return entry.name === pattern || entry.name.endsWith(pattern);
251
+ })) {
252
+ continue;
253
+ }
232
254
  const subDir = path.join(dir, entry.name);
233
255
  const hasSyncConfig = fs.existsSync(path.join(subDir, '.mj-sync.json'));
234
256
  if (hasSyncConfig) {
@@ -1 +1 @@
1
- {"version":3,"file":"provider-utils.js","sourceRoot":"","sources":["../../src/lib/provider-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;GAOG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,2CAA6B;AAC7B,mFAA6I;AAE7I,uCAAyB;AACzB,2CAA6B;AAI7B,yEAAyE;AACzE,IAAI,UAAU,GAA8B,IAAI,CAAC;AAEjD,+DAA+D;AAC/D,IAAI,cAAc,GAAiC,IAAI,CAAC;AAExD,8CAA8C;AAC9C,IAAI,qBAAqB,GAA0C,IAAI,CAAC;AAExE;;;;;;;;;;;;;;;;;GAiBG;AACI,KAAK,UAAU,kBAAkB,CAAC,MAAgB;IACvD,kDAAkD;IAClD,IAAI,cAAc,EAAE,CAAC;QACnB,OAAO,cAAc,CAAC;IACxB,CAAC;IAED,+CAA+C;IAC/C,IAAI,qBAAqB,EAAE,CAAC;QAC1B,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED,2BAA2B;IAC3B,qBAAqB,GAAG,CAAC,KAAK,IAAI,EAAE;QAClC,sBAAsB;QACtB,MAAM,UAAU,GAAe;YAC7B,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI;YAClD,QAAQ,EAAE,MAAM,CAAC,UAAU;YAC3B,IAAI,EAAE,MAAM,CAAC,UAAU;YACvB,QAAQ,EAAE,MAAM,CAAC,UAAU;YAC3B,OAAO,EAAE;gBACP,OAAO,EAAE,MAAM,CAAC,SAAS,KAAK,GAAG,IAAI,MAAM,CAAC,SAAS,KAAK,MAAM;oBACvD,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,uBAAuB,CAAC,EAAE,wBAAwB;gBAClF,sBAAsB,EAAE,MAAM,CAAC,wBAAwB,KAAK,GAAG;gBAC/D,YAAY,EAAE,MAAM,CAAC,cAAc;gBACnC,gBAAgB,EAAE,IAAI;aACvB;SACF,CAAC;QAEF,0BAA0B;QAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;QAChD,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;QAErB,oBAAoB;QACpB,UAAU,GAAG,IAAI,CAAC;QAElB,yBAAyB;QACzB,MAAM,cAAc,GAAG,IAAI,oDAA2B,CACpD,IAAI,EACJ,MAAM,CAAC,YAAY,IAAI,MAAM,CAC9B,CAAC;QAEF,kDAAkD;QAClD,cAAc,GAAG,MAAM,IAAA,6CAAoB,EAAC,cAAc,CAAC,CAAC;QAC5D,OAAO,cAAc,CAAC;IACxB,CAAC,CAAC,EAAE,CAAC;IAEL,OAAO,qBAAqB,CAAC;AAC/B,CAAC;AAhDD,gDAgDC;AAED;;;;;;;;;;;;;;;;GAgBG;AACI,KAAK,UAAU,eAAe;IACnC,IAAI,UAAU,IAAI,UAAU,CAAC,SAAS,EAAE,CAAC;QACvC,MAAM,UAAU,CAAC,KAAK,EAAE,CAAC;QACzB,UAAU,GAAG,IAAI,CAAC;IACpB,CAAC;IACD,cAAc,GAAG,IAAI,CAAC;IACtB,qBAAqB,GAAG,IAAI,CAAC;AAC/B,CAAC;AAPD,0CAOC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAgB,aAAa;IAC3B,MAAM,OAAO,GAAG,kCAAS,CAAC,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/D,IAAI,CAAC,OAAO,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,gFAAgF,CAAC,CAAC;IACpG,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAND,sCAMC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,eAAe;IAC7B,OAAO,cAAc,CAAC;AACxB,CAAC;AAFD,0CAEC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,SAAgB,qBAAqB,CAAC,GAAW,EAAE,WAAoB,EAAE,cAAyB;IAChG,MAAM,OAAO,GAAa,EAAE,CAAC;IAE7B,gGAAgG;IAChG,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,SAAS,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;QAC3F,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7B,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;YAC7D,MAAM,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC;YAEpD,IAAI,aAAa,EAAE,CAAC;gBAClB,IAAI,CAAC;oBACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC,CAAC;oBAEnE,+DAA+D;oBAC/D,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;wBAClB,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;wBACxB,OAAO,OAAO,CAAC;oBACjB,CAAC;oBAED,6EAA6E;oBAC7E,wDAAwD;oBACxD,IAAI,MAAM,CAAC,cAAc,EAAE,CAAC;wBAC1B,OAAO,qBAAqB,CAAC,SAAS,EAAE,SAAS,EAAE,MAAM,CAAC,cAAc,CAAC,CAAC;oBAC5E,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,gEAAgE;gBAClE,CAAC;YACH,CAAC;YAED,mEAAmE;YACnE,OAAO,qBAAqB,CAAC,SAAS,EAAE,SAAS,EAAE,cAAc,CAAC,CAAC;QACrE,CAAC;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,kEAAkE;IAClE,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7D,MAAM,gBAAgB,GAAa,EAAE,CAAC;IAEtC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;QAC5B,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvD,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAC1C,MAAM,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC;YAExE,IAAI,aAAa,EAAE,CAAC;gBAClB,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;IACH,CAAC;IAED,mEAAmE;IACnE,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAChD,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,gDAAgD;QAChD,KAAK,MAAM,OAAO,IAAI,cAAc,EAAE,CAAC;YACrC,MAAM,WAAW,GAAG,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CACnD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,KAAK,OAAO,CACpC,CAAC;YACF,IAAI,WAAW,EAAE,CAAC;gBAChB,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;QAED,4DAA4D;QAC5D,KAAK,MAAM,QAAQ,IAAI,gBAAgB,EAAE,CAAC;YACxC,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACxC,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;gBACtC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC;QAED,4CAA4C;QAC5C,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAE/E,OAAO,CAAC,GAAG,WAAW,EAAE,GAAG,aAAa,CAAC,CAAC;IAC5C,CAAC;IAED,0EAA0E;IAC1E,OAAO,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3F,CAAC;AAlFD,sDAkFC","sourcesContent":["/**\n * @fileoverview Database provider utilities for MetadataSync\n * @module provider-utils\n * \n * This module provides utilities for initializing and managing the database\n * connection, accessing system users, and finding entity directories. It handles\n * the mssql ConnectionPool lifecycle and MemberJunction provider initialization.\n */\n\nimport * as sql from 'mssql';\nimport { SQLServerDataProvider, SQLServerProviderConfigData, UserCache, setupSQLServerClient } from '@memberjunction/sqlserver-dataprovider';\nimport type { MJConfig } from '../config';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { UserInfo } from '@memberjunction/core';\nimport { configManager } from './config-manager';\n\n/** Global ConnectionPool instance for connection lifecycle management */\nlet globalPool: sql.ConnectionPool | null = null;\n\n/** Global provider instance to ensure single initialization */\nlet globalProvider: SQLServerDataProvider | null = null;\n\n/** Promise to track ongoing initialization */\nlet initializationPromise: Promise<SQLServerDataProvider> | null = null;\n\n/**\n * Initialize a SQLServerDataProvider with the given configuration\n * \n * Creates and initializes a mssql ConnectionPool for SQL Server, then sets up\n * the MemberJunction SQLServerDataProvider. The connection is stored globally\n * for proper cleanup. Auto-detects Azure SQL databases for encryption settings.\n * \n * @param config - MemberJunction configuration with database connection details\n * @returns Promise resolving to initialized SQLServerDataProvider instance\n * @throws Error if database connection fails\n * \n * @example\n * ```typescript\n * const config = loadMJConfig();\n * const provider = await initializeProvider(config);\n * // Provider is ready for use\n * ```\n */\nexport async function initializeProvider(config: MJConfig): Promise<SQLServerDataProvider> {\n // Return existing provider if already initialized\n if (globalProvider) {\n return globalProvider;\n }\n \n // Return ongoing initialization if in progress\n if (initializationPromise) {\n return initializationPromise;\n }\n \n // Start new initialization\n initializationPromise = (async () => {\n // Create mssql config\n const poolConfig: sql.config = {\n server: config.dbHost,\n port: config.dbPort ? Number(config.dbPort) : 1433,\n database: config.dbDatabase,\n user: config.dbUsername,\n password: config.dbPassword,\n options: {\n encrypt: config.dbEncrypt === 'Y' || config.dbEncrypt === 'true' || \n config.dbHost.includes('.database.windows.net'), // Auto-detect Azure SQL\n trustServerCertificate: config.dbTrustServerCertificate === 'Y',\n instanceName: config.dbInstanceName,\n enableArithAbort: true\n }\n };\n \n // Create and connect pool\n const pool = new sql.ConnectionPool(poolConfig);\n await pool.connect();\n \n // Store for cleanup\n globalPool = pool;\n \n // Create provider config\n const providerConfig = new SQLServerProviderConfigData(\n pool,\n config.mjCoreSchema || '__mj' \n );\n \n // Use setupSQLServerClient to properly initialize\n globalProvider = await setupSQLServerClient(providerConfig);\n return globalProvider;\n })();\n \n return initializationPromise;\n}\n\n/**\n * Clean up the global database connection\n * \n * Closes the mssql ConnectionPool if it exists and is connected.\n * Should be called when the CLI command completes to ensure proper cleanup.\n * \n * @returns Promise that resolves when cleanup is complete\n * \n * @example\n * ```typescript\n * try {\n * // Do work with database\n * } finally {\n * await cleanupProvider();\n * }\n * ```\n */\nexport async function cleanupProvider(): Promise<void> {\n if (globalPool && globalPool.connected) {\n await globalPool.close();\n globalPool = null;\n }\n globalProvider = null;\n initializationPromise = null;\n}\n\n/**\n * Get the system user from the UserCache\n * \n * Retrieves the \"System\" user from MemberJunction's UserCache. This user is\n * typically used for CLI operations where no specific user context exists.\n * \n * @returns The System UserInfo object\n * @throws Error if System user is not found in the cache\n * \n * @example\n * ```typescript\n * const systemUser = getSystemUser();\n * const syncEngine = new SyncEngine(systemUser);\n * ```\n */\nexport function getSystemUser(): UserInfo {\n const sysUser = UserCache.Instance.UserByName(\"System\", false);\n if (!sysUser) {\n throw new Error(\"System user not found in cache. Ensure the system user exists in the database.\"); \n }\n return sysUser;\n}\n\n/**\n * Get the current data provider instance\n * \n * Returns the global SQLServerDataProvider instance that was initialized by\n * initializeProvider. This allows access to data provider features like SQL logging.\n * \n * @returns The global SQLServerDataProvider instance or null if not initialized\n * \n * @example\n * ```typescript\n * const provider = getDataProvider();\n * if (provider?.CreateSqlLogger) {\n * const logger = await provider.CreateSqlLogger('/path/to/log.sql');\n * }\n * ```\n */\nexport function getDataProvider(): SQLServerDataProvider | null {\n return globalProvider;\n}\n\n/**\n * Find entity directories at the immediate level only\n * \n * Searches for directories containing .mj-sync.json files, which indicate\n * entity data directories. Only searches immediate subdirectories, not recursive.\n * If a specific directory is provided, only checks that directory.\n * \n * @param dir - Base directory to search from\n * @param specificDir - Optional specific subdirectory name to check\n * @param directoryOrder - Optional array specifying the order directories should be processed\n * @returns Array of absolute directory paths containing .mj-sync.json files, ordered according to directoryOrder\n * \n * @example\n * ```typescript\n * // Find all entity directories\n * const dirs = findEntityDirectories(process.cwd());\n * \n * // Check specific directory\n * const dirs = findEntityDirectories(process.cwd(), 'ai-prompts');\n * \n * // Find directories with custom ordering\n * const dirs = findEntityDirectories(process.cwd(), undefined, ['prompts', 'agent-types']);\n * ```\n */\nexport function findEntityDirectories(dir: string, specificDir?: string, directoryOrder?: string[]): string[] {\n const results: string[] = [];\n \n // If specific directory is provided, check if it's an entity directory or root config directory\n if (specificDir) {\n const targetDir = path.isAbsolute(specificDir) ? specificDir : path.join(dir, specificDir);\n if (fs.existsSync(targetDir)) {\n const syncConfigPath = path.join(targetDir, '.mj-sync.json');\n const hasSyncConfig = fs.existsSync(syncConfigPath);\n \n if (hasSyncConfig) {\n try {\n const config = JSON.parse(fs.readFileSync(syncConfigPath, 'utf8'));\n \n // If this config has an entity field, it's an entity directory\n if (config.entity) {\n results.push(targetDir);\n return results;\n }\n \n // If this config has directoryOrder but no entity, treat it as a root config\n // and look for entity directories in its subdirectories\n if (config.directoryOrder) {\n return findEntityDirectories(targetDir, undefined, config.directoryOrder);\n }\n } catch (error) {\n // If we can't parse the config, treat it as a regular directory\n }\n }\n \n // Fallback: look for entity subdirectories in the target directory\n return findEntityDirectories(targetDir, undefined, directoryOrder);\n }\n return results;\n }\n \n // Otherwise, find all immediate subdirectories with .mj-sync.json\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n const foundDirectories: string[] = [];\n \n for (const entry of entries) {\n if (entry.isDirectory() && !entry.name.startsWith('.')) {\n const subDir = path.join(dir, entry.name);\n const hasSyncConfig = fs.existsSync(path.join(subDir, '.mj-sync.json'));\n \n if (hasSyncConfig) {\n foundDirectories.push(subDir);\n }\n }\n }\n \n // If directoryOrder is specified, sort directories according to it\n if (directoryOrder && directoryOrder.length > 0) {\n const orderedDirs: string[] = [];\n const unorderedDirs: string[] = [];\n \n // First, add directories in the specified order\n for (const dirName of directoryOrder) {\n const matchingDir = foundDirectories.find(fullPath => \n path.basename(fullPath) === dirName\n );\n if (matchingDir) {\n orderedDirs.push(matchingDir);\n }\n }\n \n // Then, add any remaining directories in alphabetical order\n for (const foundDir of foundDirectories) {\n const dirName = path.basename(foundDir);\n if (!directoryOrder.includes(dirName)) {\n unorderedDirs.push(foundDir);\n }\n }\n \n // Sort unordered directories alphabetically\n unorderedDirs.sort((a, b) => path.basename(a).localeCompare(path.basename(b)));\n \n return [...orderedDirs, ...unorderedDirs];\n }\n \n // No ordering specified, return in alphabetical order (existing behavior)\n return foundDirectories.sort((a, b) => path.basename(a).localeCompare(path.basename(b)));\n}"]}
1
+ {"version":3,"file":"provider-utils.js","sourceRoot":"","sources":["../../src/lib/provider-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;GAOG;;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,2CAA6B;AAC7B,mFAA6I;AAE7I,uCAAyB;AACzB,2CAA6B;AAG7B,yEAAyE;AACzE,IAAI,UAAU,GAA8B,IAAI,CAAC;AAEjD,+DAA+D;AAC/D,IAAI,cAAc,GAAiC,IAAI,CAAC;AAExD,8CAA8C;AAC9C,IAAI,qBAAqB,GAA0C,IAAI,CAAC;AAExE;;;;;;;;;;;;;;;;;GAiBG;AACI,KAAK,UAAU,kBAAkB,CAAC,MAAgB;IACvD,kDAAkD;IAClD,IAAI,cAAc,EAAE,CAAC;QACnB,OAAO,cAAc,CAAC;IACxB,CAAC;IAED,+CAA+C;IAC/C,IAAI,qBAAqB,EAAE,CAAC;QAC1B,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED,2BAA2B;IAC3B,qBAAqB,GAAG,CAAC,KAAK,IAAI,EAAE;QAClC,sBAAsB;QACtB,MAAM,UAAU,GAAe;YAC7B,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI;YAClD,QAAQ,EAAE,MAAM,CAAC,UAAU;YAC3B,IAAI,EAAE,MAAM,CAAC,UAAU;YACvB,QAAQ,EAAE,MAAM,CAAC,UAAU;YAC3B,OAAO,EAAE;gBACP,OAAO,EAAE,MAAM,CAAC,SAAS,KAAK,GAAG,IAAI,MAAM,CAAC,SAAS,KAAK,MAAM;oBACvD,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,uBAAuB,CAAC,EAAE,wBAAwB;gBAClF,sBAAsB,EAAE,MAAM,CAAC,wBAAwB,KAAK,GAAG;gBAC/D,YAAY,EAAE,MAAM,CAAC,cAAc;gBACnC,gBAAgB,EAAE,IAAI;aACvB;SACF,CAAC;QAEF,0BAA0B;QAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;QAChD,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;QAErB,oBAAoB;QACpB,UAAU,GAAG,IAAI,CAAC;QAElB,yBAAyB;QACzB,MAAM,cAAc,GAAG,IAAI,oDAA2B,CACpD,IAAI,EACJ,MAAM,CAAC,YAAY,IAAI,MAAM,CAC9B,CAAC;QAEF,kDAAkD;QAClD,cAAc,GAAG,MAAM,IAAA,6CAAoB,EAAC,cAAc,CAAC,CAAC;QAC5D,OAAO,cAAc,CAAC;IACxB,CAAC,CAAC,EAAE,CAAC;IAEL,OAAO,qBAAqB,CAAC;AAC/B,CAAC;AAhDD,gDAgDC;AAED;;;;;;;;;;;;;;;;GAgBG;AACI,KAAK,UAAU,eAAe;IACnC,IAAI,UAAU,IAAI,UAAU,CAAC,SAAS,EAAE,CAAC;QACvC,MAAM,UAAU,CAAC,KAAK,EAAE,CAAC;QACzB,UAAU,GAAG,IAAI,CAAC;IACpB,CAAC;IACD,cAAc,GAAG,IAAI,CAAC;IACtB,qBAAqB,GAAG,IAAI,CAAC;AAC/B,CAAC;AAPD,0CAOC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,aAAa;IAC3B,MAAM,OAAO,GAAG,kCAAS,CAAC,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAC/D,IAAI,CAAC,OAAO,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,gFAAgF,CAAC,CAAC;IACpG,CAAC;IAED,kDAAkD;IAClD,MAAM,gBAAgB,GAAG,OAAO,CAAC,SAAS,IAAI,OAAO,CAAC,SAAS,CAAC,IAAI,CAClE,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,KAAK,WAAW,CAC/D,CAAC;IAEF,IAAI,CAAC,gBAAgB,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CACb,kDAAkD;YAClD,+DAA+D;YAC/D,iFAAiF;YACjF,yFAAyF,CAC1F,CAAC;IACJ,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AArBD,sCAqBC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,eAAe;IAC7B,OAAO,cAAc,CAAC;AACxB,CAAC;AAFD,0CAEC;AAED;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,SAAgB,qBAAqB,CAAC,GAAW,EAAE,WAAoB,EAAE,cAAyB,EAAE,iBAA4B;IAC9H,MAAM,OAAO,GAAa,EAAE,CAAC;IAE7B,gGAAgG;IAChG,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,SAAS,GAAG,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;QAC3F,IAAI,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7B,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;YAC7D,MAAM,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC;YAEpD,IAAI,aAAa,EAAE,CAAC;gBAClB,IAAI,CAAC;oBACH,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC,CAAC;oBAEnE,+DAA+D;oBAC/D,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;wBAClB,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;wBACxB,OAAO,OAAO,CAAC;oBACjB,CAAC;oBAED,6EAA6E;oBAC7E,wDAAwD;oBACxD,IAAI,MAAM,CAAC,cAAc,EAAE,CAAC;wBAC1B,2DAA2D;wBAC3D,MAAM,uBAAuB,GAAG;4BAC9B,GAAG,CAAC,iBAAiB,IAAI,EAAE,CAAC;4BAC5B,GAAG,CAAC,MAAM,CAAC,iBAAiB,IAAI,EAAE,CAAC;yBACpC,CAAC;wBACF,OAAO,qBAAqB,CAAC,SAAS,EAAE,SAAS,EAAE,MAAM,CAAC,cAAc,EAAE,uBAAuB,CAAC,CAAC;oBACrG,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,gEAAgE;gBAClE,CAAC;YACH,CAAC;YAED,mEAAmE;YACnE,OAAO,qBAAqB,CAAC,SAAS,EAAE,SAAS,EAAE,cAAc,EAAE,iBAAiB,CAAC,CAAC;QACxF,CAAC;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,kEAAkE;IAClE,MAAM,OAAO,GAAG,EAAE,CAAC,WAAW,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7D,MAAM,gBAAgB,GAAa,EAAE,CAAC;IAEtC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;QAC5B,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACvD,4CAA4C;YAC5C,IAAI,iBAAiB,IAAI,iBAAiB,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;gBACxD,2DAA2D;gBAC3D,OAAO,KAAK,CAAC,IAAI,KAAK,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAChE,CAAC,CAAC,EAAE,CAAC;gBACH,SAAS;YACX,CAAC;YAED,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAC1C,MAAM,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC,CAAC;YAExE,IAAI,aAAa,EAAE,CAAC;gBAClB,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;IACH,CAAC;IAED,mEAAmE;IACnE,IAAI,cAAc,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAChD,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,gDAAgD;QAChD,KAAK,MAAM,OAAO,IAAI,cAAc,EAAE,CAAC;YACrC,MAAM,WAAW,GAAG,gBAAgB,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CACnD,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,KAAK,OAAO,CACpC,CAAC;YACF,IAAI,WAAW,EAAE,CAAC;gBAChB,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;QAED,4DAA4D;QAC5D,KAAK,MAAM,QAAQ,IAAI,gBAAgB,EAAE,CAAC;YACxC,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;YACxC,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;gBACtC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC;QAED,4CAA4C;QAC5C,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAE/E,OAAO,CAAC,GAAG,WAAW,EAAE,GAAG,aAAa,CAAC,CAAC;IAC5C,CAAC;IAED,0EAA0E;IAC1E,OAAO,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC3F,CAAC;AA/FD,sDA+FC","sourcesContent":["/**\n * @fileoverview Database provider utilities for MetadataSync\n * @module provider-utils\n * \n * This module provides utilities for initializing and managing the database\n * connection, accessing system users, and finding entity directories. It handles\n * the mssql ConnectionPool lifecycle and MemberJunction provider initialization.\n */\n\nimport * as sql from 'mssql';\nimport { SQLServerDataProvider, SQLServerProviderConfigData, UserCache, setupSQLServerClient } from '@memberjunction/sqlserver-dataprovider';\nimport type { MJConfig } from '../config';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { UserInfo } from '@memberjunction/core';\n\n/** Global ConnectionPool instance for connection lifecycle management */\nlet globalPool: sql.ConnectionPool | null = null;\n\n/** Global provider instance to ensure single initialization */\nlet globalProvider: SQLServerDataProvider | null = null;\n\n/** Promise to track ongoing initialization */\nlet initializationPromise: Promise<SQLServerDataProvider> | null = null;\n\n/**\n * Initialize a SQLServerDataProvider with the given configuration\n * \n * Creates and initializes a mssql ConnectionPool for SQL Server, then sets up\n * the MemberJunction SQLServerDataProvider. The connection is stored globally\n * for proper cleanup. Auto-detects Azure SQL databases for encryption settings.\n * \n * @param config - MemberJunction configuration with database connection details\n * @returns Promise resolving to initialized SQLServerDataProvider instance\n * @throws Error if database connection fails\n * \n * @example\n * ```typescript\n * const config = loadMJConfig();\n * const provider = await initializeProvider(config);\n * // Provider is ready for use\n * ```\n */\nexport async function initializeProvider(config: MJConfig): Promise<SQLServerDataProvider> {\n // Return existing provider if already initialized\n if (globalProvider) {\n return globalProvider;\n }\n \n // Return ongoing initialization if in progress\n if (initializationPromise) {\n return initializationPromise;\n }\n \n // Start new initialization\n initializationPromise = (async () => {\n // Create mssql config\n const poolConfig: sql.config = {\n server: config.dbHost,\n port: config.dbPort ? Number(config.dbPort) : 1433,\n database: config.dbDatabase,\n user: config.dbUsername,\n password: config.dbPassword,\n options: {\n encrypt: config.dbEncrypt === 'Y' || config.dbEncrypt === 'true' || \n config.dbHost.includes('.database.windows.net'), // Auto-detect Azure SQL\n trustServerCertificate: config.dbTrustServerCertificate === 'Y',\n instanceName: config.dbInstanceName,\n enableArithAbort: true\n }\n };\n \n // Create and connect pool\n const pool = new sql.ConnectionPool(poolConfig);\n await pool.connect();\n \n // Store for cleanup\n globalPool = pool;\n \n // Create provider config\n const providerConfig = new SQLServerProviderConfigData(\n pool,\n config.mjCoreSchema || '__mj' \n );\n \n // Use setupSQLServerClient to properly initialize\n globalProvider = await setupSQLServerClient(providerConfig);\n return globalProvider;\n })();\n \n return initializationPromise;\n}\n\n/**\n * Clean up the global database connection\n * \n * Closes the mssql ConnectionPool if it exists and is connected.\n * Should be called when the CLI command completes to ensure proper cleanup.\n * \n * @returns Promise that resolves when cleanup is complete\n * \n * @example\n * ```typescript\n * try {\n * // Do work with database\n * } finally {\n * await cleanupProvider();\n * }\n * ```\n */\nexport async function cleanupProvider(): Promise<void> {\n if (globalPool && globalPool.connected) {\n await globalPool.close();\n globalPool = null;\n }\n globalProvider = null;\n initializationPromise = null;\n}\n\n/**\n * Get the system user from the UserCache\n * \n * Retrieves the \"System\" user from MemberJunction's UserCache. This user is\n * typically used for CLI operations where no specific user context exists.\n * The System user must have the Developer role to perform metadata sync operations.\n * \n * @returns The System UserInfo object\n * @throws Error if System user is not found in the cache or doesn't have Developer role\n * \n * @example\n * ```typescript\n * const systemUser = getSystemUser();\n * const syncEngine = new SyncEngine(systemUser);\n * ```\n */\nexport function getSystemUser(): UserInfo {\n const sysUser = UserCache.Instance.UserByName(\"System\", false);\n if (!sysUser) {\n throw new Error(\"System user not found in cache. Ensure the system user exists in the database.\"); \n }\n \n // Check if the System user has the Developer role\n const hasDeveloperRole = sysUser.UserRoles && sysUser.UserRoles.some(\n userRole => userRole.Role.trim().toLowerCase() === 'developer'\n );\n \n if (!hasDeveloperRole) {\n throw new Error(\n \"System user does not have the 'Developer' role. \" +\n \"The Developer role is required for metadata sync operations. \" +\n \"Please ensure the System user is assigned the Developer role in the database:\\n\" +\n \"* Add a record to the __mj.UserRole table linking the System user to the Developer role\"\n );\n }\n \n return sysUser;\n}\n\n/**\n * Get the current data provider instance\n * \n * Returns the global SQLServerDataProvider instance that was initialized by\n * initializeProvider. This allows access to data provider features like SQL logging.\n * \n * @returns The global SQLServerDataProvider instance or null if not initialized\n * \n * @example\n * ```typescript\n * const provider = getDataProvider();\n * if (provider?.CreateSqlLogger) {\n * const logger = await provider.CreateSqlLogger('/path/to/log.sql');\n * }\n * ```\n */\nexport function getDataProvider(): SQLServerDataProvider | null {\n return globalProvider;\n}\n\n/**\n * Find entity directories at the immediate level only\n * \n * Searches for directories containing .mj-sync.json files, which indicate\n * entity data directories. Only searches immediate subdirectories, not recursive.\n * If a specific directory is provided, only checks that directory.\n * \n * @param dir - Base directory to search from\n * @param specificDir - Optional specific subdirectory name to check\n * @param directoryOrder - Optional array specifying the order directories should be processed\n * @param ignoreDirectories - Optional array of directory patterns to ignore\n * @returns Array of absolute directory paths containing .mj-sync.json files, ordered according to directoryOrder\n * \n * @example\n * ```typescript\n * // Find all entity directories\n * const dirs = findEntityDirectories(process.cwd());\n * \n * // Check specific directory\n * const dirs = findEntityDirectories(process.cwd(), 'ai-prompts');\n * \n * // Find directories with custom ordering\n * const dirs = findEntityDirectories(process.cwd(), undefined, ['prompts', 'agent-types']);\n * ```\n */\nexport function findEntityDirectories(dir: string, specificDir?: string, directoryOrder?: string[], ignoreDirectories?: string[]): string[] {\n const results: string[] = [];\n \n // If specific directory is provided, check if it's an entity directory or root config directory\n if (specificDir) {\n const targetDir = path.isAbsolute(specificDir) ? specificDir : path.join(dir, specificDir);\n if (fs.existsSync(targetDir)) {\n const syncConfigPath = path.join(targetDir, '.mj-sync.json');\n const hasSyncConfig = fs.existsSync(syncConfigPath);\n \n if (hasSyncConfig) {\n try {\n const config = JSON.parse(fs.readFileSync(syncConfigPath, 'utf8'));\n \n // If this config has an entity field, it's an entity directory\n if (config.entity) {\n results.push(targetDir);\n return results;\n }\n \n // If this config has directoryOrder but no entity, treat it as a root config\n // and look for entity directories in its subdirectories\n if (config.directoryOrder) {\n // Merge ignore directories from parent with current config\n const mergedIgnoreDirectories = [\n ...(ignoreDirectories || []),\n ...(config.ignoreDirectories || [])\n ];\n return findEntityDirectories(targetDir, undefined, config.directoryOrder, mergedIgnoreDirectories);\n }\n } catch (error) {\n // If we can't parse the config, treat it as a regular directory\n }\n }\n \n // Fallback: look for entity subdirectories in the target directory\n return findEntityDirectories(targetDir, undefined, directoryOrder, ignoreDirectories);\n }\n return results;\n }\n \n // Otherwise, find all immediate subdirectories with .mj-sync.json\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n const foundDirectories: string[] = [];\n \n for (const entry of entries) {\n if (entry.isDirectory() && !entry.name.startsWith('.')) {\n // Check if this directory should be ignored\n if (ignoreDirectories && ignoreDirectories.some(pattern => {\n // Simple pattern matching: exact name or ends with pattern\n return entry.name === pattern || entry.name.endsWith(pattern);\n })) {\n continue;\n }\n \n const subDir = path.join(dir, entry.name);\n const hasSyncConfig = fs.existsSync(path.join(subDir, '.mj-sync.json'));\n \n if (hasSyncConfig) {\n foundDirectories.push(subDir);\n }\n }\n }\n \n // If directoryOrder is specified, sort directories according to it\n if (directoryOrder && directoryOrder.length > 0) {\n const orderedDirs: string[] = [];\n const unorderedDirs: string[] = [];\n \n // First, add directories in the specified order\n for (const dirName of directoryOrder) {\n const matchingDir = foundDirectories.find(fullPath => \n path.basename(fullPath) === dirName\n );\n if (matchingDir) {\n orderedDirs.push(matchingDir);\n }\n }\n \n // Then, add any remaining directories in alphabetical order\n for (const foundDir of foundDirectories) {\n const dirName = path.basename(foundDir);\n if (!directoryOrder.includes(dirName)) {\n unorderedDirs.push(foundDir);\n }\n }\n \n // Sort unordered directories alphabetically\n unorderedDirs.sort((a, b) => path.basename(a).localeCompare(path.basename(b)));\n \n return [...orderedDirs, ...unorderedDirs];\n }\n \n // No ordering specified, return in alphabetical order (existing behavior)\n return foundDirectories.sort((a, b) => path.basename(a).localeCompare(path.basename(b)));\n}"]}
@@ -52,7 +52,7 @@ export declare class SyncEngine {
52
52
  * Initializes the sync engine by refreshing metadata cache
53
53
  * @returns Promise that resolves when initialization is complete
54
54
  */
55
- initialize(): Promise<void>;
55
+ initialize(forceRefresh?: boolean): Promise<void>;
56
56
  /**
57
57
  * Process special references in field values
58
58
  *
@@ -104,7 +104,10 @@ export declare class SyncEngine {
104
104
  * });
105
105
  * ```
106
106
  */
107
- resolveLookup(entityName: string, fieldName: string, fieldValue: string, autoCreate?: boolean, createFields?: Record<string, any>): Promise<string>;
107
+ resolveLookup(entityName: string, lookupFields: Array<{
108
+ fieldName: string;
109
+ fieldValue: string;
110
+ }>, autoCreate?: boolean, createFields?: Record<string, any>): Promise<string>;
108
111
  /**
109
112
  * Build cascading defaults for a file path and process field values
110
113
  *
@@ -251,6 +254,33 @@ export declare class SyncEngine {
251
254
  * @private
252
255
  */
253
256
  private loadAndProcessTemplate;
257
+ /**
258
+ * Process file content with {@include} references
259
+ *
260
+ * Recursively processes a file's content to resolve `{@include path}` references.
261
+ * Include references use JSDoc-style syntax and support:
262
+ * - Relative paths resolved from the containing file's directory
263
+ * - Recursive includes (includes within included files)
264
+ * - Circular reference detection to prevent infinite loops
265
+ * - Seamless content substitution maintaining surrounding text
266
+ *
267
+ * @param filePath - Path to the file being processed
268
+ * @param content - The file content to process
269
+ * @param visitedPaths - Set of already visited file paths for circular reference detection
270
+ * @returns Promise resolving to the content with all includes resolved
271
+ * @throws Error if circular reference detected or included file not found
272
+ *
273
+ * @example
274
+ * ```typescript
275
+ * // Content with include reference
276
+ * const content = 'This is a {@include ./shared/header.md} example';
277
+ *
278
+ * // Resolves to:
279
+ * const result = await processFileContentWithIncludes('/path/to/file.md', content);
280
+ * // 'This is a [contents of header.md] example'
281
+ * ```
282
+ */
283
+ private processFileContentWithIncludes;
254
284
  /**
255
285
  * Deep merge two objects with target taking precedence
256
286
  *
@@ -47,9 +47,10 @@ class SyncEngine {
47
47
  * Initializes the sync engine by refreshing metadata cache
48
48
  * @returns Promise that resolves when initialization is complete
49
49
  */
50
- async initialize() {
51
- // Initialize metadata
52
- await this.metadata.Refresh();
50
+ async initialize(forceRefresh = false) {
51
+ if (forceRefresh) {
52
+ await this.metadata.Refresh();
53
+ }
53
54
  }
54
55
  /**
55
56
  * Process special references in field values
@@ -103,7 +104,9 @@ class SyncEngine {
103
104
  const filePath = value.substring(6);
104
105
  const fullPath = path_1.default.resolve(baseDir, filePath);
105
106
  if (await fs_extra_1.default.pathExists(fullPath)) {
106
- return await fs_extra_1.default.readFile(fullPath, 'utf-8');
107
+ const fileContent = await fs_extra_1.default.readFile(fullPath, 'utf-8');
108
+ // Process the file content for {@include} references
109
+ return await this.processFileContentWithIncludes(fullPath, fileContent);
107
110
  }
108
111
  else {
109
112
  throw new Error(`File not found: ${fullPath}`);
@@ -124,7 +127,7 @@ class SyncEngine {
124
127
  if (value.startsWith('@lookup:')) {
125
128
  const lookupStr = value.substring(8);
126
129
  // Parse lookup with optional create syntax
127
- // Format: EntityName.FieldName=Value?create&OtherField=Value
130
+ // Format: EntityName.Field1=Value1&Field2=Value2?create&OtherField=Value
128
131
  const entityMatch = lookupStr.match(/^([^.]+)\./);
129
132
  if (!entityMatch) {
130
133
  throw new Error(`Invalid lookup format: ${value}`);
@@ -134,12 +137,20 @@ class SyncEngine {
134
137
  // Check if this has ?create syntax
135
138
  const hasCreate = remaining.includes('?create');
136
139
  const lookupPart = hasCreate ? remaining.split('?')[0] : remaining;
137
- // Parse the main lookup field
138
- const fieldMatch = lookupPart.match(/^(.+?)=(.+)$/);
139
- if (!fieldMatch) {
140
- throw new Error(`Invalid lookup format: ${value}`);
140
+ // Parse all lookup fields (can be multiple with &)
141
+ const lookupFields = [];
142
+ const lookupPairs = lookupPart.split('&');
143
+ for (const pair of lookupPairs) {
144
+ const fieldMatch = pair.match(/^(.+?)=(.+)$/);
145
+ if (!fieldMatch) {
146
+ throw new Error(`Invalid lookup field format: ${pair} in ${value}`);
147
+ }
148
+ const [, fieldName, fieldValue] = fieldMatch;
149
+ lookupFields.push({ fieldName: fieldName.trim(), fieldValue: fieldValue.trim() });
150
+ }
151
+ if (lookupFields.length === 0) {
152
+ throw new Error(`No lookup fields specified: ${value}`);
141
153
  }
142
- const [, fieldName, fieldValue] = fieldMatch;
143
154
  // Parse additional fields for creation if ?create is present
144
155
  let createFields = {};
145
156
  if (hasCreate && remaining.includes('?create&')) {
@@ -152,7 +163,7 @@ class SyncEngine {
152
163
  }
153
164
  }
154
165
  }
155
- return await this.resolveLookup(entityName, fieldName, fieldValue, hasCreate, createFields);
166
+ return await this.resolveLookup(entityName, lookupFields, hasCreate, createFields);
156
167
  }
157
168
  // Check for @env: reference
158
169
  if (value.startsWith('@env:')) {
@@ -188,18 +199,33 @@ class SyncEngine {
188
199
  * });
189
200
  * ```
190
201
  */
191
- async resolveLookup(entityName, fieldName, fieldValue, autoCreate = false, createFields = {}) {
202
+ async resolveLookup(entityName, lookupFields, autoCreate = false, createFields = {}) {
192
203
  // Debug logging handled by caller if needed
193
204
  const rv = new core_1.RunView();
194
205
  const entityInfo = this.metadata.EntityByName(entityName);
195
206
  if (!entityInfo) {
196
207
  throw new Error(`Entity not found: ${entityName}`);
197
208
  }
198
- const field = entityInfo.Fields.find(f => f.Name.trim().toLowerCase() === fieldName.trim().toLowerCase());
199
- const quotes = field?.NeedsQuotes ? "'" : '';
209
+ // Build compound filter for all lookup fields
210
+ const filterParts = [];
211
+ for (const { fieldName, fieldValue } of lookupFields) {
212
+ const field = entityInfo.Fields.find(f => f.Name.trim().toLowerCase() === fieldName.trim().toLowerCase());
213
+ if (!field) {
214
+ throw new Error(`Field '${fieldName}' not found in entity '${entityName}'`);
215
+ }
216
+ // Handle null values properly
217
+ if (fieldValue.trim().toLowerCase() === 'null') {
218
+ filterParts.push(`${fieldName} IS NULL`);
219
+ }
220
+ else {
221
+ const quotes = field.NeedsQuotes ? "'" : '';
222
+ filterParts.push(`${fieldName} = ${quotes}${fieldValue.replace(/'/g, "''")}${quotes}`);
223
+ }
224
+ }
225
+ const extraFilter = filterParts.join(' AND ');
200
226
  const result = await rv.RunView({
201
227
  EntityName: entityName,
202
- ExtraFilter: `${fieldName} = ${quotes}${fieldValue.replace(/'/g, "''")}${quotes}`,
228
+ ExtraFilter: extraFilter,
203
229
  MaxRows: 1
204
230
  }, this.contextUser);
205
231
  if (result.Success && result.Results.length > 0) {
@@ -226,9 +252,17 @@ class SyncEngine {
226
252
  }
227
253
  }
228
254
  }
229
- // Set the lookup field
230
- if (fieldName in newEntity) {
231
- newEntity[fieldName] = fieldValue;
255
+ // Set all lookup fields
256
+ for (const { fieldName, fieldValue } of lookupFields) {
257
+ if (fieldName in newEntity) {
258
+ // Handle null values properly
259
+ if (fieldValue.toLowerCase() === 'null') {
260
+ newEntity[fieldName] = null;
261
+ }
262
+ else {
263
+ newEntity[fieldName] = fieldValue;
264
+ }
265
+ }
232
266
  }
233
267
  // Set any additional fields provided
234
268
  for (const [key, value] of Object.entries(createFields)) {
@@ -237,7 +271,8 @@ class SyncEngine {
237
271
  }
238
272
  }
239
273
  // Save the new record (new records are always dirty)
240
- console.log(`📝 Auto-creating ${entityName} record where ${fieldName}='${fieldValue}'`);
274
+ const filterDesc = lookupFields.map(({ fieldName, fieldValue }) => `${fieldName}='${fieldValue}'`).join(' AND ');
275
+ console.log(`📝 Auto-creating ${entityName} record where ${filterDesc}`);
241
276
  const saved = await newEntity.Save();
242
277
  if (!saved) {
243
278
  const message = newEntity.LatestResult?.Message;
@@ -254,7 +289,8 @@ class SyncEngine {
254
289
  return newId;
255
290
  }
256
291
  }
257
- throw new Error(`Lookup failed: No record found in '${entityName}' where ${fieldName}='${fieldValue}'`);
292
+ const filterDesc = lookupFields.map(({ fieldName, fieldValue }) => `${fieldName}='${fieldValue}'`).join(' AND ');
293
+ throw new Error(`Lookup failed: No record found in '${entityName}' where ${filterDesc}`);
258
294
  }
259
295
  /**
260
296
  * Build cascading defaults for a file path and process field values
@@ -540,6 +576,71 @@ class SyncEngine {
540
576
  throw new Error(`Failed to load template ${fullPath}: ${error}`);
541
577
  }
542
578
  }
579
+ /**
580
+ * Process file content with {@include} references
581
+ *
582
+ * Recursively processes a file's content to resolve `{@include path}` references.
583
+ * Include references use JSDoc-style syntax and support:
584
+ * - Relative paths resolved from the containing file's directory
585
+ * - Recursive includes (includes within included files)
586
+ * - Circular reference detection to prevent infinite loops
587
+ * - Seamless content substitution maintaining surrounding text
588
+ *
589
+ * @param filePath - Path to the file being processed
590
+ * @param content - The file content to process
591
+ * @param visitedPaths - Set of already visited file paths for circular reference detection
592
+ * @returns Promise resolving to the content with all includes resolved
593
+ * @throws Error if circular reference detected or included file not found
594
+ *
595
+ * @example
596
+ * ```typescript
597
+ * // Content with include reference
598
+ * const content = 'This is a {@include ./shared/header.md} example';
599
+ *
600
+ * // Resolves to:
601
+ * const result = await processFileContentWithIncludes('/path/to/file.md', content);
602
+ * // 'This is a [contents of header.md] example'
603
+ * ```
604
+ */
605
+ async processFileContentWithIncludes(filePath, content, visitedPaths = new Set()) {
606
+ // Add current file to visited set
607
+ const absolutePath = path_1.default.resolve(filePath);
608
+ if (visitedPaths.has(absolutePath)) {
609
+ throw new Error(`Circular reference detected: ${absolutePath} is already being processed`);
610
+ }
611
+ visitedPaths.add(absolutePath);
612
+ // Pattern to match {@include path} references
613
+ // Supports whitespace around the path for flexibility
614
+ const includePattern = /\{@include\s+([^\}]+)\s*\}/g;
615
+ let processedContent = content;
616
+ let match;
617
+ // Process all {@include} references
618
+ while ((match = includePattern.exec(content)) !== null) {
619
+ const [fullMatch, includePath] = match;
620
+ const trimmedPath = includePath.trim();
621
+ // Resolve the include path relative to the current file's directory
622
+ const currentDir = path_1.default.dirname(filePath);
623
+ const resolvedPath = path_1.default.resolve(currentDir, trimmedPath);
624
+ try {
625
+ // Check if the included file exists
626
+ if (!await fs_extra_1.default.pathExists(resolvedPath)) {
627
+ throw new Error(`Included file not found: ${resolvedPath}`);
628
+ }
629
+ // Read the included file
630
+ const includedContent = await fs_extra_1.default.readFile(resolvedPath, 'utf-8');
631
+ // Recursively process the included content for nested includes
632
+ const processedInclude = await this.processFileContentWithIncludes(resolvedPath, includedContent, new Set(visitedPaths) // Pass a copy to allow the same file in different branches
633
+ );
634
+ // Replace the {@include} reference with the processed content
635
+ processedContent = processedContent.replace(fullMatch, processedInclude);
636
+ }
637
+ catch (error) {
638
+ // Enhance error message with context
639
+ throw new Error(`Failed to process {@include ${trimmedPath}} in ${filePath}: ${error}`);
640
+ }
641
+ }
642
+ return processedContent;
643
+ }
543
644
  /**
544
645
  * Deep merge two objects with target taking precedence
545
646
  *