@machinemetrics/mm-erp-sdk 0.1.9-beta.0 → 0.1.9-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. package/README.md +5 -0
  2. package/dist/{config-C2Dse2g2.js → config-Bax6Ofp5.js} +2 -2
  3. package/dist/{config-C2Dse2g2.js.map → config-Bax6Ofp5.js.map} +1 -1
  4. package/dist/{connector-factory-BGykiICT.js → connector-factory-BaMIlES8.js} +2 -2
  5. package/dist/{connector-factory-BGykiICT.js.map → connector-factory-BaMIlES8.js.map} +1 -1
  6. package/dist/{hashed-cache-manager-DbG72eRm.js → hashed-cache-manager-C1u9jQgY.js} +4 -4
  7. package/dist/{hashed-cache-manager-DbG72eRm.js.map → hashed-cache-manager-C1u9jQgY.js.map} +1 -1
  8. package/dist/{index-Cq9tNcJT.js → index-BkVlW0ZW.js} +2 -2
  9. package/dist/{index-Cq9tNcJT.js.map → index-BkVlW0ZW.js.map} +1 -1
  10. package/dist/{logger-DeKxCUPp.js → logger-DW5fyhVS.js} +101 -40
  11. package/dist/{logger-DeKxCUPp.js.map → logger-DW5fyhVS.js.map} +1 -1
  12. package/dist/mm-erp-sdk.js +8 -12
  13. package/dist/mm-erp-sdk.js.map +1 -1
  14. package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
  15. package/dist/services/data-sync-service/jobs/from-erp.js +4 -4
  16. package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
  17. package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
  18. package/dist/services/data-sync-service/jobs/to-erp.js +3 -3
  19. package/dist/services/reporting-service/logger.d.ts.map +1 -1
  20. package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts.map +1 -1
  21. package/package.json +1 -1
  22. package/src/services/mm-api-service/mm-api-service.ts +1 -1
  23. package/src/services/reporting-service/logger.ts +110 -43
  24. package/src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts +1 -5
package/README.md CHANGED
@@ -120,6 +120,11 @@ SQL_SERVER_PASSWORD="password"
120
120
  LOG_LEVEL="info"
121
121
  ```
122
122
 
123
+ ## Logging Reliability
124
+
125
+ - The SDK logger now captures rotate/write transport failures internally, but callers should still treat logging as best-effort. If you manage your own in-process scheduler (e.g., not using Bree), wrap any `job.isRunning = true` flags and subsequent `logger.*` calls in a `try/finally` block so the scheduler state clears even if logging throws.
126
+ - Bree-based connectors inherit process isolation, but custom schedulers run inside a single event loop. Always reset locks/timers inside `finally` clauses to avoid getting stuck when a synchronous dependency (logging, metrics, etc.) fails mid-cycle.
127
+
123
128
  ## API Reference
124
129
 
125
130
  ### Core Interfaces
@@ -1,4 +1,4 @@
1
- import { g as getDefaultExportFromCjs } from "./logger-DeKxCUPp.js";
1
+ import { g as getDefaultExportFromCjs } from "./logger-DW5fyhVS.js";
2
2
  import fs$1 from "fs";
3
3
  import path$1 from "path";
4
4
  import require$$0 from "os";
@@ -415,4 +415,4 @@ const cliOptions_default = /* @__PURE__ */ getDefaultExportFromCjs(cliOptions);
415
415
  )
416
416
  );
417
417
  })();
418
- //# sourceMappingURL=config-C2Dse2g2.js.map
418
+ //# sourceMappingURL=config-Bax6Ofp5.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"config-C2Dse2g2.js","sources":["../node_modules/dotenv/lib/main.js","../node_modules/dotenv/lib/env-options.js","../node_modules/dotenv/lib/cli-options.js","../node_modules/dotenv/config.js"],"sourcesContent":["const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n options = options || {}\n\n const vaultPath = _vaultPath(options)\n options.path = vaultPath // parse .env.vault\n const result = DotenvModule.configDotenv(options)\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n const debug = Boolean(options && options.debug)\n const quiet = options && 'quiet' in options ? options.quiet : true\n\n if (debug || !quiet) {\n _log('Loading env from encrypted .env.vault')\n }\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n const quiet = options && 'quiet' in options ? options.quiet : true\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (debug || !quiet) {\n const keysCount = Object.keys(parsedAll).length\n const shortPaths = []\n for (const filePath of optionPaths) {\n try {\n const relative = path.relative(process.cwd(), filePath)\n shortPaths.push(relative)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${filePath} ${e.message}`)\n }\n lastError = e\n }\n }\n\n _log(`injecting env (${keysCount}) from ${shortPaths.join(',')}`)\n }\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","// ../config.js accepts options via environment variables\nconst options = {}\n\nif (process.env.DOTENV_CONFIG_ENCODING != null) {\n options.encoding = process.env.DOTENV_CONFIG_ENCODING\n}\n\nif (process.env.DOTENV_CONFIG_PATH != null) {\n options.path = process.env.DOTENV_CONFIG_PATH\n}\n\nif (process.env.DOTENV_CONFIG_QUIET != null) {\n options.quiet = process.env.DOTENV_CONFIG_QUIET\n}\n\nif (process.env.DOTENV_CONFIG_DEBUG != null) {\n options.debug = process.env.DOTENV_CONFIG_DEBUG\n}\n\nif (process.env.DOTENV_CONFIG_OVERRIDE != null) {\n options.override = process.env.DOTENV_CONFIG_OVERRIDE\n}\n\nif (process.env.DOTENV_CONFIG_DOTENV_KEY != null) {\n options.DOTENV_KEY = process.env.DOTENV_CONFIG_DOTENV_KEY\n}\n\nmodule.exports = options\n","const re = /^dotenv_config_(encoding|path|quiet|debug|override|DOTENV_KEY)=(.+)$/\n\nmodule.exports = function optionMatcher (args) {\n const options = args.reduce(function (acc, cur) {\n const matches = cur.match(re)\n if (matches) {\n acc[matches[1]] = matches[2]\n }\n return acc\n }, {})\n\n if (!('quiet' in options)) {\n options.quiet = 'true'\n }\n\n return options\n}\n","(function () {\n require('./lib/main').config(\n Object.assign(\n {},\n require('./lib/env-options'),\n require('./lib/cli-options')(process.argv)\n )\n )\n})()\n"],"names":["require$$0","require$$1","require$$2","options","path","mainModule"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAM,KAAKA;AACX,MAAM,OAAOC;AACb,MAAM,KAAKC;AACX,MAAM,SAAS;AACf,MAAM,cAAc;AAEpB,MAAM,UAAU,YAAY;AAE5B,MAAM,OAAO;AAGb,SAAS,MAAO,KAAK;AACnB,QAAM,MAAM,CAAA;AAGZ,MAAI,QAAQ,IAAI,SAAQ;AAGxB,UAAQ,MAAM,QAAQ,WAAW,IAAI;AAErC,MAAI;AACJ,UAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,MAAM;AACzC,UAAM,MAAM,MAAM,CAAC;AAGnB,QAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,YAAQ,MAAM,KAAI;AAGlB,UAAM,aAAa,MAAM,CAAC;AAG1B,YAAQ,MAAM,QAAQ,0BAA0B,IAAI;AAGpD,QAAI,eAAe,KAAK;AACtB,cAAQ,MAAM,QAAQ,QAAQ,IAAI;AAClC,cAAQ,MAAM,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAGI,QAAI,GAAG,IAAI;AAAA,EACf;AAEE,SAAO;AACT;AAEA,SAAS,YAAaC,UAAS;AAC7B,EAAAA,WAAUA,YAAW,CAAA;AAErB,QAAM,YAAY,WAAWA,QAAO;AACpC,EAAAA,SAAQ,OAAO;AACf,QAAM,SAAS,aAAa,aAAaA,QAAO;AAChD,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAIE,QAAM,OAAO,WAAWA,QAAO,EAAE,MAAM,GAAG;AAC1C,QAAM,SAAS,KAAK;AAEpB,MAAI;AACJ,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,QAAI;AAEF,YAAM,MAAM,KAAK,CAAC,EAAE,KAAI;AAGxB,YAAM,QAAQ,cAAc,QAAQ,GAAG;AAGvC,kBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,IACN,SAAa,OAAO;AAEd,UAAI,IAAI,KAAK,QAAQ;AACnB,cAAM;AAAA,MACd;AAAA,IAEA;AAAA,EACA;AAGE,SAAO,aAAa,MAAM,SAAS;AACrC;AAEA,SAAS,MAAO,SAAS;AACvB,UAAQ,IAAI,WAAW,OAAO,WAAW,OAAO,EAAE;AACpD;AAEA,SAAS,OAAQ,SAAS;AACxB,UAAQ,IAAI,WAAW,OAAO,YAAY,OAAO,EAAE;AACrD;AAEA,SAAS,KAAM,SAAS;AACtB,UAAQ,IAAI,WAAW,OAAO,KAAK,OAAO,EAAE;AAC9C;AAEA,SAAS,WAAYA,UAAS;AAE5B,MAAIA,YAAWA,SAAQ,cAAcA,SAAQ,WAAW,SAAS,GAAG;AAClE,WAAOA,SAAQ;AAAA,EACnB;AAGE,MAAI,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,GAAG;AAC/D,WAAO,QAAQ,IAAI;AAAA,EACvB;AAGE,SAAO;AACT;AAEA,SAAS,cAAe,QAAQ,WAAW;AAEzC,MAAI;AACJ,MAAI;AACF,UAAM,IAAI,IAAI,SAAS;AAAA,EAC3B,SAAW,OAAO;AACd,QAAI,MAAM,SAAS,mBAAmB;AACpC,YAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,UAAI,OAAO;AACX,YAAM;AAAA,IACZ;AAEI,UAAM;AAAA,EACV;AAGE,QAAM,MAAM,IAAI;AAChB,MAAI,CAAC,KAAK;AACR,UAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,QAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,MAAI,CAAC,aAAa;AAChB,UAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,QAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE;AAChE,QAAM,aAAa,OAAO,OAAO,cAAc;AAC/C,MAAI,CAAC,YAAY;AACf,UAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAEE,SAAO,EAAE,YAAY,IAAG;AAC1B;AAEA,SAAS,WAAYA,UAAS;AAC5B,MAAI,oBAAoB;AAExB,MAAIA,YAAWA,SAAQ,QAAQA,SAAQ,KAAK,SAAS,GAAG;AACtD,QAAI,MAAM,QAAQA,SAAQ,IAAI,GAAG;AAC/B,iBAAW,YAAYA,SAAQ,MAAM;AACnC,YAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,8BAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA,QAClF;AAAA,MACA;AAAA,IACA,OAAW;AACL,0BAAoBA,SAAQ,KAAK,SAAS,QAAQ,IAAIA,SAAQ,OAAO,GAAGA,SAAQ,IAAI;AAAA,IAC1F;AAAA,EACA,OAAS;AACL,wBAAoB,KAAK,QAAQ,QAAQ,IAAG,GAAI,YAAY;AAAA,EAChE;AAEE,MAAI,GAAG,WAAW,iBAAiB,GAAG;AACpC,WAAO;AAAA,EACX;AAEE,SAAO;AACT;AAEA,SAAS,aAAc,SAAS;AAC9B,SAAO,QAAQ,CAAC,MAAM,MAAM,KAAK,KAAK,GAAG,QAAO,GAAI,QAAQ,MAAM,CAAC,CAAC,IAAI;AAC1E;AAEA,SAAS,aAAcA,UAAS;AAC9B,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,QAAQA,YAAW,WAAWA,WAAUA,SAAQ,QAAQ;AAE9D,MAAI,SAAS,CAAC,OAAO;AACnB,SAAK,uCAAuC;AAAA,EAChD;AAEE,QAAM,SAAS,aAAa,YAAYA,QAAO;AAE/C,MAAI,aAAa,QAAQ;AACzB,MAAIA,YAAWA,SAAQ,cAAc,MAAM;AACzC,iBAAaA,SAAQ;AAAA,EACzB;AAEE,eAAa,SAAS,YAAY,QAAQA,QAAO;AAEjD,SAAO,EAAE,OAAM;AACjB;AAEA,SAAS,aAAcA,UAAS;AAC9B,QAAM,aAAa,KAAK,QAAQ,QAAQ,IAAG,GAAI,MAAM;AACrD,MAAI,WAAW;AACf,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,QAAQA,YAAW,WAAWA,WAAUA,SAAQ,QAAQ;AAE9D,MAAIA,YAAWA,SAAQ,UAAU;AAC/B,eAAWA,SAAQ;AAAA,EACvB,OAAS;AACL,QAAI,OAAO;AACT,aAAO,oDAAoD;AAAA,IACjE;AAAA,EACA;AAEE,MAAI,cAAc,CAAC,UAAU;AAC7B,MAAIA,YAAWA,SAAQ,MAAM;AAC3B,QAAI,CAAC,MAAM,QAAQA,SAAQ,IAAI,GAAG;AAChC,oBAAc,CAAC,aAAaA,SAAQ,IAAI,CAAC;AAAA,IAC/C,OAAW;AACL,oBAAc,CAAA;AACd,iBAAW,YAAYA,SAAQ,MAAM;AACnC,oBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAC/C;AAAA,IACA;AAAA,EACA;AAIE,MAAI;AACJ,QAAM,YAAY,CAAA;AAClB,aAAWC,SAAQ,aAAa;AAC9B,QAAI;AAEF,YAAM,SAAS,aAAa,MAAM,GAAG,aAAaA,OAAM,EAAE,UAAU,CAAC;AAErE,mBAAa,SAAS,WAAW,QAAQD,QAAO;AAAA,IACtD,SAAa,GAAG;AACV,UAAI,OAAO;AACT,eAAO,kBAAkBC,KAAI,IAAI,EAAE,OAAO,EAAE;AAAA,MACpD;AACM,kBAAY;AAAA,IAClB;AAAA,EACA;AAEE,MAAI,aAAa,QAAQ;AACzB,MAAID,YAAWA,SAAQ,cAAc,MAAM;AACzC,iBAAaA,SAAQ;AAAA,EACzB;AAEE,eAAa,SAAS,YAAY,WAAWA,QAAO;AAEpD,MAAI,SAAS,CAAC,OAAO;AACnB,UAAM,YAAY,OAAO,KAAK,SAAS,EAAE;AACzC,UAAM,aAAa,CAAA;AACnB,eAAW,YAAY,aAAa;AAClC,UAAI;AACF,cAAM,WAAW,KAAK,SAAS,QAAQ,IAAG,GAAI,QAAQ;AACtD,mBAAW,KAAK,QAAQ;AAAA,MAChC,SAAe,GAAG;AACV,YAAI,OAAO;AACT,iBAAO,kBAAkB,QAAQ,IAAI,EAAE,OAAO,EAAE;AAAA,QAC1D;AACQ,oBAAY;AAAA,MACpB;AAAA,IACA;AAEI,SAAK,kBAAkB,SAAS,UAAU,WAAW,KAAK,GAAG,CAAC,EAAE;AAAA,EACpE;AAEE,MAAI,WAAW;AACb,WAAO,EAAE,QAAQ,WAAW,OAAO,UAAS;AAAA,EAChD,OAAS;AACL,WAAO,EAAE,QAAQ,UAAS;AAAA,EAC9B;AACA;AAGA,SAAS,OAAQA,UAAS;AAExB,MAAI,WAAWA,QAAO,EAAE,WAAW,GAAG;AACpC,WAAO,aAAa,aAAaA,QAAO;AAAA,EAC5C;AAEE,QAAM,YAAY,WAAWA,QAAO;AAGpC,MAAI,CAAC,WAAW;AACd,UAAM,+DAA+D,SAAS,+BAA+B;AAE7G,WAAO,aAAa,aAAaA,QAAO;AAAA,EAC5C;AAEE,SAAO,aAAa,aAAaA,QAAO;AAC1C;AAEA,SAAS,QAAS,WAAW,QAAQ;AACnC,QAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,MAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,QAAM,QAAQ,WAAW,SAAS,GAAG,EAAE;AACvC,QAAM,UAAU,WAAW,SAAS,GAAG;AACvC,eAAa,WAAW,SAAS,IAAI,GAAG;AAExC,MAAI;AACF,UAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,WAAO,WAAW,OAAO;AACzB,WAAO,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,EACxD,SAAW,OAAO;AACd,UAAM,UAAU,iBAAiB;AACjC,UAAM,mBAAmB,MAAM,YAAY;AAC3C,UAAM,mBAAmB,MAAM,YAAY;AAE3C,QAAI,WAAW,kBAAkB;AAC/B,YAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,UAAI,OAAO;AACX,YAAM;AAAA,IACZ,WAAe,kBAAkB;AAC3B,YAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,UAAI,OAAO;AACX,YAAM;AAAA,IACZ,OAAW;AACL,YAAM;AAAA,IACZ;AAAA,EACA;AACA;AAGA,SAAS,SAAU,YAAY,QAAQA,WAAU,CAAA,GAAI;AACnD,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,WAAW,QAAQA,YAAWA,SAAQ,QAAQ;AAEpD,MAAI,OAAO,WAAW,UAAU;AAC9B,UAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,GAAG;AACzD,UAAI,aAAa,MAAM;AACrB,mBAAW,GAAG,IAAI,OAAO,GAAG;AAAA,MACpC;AAEM,UAAI,OAAO;AACT,YAAI,aAAa,MAAM;AACrB,iBAAO,IAAI,GAAG,0CAA0C;AAAA,QAClE,OAAe;AACL,iBAAO,IAAI,GAAG,8CAA8C;AAAA,QACtE;AAAA,MACA;AAAA,IACA,OAAW;AACL,iBAAW,GAAG,IAAI,OAAO,GAAG;AAAA,IAClC;AAAA,EACA;AACA;AAEA,MAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAA,iBAAAE,OAAA,QAAA,eAA8B,aAAa;AAC3C,IAAA,iBAAAA,OAAA,QAAA,eAA8B,aAAa;AAC3C,IAAA,gBAAAA,OAAA,QAAA,cAA6B,aAAa;AAC1C,IAAA,WAAAA,OAAA,QAAA,SAAwB,aAAa;AACrC,IAAA,YAAAA,OAAA,QAAA,UAAyB,aAAa;AACtC,IAAA,UAAAA,OAAA,QAAA,QAAuB,aAAa;AACpC,IAAA,aAAAA,OAAA,QAAA,WAA0B,aAAa;AAEvCA,OAAA,UAAiB;;;AChYjB,MAAM,UAAU,CAAA;AAEhB,IAAI,QAAQ,IAAI,0BAA0B,MAAM;AAC9C,UAAQ,WAAW,QAAQ,IAAI;AACjC;AAEA,IAAI,QAAQ,IAAI,sBAAsB,MAAM;AAC1C,UAAQ,OAAO,QAAQ,IAAI;AAC7B;AAEA,IAAI,QAAQ,IAAI,uBAAuB,MAAM;AAC3C,UAAQ,QAAQ,QAAQ,IAAI;AAC9B;AAEA,IAAI,QAAQ,IAAI,uBAAuB,MAAM;AAC3C,UAAQ,QAAQ,QAAQ,IAAI;AAC9B;AAEA,IAAI,QAAQ,IAAI,0BAA0B,MAAM;AAC9C,UAAQ,WAAW,QAAQ,IAAI;AACjC;AAEA,IAAI,QAAQ,IAAI,4BAA4B,MAAM;AAChD,UAAQ,aAAa,QAAQ,IAAI;AACnC;AAEA,IAAA,aAAiB;;AC3BjB,MAAM,KAAK;AAEX,IAAA,aAAiB,SAAS,cAAe,MAAM;AAC7C,QAAMF,WAAU,KAAK,OAAO,SAAU,KAAK,KAAK;AAC9C,UAAM,UAAU,IAAI,MAAM,EAAE;AAC5B,QAAI,SAAS;AACX,UAAI,QAAQ,CAAC,CAAC,IAAI,QAAQ,CAAC;AAAA,IACjC;AACI,WAAO;AAAA,EACX,GAAK,CAAA,CAAE;AAEL,MAAI,EAAE,WAAWA,WAAU;AACzB,IAAAA,SAAQ,QAAQ;AAAA,EACpB;AAEE,SAAOA;AACT;;CChBC,WAAY;AACXH,cAAsB;AAAA,IACpB,OAAO;AAAA,MACL,CAAA;AAAA,MACAC;AAAAA,MACAC,WAA6B,QAAQ,IAAI;AAAA,IAC/C;AAAA,EACA;AACA,GAAC;","x_google_ignoreList":[0,1,2,3]}
1
+ {"version":3,"file":"config-Bax6Ofp5.js","sources":["../node_modules/dotenv/lib/main.js","../node_modules/dotenv/lib/env-options.js","../node_modules/dotenv/lib/cli-options.js","../node_modules/dotenv/config.js"],"sourcesContent":["const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n options = options || {}\n\n const vaultPath = _vaultPath(options)\n options.path = vaultPath // parse .env.vault\n const result = DotenvModule.configDotenv(options)\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n const debug = Boolean(options && options.debug)\n const quiet = options && 'quiet' in options ? options.quiet : true\n\n if (debug || !quiet) {\n _log('Loading env from encrypted .env.vault')\n }\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n const quiet = options && 'quiet' in options ? options.quiet : true\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (debug || !quiet) {\n const keysCount = Object.keys(parsedAll).length\n const shortPaths = []\n for (const filePath of optionPaths) {\n try {\n const relative = path.relative(process.cwd(), filePath)\n shortPaths.push(relative)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${filePath} ${e.message}`)\n }\n lastError = e\n }\n }\n\n _log(`injecting env (${keysCount}) from ${shortPaths.join(',')}`)\n }\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","// ../config.js accepts options via environment variables\nconst options = {}\n\nif (process.env.DOTENV_CONFIG_ENCODING != null) {\n options.encoding = process.env.DOTENV_CONFIG_ENCODING\n}\n\nif (process.env.DOTENV_CONFIG_PATH != null) {\n options.path = process.env.DOTENV_CONFIG_PATH\n}\n\nif (process.env.DOTENV_CONFIG_QUIET != null) {\n options.quiet = process.env.DOTENV_CONFIG_QUIET\n}\n\nif (process.env.DOTENV_CONFIG_DEBUG != null) {\n options.debug = process.env.DOTENV_CONFIG_DEBUG\n}\n\nif (process.env.DOTENV_CONFIG_OVERRIDE != null) {\n options.override = process.env.DOTENV_CONFIG_OVERRIDE\n}\n\nif (process.env.DOTENV_CONFIG_DOTENV_KEY != null) {\n options.DOTENV_KEY = process.env.DOTENV_CONFIG_DOTENV_KEY\n}\n\nmodule.exports = options\n","const re = /^dotenv_config_(encoding|path|quiet|debug|override|DOTENV_KEY)=(.+)$/\n\nmodule.exports = function optionMatcher (args) {\n const options = args.reduce(function (acc, cur) {\n const matches = cur.match(re)\n if (matches) {\n acc[matches[1]] = matches[2]\n }\n return acc\n }, {})\n\n if (!('quiet' in options)) {\n options.quiet = 'true'\n }\n\n return options\n}\n","(function () {\n require('./lib/main').config(\n Object.assign(\n {},\n require('./lib/env-options'),\n require('./lib/cli-options')(process.argv)\n )\n )\n})()\n"],"names":["require$$0","require$$1","require$$2","options","path","mainModule"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,MAAM,KAAKA;AACX,MAAM,OAAOC;AACb,MAAM,KAAKC;AACX,MAAM,SAAS;AACf,MAAM,cAAc;AAEpB,MAAM,UAAU,YAAY;AAE5B,MAAM,OAAO;AAGb,SAAS,MAAO,KAAK;AACnB,QAAM,MAAM,CAAA;AAGZ,MAAI,QAAQ,IAAI,SAAQ;AAGxB,UAAQ,MAAM,QAAQ,WAAW,IAAI;AAErC,MAAI;AACJ,UAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,MAAM;AACzC,UAAM,MAAM,MAAM,CAAC;AAGnB,QAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,YAAQ,MAAM,KAAI;AAGlB,UAAM,aAAa,MAAM,CAAC;AAG1B,YAAQ,MAAM,QAAQ,0BAA0B,IAAI;AAGpD,QAAI,eAAe,KAAK;AACtB,cAAQ,MAAM,QAAQ,QAAQ,IAAI;AAClC,cAAQ,MAAM,QAAQ,QAAQ,IAAI;AAAA,IACxC;AAGI,QAAI,GAAG,IAAI;AAAA,EACf;AAEE,SAAO;AACT;AAEA,SAAS,YAAaC,UAAS;AAC7B,EAAAA,WAAUA,YAAW,CAAA;AAErB,QAAM,YAAY,WAAWA,QAAO;AACpC,EAAAA,SAAQ,OAAO;AACf,QAAM,SAAS,aAAa,aAAaA,QAAO;AAChD,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAIE,QAAM,OAAO,WAAWA,QAAO,EAAE,MAAM,GAAG;AAC1C,QAAM,SAAS,KAAK;AAEpB,MAAI;AACJ,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC/B,QAAI;AAEF,YAAM,MAAM,KAAK,CAAC,EAAE,KAAI;AAGxB,YAAM,QAAQ,cAAc,QAAQ,GAAG;AAGvC,kBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,IACN,SAAa,OAAO;AAEd,UAAI,IAAI,KAAK,QAAQ;AACnB,cAAM;AAAA,MACd;AAAA,IAEA;AAAA,EACA;AAGE,SAAO,aAAa,MAAM,SAAS;AACrC;AAEA,SAAS,MAAO,SAAS;AACvB,UAAQ,IAAI,WAAW,OAAO,WAAW,OAAO,EAAE;AACpD;AAEA,SAAS,OAAQ,SAAS;AACxB,UAAQ,IAAI,WAAW,OAAO,YAAY,OAAO,EAAE;AACrD;AAEA,SAAS,KAAM,SAAS;AACtB,UAAQ,IAAI,WAAW,OAAO,KAAK,OAAO,EAAE;AAC9C;AAEA,SAAS,WAAYA,UAAS;AAE5B,MAAIA,YAAWA,SAAQ,cAAcA,SAAQ,WAAW,SAAS,GAAG;AAClE,WAAOA,SAAQ;AAAA,EACnB;AAGE,MAAI,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,GAAG;AAC/D,WAAO,QAAQ,IAAI;AAAA,EACvB;AAGE,SAAO;AACT;AAEA,SAAS,cAAe,QAAQ,WAAW;AAEzC,MAAI;AACJ,MAAI;AACF,UAAM,IAAI,IAAI,SAAS;AAAA,EAC3B,SAAW,OAAO;AACd,QAAI,MAAM,SAAS,mBAAmB;AACpC,YAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,UAAI,OAAO;AACX,YAAM;AAAA,IACZ;AAEI,UAAM;AAAA,EACV;AAGE,QAAM,MAAM,IAAI;AAChB,MAAI,CAAC,KAAK;AACR,UAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,QAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,MAAI,CAAC,aAAa;AAChB,UAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,QAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE;AAChE,QAAM,aAAa,OAAO,OAAO,cAAc;AAC/C,MAAI,CAAC,YAAY;AACf,UAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAEE,SAAO,EAAE,YAAY,IAAG;AAC1B;AAEA,SAAS,WAAYA,UAAS;AAC5B,MAAI,oBAAoB;AAExB,MAAIA,YAAWA,SAAQ,QAAQA,SAAQ,KAAK,SAAS,GAAG;AACtD,QAAI,MAAM,QAAQA,SAAQ,IAAI,GAAG;AAC/B,iBAAW,YAAYA,SAAQ,MAAM;AACnC,YAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,8BAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA,QAClF;AAAA,MACA;AAAA,IACA,OAAW;AACL,0BAAoBA,SAAQ,KAAK,SAAS,QAAQ,IAAIA,SAAQ,OAAO,GAAGA,SAAQ,IAAI;AAAA,IAC1F;AAAA,EACA,OAAS;AACL,wBAAoB,KAAK,QAAQ,QAAQ,IAAG,GAAI,YAAY;AAAA,EAChE;AAEE,MAAI,GAAG,WAAW,iBAAiB,GAAG;AACpC,WAAO;AAAA,EACX;AAEE,SAAO;AACT;AAEA,SAAS,aAAc,SAAS;AAC9B,SAAO,QAAQ,CAAC,MAAM,MAAM,KAAK,KAAK,GAAG,QAAO,GAAI,QAAQ,MAAM,CAAC,CAAC,IAAI;AAC1E;AAEA,SAAS,aAAcA,UAAS;AAC9B,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,QAAQA,YAAW,WAAWA,WAAUA,SAAQ,QAAQ;AAE9D,MAAI,SAAS,CAAC,OAAO;AACnB,SAAK,uCAAuC;AAAA,EAChD;AAEE,QAAM,SAAS,aAAa,YAAYA,QAAO;AAE/C,MAAI,aAAa,QAAQ;AACzB,MAAIA,YAAWA,SAAQ,cAAc,MAAM;AACzC,iBAAaA,SAAQ;AAAA,EACzB;AAEE,eAAa,SAAS,YAAY,QAAQA,QAAO;AAEjD,SAAO,EAAE,OAAM;AACjB;AAEA,SAAS,aAAcA,UAAS;AAC9B,QAAM,aAAa,KAAK,QAAQ,QAAQ,IAAG,GAAI,MAAM;AACrD,MAAI,WAAW;AACf,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,QAAQA,YAAW,WAAWA,WAAUA,SAAQ,QAAQ;AAE9D,MAAIA,YAAWA,SAAQ,UAAU;AAC/B,eAAWA,SAAQ;AAAA,EACvB,OAAS;AACL,QAAI,OAAO;AACT,aAAO,oDAAoD;AAAA,IACjE;AAAA,EACA;AAEE,MAAI,cAAc,CAAC,UAAU;AAC7B,MAAIA,YAAWA,SAAQ,MAAM;AAC3B,QAAI,CAAC,MAAM,QAAQA,SAAQ,IAAI,GAAG;AAChC,oBAAc,CAAC,aAAaA,SAAQ,IAAI,CAAC;AAAA,IAC/C,OAAW;AACL,oBAAc,CAAA;AACd,iBAAW,YAAYA,SAAQ,MAAM;AACnC,oBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAC/C;AAAA,IACA;AAAA,EACA;AAIE,MAAI;AACJ,QAAM,YAAY,CAAA;AAClB,aAAWC,SAAQ,aAAa;AAC9B,QAAI;AAEF,YAAM,SAAS,aAAa,MAAM,GAAG,aAAaA,OAAM,EAAE,UAAU,CAAC;AAErE,mBAAa,SAAS,WAAW,QAAQD,QAAO;AAAA,IACtD,SAAa,GAAG;AACV,UAAI,OAAO;AACT,eAAO,kBAAkBC,KAAI,IAAI,EAAE,OAAO,EAAE;AAAA,MACpD;AACM,kBAAY;AAAA,IAClB;AAAA,EACA;AAEE,MAAI,aAAa,QAAQ;AACzB,MAAID,YAAWA,SAAQ,cAAc,MAAM;AACzC,iBAAaA,SAAQ;AAAA,EACzB;AAEE,eAAa,SAAS,YAAY,WAAWA,QAAO;AAEpD,MAAI,SAAS,CAAC,OAAO;AACnB,UAAM,YAAY,OAAO,KAAK,SAAS,EAAE;AACzC,UAAM,aAAa,CAAA;AACnB,eAAW,YAAY,aAAa;AAClC,UAAI;AACF,cAAM,WAAW,KAAK,SAAS,QAAQ,IAAG,GAAI,QAAQ;AACtD,mBAAW,KAAK,QAAQ;AAAA,MAChC,SAAe,GAAG;AACV,YAAI,OAAO;AACT,iBAAO,kBAAkB,QAAQ,IAAI,EAAE,OAAO,EAAE;AAAA,QAC1D;AACQ,oBAAY;AAAA,MACpB;AAAA,IACA;AAEI,SAAK,kBAAkB,SAAS,UAAU,WAAW,KAAK,GAAG,CAAC,EAAE;AAAA,EACpE;AAEE,MAAI,WAAW;AACb,WAAO,EAAE,QAAQ,WAAW,OAAO,UAAS;AAAA,EAChD,OAAS;AACL,WAAO,EAAE,QAAQ,UAAS;AAAA,EAC9B;AACA;AAGA,SAAS,OAAQA,UAAS;AAExB,MAAI,WAAWA,QAAO,EAAE,WAAW,GAAG;AACpC,WAAO,aAAa,aAAaA,QAAO;AAAA,EAC5C;AAEE,QAAM,YAAY,WAAWA,QAAO;AAGpC,MAAI,CAAC,WAAW;AACd,UAAM,+DAA+D,SAAS,+BAA+B;AAE7G,WAAO,aAAa,aAAaA,QAAO;AAAA,EAC5C;AAEE,SAAO,aAAa,aAAaA,QAAO;AAC1C;AAEA,SAAS,QAAS,WAAW,QAAQ;AACnC,QAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,MAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,QAAM,QAAQ,WAAW,SAAS,GAAG,EAAE;AACvC,QAAM,UAAU,WAAW,SAAS,GAAG;AACvC,eAAa,WAAW,SAAS,IAAI,GAAG;AAExC,MAAI;AACF,UAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,WAAO,WAAW,OAAO;AACzB,WAAO,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,EACxD,SAAW,OAAO;AACd,UAAM,UAAU,iBAAiB;AACjC,UAAM,mBAAmB,MAAM,YAAY;AAC3C,UAAM,mBAAmB,MAAM,YAAY;AAE3C,QAAI,WAAW,kBAAkB;AAC/B,YAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,UAAI,OAAO;AACX,YAAM;AAAA,IACZ,WAAe,kBAAkB;AAC3B,YAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,UAAI,OAAO;AACX,YAAM;AAAA,IACZ,OAAW;AACL,YAAM;AAAA,IACZ;AAAA,EACA;AACA;AAGA,SAAS,SAAU,YAAY,QAAQA,WAAU,CAAA,GAAI;AACnD,QAAM,QAAQ,QAAQA,YAAWA,SAAQ,KAAK;AAC9C,QAAM,WAAW,QAAQA,YAAWA,SAAQ,QAAQ;AAEpD,MAAI,OAAO,WAAW,UAAU;AAC9B,UAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,QAAI,OAAO;AACX,UAAM;AAAA,EACV;AAGE,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,GAAG;AACzD,UAAI,aAAa,MAAM;AACrB,mBAAW,GAAG,IAAI,OAAO,GAAG;AAAA,MACpC;AAEM,UAAI,OAAO;AACT,YAAI,aAAa,MAAM;AACrB,iBAAO,IAAI,GAAG,0CAA0C;AAAA,QAClE,OAAe;AACL,iBAAO,IAAI,GAAG,8CAA8C;AAAA,QACtE;AAAA,MACA;AAAA,IACA,OAAW;AACL,iBAAW,GAAG,IAAI,OAAO,GAAG;AAAA,IAClC;AAAA,EACA;AACA;AAEA,MAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAA,iBAAAE,OAAA,QAAA,eAA8B,aAAa;AAC3C,IAAA,iBAAAA,OAAA,QAAA,eAA8B,aAAa;AAC3C,IAAA,gBAAAA,OAAA,QAAA,cAA6B,aAAa;AAC1C,IAAA,WAAAA,OAAA,QAAA,SAAwB,aAAa;AACrC,IAAA,YAAAA,OAAA,QAAA,UAAyB,aAAa;AACtC,IAAA,UAAAA,OAAA,QAAA,QAAuB,aAAa;AACpC,IAAA,aAAAA,OAAA,QAAA,WAA0B,aAAa;AAEvCA,OAAA,UAAiB;;;AChYjB,MAAM,UAAU,CAAA;AAEhB,IAAI,QAAQ,IAAI,0BAA0B,MAAM;AAC9C,UAAQ,WAAW,QAAQ,IAAI;AACjC;AAEA,IAAI,QAAQ,IAAI,sBAAsB,MAAM;AAC1C,UAAQ,OAAO,QAAQ,IAAI;AAC7B;AAEA,IAAI,QAAQ,IAAI,uBAAuB,MAAM;AAC3C,UAAQ,QAAQ,QAAQ,IAAI;AAC9B;AAEA,IAAI,QAAQ,IAAI,uBAAuB,MAAM;AAC3C,UAAQ,QAAQ,QAAQ,IAAI;AAC9B;AAEA,IAAI,QAAQ,IAAI,0BAA0B,MAAM;AAC9C,UAAQ,WAAW,QAAQ,IAAI;AACjC;AAEA,IAAI,QAAQ,IAAI,4BAA4B,MAAM;AAChD,UAAQ,aAAa,QAAQ,IAAI;AACnC;AAEA,IAAA,aAAiB;;AC3BjB,MAAM,KAAK;AAEX,IAAA,aAAiB,SAAS,cAAe,MAAM;AAC7C,QAAMF,WAAU,KAAK,OAAO,SAAU,KAAK,KAAK;AAC9C,UAAM,UAAU,IAAI,MAAM,EAAE;AAC5B,QAAI,SAAS;AACX,UAAI,QAAQ,CAAC,CAAC,IAAI,QAAQ,CAAC;AAAA,IACjC;AACI,WAAO;AAAA,EACX,GAAK,CAAA,CAAE;AAEL,MAAI,EAAE,WAAWA,WAAU;AACzB,IAAAA,SAAQ,QAAQ;AAAA,EACpB;AAEE,SAAOA;AACT;;CChBC,WAAY;AACXH,cAAsB;AAAA,IACpB,OAAO;AAAA,MACL,CAAA;AAAA,MACAC;AAAAA,MACAC,WAA6B,QAAQ,IAAI;AAAA,IAC/C;AAAA,EACA;AACA,GAAC;","x_google_ignoreList":[0,1,2,3]}
@@ -1,4 +1,4 @@
1
- import { l as logger } from "./logger-DeKxCUPp.js";
1
+ import { l as logger } from "./logger-DW5fyhVS.js";
2
2
  import { pathToFileURL } from "url";
3
3
  const createConnectorFromPath = async (connectorPath) => {
4
4
  try {
@@ -27,4 +27,4 @@ const createConnectorFromPath = async (connectorPath) => {
27
27
  export {
28
28
  createConnectorFromPath as c
29
29
  };
30
- //# sourceMappingURL=connector-factory-BGykiICT.js.map
30
+ //# sourceMappingURL=connector-factory-BaMIlES8.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"connector-factory-BGykiICT.js","sources":["../src/utils/connector-factory.ts"],"sourcesContent":["import { IERPConnector } from \"../types/erp-connector\";\nimport logger from \"../services/reporting-service/logger\";\nimport { pathToFileURL } from \"url\";\n\n/**\n * Helper function to dynamically import and create connector instance from a file path\n * @param connectorPath - The file path to the connector module\n * @returns A new instance of the IERPConnector\n */\nexport const createConnectorFromPath = async (\n connectorPath: string\n): Promise<IERPConnector> => {\n try {\n // Some detailed debug logging to help with troubleshooting multi-platform connector paths\n const pathParts = connectorPath.split('/');\n const filename = pathParts[pathParts.length - 1];\n\n logger.debug(\"createConnectorFromPath:\", {\n connectorPath,\n pathParts,\n filename,\n finalImport: pathToFileURL(connectorPath).href,\n });\n\n const connectorModule = await import(pathToFileURL(connectorPath).href);\n\n // Get the default export or named export\n const ConnectorClass =\n connectorModule.default ||\n connectorModule[Object.keys(connectorModule)[0]];\n\n if (!ConnectorClass) {\n throw new Error(`No connector class found in module: ${connectorPath}`);\n }\n\n // Create new instance of the connector\n return new ConnectorClass();\n } catch (error) {\n logger.error(\n `Failed to create connector instance from path: ${connectorPath}`,\n { error }\n );\n throw error;\n }\n};"],"names":[],"mappings":";;AASO,MAAM,0BAA0B,OACrC,kBAC2B;AAC3B,MAAI;AAEF,UAAM,YAAY,cAAc,MAAM,GAAG;AACzC,UAAM,WAAW,UAAU,UAAU,SAAS,CAAC;AAE/C,WAAO,MAAM,4BAA4B;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,cAAc,aAAa,EAAE;AAAA,IAAA,CAC3C;AAED,UAAM,kBAAkB,MAAM,OAAO,cAAc,aAAa,EAAE;AAGlE,UAAM,iBACJ,gBAAgB,WAChB,gBAAgB,OAAO,KAAK,eAAe,EAAE,CAAC,CAAC;AAEjD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,uCAAuC,aAAa,EAAE;AAAA,IACxE;AAGA,WAAO,IAAI,eAAA;AAAA,EACb,SAAS,OAAO;AACd,WAAO;AAAA,MACL,kDAAkD,aAAa;AAAA,MAC/D,EAAE,MAAA;AAAA,IAAM;AAEV,UAAM;AAAA,EACR;AACF;"}
1
+ {"version":3,"file":"connector-factory-BaMIlES8.js","sources":["../src/utils/connector-factory.ts"],"sourcesContent":["import { IERPConnector } from \"../types/erp-connector\";\nimport logger from \"../services/reporting-service/logger\";\nimport { pathToFileURL } from \"url\";\n\n/**\n * Helper function to dynamically import and create connector instance from a file path\n * @param connectorPath - The file path to the connector module\n * @returns A new instance of the IERPConnector\n */\nexport const createConnectorFromPath = async (\n connectorPath: string\n): Promise<IERPConnector> => {\n try {\n // Some detailed debug logging to help with troubleshooting multi-platform connector paths\n const pathParts = connectorPath.split('/');\n const filename = pathParts[pathParts.length - 1];\n\n logger.debug(\"createConnectorFromPath:\", {\n connectorPath,\n pathParts,\n filename,\n finalImport: pathToFileURL(connectorPath).href,\n });\n\n const connectorModule = await import(pathToFileURL(connectorPath).href);\n\n // Get the default export or named export\n const ConnectorClass =\n connectorModule.default ||\n connectorModule[Object.keys(connectorModule)[0]];\n\n if (!ConnectorClass) {\n throw new Error(`No connector class found in module: ${connectorPath}`);\n }\n\n // Create new instance of the connector\n return new ConnectorClass();\n } catch (error) {\n logger.error(\n `Failed to create connector instance from path: ${connectorPath}`,\n { error }\n );\n throw error;\n }\n};"],"names":[],"mappings":";;AASO,MAAM,0BAA0B,OACrC,kBAC2B;AAC3B,MAAI;AAEF,UAAM,YAAY,cAAc,MAAM,GAAG;AACzC,UAAM,WAAW,UAAU,UAAU,SAAS,CAAC;AAE/C,WAAO,MAAM,4BAA4B;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,cAAc,aAAa,EAAE;AAAA,IAAA,CAC3C;AAED,UAAM,kBAAkB,MAAM,OAAO,cAAc,aAAa,EAAE;AAGlE,UAAM,iBACJ,gBAAgB,WAChB,gBAAgB,OAAO,KAAK,eAAe,EAAE,CAAC,CAAC;AAEjD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,uCAAuC,aAAa,EAAE;AAAA,IACxE;AAGA,WAAO,IAAI,eAAA;AAAA,EACb,SAAS,OAAO;AACd,WAAO;AAAA,MACL,kDAAkD,aAAa;AAAA,MAC/D,EAAE,MAAA;AAAA,IAAM;AAEV,UAAM;AAAA,EACR;AACF;"}
@@ -2,9 +2,9 @@ import knex from "knex";
2
2
  import { c as config } from "./knexfile-Bng2Ru9c.js";
3
3
  import stringify from "json-stable-stringify";
4
4
  import XXH from "xxhashjs";
5
- import "./config-C2Dse2g2.js";
6
- import { c as configureLogger, l as logger } from "./logger-DeKxCUPp.js";
7
- import "./index-Cq9tNcJT.js";
5
+ import "./config-Bax6Ofp5.js";
6
+ import { c as configureLogger, l as logger } from "./logger-DW5fyhVS.js";
7
+ import "./index-BkVlW0ZW.js";
8
8
  class CoreConfiguration {
9
9
  static instance;
10
10
  // General Configuration
@@ -319,4 +319,4 @@ export {
319
319
  getSQLServerConfiguration as a,
320
320
  getErpApiConnectionParams as g
321
321
  };
322
- //# sourceMappingURL=hashed-cache-manager-DbG72eRm.js.map
322
+ //# sourceMappingURL=hashed-cache-manager-C1u9jQgY.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"hashed-cache-manager-DbG72eRm.js","sources":["../src/services/data-sync-service/configuration-manager.ts","../src/services/caching-service/hashed-cache-manager.ts"],"sourcesContent":["import \"dotenv/config\";\nimport { configureLogger } from \"../reporting-service/logger\";\nimport { SQLServerConfiguration } from \"../sql-server-erp-service/configuration\";\n\nexport class CoreConfiguration {\n private static instance: CoreConfiguration;\n\n // General Configuration\n public readonly logLevel: string;\n public readonly erpSystem: string;\n public readonly nodeEnv: string;\n\n // MM API (aka \"Mapping\") Service\n public readonly mmERPSvcApiBaseUrl: string;\n public readonly mmApiBaseUrl: string;\n public readonly mmApiAuthToken: string;\n public readonly mmApiRetryAttempts: number;\n\n // Caching (optionally used for interacting with the MM API)\n public readonly cacheTTL: number;\n\n // ERP API Service\n public readonly erpApiPagingLimit: number; //Pagination limit for ERP API\n\n // Job timing Intervals\n public readonly fromErpInterval: string;\n public readonly toErpInterval: string;\n public readonly retryLaborTicketsInterval: string;\n public readonly cacheExpirationCheckInterval: string;\n\n private constructor() {\n this.logLevel = process.env.LOG_LEVEL || \"info\";\n this.erpSystem = process.env.ERP_SYSTEM || \"template\";\n this.nodeEnv = process.env.NODE_ENV || \"development\";\n\n //#region MM API (aka \"Mapping\") Service\n /**\n * MM ERP Service REST API URL (typically https://erp-api.svc.machinemetrics.com)\n */\n this.mmERPSvcApiBaseUrl = process.env.MM_MAPPING_SERVICE_URL || \"\";\n\n /**\n * MM REST API URL (typically https://api.machinemetrics.com)\n */\n console.log(\"=== CONFIG DEBUG ===\");\n console.log(\"MM_MAPPING_AUTH_SERVICE_URL env var:\", process.env.MM_MAPPING_AUTH_SERVICE_URL);\n this.mmApiBaseUrl = process.env.MM_MAPPING_AUTH_SERVICE_URL || \"\";\n console.log(\"mmApiBaseUrl set to:\", this.mmApiBaseUrl);\n console.log(\"=== END CONFIG DEBUG ===\");\n\n /**\n * Company Auth Token\n */\n this.mmApiAuthToken = process.env.MM_MAPPING_SERVICE_TOKEN || \"\";\n\n /**\n * Number of retry attempts for MM API calls\n */\n this.mmApiRetryAttempts = parseInt(process.env.RETRY_ATTEMPTS || \"0\");\n //#endregion MM API (aka \"Mapping\") Service\n\n /**\n * Default pagination limit for ERP API\n */\n this.erpApiPagingLimit = parseInt(process.env.ERP_PAGINATION_LIMIT || \"0\");\n //#endregion ERP API Service\n\n /**\n * For how to define the intervals, see Bree's documentation: https://github.com/breejs/bree\n */\n this.fromErpInterval =\n process.env.FROM_ERP_INTERVAL || process.env.POLL_INTERVAL || \"5 min\";\n this.toErpInterval = process.env.TO_ERP_INTERVAL || \"5 min\";\n this.retryLaborTicketsInterval =\n process.env.RETRY_LABOR_TICKETS_INTERVAL || \"30 min\";\n this.cacheExpirationCheckInterval =\n process.env.CACHE_EXPIRATION_CHECK_INTERVAL || \"5 min\";\n\n /**\n * Cache TTL (in seconds)\n */\n const cacheTTLDef = 7 * 24 * 60 * 60; // 7 days\n this.cacheTTL = parseInt(process.env.CACHE_TTL || cacheTTLDef.toString());\n\n // Configure the logger with our settings\n configureLogger(this.logLevel, this.nodeEnv);\n }\n\n public static inst(): CoreConfiguration {\n if (!CoreConfiguration.instance) {\n CoreConfiguration.instance = new CoreConfiguration();\n }\n return CoreConfiguration.instance;\n }\n\n /**\n * Returns a sanitized version of the configuration for safe logging.\n * Masks sensitive fields like authentication tokens.\n */\n public toSafeLogObject(): Record<string, unknown> {\n const maskSensitiveValue = (value: string): string => {\n if (!value || value.length === 0) {\n return \"\";\n }\n if (value.length < 6) {\n return \"********\";\n }\n // Show first 3 and last 3 characters, mask the middle\n return value.substring(0, 3) + \"********\" + value.substring(value.length - 3);\n };\n\n return {\n logLevel: this.logLevel,\n erpSystem: this.erpSystem,\n nodeEnv: this.nodeEnv,\n mmERPSvcApiBaseUrl: this.mmERPSvcApiBaseUrl,\n mmApiBaseUrl: this.mmApiBaseUrl,\n mmApiAuthToken: maskSensitiveValue(this.mmApiAuthToken),\n mmApiRetryAttempts: this.mmApiRetryAttempts,\n cacheTTL: this.cacheTTL,\n erpApiPagingLimit: this.erpApiPagingLimit,\n fromErpInterval: this.fromErpInterval,\n toErpInterval: this.toErpInterval,\n retryLaborTicketsInterval: this.retryLaborTicketsInterval,\n cacheExpirationCheckInterval: this.cacheExpirationCheckInterval,\n };\n }\n}\n\n/**\n * Helper function to get the SQL Server Configuration for collectors that use SQL Server to interact with the ERP\n */\nexport const getSQLServerConfiguration = (): SQLServerConfiguration => {\n return {\n username: process.env.ERP_SQLSERVER_USERNAME || \"\",\n password: process.env.ERP_SQLSERVER_PASSWORD || \"\",\n database: process.env.ERP_SQLSERVER_DATABASE || \"\",\n host:\n process.env.ERP_SQLSERVER_HOST || process.env.ERP_SQLSERVER_SERVER || \"\",\n port: process.env.ERP_SQLSERVER_PORT || \"1433\",\n connectionTimeout: process.env.ERP_SQLSERVER_CONNECTION_TIMEOUT || \"30000\",\n requestTimeout: process.env.ERP_SQLSERVER_REQUEST_TIMEOUT || \"60000\",\n poolMax: process.env.ERP_SQLSERVER_MAX || \"10\",\n poolMin: process.env.ERP_SQLSERVER_MIN || \"0\",\n idleTimeoutMillis:\n process.env.ERP_SQLSERVER_IDLE_TIMEOUT_MMILLIS || \"30000\",\n encrypt: process.env.ERP_SQLSERVER_ENCRYPT === \"true\",\n trustServer: process.env.ERP_SQLSERVER_TRUST_SERVER === \"true\",\n };\n};\n\n/**\n * Parameters required to connect to an ERP system via its API.\n * Contains all the necessary settings to establish a connection and authenticate with an ERP system's API.\n */\nexport class ErpApiConnectionParams {\n constructor(\n public readonly erpApiUrl: string, // Base url of ERP\n public readonly erpApiClientId: string, // Client ID to authenticate with ERP\n public readonly erpApiClientSecret: string, // Client Secret to authenticate with ERP\n public readonly erpApiOrganizationId: string, // Organization / tenant Id\n public readonly erpAuthBaseUrl: string, // Auth base url\n public readonly retryAttempts: number = 3 // Number of retry attempts for API calls\n ) {}\n}\n\n/**\n * Helper function to get the ERP API Connection Parameters\n * Not all connectors use these, but keeping these commonly values in one place may\n * make it easier to set and understand env var names set in App.\n */\nexport const getErpApiConnectionParams = (): ErpApiConnectionParams => {\n return new ErpApiConnectionParams(\n process.env.ERP_API_URL || \"\",\n process.env.ERP_API_CLIENT_ID || \"\",\n process.env.ERP_API_CLIENT_SECRET || \"\",\n process.env.ERP_API_ORGANIZATION_ID || \"\",\n process.env.ERP_AUTH_BASE_URL || \"\",\n parseInt(process.env.ERP_API_RETRY_ATTEMPTS || \"3\")\n );\n};\n","import knex, { Knex } from \"knex\";\nimport config from \"../../knexfile\";\nimport stringify from \"json-stable-stringify\";\nimport XXH from \"xxhashjs\";\nimport { ERPObjType } from \"../../types/erp-types\";\nimport { CacheMetrics } from \"./index\";\nimport { CoreConfiguration } from \"../data-sync-service/configuration-manager\";\nimport { logger } from \"../reporting-service\";\n\ntype HashedCacheManagerOptions = {\n ttl?: number;\n tableName?: string;\n};\n\nexport class HashedCacheManager {\n private static TABLE_NAME = \"sdk_cache\";\n private db: Knex;\n private options: HashedCacheManagerOptions;\n private static readonly SEED = 0xabcd; // Arbitrary seed for hashing\n private isDestroyed: boolean = false;\n private metrics: CacheMetrics = {\n recordCounts: {},\n };\n\n constructor(options?: HashedCacheManagerOptions) {\n this.options = {\n ttl: options?.ttl || CoreConfiguration.inst().cacheTTL,\n tableName: options?.tableName || HashedCacheManager.TABLE_NAME,\n };\n this.db = knex({\n ...config.local,\n pool: {\n min: 0,\n max: 10,\n },\n });\n }\n\n /**\n * Checks if the cache manager is still valid\n * @throws Error if the cache manager has been destroyed\n */\n private checkValid(): void {\n if (this.isDestroyed) {\n throw new Error(\"Cache manager has been destroyed\");\n }\n }\n\n /**\n * Generates a stable hash of a record using JSON stringify + xxhash\n */\n public static hashRecord(record: object): string {\n try {\n const serialized = stringify(record);\n if (!serialized) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n const hash = XXH.h64(serialized, HashedCacheManager.SEED).toString(16);\n return hash;\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"circular\")) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n throw error;\n }\n }\n\n /**\n * Gets a record from the cache\n * @param type The type of record\n * @param hash The hash of the record\n * @returns The record if it exists, null otherwise\n */\n private async getRecord(\n type: ERPObjType,\n hash: string\n ): Promise<{ key: string } | null> {\n this.checkValid();\n return this.db(this.options.tableName)\n .select(\"key\")\n .where({ type, key: hash })\n .first();\n }\n\n /**\n * Stores a record in the cache\n * @param type The type of record\n * @param record The record to store\n * @returns true if a new record was created, false if an existing record was updated\n */\n public async store(type: ERPObjType, record: object): Promise<boolean> {\n if (!this.isDestroyed && record) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n const now = new Date();\n\n // First check if record exists with same type and hash\n const existing = await this.db(this.options.tableName)\n .where({\n type,\n key: hash,\n })\n .first();\n\n if (existing) {\n return false; // No need to update, hash hasn't changed\n } else {\n // Insert new record with minimal data\n const result = await this.db(this.options.tableName)\n .insert({\n type,\n key: hash,\n created_at: now,\n })\n .returning(\"id\");\n return result.length > 0;\n }\n } catch (error) {\n logger.error(\"Error storing record:\", error);\n throw error;\n }\n }\n return false;\n }\n\n /**\n * Checks if a record has changed since last seen\n * @param type The type of record\n * @param record The record to check\n * @returns true if the record has changed or is new\n */\n async hasChanged(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const newHash = HashedCacheManager.hashRecord(record);\n const existing = await this.getRecord(type, newHash);\n return !existing;\n }\n\n /**\n * Checks if a record has changed and stores it if it has\n * @param type The type of record\n * @param record The record to check and store\n * @returns true if the record was changed or is new\n */\n async upsert(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const hasChanged = await this.hasChanged(type, record);\n if (hasChanged) {\n await this.store(type, record as Record<string, unknown>);\n }\n return hasChanged;\n }\n\n /**\n * Removes expired records based on TTL\n */\n async removeExpiredObjects(): Promise<void> {\n this.checkValid();\n const ttl = this.options.ttl;\n if (!ttl) return;\n\n const ttlMilliseconds = ttl * 1000;\n const expirationLimitDate = new Date(Date.now() - ttlMilliseconds);\n const expirationLimit = expirationLimitDate\n .toISOString()\n .slice(0, 19)\n .replace(\"T\", \" \");\n\n await this.db(this.options.tableName)\n .where(\"created_at\", \"<\", expirationLimit)\n .del();\n }\n\n /**\n * Gets all records of a specific type\n */\n async getRecordsByType(type: ERPObjType): Promise<string[]> {\n this.checkValid();\n const records = await this.db(this.options.tableName)\n .select(\"key\")\n .where({ type });\n\n return records.map((record) => record.key);\n }\n\n /**\n * Removes all records of a specific type\n */\n async removeRecordsByType(type: ERPObjType): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).where({ type }).del();\n }\n\n /**\n * Removes a specific record\n */\n public async removeRecord(type: ERPObjType, record: object): Promise<void> {\n if (!this.isDestroyed) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n await this.db(this.options.tableName)\n .where({ type, key: hash }) // Use key for deletion\n .del();\n } catch (error) {\n logger.error(\"Error removing record:\", error);\n throw error;\n }\n }\n }\n\n /**\n * Clears all records from the cache\n */\n async clear(): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).del();\n }\n\n /**\n * Cleans up database connection and marks the cache manager as destroyed\n */\n async destroy(): Promise<void> {\n if (!this.isDestroyed) {\n await this.db.destroy();\n this.isDestroyed = true;\n }\n }\n\n /**\n * Gets the current cache metrics\n * @returns The current cache metrics\n */\n async getMetrics(): Promise<CacheMetrics> {\n this.checkValid();\n\n // Get counts for each type\n const counts = (await this.db(this.options.tableName)\n .select(\"type\")\n .count(\"* as count\")\n .groupBy(\"type\")) as Array<{ type: string; count: string }>;\n\n // Update metrics\n this.metrics.recordCounts = counts.reduce(\n (acc, row) => {\n acc[row.type] = parseInt(row.count, 10);\n return acc;\n },\n {} as Record<string, number>\n );\n\n return this.metrics;\n }\n}\n"],"names":[],"mappings":";;;;;;;AAIO,MAAM,kBAAkB;AAAA,EAC7B,OAAe;AAAA;AAAA,EAGC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,cAAc;AACpB,SAAK,WAAW,QAAQ,IAAI,aAAa;AACzC,SAAK,YAAY,QAAQ,IAAI,cAAc;AAC3C,SAAK,UAAU,QAAQ,IAAI,YAAY;AAMvC,SAAK,qBAAqB,QAAQ,IAAI,0BAA0B;AAKhE,YAAQ,IAAI,sBAAsB;AAClC,YAAQ,IAAI,wCAAwC,QAAQ,IAAI,2BAA2B;AAC3F,SAAK,eAAe,QAAQ,IAAI,+BAA+B;AAC/D,YAAQ,IAAI,wBAAwB,KAAK,YAAY;AACrD,YAAQ,IAAI,0BAA0B;AAKtC,SAAK,iBAAiB,QAAQ,IAAI,4BAA4B;AAK9D,SAAK,qBAAqB,SAAS,QAAQ,IAAI,kBAAkB,GAAG;AAMpE,SAAK,oBAAoB,SAAS,QAAQ,IAAI,wBAAwB,GAAG;AAMzE,SAAK,kBACH,QAAQ,IAAI,qBAAqB,QAAQ,IAAI,iBAAiB;AAChE,SAAK,gBAAgB,QAAQ,IAAI,mBAAmB;AACpD,SAAK,4BACH,QAAQ,IAAI,gCAAgC;AAC9C,SAAK,+BACH,QAAQ,IAAI,mCAAmC;AAKjD,UAAM,cAAc,IAAI,KAAK,KAAK;AAClC,SAAK,WAAW,SAAS,QAAQ,IAAI,aAAa,YAAY,UAAU;AAGxE,oBAAgB,KAAK,UAAU,KAAK,OAAO;AAAA,EAC7C;AAAA,EAEA,OAAc,OAA0B;AACtC,QAAI,CAAC,kBAAkB,UAAU;AAC/B,wBAAkB,WAAW,IAAI,kBAAA;AAAA,IACnC;AACA,WAAO,kBAAkB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,kBAA2C;AAChD,UAAM,qBAAqB,CAAC,UAA0B;AACpD,UAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,eAAO;AAAA,MACT;AACA,UAAI,MAAM,SAAS,GAAG;AACpB,eAAO;AAAA,MACT;AAEA,aAAO,MAAM,UAAU,GAAG,CAAC,IAAI,aAAa,MAAM,UAAU,MAAM,SAAS,CAAC;AAAA,IAC9E;AAEA,WAAO;AAAA,MACL,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,oBAAoB,KAAK;AAAA,MACzB,cAAc,KAAK;AAAA,MACnB,gBAAgB,mBAAmB,KAAK,cAAc;AAAA,MACtD,oBAAoB,KAAK;AAAA,MACzB,UAAU,KAAK;AAAA,MACf,mBAAmB,KAAK;AAAA,MACxB,iBAAiB,KAAK;AAAA,MACtB,eAAe,KAAK;AAAA,MACpB,2BAA2B,KAAK;AAAA,MAChC,8BAA8B,KAAK;AAAA,IAAA;AAAA,EAEvC;AACF;AAKO,MAAM,4BAA4B,MAA8B;AACrE,SAAO;AAAA,IACL,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,MACE,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IACxE,MAAM,QAAQ,IAAI,sBAAsB;AAAA,IACxC,mBAAmB,QAAQ,IAAI,oCAAoC;AAAA,IACnE,gBAAgB,QAAQ,IAAI,iCAAiC;AAAA,IAC7D,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,mBACE,QAAQ,IAAI,sCAAsC;AAAA,IACpD,SAAS,QAAQ,IAAI,0BAA0B;AAAA,IAC/C,aAAa,QAAQ,IAAI,+BAA+B;AAAA,EAAA;AAE5D;AAMO,MAAM,uBAAuB;AAAA,EAClC,YACkB,WACA,gBACA,oBACA,sBACA,gBACA,gBAAwB,GACxC;AANgB,SAAA,YAAA;AACA,SAAA,iBAAA;AACA,SAAA,qBAAA;AACA,SAAA,uBAAA;AACA,SAAA,iBAAA;AACA,SAAA,gBAAA;AAAA,EACf;AACL;AAOO,MAAM,4BAA4B,MAA8B;AACrE,SAAO,IAAI;AAAA,IACT,QAAQ,IAAI,eAAe;AAAA,IAC3B,QAAQ,IAAI,qBAAqB;AAAA,IACjC,QAAQ,IAAI,yBAAyB;AAAA,IACrC,QAAQ,IAAI,2BAA2B;AAAA,IACvC,QAAQ,IAAI,qBAAqB;AAAA,IACjC,SAAS,QAAQ,IAAI,0BAA0B,GAAG;AAAA,EAAA;AAEtD;ACtKO,MAAM,mBAAmB;AAAA,EAC9B,OAAe,aAAa;AAAA,EACpB;AAAA,EACA;AAAA,EACR,OAAwB,OAAO;AAAA;AAAA,EACvB,cAAuB;AAAA,EACvB,UAAwB;AAAA,IAC9B,cAAc,CAAA;AAAA,EAAC;AAAA,EAGjB,YAAY,SAAqC;AAC/C,SAAK,UAAU;AAAA,MACb,KAAK,SAAS,OAAO,kBAAkB,OAAO;AAAA,MAC9C,WAAW,SAAS,aAAa,mBAAmB;AAAA,IAAA;AAEtD,SAAK,KAAK,KAAK;AAAA,MACb,GAAG,OAAO;AAAA,MACV,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IACP,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAmB;AACzB,QAAI,KAAK,aAAa;AACpB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAc,WAAW,QAAwB;AAC/C,QAAI;AACF,YAAM,aAAa,UAAU,MAAM;AACnC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM,OAAO,IAAI,IAAI,YAAY,mBAAmB,IAAI,EAAE,SAAS,EAAE;AACrE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChE,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,UACZ,MACA,MACiC;AACjC,SAAK,WAAA;AACL,WAAO,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClC,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,MAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,MAAM,MAAkB,QAAkC;AACrE,QAAI,CAAC,KAAK,eAAe,QAAQ;AAC/B,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,0BAAU,KAAA;AAGhB,cAAM,WAAW,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClD,MAAM;AAAA,UACL;AAAA,UACA,KAAK;AAAA,QAAA,CACN,EACA,MAAA;AAEH,YAAI,UAAU;AACZ,iBAAO;AAAA,QACT,OAAO;AAEL,gBAAM,SAAS,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAChD,OAAO;AAAA,YACN;AAAA,YACA,KAAK;AAAA,YACL,YAAY;AAAA,UAAA,CACb,EACA,UAAU,IAAI;AACjB,iBAAO,OAAO,SAAS;AAAA,QACzB;AAAA,MACF,SAAS,OAAO;AACd,eAAO,MAAM,yBAAyB,KAAK;AAC3C,cAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,MAAkB,QAAkC;AACnE,SAAK,WAAA;AACL,UAAM,UAAU,mBAAmB,WAAW,MAAM;AACpD,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,OAAO;AACnD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,MAAkB,QAAkC;AAC/D,SAAK,WAAA;AACL,UAAM,aAAa,MAAM,KAAK,WAAW,MAAM,MAAM;AACrD,QAAI,YAAY;AACd,YAAM,KAAK,MAAM,MAAM,MAAiC;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAsC;AAC1C,SAAK,WAAA;AACL,UAAM,MAAM,KAAK,QAAQ;AACzB,QAAI,CAAC,IAAK;AAEV,UAAM,kBAAkB,MAAM;AAC9B,UAAM,sBAAsB,IAAI,KAAK,KAAK,IAAA,IAAQ,eAAe;AACjE,UAAM,kBAAkB,oBACrB,YAAA,EACA,MAAM,GAAG,EAAE,EACX,QAAQ,KAAK,GAAG;AAEnB,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,cAAc,KAAK,eAAe,EACxC,IAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAqC;AAC1D,SAAK,WAAA;AACL,UAAM,UAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM;AAEjB,WAAO,QAAQ,IAAI,CAAC,WAAW,OAAO,GAAG;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,MAAiC;AACzD,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,IAAA;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,aAAa,MAAkB,QAA+B;AACzE,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,IAAA;AAAA,MACL,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,IAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,KAAK,GAAG,QAAA;AACd,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAoC;AACxC,SAAK,WAAA;AAGL,UAAM,SAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,MAAM,EACb,MAAM,YAAY,EAClB,QAAQ,MAAM;AAGjB,SAAK,QAAQ,eAAe,OAAO;AAAA,MACjC,CAAC,KAAK,QAAQ;AACZ,YAAI,IAAI,IAAI,IAAI,SAAS,IAAI,OAAO,EAAE;AACtC,eAAO;AAAA,MACT;AAAA,MACA,CAAA;AAAA,IAAC;AAGH,WAAO,KAAK;AAAA,EACd;AACF;"}
1
+ {"version":3,"file":"hashed-cache-manager-C1u9jQgY.js","sources":["../src/services/data-sync-service/configuration-manager.ts","../src/services/caching-service/hashed-cache-manager.ts"],"sourcesContent":["import \"dotenv/config\";\nimport { configureLogger } from \"../reporting-service/logger\";\nimport { SQLServerConfiguration } from \"../sql-server-erp-service/configuration\";\n\nexport class CoreConfiguration {\n private static instance: CoreConfiguration;\n\n // General Configuration\n public readonly logLevel: string;\n public readonly erpSystem: string;\n public readonly nodeEnv: string;\n\n // MM API (aka \"Mapping\") Service\n public readonly mmERPSvcApiBaseUrl: string;\n public readonly mmApiBaseUrl: string;\n public readonly mmApiAuthToken: string;\n public readonly mmApiRetryAttempts: number;\n\n // Caching (optionally used for interacting with the MM API)\n public readonly cacheTTL: number;\n\n // ERP API Service\n public readonly erpApiPagingLimit: number; //Pagination limit for ERP API\n\n // Job timing Intervals\n public readonly fromErpInterval: string;\n public readonly toErpInterval: string;\n public readonly retryLaborTicketsInterval: string;\n public readonly cacheExpirationCheckInterval: string;\n\n private constructor() {\n this.logLevel = process.env.LOG_LEVEL || \"info\";\n this.erpSystem = process.env.ERP_SYSTEM || \"template\";\n this.nodeEnv = process.env.NODE_ENV || \"development\";\n\n //#region MM API (aka \"Mapping\") Service\n /**\n * MM ERP Service REST API URL (typically https://erp-api.svc.machinemetrics.com)\n */\n this.mmERPSvcApiBaseUrl = process.env.MM_MAPPING_SERVICE_URL || \"\";\n\n /**\n * MM REST API URL (typically https://api.machinemetrics.com)\n */\n console.log(\"=== CONFIG DEBUG ===\");\n console.log(\"MM_MAPPING_AUTH_SERVICE_URL env var:\", process.env.MM_MAPPING_AUTH_SERVICE_URL);\n this.mmApiBaseUrl = process.env.MM_MAPPING_AUTH_SERVICE_URL || \"\";\n console.log(\"mmApiBaseUrl set to:\", this.mmApiBaseUrl);\n console.log(\"=== END CONFIG DEBUG ===\");\n\n /**\n * Company Auth Token\n */\n this.mmApiAuthToken = process.env.MM_MAPPING_SERVICE_TOKEN || \"\";\n\n /**\n * Number of retry attempts for MM API calls\n */\n this.mmApiRetryAttempts = parseInt(process.env.RETRY_ATTEMPTS || \"0\");\n //#endregion MM API (aka \"Mapping\") Service\n\n /**\n * Default pagination limit for ERP API\n */\n this.erpApiPagingLimit = parseInt(process.env.ERP_PAGINATION_LIMIT || \"0\");\n //#endregion ERP API Service\n\n /**\n * For how to define the intervals, see Bree's documentation: https://github.com/breejs/bree\n */\n this.fromErpInterval =\n process.env.FROM_ERP_INTERVAL || process.env.POLL_INTERVAL || \"5 min\";\n this.toErpInterval = process.env.TO_ERP_INTERVAL || \"5 min\";\n this.retryLaborTicketsInterval =\n process.env.RETRY_LABOR_TICKETS_INTERVAL || \"30 min\";\n this.cacheExpirationCheckInterval =\n process.env.CACHE_EXPIRATION_CHECK_INTERVAL || \"5 min\";\n\n /**\n * Cache TTL (in seconds)\n */\n const cacheTTLDef = 7 * 24 * 60 * 60; // 7 days\n this.cacheTTL = parseInt(process.env.CACHE_TTL || cacheTTLDef.toString());\n\n // Configure the logger with our settings\n configureLogger(this.logLevel, this.nodeEnv);\n }\n\n public static inst(): CoreConfiguration {\n if (!CoreConfiguration.instance) {\n CoreConfiguration.instance = new CoreConfiguration();\n }\n return CoreConfiguration.instance;\n }\n\n /**\n * Returns a sanitized version of the configuration for safe logging.\n * Masks sensitive fields like authentication tokens.\n */\n public toSafeLogObject(): Record<string, unknown> {\n const maskSensitiveValue = (value: string): string => {\n if (!value || value.length === 0) {\n return \"\";\n }\n if (value.length < 6) {\n return \"********\";\n }\n // Show first 3 and last 3 characters, mask the middle\n return value.substring(0, 3) + \"********\" + value.substring(value.length - 3);\n };\n\n return {\n logLevel: this.logLevel,\n erpSystem: this.erpSystem,\n nodeEnv: this.nodeEnv,\n mmERPSvcApiBaseUrl: this.mmERPSvcApiBaseUrl,\n mmApiBaseUrl: this.mmApiBaseUrl,\n mmApiAuthToken: maskSensitiveValue(this.mmApiAuthToken),\n mmApiRetryAttempts: this.mmApiRetryAttempts,\n cacheTTL: this.cacheTTL,\n erpApiPagingLimit: this.erpApiPagingLimit,\n fromErpInterval: this.fromErpInterval,\n toErpInterval: this.toErpInterval,\n retryLaborTicketsInterval: this.retryLaborTicketsInterval,\n cacheExpirationCheckInterval: this.cacheExpirationCheckInterval,\n };\n }\n}\n\n/**\n * Helper function to get the SQL Server Configuration for collectors that use SQL Server to interact with the ERP\n */\nexport const getSQLServerConfiguration = (): SQLServerConfiguration => {\n return {\n username: process.env.ERP_SQLSERVER_USERNAME || \"\",\n password: process.env.ERP_SQLSERVER_PASSWORD || \"\",\n database: process.env.ERP_SQLSERVER_DATABASE || \"\",\n host:\n process.env.ERP_SQLSERVER_HOST || process.env.ERP_SQLSERVER_SERVER || \"\",\n port: process.env.ERP_SQLSERVER_PORT || \"1433\",\n connectionTimeout: process.env.ERP_SQLSERVER_CONNECTION_TIMEOUT || \"30000\",\n requestTimeout: process.env.ERP_SQLSERVER_REQUEST_TIMEOUT || \"60000\",\n poolMax: process.env.ERP_SQLSERVER_MAX || \"10\",\n poolMin: process.env.ERP_SQLSERVER_MIN || \"0\",\n idleTimeoutMillis:\n process.env.ERP_SQLSERVER_IDLE_TIMEOUT_MMILLIS || \"30000\",\n encrypt: process.env.ERP_SQLSERVER_ENCRYPT === \"true\",\n trustServer: process.env.ERP_SQLSERVER_TRUST_SERVER === \"true\",\n };\n};\n\n/**\n * Parameters required to connect to an ERP system via its API.\n * Contains all the necessary settings to establish a connection and authenticate with an ERP system's API.\n */\nexport class ErpApiConnectionParams {\n constructor(\n public readonly erpApiUrl: string, // Base url of ERP\n public readonly erpApiClientId: string, // Client ID to authenticate with ERP\n public readonly erpApiClientSecret: string, // Client Secret to authenticate with ERP\n public readonly erpApiOrganizationId: string, // Organization / tenant Id\n public readonly erpAuthBaseUrl: string, // Auth base url\n public readonly retryAttempts: number = 3 // Number of retry attempts for API calls\n ) {}\n}\n\n/**\n * Helper function to get the ERP API Connection Parameters\n * Not all connectors use these, but keeping these commonly values in one place may\n * make it easier to set and understand env var names set in App.\n */\nexport const getErpApiConnectionParams = (): ErpApiConnectionParams => {\n return new ErpApiConnectionParams(\n process.env.ERP_API_URL || \"\",\n process.env.ERP_API_CLIENT_ID || \"\",\n process.env.ERP_API_CLIENT_SECRET || \"\",\n process.env.ERP_API_ORGANIZATION_ID || \"\",\n process.env.ERP_AUTH_BASE_URL || \"\",\n parseInt(process.env.ERP_API_RETRY_ATTEMPTS || \"3\")\n );\n};\n","import knex, { Knex } from \"knex\";\nimport config from \"../../knexfile\";\nimport stringify from \"json-stable-stringify\";\nimport XXH from \"xxhashjs\";\nimport { ERPObjType } from \"../../types/erp-types\";\nimport { CacheMetrics } from \"./index\";\nimport { CoreConfiguration } from \"../data-sync-service/configuration-manager\";\nimport { logger } from \"../reporting-service\";\n\ntype HashedCacheManagerOptions = {\n ttl?: number;\n tableName?: string;\n};\n\nexport class HashedCacheManager {\n private static TABLE_NAME = \"sdk_cache\";\n private db: Knex;\n private options: HashedCacheManagerOptions;\n private static readonly SEED = 0xabcd; // Arbitrary seed for hashing\n private isDestroyed: boolean = false;\n private metrics: CacheMetrics = {\n recordCounts: {},\n };\n\n constructor(options?: HashedCacheManagerOptions) {\n this.options = {\n ttl: options?.ttl || CoreConfiguration.inst().cacheTTL,\n tableName: options?.tableName || HashedCacheManager.TABLE_NAME,\n };\n this.db = knex({\n ...config.local,\n pool: {\n min: 0,\n max: 10,\n },\n });\n }\n\n /**\n * Checks if the cache manager is still valid\n * @throws Error if the cache manager has been destroyed\n */\n private checkValid(): void {\n if (this.isDestroyed) {\n throw new Error(\"Cache manager has been destroyed\");\n }\n }\n\n /**\n * Generates a stable hash of a record using JSON stringify + xxhash\n */\n public static hashRecord(record: object): string {\n try {\n const serialized = stringify(record);\n if (!serialized) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n const hash = XXH.h64(serialized, HashedCacheManager.SEED).toString(16);\n return hash;\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"circular\")) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n throw error;\n }\n }\n\n /**\n * Gets a record from the cache\n * @param type The type of record\n * @param hash The hash of the record\n * @returns The record if it exists, null otherwise\n */\n private async getRecord(\n type: ERPObjType,\n hash: string\n ): Promise<{ key: string } | null> {\n this.checkValid();\n return this.db(this.options.tableName)\n .select(\"key\")\n .where({ type, key: hash })\n .first();\n }\n\n /**\n * Stores a record in the cache\n * @param type The type of record\n * @param record The record to store\n * @returns true if a new record was created, false if an existing record was updated\n */\n public async store(type: ERPObjType, record: object): Promise<boolean> {\n if (!this.isDestroyed && record) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n const now = new Date();\n\n // First check if record exists with same type and hash\n const existing = await this.db(this.options.tableName)\n .where({\n type,\n key: hash,\n })\n .first();\n\n if (existing) {\n return false; // No need to update, hash hasn't changed\n } else {\n // Insert new record with minimal data\n const result = await this.db(this.options.tableName)\n .insert({\n type,\n key: hash,\n created_at: now,\n })\n .returning(\"id\");\n return result.length > 0;\n }\n } catch (error) {\n logger.error(\"Error storing record:\", error);\n throw error;\n }\n }\n return false;\n }\n\n /**\n * Checks if a record has changed since last seen\n * @param type The type of record\n * @param record The record to check\n * @returns true if the record has changed or is new\n */\n async hasChanged(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const newHash = HashedCacheManager.hashRecord(record);\n const existing = await this.getRecord(type, newHash);\n return !existing;\n }\n\n /**\n * Checks if a record has changed and stores it if it has\n * @param type The type of record\n * @param record The record to check and store\n * @returns true if the record was changed or is new\n */\n async upsert(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const hasChanged = await this.hasChanged(type, record);\n if (hasChanged) {\n await this.store(type, record as Record<string, unknown>);\n }\n return hasChanged;\n }\n\n /**\n * Removes expired records based on TTL\n */\n async removeExpiredObjects(): Promise<void> {\n this.checkValid();\n const ttl = this.options.ttl;\n if (!ttl) return;\n\n const ttlMilliseconds = ttl * 1000;\n const expirationLimitDate = new Date(Date.now() - ttlMilliseconds);\n const expirationLimit = expirationLimitDate\n .toISOString()\n .slice(0, 19)\n .replace(\"T\", \" \");\n\n await this.db(this.options.tableName)\n .where(\"created_at\", \"<\", expirationLimit)\n .del();\n }\n\n /**\n * Gets all records of a specific type\n */\n async getRecordsByType(type: ERPObjType): Promise<string[]> {\n this.checkValid();\n const records = await this.db(this.options.tableName)\n .select(\"key\")\n .where({ type });\n\n return records.map((record) => record.key);\n }\n\n /**\n * Removes all records of a specific type\n */\n async removeRecordsByType(type: ERPObjType): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).where({ type }).del();\n }\n\n /**\n * Removes a specific record\n */\n public async removeRecord(type: ERPObjType, record: object): Promise<void> {\n if (!this.isDestroyed) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n await this.db(this.options.tableName)\n .where({ type, key: hash }) // Use key for deletion\n .del();\n } catch (error) {\n logger.error(\"Error removing record:\", error);\n throw error;\n }\n }\n }\n\n /**\n * Clears all records from the cache\n */\n async clear(): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).del();\n }\n\n /**\n * Cleans up database connection and marks the cache manager as destroyed\n */\n async destroy(): Promise<void> {\n if (!this.isDestroyed) {\n await this.db.destroy();\n this.isDestroyed = true;\n }\n }\n\n /**\n * Gets the current cache metrics\n * @returns The current cache metrics\n */\n async getMetrics(): Promise<CacheMetrics> {\n this.checkValid();\n\n // Get counts for each type\n const counts = (await this.db(this.options.tableName)\n .select(\"type\")\n .count(\"* as count\")\n .groupBy(\"type\")) as Array<{ type: string; count: string }>;\n\n // Update metrics\n this.metrics.recordCounts = counts.reduce(\n (acc, row) => {\n acc[row.type] = parseInt(row.count, 10);\n return acc;\n },\n {} as Record<string, number>\n );\n\n return this.metrics;\n }\n}\n"],"names":[],"mappings":";;;;;;;AAIO,MAAM,kBAAkB;AAAA,EAC7B,OAAe;AAAA;AAAA,EAGC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,cAAc;AACpB,SAAK,WAAW,QAAQ,IAAI,aAAa;AACzC,SAAK,YAAY,QAAQ,IAAI,cAAc;AAC3C,SAAK,UAAU,QAAQ,IAAI,YAAY;AAMvC,SAAK,qBAAqB,QAAQ,IAAI,0BAA0B;AAKhE,YAAQ,IAAI,sBAAsB;AAClC,YAAQ,IAAI,wCAAwC,QAAQ,IAAI,2BAA2B;AAC3F,SAAK,eAAe,QAAQ,IAAI,+BAA+B;AAC/D,YAAQ,IAAI,wBAAwB,KAAK,YAAY;AACrD,YAAQ,IAAI,0BAA0B;AAKtC,SAAK,iBAAiB,QAAQ,IAAI,4BAA4B;AAK9D,SAAK,qBAAqB,SAAS,QAAQ,IAAI,kBAAkB,GAAG;AAMpE,SAAK,oBAAoB,SAAS,QAAQ,IAAI,wBAAwB,GAAG;AAMzE,SAAK,kBACH,QAAQ,IAAI,qBAAqB,QAAQ,IAAI,iBAAiB;AAChE,SAAK,gBAAgB,QAAQ,IAAI,mBAAmB;AACpD,SAAK,4BACH,QAAQ,IAAI,gCAAgC;AAC9C,SAAK,+BACH,QAAQ,IAAI,mCAAmC;AAKjD,UAAM,cAAc,IAAI,KAAK,KAAK;AAClC,SAAK,WAAW,SAAS,QAAQ,IAAI,aAAa,YAAY,UAAU;AAGxE,oBAAgB,KAAK,UAAU,KAAK,OAAO;AAAA,EAC7C;AAAA,EAEA,OAAc,OAA0B;AACtC,QAAI,CAAC,kBAAkB,UAAU;AAC/B,wBAAkB,WAAW,IAAI,kBAAA;AAAA,IACnC;AACA,WAAO,kBAAkB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,kBAA2C;AAChD,UAAM,qBAAqB,CAAC,UAA0B;AACpD,UAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,eAAO;AAAA,MACT;AACA,UAAI,MAAM,SAAS,GAAG;AACpB,eAAO;AAAA,MACT;AAEA,aAAO,MAAM,UAAU,GAAG,CAAC,IAAI,aAAa,MAAM,UAAU,MAAM,SAAS,CAAC;AAAA,IAC9E;AAEA,WAAO;AAAA,MACL,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,oBAAoB,KAAK;AAAA,MACzB,cAAc,KAAK;AAAA,MACnB,gBAAgB,mBAAmB,KAAK,cAAc;AAAA,MACtD,oBAAoB,KAAK;AAAA,MACzB,UAAU,KAAK;AAAA,MACf,mBAAmB,KAAK;AAAA,MACxB,iBAAiB,KAAK;AAAA,MACtB,eAAe,KAAK;AAAA,MACpB,2BAA2B,KAAK;AAAA,MAChC,8BAA8B,KAAK;AAAA,IAAA;AAAA,EAEvC;AACF;AAKO,MAAM,4BAA4B,MAA8B;AACrE,SAAO;AAAA,IACL,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,MACE,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IACxE,MAAM,QAAQ,IAAI,sBAAsB;AAAA,IACxC,mBAAmB,QAAQ,IAAI,oCAAoC;AAAA,IACnE,gBAAgB,QAAQ,IAAI,iCAAiC;AAAA,IAC7D,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,mBACE,QAAQ,IAAI,sCAAsC;AAAA,IACpD,SAAS,QAAQ,IAAI,0BAA0B;AAAA,IAC/C,aAAa,QAAQ,IAAI,+BAA+B;AAAA,EAAA;AAE5D;AAMO,MAAM,uBAAuB;AAAA,EAClC,YACkB,WACA,gBACA,oBACA,sBACA,gBACA,gBAAwB,GACxC;AANgB,SAAA,YAAA;AACA,SAAA,iBAAA;AACA,SAAA,qBAAA;AACA,SAAA,uBAAA;AACA,SAAA,iBAAA;AACA,SAAA,gBAAA;AAAA,EACf;AACL;AAOO,MAAM,4BAA4B,MAA8B;AACrE,SAAO,IAAI;AAAA,IACT,QAAQ,IAAI,eAAe;AAAA,IAC3B,QAAQ,IAAI,qBAAqB;AAAA,IACjC,QAAQ,IAAI,yBAAyB;AAAA,IACrC,QAAQ,IAAI,2BAA2B;AAAA,IACvC,QAAQ,IAAI,qBAAqB;AAAA,IACjC,SAAS,QAAQ,IAAI,0BAA0B,GAAG;AAAA,EAAA;AAEtD;ACtKO,MAAM,mBAAmB;AAAA,EAC9B,OAAe,aAAa;AAAA,EACpB;AAAA,EACA;AAAA,EACR,OAAwB,OAAO;AAAA;AAAA,EACvB,cAAuB;AAAA,EACvB,UAAwB;AAAA,IAC9B,cAAc,CAAA;AAAA,EAAC;AAAA,EAGjB,YAAY,SAAqC;AAC/C,SAAK,UAAU;AAAA,MACb,KAAK,SAAS,OAAO,kBAAkB,OAAO;AAAA,MAC9C,WAAW,SAAS,aAAa,mBAAmB;AAAA,IAAA;AAEtD,SAAK,KAAK,KAAK;AAAA,MACb,GAAG,OAAO;AAAA,MACV,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IACP,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAmB;AACzB,QAAI,KAAK,aAAa;AACpB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAc,WAAW,QAAwB;AAC/C,QAAI;AACF,YAAM,aAAa,UAAU,MAAM;AACnC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM,OAAO,IAAI,IAAI,YAAY,mBAAmB,IAAI,EAAE,SAAS,EAAE;AACrE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChE,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,UACZ,MACA,MACiC;AACjC,SAAK,WAAA;AACL,WAAO,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClC,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,MAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,MAAM,MAAkB,QAAkC;AACrE,QAAI,CAAC,KAAK,eAAe,QAAQ;AAC/B,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,0BAAU,KAAA;AAGhB,cAAM,WAAW,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClD,MAAM;AAAA,UACL;AAAA,UACA,KAAK;AAAA,QAAA,CACN,EACA,MAAA;AAEH,YAAI,UAAU;AACZ,iBAAO;AAAA,QACT,OAAO;AAEL,gBAAM,SAAS,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAChD,OAAO;AAAA,YACN;AAAA,YACA,KAAK;AAAA,YACL,YAAY;AAAA,UAAA,CACb,EACA,UAAU,IAAI;AACjB,iBAAO,OAAO,SAAS;AAAA,QACzB;AAAA,MACF,SAAS,OAAO;AACd,eAAO,MAAM,yBAAyB,KAAK;AAC3C,cAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,MAAkB,QAAkC;AACnE,SAAK,WAAA;AACL,UAAM,UAAU,mBAAmB,WAAW,MAAM;AACpD,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,OAAO;AACnD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,MAAkB,QAAkC;AAC/D,SAAK,WAAA;AACL,UAAM,aAAa,MAAM,KAAK,WAAW,MAAM,MAAM;AACrD,QAAI,YAAY;AACd,YAAM,KAAK,MAAM,MAAM,MAAiC;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAsC;AAC1C,SAAK,WAAA;AACL,UAAM,MAAM,KAAK,QAAQ;AACzB,QAAI,CAAC,IAAK;AAEV,UAAM,kBAAkB,MAAM;AAC9B,UAAM,sBAAsB,IAAI,KAAK,KAAK,IAAA,IAAQ,eAAe;AACjE,UAAM,kBAAkB,oBACrB,YAAA,EACA,MAAM,GAAG,EAAE,EACX,QAAQ,KAAK,GAAG;AAEnB,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,cAAc,KAAK,eAAe,EACxC,IAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAqC;AAC1D,SAAK,WAAA;AACL,UAAM,UAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM;AAEjB,WAAO,QAAQ,IAAI,CAAC,WAAW,OAAO,GAAG;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,MAAiC;AACzD,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,IAAA;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,aAAa,MAAkB,QAA+B;AACzE,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,IAAA;AAAA,MACL,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,IAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,KAAK,GAAG,QAAA;AACd,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAoC;AACxC,SAAK,WAAA;AAGL,UAAM,SAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,MAAM,EACb,MAAM,YAAY,EAClB,QAAQ,MAAM;AAGjB,SAAK,QAAQ,eAAe,OAAO;AAAA,MACjC,CAAC,KAAK,QAAQ;AACZ,YAAI,IAAI,IAAI,IAAI,SAAS,IAAI,OAAO,EAAE;AACtC,eAAO;AAAA,MACT;AAAA,MACA,CAAA;AAAA,IAAC;AAGH,WAAO,KAAK;AAAA,EACd;AACF;"}
@@ -1,6 +1,6 @@
1
1
  import fs, { mkdirSync } from "fs";
2
2
  import path from "path";
3
- import { l as logger } from "./logger-DeKxCUPp.js";
3
+ import { l as logger } from "./logger-DW5fyhVS.js";
4
4
  const STORAGE_FILE = path.join("/tmp", "job-state.json");
5
5
  const parentDir = path.dirname(STORAGE_FILE);
6
6
  try {
@@ -189,4 +189,4 @@ export {
189
189
  getCachedMMToken as g,
190
190
  setCachedMMToken as s
191
191
  };
192
- //# sourceMappingURL=index-Cq9tNcJT.js.map
192
+ //# sourceMappingURL=index-BkVlW0ZW.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index-Cq9tNcJT.js","sources":["../src/utils/local-data-store/jobs-shared-data.ts","../src/utils/local-data-store/database-lock.ts","../src/services/sqlite-service/sqlite-coordinator.ts"],"sourcesContent":["import fs from \"fs\";\nimport path from \"path\";\nimport { mkdirSync } from \"fs\";\n\n/**\n * This file contains the logic for storing and retrieving data from the job state file.\n * It is used to store data that is shared between jobs, and (more importantly) across job instances.\n */\n\nconst STORAGE_FILE = path.join(\"/tmp\", \"job-state.json\");\n\n// Ensure parent directory exists\nconst parentDir = path.dirname(STORAGE_FILE);\ntry {\n mkdirSync(parentDir, { recursive: true });\n} catch (error) {\n if ((error as NodeJS.ErrnoException).code !== \"EEXIST\") {\n throw error;\n }\n}\n\n//#region Non-exported functions\nconst ensureStorageFile = () => {\n if (!fs.existsSync(STORAGE_FILE)) {\n fs.writeFileSync(STORAGE_FILE, JSON.stringify({}), \"utf-8\");\n }\n};\n\nconst readStorage = (): Record<string, unknown> => {\n ensureStorageFile();\n try {\n return JSON.parse(fs.readFileSync(STORAGE_FILE, \"utf-8\"));\n } catch (error) {\n console.error(`Failed to read storage from ${STORAGE_FILE}:`, error);\n return {};\n }\n};\n\nconst writeStorage = (data: Record<string, unknown>): void => {\n ensureStorageFile();\n fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), \"utf-8\");\n};\n//#endregion\n\n//#region Database lock storage functions\n/**\n * Reads the database lock state from the shared storage file\n * @returns The data stored in the file\n */\nexport const readDatabaseLockState = (): Record<string, unknown> => {\n return readStorage();\n};\n\n/**\n * Writes the database lock state to the shared storage file\n * @param data The lock state data to write\n */\nexport const writeDatabaseLockState = (data: Record<string, unknown>): void => {\n writeStorage(data);\n};\n//#endregion\n\nexport const getInitialLoadComplete = (): boolean => {\n const data = readStorage();\n return (data.initialLoadComplete as boolean) ?? false;\n};\n\nexport const setInitialLoadComplete = (complete: boolean): void => {\n const data = readStorage();\n data.initialLoadComplete = complete;\n writeStorage(data);\n};\n\n/**\n * Gets the company's cached current timezone offset (e.g., -5)\n * @returns The cached timezone offset or 0 if not found\n */\nexport const getCachedTimezoneOffset = (): number => {\n const data = readStorage();\n return (data.timezoneOffset as number) ?? 0;\n};\n\n/**\n * Sets the company's current timezone offset in the cache\n * @param offset The timezone offset in hours\n */\nexport const setTimezoneOffsetInCache = (offset: number): void => {\n const data = readStorage();\n data.timezoneOffset = offset;\n writeStorage(data);\n};\n\n/**\n * Gets the cached timezone name (e.g., \"America/New_York\")\n * @returns The cached timezone name or null if not found\n */\nexport const getCachedTimezoneName = (): string | null => {\n const data = readStorage();\n return (data.timezoneName as string) ?? null;\n};\n\n/**\n * Sets the timezone name in the cache\n * @param timezone The timezone name (e.g., \"America/New_York\")\n */\nexport const setTimezoneNameInCache = (timezone: string): void => {\n const data = readStorage();\n data.timezoneName = timezone;\n writeStorage(data);\n};\n\ninterface CachedToken {\n token: string;\n expiration: number | null;\n}\n\n/**\n * Gets the cached MM API token and its expiration\n * @returns The cached token and expiration or null if not found\n */\nexport const getCachedMMToken = (): CachedToken | null => {\n const data = readStorage();\n return (data.mmApiToken as CachedToken) ?? null;\n};\n\n/**\n * Sets the MM API token and its expiration in the cache\n * @param tokenData The token and expiration to cache\n */\nexport const setCachedMMToken = (tokenData: CachedToken): void => {\n const data = readStorage();\n data.mmApiToken = tokenData;\n writeStorage(data);\n};\n","import {\n readDatabaseLockState,\n writeDatabaseLockState,\n} from \"./jobs-shared-data\";\n\ninterface DatabaseLock {\n isLocked: boolean;\n lockedBy: string;\n lockedAt: string | null;\n}\n\n/**\n * Gets the current database lock state\n * @returns The current database lock state\n */\nexport const getDatabaseLock = (): DatabaseLock => {\n const data = readDatabaseLockState();\n return (\n (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n }\n );\n};\n\n/**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\nexport const acquireDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (currentLock.isLocked) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: true,\n lockedBy: processName,\n lockedAt: new Date().toISOString(),\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Releases the database lock\n * @param processName Name of the process releasing the lock\n * @returns true if lock was released, false if process doesn't own the lock\n */\nexport const releaseDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (!currentLock.isLocked || currentLock.lockedBy !== processName) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Checks if the database is available for use\n * @returns true if database is available, false if locked\n */\nexport const isDatabaseAvailable = (): boolean => {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n};\n","import {\n acquireDatabaseLock,\n releaseDatabaseLock,\n getDatabaseLock,\n} from \"../../utils/local-data-store/database-lock\";\nimport { logger } from \"../reporting-service\";\n\nexport class SQLiteCoordinator {\n private static readonly LOCK_TIMEOUT_MS = 30_000; // 30 seconds\n private static readonly LOCK_RETRY_INTERVAL_MS = 1_000; // 1 second\n\n /**\n * Performs startup checks to ensure no stale locks exist\n * Should be called when the application starts\n */\n static async performStartupCheck(): Promise<void> {\n const currentLock = getDatabaseLock();\n\n if (currentLock.isLocked) {\n logger.warn(\n `Found existing lock held by ${currentLock.lockedBy}, releasing for clean startup`\n );\n releaseDatabaseLock(currentLock.lockedBy);\n }\n }\n\n /**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\n private static async tryAcquireLock(processName: string): Promise<boolean> {\n return acquireDatabaseLock(processName);\n }\n\n /**\n * Executes a database operation with proper locking\n * @param processName Name of the process executing the operation\n * @param operation The operation to execute\n * @returns The result of the operation\n */\n static async executeWithLock<T>(\n processName: string,\n operation: () => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n // Try to acquire the lock with timeout\n while (Date.now() - startTime < this.LOCK_TIMEOUT_MS) {\n if (await this.tryAcquireLock(processName)) {\n try {\n // Execute the operation\n const result = await operation();\n return result;\n } finally {\n // Always release the lock\n releaseDatabaseLock(processName);\n }\n }\n\n // Wait before retrying\n await new Promise((resolve) =>\n setTimeout(resolve, this.LOCK_RETRY_INTERVAL_MS)\n );\n }\n\n throw new Error(\n `Failed to acquire database lock after ${this.LOCK_TIMEOUT_MS}ms`\n );\n }\n\n /**\n * Checks if the database is currently available for operations\n * @returns true if the database is available, false if locked\n */\n static isAvailable(): boolean {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n }\n}\n"],"names":[],"mappings":";;;AASA,MAAM,eAAe,KAAK,KAAK,QAAQ,gBAAgB;AAGvD,MAAM,YAAY,KAAK,QAAQ,YAAY;AAC3C,IAAI;AACF,YAAU,WAAW,EAAE,WAAW,KAAA,CAAM;AAC1C,SAAS,OAAO;AACd,MAAK,MAAgC,SAAS,UAAU;AACtD,UAAM;AAAA,EACR;AACF;AAGA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,CAAC,GAAG,WAAW,YAAY,GAAG;AAChC,OAAG,cAAc,cAAc,KAAK,UAAU,CAAA,CAAE,GAAG,OAAO;AAAA,EAC5D;AACF;AAEA,MAAM,cAAc,MAA+B;AACjD,oBAAA;AACA,MAAI;AACF,WAAO,KAAK,MAAM,GAAG,aAAa,cAAc,OAAO,CAAC;AAAA,EAC1D,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,YAAY,KAAK,KAAK;AACnE,WAAO,CAAA;AAAA,EACT;AACF;AAEA,MAAM,eAAe,CAAC,SAAwC;AAC5D,oBAAA;AACA,KAAG,cAAc,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACvE;AAQO,MAAM,wBAAwB,MAA+B;AAClE,SAAO,YAAA;AACT;AAMO,MAAM,yBAAyB,CAAC,SAAwC;AAC7E,eAAa,IAAI;AACnB;AAGO,MAAM,yBAAyB,MAAe;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,uBAAmC;AAClD;AAEO,MAAM,yBAAyB,CAAC,aAA4B;AACjE,QAAM,OAAO,YAAA;AACb,OAAK,sBAAsB;AAC3B,eAAa,IAAI;AACnB;AAMO,MAAM,0BAA0B,MAAc;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,kBAA6B;AAC5C;AAMO,MAAM,2BAA2B,CAAC,WAAyB;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,iBAAiB;AACtB,eAAa,IAAI;AACnB;AAMO,MAAM,wBAAwB,MAAqB;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,gBAA2B;AAC1C;AAMO,MAAM,yBAAyB,CAAC,aAA2B;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,eAAe;AACpB,eAAa,IAAI;AACnB;AAWO,MAAM,mBAAmB,MAA0B;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,cAA8B;AAC7C;AAMO,MAAM,mBAAmB,CAAC,cAAiC;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,aAAa;AAClB,eAAa,IAAI;AACnB;ACtHO,MAAM,kBAAkB,MAAoB;AACjD,QAAM,OAAO,sBAAA;AACb,SACG,KAAK,gBAAiC;AAAA,IACrC,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGhB;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,WAAU,oBAAI,KAAA,GAAO,YAAA;AAAA,EAAY;AAEnC,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,CAAC,YAAY,YAAY,YAAY,aAAa,aAAa;AACjE,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAEZ,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAMO,MAAM,sBAAsB,MAAe;AAChD,QAAM,OAAO,gBAAA;AACb,SAAO,CAAC,KAAK;AACf;AC9EO,MAAM,kBAAkB;AAAA,EAC7B,OAAwB,kBAAkB;AAAA;AAAA,EAC1C,OAAwB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjD,aAAa,sBAAqC;AAChD,UAAM,cAAc,gBAAA;AAEpB,QAAI,YAAY,UAAU;AACxB,aAAO;AAAA,QACL,+BAA+B,YAAY,QAAQ;AAAA,MAAA;AAErD,0BAAoB,YAAY,QAAQ;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAqB,eAAe,aAAuC;AACzE,WAAO,oBAAoB,WAAW;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,gBACX,aACA,WACY;AACZ,UAAM,YAAY,KAAK,IAAA;AAGvB,WAAO,KAAK,IAAA,IAAQ,YAAY,KAAK,iBAAiB;AACpD,UAAI,MAAM,KAAK,eAAe,WAAW,GAAG;AAC1C,YAAI;AAEF,gBAAM,SAAS,MAAM,UAAA;AACrB,iBAAO;AAAA,QACT,UAAA;AAEE,8BAAoB,WAAW;AAAA,QACjC;AAAA,MACF;AAGA,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,sBAAsB;AAAA,MAAA;AAAA,IAEnD;AAEA,UAAM,IAAI;AAAA,MACR,yCAAyC,KAAK,eAAe;AAAA,IAAA;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,UAAM,OAAO,gBAAA;AACb,WAAO,CAAC,KAAK;AAAA,EACf;AACF;"}
1
+ {"version":3,"file":"index-BkVlW0ZW.js","sources":["../src/utils/local-data-store/jobs-shared-data.ts","../src/utils/local-data-store/database-lock.ts","../src/services/sqlite-service/sqlite-coordinator.ts"],"sourcesContent":["import fs from \"fs\";\nimport path from \"path\";\nimport { mkdirSync } from \"fs\";\n\n/**\n * This file contains the logic for storing and retrieving data from the job state file.\n * It is used to store data that is shared between jobs, and (more importantly) across job instances.\n */\n\nconst STORAGE_FILE = path.join(\"/tmp\", \"job-state.json\");\n\n// Ensure parent directory exists\nconst parentDir = path.dirname(STORAGE_FILE);\ntry {\n mkdirSync(parentDir, { recursive: true });\n} catch (error) {\n if ((error as NodeJS.ErrnoException).code !== \"EEXIST\") {\n throw error;\n }\n}\n\n//#region Non-exported functions\nconst ensureStorageFile = () => {\n if (!fs.existsSync(STORAGE_FILE)) {\n fs.writeFileSync(STORAGE_FILE, JSON.stringify({}), \"utf-8\");\n }\n};\n\nconst readStorage = (): Record<string, unknown> => {\n ensureStorageFile();\n try {\n return JSON.parse(fs.readFileSync(STORAGE_FILE, \"utf-8\"));\n } catch (error) {\n console.error(`Failed to read storage from ${STORAGE_FILE}:`, error);\n return {};\n }\n};\n\nconst writeStorage = (data: Record<string, unknown>): void => {\n ensureStorageFile();\n fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), \"utf-8\");\n};\n//#endregion\n\n//#region Database lock storage functions\n/**\n * Reads the database lock state from the shared storage file\n * @returns The data stored in the file\n */\nexport const readDatabaseLockState = (): Record<string, unknown> => {\n return readStorage();\n};\n\n/**\n * Writes the database lock state to the shared storage file\n * @param data The lock state data to write\n */\nexport const writeDatabaseLockState = (data: Record<string, unknown>): void => {\n writeStorage(data);\n};\n//#endregion\n\nexport const getInitialLoadComplete = (): boolean => {\n const data = readStorage();\n return (data.initialLoadComplete as boolean) ?? false;\n};\n\nexport const setInitialLoadComplete = (complete: boolean): void => {\n const data = readStorage();\n data.initialLoadComplete = complete;\n writeStorage(data);\n};\n\n/**\n * Gets the company's cached current timezone offset (e.g., -5)\n * @returns The cached timezone offset or 0 if not found\n */\nexport const getCachedTimezoneOffset = (): number => {\n const data = readStorage();\n return (data.timezoneOffset as number) ?? 0;\n};\n\n/**\n * Sets the company's current timezone offset in the cache\n * @param offset The timezone offset in hours\n */\nexport const setTimezoneOffsetInCache = (offset: number): void => {\n const data = readStorage();\n data.timezoneOffset = offset;\n writeStorage(data);\n};\n\n/**\n * Gets the cached timezone name (e.g., \"America/New_York\")\n * @returns The cached timezone name or null if not found\n */\nexport const getCachedTimezoneName = (): string | null => {\n const data = readStorage();\n return (data.timezoneName as string) ?? null;\n};\n\n/**\n * Sets the timezone name in the cache\n * @param timezone The timezone name (e.g., \"America/New_York\")\n */\nexport const setTimezoneNameInCache = (timezone: string): void => {\n const data = readStorage();\n data.timezoneName = timezone;\n writeStorage(data);\n};\n\ninterface CachedToken {\n token: string;\n expiration: number | null;\n}\n\n/**\n * Gets the cached MM API token and its expiration\n * @returns The cached token and expiration or null if not found\n */\nexport const getCachedMMToken = (): CachedToken | null => {\n const data = readStorage();\n return (data.mmApiToken as CachedToken) ?? null;\n};\n\n/**\n * Sets the MM API token and its expiration in the cache\n * @param tokenData The token and expiration to cache\n */\nexport const setCachedMMToken = (tokenData: CachedToken): void => {\n const data = readStorage();\n data.mmApiToken = tokenData;\n writeStorage(data);\n};\n","import {\n readDatabaseLockState,\n writeDatabaseLockState,\n} from \"./jobs-shared-data\";\n\ninterface DatabaseLock {\n isLocked: boolean;\n lockedBy: string;\n lockedAt: string | null;\n}\n\n/**\n * Gets the current database lock state\n * @returns The current database lock state\n */\nexport const getDatabaseLock = (): DatabaseLock => {\n const data = readDatabaseLockState();\n return (\n (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n }\n );\n};\n\n/**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\nexport const acquireDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (currentLock.isLocked) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: true,\n lockedBy: processName,\n lockedAt: new Date().toISOString(),\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Releases the database lock\n * @param processName Name of the process releasing the lock\n * @returns true if lock was released, false if process doesn't own the lock\n */\nexport const releaseDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (!currentLock.isLocked || currentLock.lockedBy !== processName) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Checks if the database is available for use\n * @returns true if database is available, false if locked\n */\nexport const isDatabaseAvailable = (): boolean => {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n};\n","import {\n acquireDatabaseLock,\n releaseDatabaseLock,\n getDatabaseLock,\n} from \"../../utils/local-data-store/database-lock\";\nimport { logger } from \"../reporting-service\";\n\nexport class SQLiteCoordinator {\n private static readonly LOCK_TIMEOUT_MS = 30_000; // 30 seconds\n private static readonly LOCK_RETRY_INTERVAL_MS = 1_000; // 1 second\n\n /**\n * Performs startup checks to ensure no stale locks exist\n * Should be called when the application starts\n */\n static async performStartupCheck(): Promise<void> {\n const currentLock = getDatabaseLock();\n\n if (currentLock.isLocked) {\n logger.warn(\n `Found existing lock held by ${currentLock.lockedBy}, releasing for clean startup`\n );\n releaseDatabaseLock(currentLock.lockedBy);\n }\n }\n\n /**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\n private static async tryAcquireLock(processName: string): Promise<boolean> {\n return acquireDatabaseLock(processName);\n }\n\n /**\n * Executes a database operation with proper locking\n * @param processName Name of the process executing the operation\n * @param operation The operation to execute\n * @returns The result of the operation\n */\n static async executeWithLock<T>(\n processName: string,\n operation: () => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n // Try to acquire the lock with timeout\n while (Date.now() - startTime < this.LOCK_TIMEOUT_MS) {\n if (await this.tryAcquireLock(processName)) {\n try {\n // Execute the operation\n const result = await operation();\n return result;\n } finally {\n // Always release the lock\n releaseDatabaseLock(processName);\n }\n }\n\n // Wait before retrying\n await new Promise((resolve) =>\n setTimeout(resolve, this.LOCK_RETRY_INTERVAL_MS)\n );\n }\n\n throw new Error(\n `Failed to acquire database lock after ${this.LOCK_TIMEOUT_MS}ms`\n );\n }\n\n /**\n * Checks if the database is currently available for operations\n * @returns true if the database is available, false if locked\n */\n static isAvailable(): boolean {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n }\n}\n"],"names":[],"mappings":";;;AASA,MAAM,eAAe,KAAK,KAAK,QAAQ,gBAAgB;AAGvD,MAAM,YAAY,KAAK,QAAQ,YAAY;AAC3C,IAAI;AACF,YAAU,WAAW,EAAE,WAAW,KAAA,CAAM;AAC1C,SAAS,OAAO;AACd,MAAK,MAAgC,SAAS,UAAU;AACtD,UAAM;AAAA,EACR;AACF;AAGA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,CAAC,GAAG,WAAW,YAAY,GAAG;AAChC,OAAG,cAAc,cAAc,KAAK,UAAU,CAAA,CAAE,GAAG,OAAO;AAAA,EAC5D;AACF;AAEA,MAAM,cAAc,MAA+B;AACjD,oBAAA;AACA,MAAI;AACF,WAAO,KAAK,MAAM,GAAG,aAAa,cAAc,OAAO,CAAC;AAAA,EAC1D,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,YAAY,KAAK,KAAK;AACnE,WAAO,CAAA;AAAA,EACT;AACF;AAEA,MAAM,eAAe,CAAC,SAAwC;AAC5D,oBAAA;AACA,KAAG,cAAc,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACvE;AAQO,MAAM,wBAAwB,MAA+B;AAClE,SAAO,YAAA;AACT;AAMO,MAAM,yBAAyB,CAAC,SAAwC;AAC7E,eAAa,IAAI;AACnB;AAGO,MAAM,yBAAyB,MAAe;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,uBAAmC;AAClD;AAEO,MAAM,yBAAyB,CAAC,aAA4B;AACjE,QAAM,OAAO,YAAA;AACb,OAAK,sBAAsB;AAC3B,eAAa,IAAI;AACnB;AAMO,MAAM,0BAA0B,MAAc;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,kBAA6B;AAC5C;AAMO,MAAM,2BAA2B,CAAC,WAAyB;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,iBAAiB;AACtB,eAAa,IAAI;AACnB;AAMO,MAAM,wBAAwB,MAAqB;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,gBAA2B;AAC1C;AAMO,MAAM,yBAAyB,CAAC,aAA2B;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,eAAe;AACpB,eAAa,IAAI;AACnB;AAWO,MAAM,mBAAmB,MAA0B;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,cAA8B;AAC7C;AAMO,MAAM,mBAAmB,CAAC,cAAiC;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,aAAa;AAClB,eAAa,IAAI;AACnB;ACtHO,MAAM,kBAAkB,MAAoB;AACjD,QAAM,OAAO,sBAAA;AACb,SACG,KAAK,gBAAiC;AAAA,IACrC,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGhB;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,WAAU,oBAAI,KAAA,GAAO,YAAA;AAAA,EAAY;AAEnC,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,CAAC,YAAY,YAAY,YAAY,aAAa,aAAa;AACjE,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAEZ,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAMO,MAAM,sBAAsB,MAAe;AAChD,QAAM,OAAO,gBAAA;AACb,SAAO,CAAC,KAAK;AACf;AC9EO,MAAM,kBAAkB;AAAA,EAC7B,OAAwB,kBAAkB;AAAA;AAAA,EAC1C,OAAwB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjD,aAAa,sBAAqC;AAChD,UAAM,cAAc,gBAAA;AAEpB,QAAI,YAAY,UAAU;AACxB,aAAO;AAAA,QACL,+BAA+B,YAAY,QAAQ;AAAA,MAAA;AAErD,0BAAoB,YAAY,QAAQ;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAqB,eAAe,aAAuC;AACzE,WAAO,oBAAoB,WAAW;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,gBACX,aACA,WACY;AACZ,UAAM,YAAY,KAAK,IAAA;AAGvB,WAAO,KAAK,IAAA,IAAQ,YAAY,KAAK,iBAAiB;AACpD,UAAI,MAAM,KAAK,eAAe,WAAW,GAAG;AAC1C,YAAI;AAEF,gBAAM,SAAS,MAAM,UAAA;AACrB,iBAAO;AAAA,QACT,UAAA;AAEE,8BAAoB,WAAW;AAAA,QACjC;AAAA,MACF;AAGA,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,sBAAsB;AAAA,MAAA;AAAA,IAEnD;AAEA,UAAM,IAAI;AAAA,MACR,yCAAyC,KAAK,eAAe;AAAA,IAAA;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,UAAM,OAAO,gBAAA;AACb,WAAO,CAAC,KAAK;AAAA,EACf;AACF;"}
@@ -17400,6 +17400,28 @@ winston.transports.DailyRotateFile = DailyRotateFile;
17400
17400
  var winstonDailyRotateFile = DailyRotateFile;
17401
17401
  const DailyRotateFile$1 = /* @__PURE__ */ getDefaultExportFromCjs(winstonDailyRotateFile);
17402
17402
  const logDirectory = "logs";
17403
+ const initialLogLevel = process.env.LOG_LEVEL || "info";
17404
+ const LOGGER_ERROR_PREFIX = "[mm-erp-sdk logger]";
17405
+ const serializeLoggerError = (error) => {
17406
+ if (error instanceof Error) {
17407
+ return error.stack ?? error.message;
17408
+ }
17409
+ if (typeof error === "string") {
17410
+ return error;
17411
+ }
17412
+ try {
17413
+ return JSON.stringify(error);
17414
+ } catch {
17415
+ return String(error);
17416
+ }
17417
+ };
17418
+ const handleLoggerError = (error) => {
17419
+ const serialized = serializeLoggerError(error);
17420
+ try {
17421
+ console.error(`${LOGGER_ERROR_PREFIX} transport error: ${serialized}`);
17422
+ } catch {
17423
+ }
17424
+ };
17403
17425
  const MSG_MAX_LEN = 60;
17404
17426
  const MSG_MAX_CHARS = 2048;
17405
17427
  const truncateString = (str) => {
@@ -17432,22 +17454,26 @@ const logFormat = winston$2.format.combine(
17432
17454
  winston$2.format.splat(),
17433
17455
  baseFormat
17434
17456
  );
17457
+ const createConsoleTransport = () => {
17458
+ const consoleTransport = new winston$2.transports.Console({
17459
+ format: winston$2.format.combine(
17460
+ winston$2.format.timestamp(),
17461
+ winston$2.format.splat(),
17462
+ baseFormat,
17463
+ winston$2.format.colorize({ all: true })
17464
+ )
17465
+ });
17466
+ consoleTransport.on("error", handleLoggerError);
17467
+ return consoleTransport;
17468
+ };
17435
17469
  const logger = winston$2.createLogger({
17436
- level: process.env.LOG_LEVEL || "info",
17470
+ level: initialLogLevel,
17437
17471
  format: logFormat,
17438
- transports: [
17439
- new winston$2.transports.Console({
17440
- format: winston$2.format.combine(
17441
- winston$2.format.timestamp(),
17442
- winston$2.format.splat(),
17443
- baseFormat,
17444
- winston$2.format.colorize({ all: true })
17445
- )
17446
- })
17447
- ]
17472
+ transports: [createConsoleTransport()]
17448
17473
  });
17474
+ logger.on("error", handleLoggerError);
17449
17475
  const createFileTransport = () => {
17450
- return new DailyRotateFile$1({
17476
+ const transport = new DailyRotateFile$1({
17451
17477
  filename: path$1.join(logDirectory, "%DATE%.log"),
17452
17478
  datePattern: "YYYY-MM-DD",
17453
17479
  zippedArchive: true,
@@ -17455,6 +17481,8 @@ const createFileTransport = () => {
17455
17481
  maxFiles: "14d",
17456
17482
  format: logFormat
17457
17483
  });
17484
+ transport.on("error", handleLoggerError);
17485
+ return transport;
17458
17486
  };
17459
17487
  function attachRotateMitigation(transport, opts) {
17460
17488
  const { logLevel, nodeEnv } = opts;
@@ -17463,30 +17491,72 @@ function attachRotateMitigation(transport, opts) {
17463
17491
  if (isRefreshing) return;
17464
17492
  isRefreshing = true;
17465
17493
  let removalTimer = null;
17466
- const next = createFileTransport();
17467
- next.on("new", () => {
17494
+ let next = null;
17495
+ const cleanupRefresh = () => {
17468
17496
  if (removalTimer) {
17469
17497
  clearTimeout(removalTimer);
17470
17498
  removalTimer = null;
17471
17499
  }
17472
- try {
17473
- logger.remove(transport);
17474
- } catch {
17475
- }
17476
17500
  isRefreshing = false;
17477
- });
17478
- attachRotateMitigation(next, opts);
17479
- logger.add(next);
17480
- const REMOVAL_GRACE_MS = 3e4;
17481
- removalTimer = setTimeout(() => {
17501
+ };
17502
+ try {
17503
+ next = createFileTransport();
17504
+ next.on("new", () => {
17505
+ if (removalTimer) {
17506
+ clearTimeout(removalTimer);
17507
+ removalTimer = null;
17508
+ }
17509
+ try {
17510
+ logger.remove(transport);
17511
+ } catch (error) {
17512
+ handleLoggerError(error);
17513
+ }
17514
+ cleanupRefresh();
17515
+ });
17516
+ attachRotateMitigation(next, opts);
17482
17517
  try {
17483
- logger.remove(transport);
17484
- } catch {
17518
+ logger.add(next);
17519
+ } catch (error) {
17520
+ handleLoggerError(error);
17521
+ cleanupRefresh();
17522
+ try {
17523
+ logger.remove(next);
17524
+ } catch {
17525
+ }
17526
+ try {
17527
+ if (typeof next.close === "function") {
17528
+ next.close();
17529
+ }
17530
+ } catch {
17531
+ }
17532
+ return;
17485
17533
  }
17486
- isRefreshing = false;
17487
- removalTimer = null;
17488
- }, REMOVAL_GRACE_MS);
17489
- logger.level = logLevel;
17534
+ const REMOVAL_GRACE_MS = 3e4;
17535
+ removalTimer = setTimeout(() => {
17536
+ try {
17537
+ logger.remove(transport);
17538
+ } catch (error) {
17539
+ handleLoggerError(error);
17540
+ }
17541
+ cleanupRefresh();
17542
+ }, REMOVAL_GRACE_MS);
17543
+ logger.level = logLevel;
17544
+ } catch (error) {
17545
+ cleanupRefresh();
17546
+ handleLoggerError(error);
17547
+ if (next) {
17548
+ try {
17549
+ logger.remove(next);
17550
+ } catch {
17551
+ }
17552
+ try {
17553
+ if (typeof next.close === "function") {
17554
+ next.close();
17555
+ }
17556
+ } catch {
17557
+ }
17558
+ }
17559
+ }
17490
17560
  });
17491
17561
  }
17492
17562
  const configureLogger = (logLevel, nodeEnv) => {
@@ -17510,16 +17580,7 @@ const configureLogger = (logLevel, nodeEnv) => {
17510
17580
  attachRotateMitigation(fileTransport, { logLevel, nodeEnv });
17511
17581
  logger.add(fileTransport);
17512
17582
  if (nodeEnv !== "production") {
17513
- logger.add(
17514
- new winston$2.transports.Console({
17515
- format: winston$2.format.combine(
17516
- winston$2.format.timestamp(),
17517
- winston$2.format.splat(),
17518
- baseFormat,
17519
- winston$2.format.colorize({ all: true })
17520
- )
17521
- })
17522
- );
17583
+ logger.add(createConsoleTransport());
17523
17584
  }
17524
17585
  logger.level = logLevel;
17525
17586
  };
@@ -17529,4 +17590,4 @@ export {
17529
17590
  getDefaultExportFromCjs as g,
17530
17591
  logger as l
17531
17592
  };
17532
- //# sourceMappingURL=logger-DeKxCUPp.js.map
17593
+ //# sourceMappingURL=logger-DW5fyhVS.js.map