@machinemetrics/mm-erp-sdk 0.1.1-beta.6 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{hashed-cache-manager-DkDox9wX.js → hashed-cache-manager-Ci59eC75.js} +2 -7
- package/dist/hashed-cache-manager-Ci59eC75.js.map +1 -0
- package/dist/{index-Cn9ccxOO.js → index-CXbOvFyf.js} +3 -1
- package/dist/{index-Cn9ccxOO.js.map → index-CXbOvFyf.js.map} +1 -1
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/mm-erp-sdk.js +137 -52
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/configuration-manager.d.ts +0 -1
- package/dist/services/data-sync-service/configuration-manager.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +6 -3
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +6 -4
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.js +7 -1
- package/dist/services/data-sync-service/jobs/run-migrations.js.map +1 -1
- package/dist/services/erp-api-services/graphql/graphql-service.d.ts +5 -0
- package/dist/services/erp-api-services/graphql/graphql-service.d.ts.map +1 -1
- package/dist/services/erp-api-services/rest/rest-api-service.d.ts +5 -0
- package/dist/services/erp-api-services/rest/rest-api-service.d.ts.map +1 -1
- package/dist/services/mm-api-service/mm-api-service.d.ts +5 -0
- package/dist/services/mm-api-service/mm-api-service.d.ts.map +1 -1
- package/dist/services/mm-api-service/types/mm-response-interfaces.d.ts +24 -8
- package/dist/services/mm-api-service/types/mm-response-interfaces.d.ts.map +1 -1
- package/dist/utils/http-client.d.ts +2 -1
- package/dist/utils/http-client.d.ts.map +1 -1
- package/dist/utils/index.d.ts +1 -0
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/error-processor.d.ts +5 -5
- package/dist/utils/standard-process-drivers/error-processor.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts +10 -9
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/index.ts +6 -2
- package/src/services/data-sync-service/configuration-manager.ts +0 -9
- package/src/services/data-sync-service/jobs/clean-up-expired-cache.ts +4 -1
- package/src/services/data-sync-service/jobs/from-erp.ts +5 -3
- package/src/services/data-sync-service/jobs/retry-failed-labor-tickets.ts +3 -1
- package/src/services/data-sync-service/jobs/run-migrations.ts +7 -1
- package/src/services/erp-api-services/graphql/graphql-service.ts +8 -0
- package/src/services/erp-api-services/rest/rest-api-service.ts +8 -0
- package/src/services/mm-api-service/mm-api-service.ts +11 -2
- package/src/services/mm-api-service/types/mm-response-interfaces.ts +30 -14
- package/src/utils/http-client.ts +111 -41
- package/src/utils/index.ts +6 -0
- package/src/utils/standard-process-drivers/error-processor.ts +11 -11
- package/src/utils/standard-process-drivers/standard-process-drivers.ts +10 -9
- package/dist/hashed-cache-manager-DkDox9wX.js.map +0 -1
|
@@ -4,7 +4,7 @@ import stringify from "json-stable-stringify";
|
|
|
4
4
|
import XXH from "xxhashjs";
|
|
5
5
|
import "./config-2l5vnNkA.js";
|
|
6
6
|
import { c as configureLogger, l as logger } from "./logger-QG73MndU.js";
|
|
7
|
-
import "./index-
|
|
7
|
+
import "./index-CXbOvFyf.js";
|
|
8
8
|
class CoreConfiguration {
|
|
9
9
|
static instance;
|
|
10
10
|
// General Configuration
|
|
@@ -19,8 +19,6 @@ class CoreConfiguration {
|
|
|
19
19
|
// Caching (optionally used for interacting with the MM API)
|
|
20
20
|
cacheTTL;
|
|
21
21
|
// ERP API Service
|
|
22
|
-
erpApiRetryAttemptsDef;
|
|
23
|
-
//Retry attempts for ERP API
|
|
24
22
|
erpApiPagingLimit;
|
|
25
23
|
//Pagination limit for ERP API
|
|
26
24
|
// Job timing Intervals
|
|
@@ -40,9 +38,6 @@ class CoreConfiguration {
|
|
|
40
38
|
console.log("=== END CONFIG DEBUG ===");
|
|
41
39
|
this.mmApiAuthToken = process.env.MM_MAPPING_SERVICE_TOKEN || "";
|
|
42
40
|
this.mmApiRetryAttempts = parseInt(process.env.RETRY_ATTEMPTS || "0");
|
|
43
|
-
this.erpApiRetryAttemptsDef = parseInt(
|
|
44
|
-
process.env.ERP_API_RETRY_ATTEMPTS_DEF || "3"
|
|
45
|
-
);
|
|
46
41
|
this.erpApiPagingLimit = parseInt(process.env.ERP_PAGINATION_LIMIT || "0");
|
|
47
42
|
this.fromErpInterval = process.env.FROM_ERP_INTERVAL || process.env.POLL_INTERVAL || "5 min";
|
|
48
43
|
this.toErpInterval = process.env.TO_ERP_INTERVAL || "5 min";
|
|
@@ -293,4 +288,4 @@ export {
|
|
|
293
288
|
getSQLServerConfiguration as a,
|
|
294
289
|
getErpApiConnectionParams as g
|
|
295
290
|
};
|
|
296
|
-
//# sourceMappingURL=hashed-cache-manager-
|
|
291
|
+
//# sourceMappingURL=hashed-cache-manager-Ci59eC75.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hashed-cache-manager-Ci59eC75.js","sources":["../src/services/data-sync-service/configuration-manager.ts","../src/services/caching-service/hashed-cache-manager.ts"],"sourcesContent":["import \"dotenv/config\";\nimport { configureLogger } from \"../reporting-service/logger\";\nimport { SQLServerConfiguration } from \"../sql-server-erp-service/configuration\";\n\nexport class CoreConfiguration {\n private static instance: CoreConfiguration;\n\n // General Configuration\n public readonly logLevel: string;\n public readonly erpSystem: string;\n public readonly nodeEnv: string;\n\n // MM API (aka \"Mapping\") Service\n public readonly mmERPSvcApiBaseUrl: string;\n public readonly mmApiBaseUrl: string;\n public readonly mmApiAuthToken: string;\n public readonly mmApiRetryAttempts: number;\n\n // Caching (optionally used for interacting with the MM API)\n public readonly cacheTTL: number;\n\n // ERP API Service\n public readonly erpApiPagingLimit: number; //Pagination limit for ERP API\n\n // Job timing Intervals\n public readonly fromErpInterval: string;\n public readonly toErpInterval: string;\n public readonly retryLaborTicketsInterval: string;\n public readonly cacheExpirationCheckInterval: string;\n\n private constructor() {\n this.logLevel = process.env.LOG_LEVEL || \"info\";\n this.erpSystem = process.env.ERP_SYSTEM || \"template\";\n this.nodeEnv = process.env.NODE_ENV || \"development\";\n\n //#region MM API (aka \"Mapping\") Service\n /**\n * MM ERP Service REST API URL (typically https://erp-api.svc.machinemetrics.com)\n */\n this.mmERPSvcApiBaseUrl = process.env.MM_MAPPING_SERVICE_URL || \"\";\n\n /**\n * MM REST API URL (typically https://api.machinemetrics.com)\n */\n console.log(\"=== CONFIG DEBUG ===\");\n console.log(\"MM_MAPPING_AUTH_SERVICE_URL env var:\", process.env.MM_MAPPING_AUTH_SERVICE_URL);\n this.mmApiBaseUrl = process.env.MM_MAPPING_AUTH_SERVICE_URL || \"\";\n console.log(\"mmApiBaseUrl set to:\", this.mmApiBaseUrl);\n console.log(\"=== END CONFIG DEBUG ===\");\n\n /**\n * Company Auth Token\n */\n this.mmApiAuthToken = process.env.MM_MAPPING_SERVICE_TOKEN || \"\";\n\n /**\n * Number of retry attempts for MM API calls\n */\n this.mmApiRetryAttempts = parseInt(process.env.RETRY_ATTEMPTS || \"0\");\n //#endregion MM API (aka \"Mapping\") Service\n\n /**\n * Default pagination limit for ERP API\n */\n this.erpApiPagingLimit = parseInt(process.env.ERP_PAGINATION_LIMIT || \"0\");\n //#endregion ERP API Service\n\n /**\n * For how to define the intervals, see Bree's documentation: https://github.com/breejs/bree\n */\n this.fromErpInterval =\n process.env.FROM_ERP_INTERVAL || process.env.POLL_INTERVAL || \"5 min\";\n this.toErpInterval = process.env.TO_ERP_INTERVAL || \"5 min\";\n this.retryLaborTicketsInterval =\n process.env.RETRY_LABOR_TICKETS_INTERVAL || \"30 min\";\n this.cacheExpirationCheckInterval =\n process.env.CACHE_EXPIRATION_CHECK_INTERVAL || \"5 min\";\n\n /**\n * Cache TTL (in seconds)\n */\n const cacheTTLDef = 7 * 24 * 60 * 60; // 7 days\n this.cacheTTL = parseInt(process.env.CACHE_TTL || cacheTTLDef.toString());\n\n // Configure the logger with our settings\n configureLogger(this.logLevel, this.nodeEnv);\n }\n\n public static inst(): CoreConfiguration {\n if (!CoreConfiguration.instance) {\n CoreConfiguration.instance = new CoreConfiguration();\n }\n return CoreConfiguration.instance;\n }\n}\n\n/**\n * Helper function to get the SQL Server Configuration for collectors that use SQL Server to interact with the ERP\n */\nexport const getSQLServerConfiguration = (): SQLServerConfiguration => {\n return {\n username: process.env.ERP_SQLSERVER_USERNAME || \"\",\n password: process.env.ERP_SQLSERVER_PASSWORD || \"\",\n database: process.env.ERP_SQLSERVER_DATABASE || \"\",\n host:\n process.env.ERP_SQLSERVER_HOST || process.env.ERP_SQLSERVER_SERVER || \"\",\n port: process.env.ERP_SQLSERVER_PORT || \"1433\",\n connectionTimeout: process.env.ERP_SQLSERVER_CONNECTION_TIMEOUT || \"30000\",\n requestTimeout: process.env.ERP_SQLSERVER_REQUEST_TIMEOUT || \"60000\",\n poolMax: process.env.ERP_SQLSERVER_MAX || \"10\",\n poolMin: process.env.ERP_SQLSERVER_MIN || \"0\",\n idleTimeoutMillis:\n process.env.ERP_SQLSERVER_IDLE_TIMEOUT_MMILLIS || \"30000\",\n encrypt: process.env.ERP_SQLSERVER_ENCRYPT === \"true\",\n trustServer: process.env.ERP_SQLSERVER_TRUST_SERVER === \"true\",\n };\n};\n\n/**\n * Parameters required to connect to an ERP system via its API.\n * Contains all the necessary settings to establish a connection and authenticate with an ERP system's API.\n */\nexport class ErpApiConnectionParams {\n constructor(\n public readonly erpApiUrl: string, // Base url of ERP\n public readonly erpApiClientId: string, // Client ID to authenticate with ERP\n public readonly erpApiClientSecret: string, // Client Secret to authenticate with ERP\n public readonly erpApiOrganizationId: string, // Organization / tenant Id\n public readonly erpAuthBaseUrl: string, // Auth base url\n public readonly retryAttempts: number = 3 // Number of retry attempts for API calls\n ) {}\n}\n\n/**\n * Helper function to get the ERP API Connection Parameters\n * Not all connectors use these, but keeping these commonly values in one place may\n * make it easier to set and understand env var names set in App.\n */\nexport const getErpApiConnectionParams = (): ErpApiConnectionParams => {\n return new ErpApiConnectionParams(\n process.env.ERP_API_URL || \"\",\n process.env.ERP_API_CLIENT_ID || \"\",\n process.env.ERP_API_CLIENT_SECRET || \"\",\n process.env.ERP_API_ORGANIZATION_ID || \"\",\n process.env.ERP_AUTH_BASE_URL || \"\",\n parseInt(process.env.ERP_API_RETRY_ATTEMPTS || \"3\")\n );\n};\n","import knex, { Knex } from \"knex\";\nimport config from \"../../knexfile\";\nimport stringify from \"json-stable-stringify\";\nimport XXH from \"xxhashjs\";\nimport { ERPObjType } from \"../../types/erp-types\";\nimport { CacheMetrics } from \"./index\";\nimport { CoreConfiguration } from \"../data-sync-service/configuration-manager\";\nimport { logger } from \"../reporting-service\";\n\ntype HashedCacheManagerOptions = {\n ttl?: number;\n tableName?: string;\n};\n\nexport class HashedCacheManager {\n private static TABLE_NAME = \"sdk_cache\";\n private db: Knex;\n private options: HashedCacheManagerOptions;\n private static readonly SEED = 0xabcd; // Arbitrary seed for hashing\n private isDestroyed: boolean = false;\n private metrics: CacheMetrics = {\n recordCounts: {},\n };\n\n constructor(options?: HashedCacheManagerOptions) {\n this.options = {\n ttl: options?.ttl || CoreConfiguration.inst().cacheTTL,\n tableName: options?.tableName || HashedCacheManager.TABLE_NAME,\n };\n this.db = knex({\n ...config.local,\n pool: {\n min: 0,\n max: 10,\n },\n });\n }\n\n /**\n * Checks if the cache manager is still valid\n * @throws Error if the cache manager has been destroyed\n */\n private checkValid(): void {\n if (this.isDestroyed) {\n throw new Error(\"Cache manager has been destroyed\");\n }\n }\n\n /**\n * Generates a stable hash of a record using JSON stringify + xxhash\n */\n public static hashRecord(record: object): string {\n try {\n const serialized = stringify(record);\n if (!serialized) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n const hash = XXH.h64(serialized, HashedCacheManager.SEED).toString(16);\n return hash;\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"circular\")) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n throw error;\n }\n }\n\n /**\n * Gets a record from the cache\n * @param type The type of record\n * @param hash The hash of the record\n * @returns The record if it exists, null otherwise\n */\n private async getRecord(\n type: ERPObjType,\n hash: string\n ): Promise<{ key: string } | null> {\n this.checkValid();\n return this.db(this.options.tableName)\n .select(\"key\")\n .where({ type, key: hash })\n .first();\n }\n\n /**\n * Stores a record in the cache\n * @param type The type of record\n * @param record The record to store\n * @returns true if a new record was created, false if an existing record was updated\n */\n public async store(type: ERPObjType, record: object): Promise<boolean> {\n if (!this.isDestroyed && record) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n const now = new Date();\n\n // First check if record exists with same type and hash\n const existing = await this.db(this.options.tableName)\n .where({\n type,\n key: hash,\n })\n .first();\n\n if (existing) {\n return false; // No need to update, hash hasn't changed\n } else {\n // Insert new record with minimal data\n const result = await this.db(this.options.tableName)\n .insert({\n type,\n key: hash,\n created_at: now,\n })\n .returning(\"id\");\n return result.length > 0;\n }\n } catch (error) {\n logger.error(\"Error storing record:\", error);\n throw error;\n }\n }\n return false;\n }\n\n /**\n * Checks if a record has changed since last seen\n * @param type The type of record\n * @param record The record to check\n * @returns true if the record has changed or is new\n */\n async hasChanged(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const newHash = HashedCacheManager.hashRecord(record);\n const existing = await this.getRecord(type, newHash);\n return !existing;\n }\n\n /**\n * Checks if a record has changed and stores it if it has\n * @param type The type of record\n * @param record The record to check and store\n * @returns true if the record was changed or is new\n */\n async upsert(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const hasChanged = await this.hasChanged(type, record);\n if (hasChanged) {\n await this.store(type, record as Record<string, unknown>);\n }\n return hasChanged;\n }\n\n /**\n * Removes expired records based on TTL\n */\n async removeExpiredObjects(): Promise<void> {\n this.checkValid();\n const ttl = this.options.ttl;\n if (!ttl) return;\n\n const ttlMilliseconds = ttl * 1000;\n const expirationLimitDate = new Date(Date.now() - ttlMilliseconds);\n const expirationLimit = expirationLimitDate\n .toISOString()\n .slice(0, 19)\n .replace(\"T\", \" \");\n\n await this.db(this.options.tableName)\n .where(\"created_at\", \"<\", expirationLimit)\n .del();\n }\n\n /**\n * Gets all records of a specific type\n */\n async getRecordsByType(type: ERPObjType): Promise<string[]> {\n this.checkValid();\n const records = await this.db(this.options.tableName)\n .select(\"key\")\n .where({ type });\n\n return records.map((record) => record.key);\n }\n\n /**\n * Removes all records of a specific type\n */\n async removeRecordsByType(type: ERPObjType): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).where({ type }).del();\n }\n\n /**\n * Removes a specific record\n */\n public async removeRecord(type: ERPObjType, record: object): Promise<void> {\n if (!this.isDestroyed) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n await this.db(this.options.tableName)\n .where({ type, key: hash }) // Use key for deletion\n .del();\n } catch (error) {\n logger.error(\"Error removing record:\", error);\n throw error;\n }\n }\n }\n\n /**\n * Clears all records from the cache\n */\n async clear(): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).del();\n }\n\n /**\n * Cleans up database connection and marks the cache manager as destroyed\n */\n async destroy(): Promise<void> {\n if (!this.isDestroyed) {\n await this.db.destroy();\n this.isDestroyed = true;\n }\n }\n\n /**\n * Gets the current cache metrics\n * @returns The current cache metrics\n */\n async getMetrics(): Promise<CacheMetrics> {\n this.checkValid();\n\n // Get counts for each type\n const counts = (await this.db(this.options.tableName)\n .select(\"type\")\n .count(\"* as count\")\n .groupBy(\"type\")) as Array<{ type: string; count: string }>;\n\n // Update metrics\n this.metrics.recordCounts = counts.reduce(\n (acc, row) => {\n acc[row.type] = parseInt(row.count, 10);\n return acc;\n },\n {} as Record<string, number>\n );\n\n return this.metrics;\n }\n}\n"],"names":[],"mappings":";;;;;;;AAIO,MAAM,kBAAkB;AAAA,EAC7B,OAAe;AAAA;AAAA,EAGC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,cAAc;AACpB,SAAK,WAAW,QAAQ,IAAI,aAAa;AACzC,SAAK,YAAY,QAAQ,IAAI,cAAc;AAC3C,SAAK,UAAU,QAAQ,IAAI,YAAY;AAMvC,SAAK,qBAAqB,QAAQ,IAAI,0BAA0B;AAKhE,YAAQ,IAAI,sBAAsB;AAClC,YAAQ,IAAI,wCAAwC,QAAQ,IAAI,2BAA2B;AAC3F,SAAK,eAAe,QAAQ,IAAI,+BAA+B;AAC/D,YAAQ,IAAI,wBAAwB,KAAK,YAAY;AACrD,YAAQ,IAAI,0BAA0B;AAKtC,SAAK,iBAAiB,QAAQ,IAAI,4BAA4B;AAK9D,SAAK,qBAAqB,SAAS,QAAQ,IAAI,kBAAkB,GAAG;AAMpE,SAAK,oBAAoB,SAAS,QAAQ,IAAI,wBAAwB,GAAG;AAMzE,SAAK,kBACH,QAAQ,IAAI,qBAAqB,QAAQ,IAAI,iBAAiB;AAChE,SAAK,gBAAgB,QAAQ,IAAI,mBAAmB;AACpD,SAAK,4BACH,QAAQ,IAAI,gCAAgC;AAC9C,SAAK,+BACH,QAAQ,IAAI,mCAAmC;AAKjD,UAAM,cAAc,IAAI,KAAK,KAAK;AAClC,SAAK,WAAW,SAAS,QAAQ,IAAI,aAAa,YAAY,UAAU;AAGxE,oBAAgB,KAAK,UAAU,KAAK,OAAO;AAAA,EAC7C;AAAA,EAEA,OAAc,OAA0B;AACtC,QAAI,CAAC,kBAAkB,UAAU;AAC/B,wBAAkB,WAAW,IAAI,kBAAA;AAAA,IACnC;AACA,WAAO,kBAAkB;AAAA,EAC3B;AACF;AAKO,MAAM,4BAA4B,MAA8B;AACrE,SAAO;AAAA,IACL,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,MACE,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IACxE,MAAM,QAAQ,IAAI,sBAAsB;AAAA,IACxC,mBAAmB,QAAQ,IAAI,oCAAoC;AAAA,IACnE,gBAAgB,QAAQ,IAAI,iCAAiC;AAAA,IAC7D,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,mBACE,QAAQ,IAAI,sCAAsC;AAAA,IACpD,SAAS,QAAQ,IAAI,0BAA0B;AAAA,IAC/C,aAAa,QAAQ,IAAI,+BAA+B;AAAA,EAAA;AAE5D;AAMO,MAAM,uBAAuB;AAAA,EAClC,YACkB,WACA,gBACA,oBACA,sBACA,gBACA,gBAAwB,GACxC;AANgB,SAAA,YAAA;AACA,SAAA,iBAAA;AACA,SAAA,qBAAA;AACA,SAAA,uBAAA;AACA,SAAA,iBAAA;AACA,SAAA,gBAAA;AAAA,EACf;AACL;AAOO,MAAM,4BAA4B,MAA8B;AACrE,SAAO,IAAI;AAAA,IACT,QAAQ,IAAI,eAAe;AAAA,IAC3B,QAAQ,IAAI,qBAAqB;AAAA,IACjC,QAAQ,IAAI,yBAAyB;AAAA,IACrC,QAAQ,IAAI,2BAA2B;AAAA,IACvC,QAAQ,IAAI,qBAAqB;AAAA,IACjC,SAAS,QAAQ,IAAI,0BAA0B,GAAG;AAAA,EAAA;AAEtD;ACrIO,MAAM,mBAAmB;AAAA,EAC9B,OAAe,aAAa;AAAA,EACpB;AAAA,EACA;AAAA,EACR,OAAwB,OAAO;AAAA;AAAA,EACvB,cAAuB;AAAA,EACvB,UAAwB;AAAA,IAC9B,cAAc,CAAA;AAAA,EAAC;AAAA,EAGjB,YAAY,SAAqC;AAC/C,SAAK,UAAU;AAAA,MACb,KAAK,SAAS,OAAO,kBAAkB,OAAO;AAAA,MAC9C,WAAW,SAAS,aAAa,mBAAmB;AAAA,IAAA;AAEtD,SAAK,KAAK,KAAK;AAAA,MACb,GAAG,OAAO;AAAA,MACV,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IACP,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAmB;AACzB,QAAI,KAAK,aAAa;AACpB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAc,WAAW,QAAwB;AAC/C,QAAI;AACF,YAAM,aAAa,UAAU,MAAM;AACnC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM,OAAO,IAAI,IAAI,YAAY,mBAAmB,IAAI,EAAE,SAAS,EAAE;AACrE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChE,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,UACZ,MACA,MACiC;AACjC,SAAK,WAAA;AACL,WAAO,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClC,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,MAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,MAAM,MAAkB,QAAkC;AACrE,QAAI,CAAC,KAAK,eAAe,QAAQ;AAC/B,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,0BAAU,KAAA;AAGhB,cAAM,WAAW,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClD,MAAM;AAAA,UACL;AAAA,UACA,KAAK;AAAA,QAAA,CACN,EACA,MAAA;AAEH,YAAI,UAAU;AACZ,iBAAO;AAAA,QACT,OAAO;AAEL,gBAAM,SAAS,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAChD,OAAO;AAAA,YACN;AAAA,YACA,KAAK;AAAA,YACL,YAAY;AAAA,UAAA,CACb,EACA,UAAU,IAAI;AACjB,iBAAO,OAAO,SAAS;AAAA,QACzB;AAAA,MACF,SAAS,OAAO;AACd,eAAO,MAAM,yBAAyB,KAAK;AAC3C,cAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,MAAkB,QAAkC;AACnE,SAAK,WAAA;AACL,UAAM,UAAU,mBAAmB,WAAW,MAAM;AACpD,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,OAAO;AACnD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,MAAkB,QAAkC;AAC/D,SAAK,WAAA;AACL,UAAM,aAAa,MAAM,KAAK,WAAW,MAAM,MAAM;AACrD,QAAI,YAAY;AACd,YAAM,KAAK,MAAM,MAAM,MAAiC;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAsC;AAC1C,SAAK,WAAA;AACL,UAAM,MAAM,KAAK,QAAQ;AACzB,QAAI,CAAC,IAAK;AAEV,UAAM,kBAAkB,MAAM;AAC9B,UAAM,sBAAsB,IAAI,KAAK,KAAK,IAAA,IAAQ,eAAe;AACjE,UAAM,kBAAkB,oBACrB,YAAA,EACA,MAAM,GAAG,EAAE,EACX,QAAQ,KAAK,GAAG;AAEnB,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,cAAc,KAAK,eAAe,EACxC,IAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAqC;AAC1D,SAAK,WAAA;AACL,UAAM,UAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM;AAEjB,WAAO,QAAQ,IAAI,CAAC,WAAW,OAAO,GAAG;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,MAAiC;AACzD,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,IAAA;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,aAAa,MAAkB,QAA+B;AACzE,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,IAAA;AAAA,MACL,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,IAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,KAAK,GAAG,QAAA;AACd,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAoC;AACxC,SAAK,WAAA;AAGL,UAAM,SAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,MAAM,EACb,MAAM,YAAY,EAClB,QAAQ,MAAM;AAGjB,SAAK,QAAQ,eAAe,OAAO;AAAA,MACjC,CAAC,KAAK,QAAQ;AACZ,YAAI,IAAI,IAAI,IAAI,SAAS,IAAI,OAAO,EAAE;AACtC,eAAO;AAAA,MACT;AAAA,MACA,CAAA;AAAA,IAAC;AAGH,WAAO,KAAK;AAAA,EACd;AACF;"}
|
|
@@ -173,7 +173,9 @@ export {
|
|
|
173
173
|
SQLiteCoordinator as S,
|
|
174
174
|
setTimezoneOffsetInCache as a,
|
|
175
175
|
getCachedTimezoneOffset as b,
|
|
176
|
+
getInitialLoadComplete as c,
|
|
177
|
+
setInitialLoadComplete as d,
|
|
176
178
|
getCachedMMToken as g,
|
|
177
179
|
setCachedMMToken as s
|
|
178
180
|
};
|
|
179
|
-
//# sourceMappingURL=index-
|
|
181
|
+
//# sourceMappingURL=index-CXbOvFyf.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index-Cn9ccxOO.js","sources":["../src/utils/local-data-store/jobs-shared-data.ts","../src/utils/local-data-store/database-lock.ts","../src/services/sqlite-service/sqlite-coordinator.ts"],"sourcesContent":["import fs from \"fs\";\nimport path from \"path\";\nimport { mkdirSync } from \"fs\";\n\n/**\n * This file contains the logic for storing and retrieving data from the job state file.\n * It is used to store data that is shared between jobs, and (more importantly) across job instances.\n */\n\nconst STORAGE_FILE = path.join(\"/tmp\", \"job-state.json\");\n\n// Ensure parent directory exists\nconst parentDir = path.dirname(STORAGE_FILE);\ntry {\n mkdirSync(parentDir, { recursive: true });\n} catch (error) {\n if ((error as NodeJS.ErrnoException).code !== \"EEXIST\") {\n throw error;\n }\n}\n\n//#region Non-exported functions\nconst ensureStorageFile = () => {\n if (!fs.existsSync(STORAGE_FILE)) {\n fs.writeFileSync(STORAGE_FILE, JSON.stringify({}), \"utf-8\");\n }\n};\n\nconst readStorage = (): Record<string, unknown> => {\n ensureStorageFile();\n try {\n return JSON.parse(fs.readFileSync(STORAGE_FILE, \"utf-8\"));\n } catch (error) {\n console.error(`Failed to read storage from ${STORAGE_FILE}:`, error);\n return {};\n }\n};\n\nconst writeStorage = (data: Record<string, unknown>): void => {\n ensureStorageFile();\n fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), \"utf-8\");\n};\n//#endregion\n\n//#region Database lock storage functions\n/**\n * Reads the database lock state from the shared storage file\n * @returns The data stored in the file\n */\nexport const readDatabaseLockState = (): Record<string, unknown> => {\n return readStorage();\n};\n\n/**\n * Writes the database lock state to the shared storage file\n * @param data The lock state data to write\n */\nexport const writeDatabaseLockState = (data: Record<string, unknown>): void => {\n writeStorage(data);\n};\n//#endregion\n\nexport const getInitialLoadComplete = (): boolean => {\n const data = readStorage();\n return (data.initialLoadComplete as boolean) ?? false;\n};\n\nexport const setInitialLoadComplete = (complete: boolean): void => {\n const data = readStorage();\n data.initialLoadComplete = complete;\n writeStorage(data);\n};\n\nexport const getCachedTimezoneOffset = (): number => {\n const data = readStorage();\n return (data.timezoneOffset as number) ?? 0;\n};\n\n/**\n * Sets the timezone offset in the cache\n * @param offset The timezone offset in hours\n */\nexport const setTimezoneOffsetInCache = (offset: number): void => {\n const data = readStorage();\n data.timezoneOffset = offset;\n writeStorage(data);\n};\n\ninterface CachedToken {\n token: string;\n expiration: number | null;\n}\n\n/**\n * Gets the cached MM API token and its expiration\n * @returns The cached token and expiration or null if not found\n */\nexport const getCachedMMToken = (): CachedToken | null => {\n const data = readStorage();\n return (data.mmApiToken as CachedToken) ?? null;\n};\n\n/**\n * Sets the MM API token and its expiration in the cache\n * @param tokenData The token and expiration to cache\n */\nexport const setCachedMMToken = (tokenData: CachedToken): void => {\n const data = readStorage();\n data.mmApiToken = tokenData;\n writeStorage(data);\n};\n","import {\n readDatabaseLockState,\n writeDatabaseLockState,\n} from \"./jobs-shared-data\";\n\ninterface DatabaseLock {\n isLocked: boolean;\n lockedBy: string;\n lockedAt: string | null;\n}\n\n/**\n * Gets the current database lock state\n * @returns The current database lock state\n */\nexport const getDatabaseLock = (): DatabaseLock => {\n const data = readDatabaseLockState();\n return (\n (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n }\n );\n};\n\n/**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\nexport const acquireDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (currentLock.isLocked) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: true,\n lockedBy: processName,\n lockedAt: new Date().toISOString(),\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Releases the database lock\n * @param processName Name of the process releasing the lock\n * @returns true if lock was released, false if process doesn't own the lock\n */\nexport const releaseDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (!currentLock.isLocked || currentLock.lockedBy !== processName) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Checks if the database is available for use\n * @returns true if database is available, false if locked\n */\nexport const isDatabaseAvailable = (): boolean => {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n};\n","import {\n acquireDatabaseLock,\n releaseDatabaseLock,\n getDatabaseLock,\n} from \"../../utils/local-data-store/database-lock\";\nimport { logger } from \"../reporting-service\";\n\nexport class SQLiteCoordinator {\n private static readonly LOCK_TIMEOUT_MS = 30_000; // 30 seconds\n private static readonly LOCK_RETRY_INTERVAL_MS = 1_000; // 1 second\n\n /**\n * Performs startup checks to ensure no stale locks exist\n * Should be called when the application starts\n */\n static async performStartupCheck(): Promise<void> {\n const currentLock = getDatabaseLock();\n\n if (currentLock.isLocked) {\n logger.warn(\n `Found existing lock held by ${currentLock.lockedBy}, releasing for clean startup`\n );\n releaseDatabaseLock(currentLock.lockedBy);\n }\n }\n\n /**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\n private static async tryAcquireLock(processName: string): Promise<boolean> {\n return acquireDatabaseLock(processName);\n }\n\n /**\n * Executes a database operation with proper locking\n * @param processName Name of the process executing the operation\n * @param operation The operation to execute\n * @returns The result of the operation\n */\n static async executeWithLock<T>(\n processName: string,\n operation: () => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n // Try to acquire the lock with timeout\n while (Date.now() - startTime < this.LOCK_TIMEOUT_MS) {\n if (await this.tryAcquireLock(processName)) {\n try {\n // Execute the operation\n const result = await operation();\n return result;\n } finally {\n // Always release the lock\n releaseDatabaseLock(processName);\n }\n }\n\n // Wait before retrying\n await new Promise((resolve) =>\n setTimeout(resolve, this.LOCK_RETRY_INTERVAL_MS)\n );\n }\n\n throw new Error(\n `Failed to acquire database lock after ${this.LOCK_TIMEOUT_MS}ms`\n );\n }\n\n /**\n * Checks if the database is currently available for operations\n * @returns true if the database is available, false if locked\n */\n static isAvailable(): boolean {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n }\n}\n"],"names":["fs"],"mappings":";;;AASA,MAAM,eAAe,KAAK,KAAK,QAAQ,gBAAgB;AAGvD,MAAM,YAAY,KAAK,QAAQ,YAAY;AAC3C,IAAI;AACF,YAAU,WAAW,EAAE,WAAW,KAAA,CAAM;AAC1C,SAAS,OAAO;AACd,MAAK,MAAgC,SAAS,UAAU;AACtD,UAAM;AAAA,EACR;AACF;AAGA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,CAACA,WAAG,WAAW,YAAY,GAAG;AAChCA,eAAG,cAAc,cAAc,KAAK,UAAU,CAAA,CAAE,GAAG,OAAO;AAAA,EAC5D;AACF;AAEA,MAAM,cAAc,MAA+B;AACjD,oBAAA;AACA,MAAI;AACF,WAAO,KAAK,MAAMA,WAAG,aAAa,cAAc,OAAO,CAAC;AAAA,EAC1D,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,YAAY,KAAK,KAAK;AACnE,WAAO,CAAA;AAAA,EACT;AACF;AAEA,MAAM,eAAe,CAAC,SAAwC;AAC5D,oBAAA;AACAA,aAAG,cAAc,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACvE;AAQO,MAAM,wBAAwB,MAA+B;AAClE,SAAO,YAAA;AACT;AAMO,MAAM,yBAAyB,CAAC,SAAwC;AAC7E,eAAa,IAAI;AACnB;AAGO,MAAM,yBAAyB,MAAe;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,uBAAmC;AAClD;AAEO,MAAM,yBAAyB,CAAC,aAA4B;AACjE,QAAM,OAAO,YAAA;AACb,OAAK,sBAAsB;AAC3B,eAAa,IAAI;AACnB;AAEO,MAAM,0BAA0B,MAAc;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,kBAA6B;AAC5C;AAMO,MAAM,2BAA2B,CAAC,WAAyB;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,iBAAiB;AACtB,eAAa,IAAI;AACnB;AAWO,MAAM,mBAAmB,MAA0B;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,cAA8B;AAC7C;AAMO,MAAM,mBAAmB,CAAC,cAAiC;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,aAAa;AAClB,eAAa,IAAI;AACnB;AC/FO,MAAM,kBAAkB,MAAoB;AACjD,QAAM,OAAO,sBAAA;AACb,SACG,KAAK,gBAAiC;AAAA,IACrC,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGhB;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,WAAU,oBAAI,KAAA,GAAO,YAAA;AAAA,EAAY;AAEnC,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,CAAC,YAAY,YAAY,YAAY,aAAa,aAAa;AACjE,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAEZ,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAMO,MAAM,sBAAsB,MAAe;AAChD,QAAM,OAAO,gBAAA;AACb,SAAO,CAAC,KAAK;AACf;AC9EO,MAAM,kBAAkB;AAAA,EAC7B,OAAwB,kBAAkB;AAAA;AAAA,EAC1C,OAAwB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjD,aAAa,sBAAqC;AAChD,UAAM,cAAc,gBAAA;AAEpB,QAAI,YAAY,UAAU;AACxB,aAAO;AAAA,QACL,+BAA+B,YAAY,QAAQ;AAAA,MAAA;AAErD,0BAAoB,YAAY,QAAQ;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAqB,eAAe,aAAuC;AACzE,WAAO,oBAAoB,WAAW;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,gBACX,aACA,WACY;AACZ,UAAM,YAAY,KAAK,IAAA;AAGvB,WAAO,KAAK,IAAA,IAAQ,YAAY,KAAK,iBAAiB;AACpD,UAAI,MAAM,KAAK,eAAe,WAAW,GAAG;AAC1C,YAAI;AAEF,gBAAM,SAAS,MAAM,UAAA;AACrB,iBAAO;AAAA,QACT,UAAA;AAEE,8BAAoB,WAAW;AAAA,QACjC;AAAA,MACF;AAGA,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,sBAAsB;AAAA,MAAA;AAAA,IAEnD;AAEA,UAAM,IAAI;AAAA,MACR,yCAAyC,KAAK,eAAe;AAAA,IAAA;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,UAAM,OAAO,gBAAA;AACb,WAAO,CAAC,KAAK;AAAA,EACf;AACF;"}
|
|
1
|
+
{"version":3,"file":"index-CXbOvFyf.js","sources":["../src/utils/local-data-store/jobs-shared-data.ts","../src/utils/local-data-store/database-lock.ts","../src/services/sqlite-service/sqlite-coordinator.ts"],"sourcesContent":["import fs from \"fs\";\nimport path from \"path\";\nimport { mkdirSync } from \"fs\";\n\n/**\n * This file contains the logic for storing and retrieving data from the job state file.\n * It is used to store data that is shared between jobs, and (more importantly) across job instances.\n */\n\nconst STORAGE_FILE = path.join(\"/tmp\", \"job-state.json\");\n\n// Ensure parent directory exists\nconst parentDir = path.dirname(STORAGE_FILE);\ntry {\n mkdirSync(parentDir, { recursive: true });\n} catch (error) {\n if ((error as NodeJS.ErrnoException).code !== \"EEXIST\") {\n throw error;\n }\n}\n\n//#region Non-exported functions\nconst ensureStorageFile = () => {\n if (!fs.existsSync(STORAGE_FILE)) {\n fs.writeFileSync(STORAGE_FILE, JSON.stringify({}), \"utf-8\");\n }\n};\n\nconst readStorage = (): Record<string, unknown> => {\n ensureStorageFile();\n try {\n return JSON.parse(fs.readFileSync(STORAGE_FILE, \"utf-8\"));\n } catch (error) {\n console.error(`Failed to read storage from ${STORAGE_FILE}:`, error);\n return {};\n }\n};\n\nconst writeStorage = (data: Record<string, unknown>): void => {\n ensureStorageFile();\n fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), \"utf-8\");\n};\n//#endregion\n\n//#region Database lock storage functions\n/**\n * Reads the database lock state from the shared storage file\n * @returns The data stored in the file\n */\nexport const readDatabaseLockState = (): Record<string, unknown> => {\n return readStorage();\n};\n\n/**\n * Writes the database lock state to the shared storage file\n * @param data The lock state data to write\n */\nexport const writeDatabaseLockState = (data: Record<string, unknown>): void => {\n writeStorage(data);\n};\n//#endregion\n\nexport const getInitialLoadComplete = (): boolean => {\n const data = readStorage();\n return (data.initialLoadComplete as boolean) ?? false;\n};\n\nexport const setInitialLoadComplete = (complete: boolean): void => {\n const data = readStorage();\n data.initialLoadComplete = complete;\n writeStorage(data);\n};\n\nexport const getCachedTimezoneOffset = (): number => {\n const data = readStorage();\n return (data.timezoneOffset as number) ?? 0;\n};\n\n/**\n * Sets the timezone offset in the cache\n * @param offset The timezone offset in hours\n */\nexport const setTimezoneOffsetInCache = (offset: number): void => {\n const data = readStorage();\n data.timezoneOffset = offset;\n writeStorage(data);\n};\n\ninterface CachedToken {\n token: string;\n expiration: number | null;\n}\n\n/**\n * Gets the cached MM API token and its expiration\n * @returns The cached token and expiration or null if not found\n */\nexport const getCachedMMToken = (): CachedToken | null => {\n const data = readStorage();\n return (data.mmApiToken as CachedToken) ?? null;\n};\n\n/**\n * Sets the MM API token and its expiration in the cache\n * @param tokenData The token and expiration to cache\n */\nexport const setCachedMMToken = (tokenData: CachedToken): void => {\n const data = readStorage();\n data.mmApiToken = tokenData;\n writeStorage(data);\n};\n","import {\n readDatabaseLockState,\n writeDatabaseLockState,\n} from \"./jobs-shared-data\";\n\ninterface DatabaseLock {\n isLocked: boolean;\n lockedBy: string;\n lockedAt: string | null;\n}\n\n/**\n * Gets the current database lock state\n * @returns The current database lock state\n */\nexport const getDatabaseLock = (): DatabaseLock => {\n const data = readDatabaseLockState();\n return (\n (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n }\n );\n};\n\n/**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\nexport const acquireDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (currentLock.isLocked) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: true,\n lockedBy: processName,\n lockedAt: new Date().toISOString(),\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Releases the database lock\n * @param processName Name of the process releasing the lock\n * @returns true if lock was released, false if process doesn't own the lock\n */\nexport const releaseDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (!currentLock.isLocked || currentLock.lockedBy !== processName) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Checks if the database is available for use\n * @returns true if database is available, false if locked\n */\nexport const isDatabaseAvailable = (): boolean => {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n};\n","import {\n acquireDatabaseLock,\n releaseDatabaseLock,\n getDatabaseLock,\n} from \"../../utils/local-data-store/database-lock\";\nimport { logger } from \"../reporting-service\";\n\nexport class SQLiteCoordinator {\n private static readonly LOCK_TIMEOUT_MS = 30_000; // 30 seconds\n private static readonly LOCK_RETRY_INTERVAL_MS = 1_000; // 1 second\n\n /**\n * Performs startup checks to ensure no stale locks exist\n * Should be called when the application starts\n */\n static async performStartupCheck(): Promise<void> {\n const currentLock = getDatabaseLock();\n\n if (currentLock.isLocked) {\n logger.warn(\n `Found existing lock held by ${currentLock.lockedBy}, releasing for clean startup`\n );\n releaseDatabaseLock(currentLock.lockedBy);\n }\n }\n\n /**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\n private static async tryAcquireLock(processName: string): Promise<boolean> {\n return acquireDatabaseLock(processName);\n }\n\n /**\n * Executes a database operation with proper locking\n * @param processName Name of the process executing the operation\n * @param operation The operation to execute\n * @returns The result of the operation\n */\n static async executeWithLock<T>(\n processName: string,\n operation: () => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n // Try to acquire the lock with timeout\n while (Date.now() - startTime < this.LOCK_TIMEOUT_MS) {\n if (await this.tryAcquireLock(processName)) {\n try {\n // Execute the operation\n const result = await operation();\n return result;\n } finally {\n // Always release the lock\n releaseDatabaseLock(processName);\n }\n }\n\n // Wait before retrying\n await new Promise((resolve) =>\n setTimeout(resolve, this.LOCK_RETRY_INTERVAL_MS)\n );\n }\n\n throw new Error(\n `Failed to acquire database lock after ${this.LOCK_TIMEOUT_MS}ms`\n );\n }\n\n /**\n * Checks if the database is currently available for operations\n * @returns true if the database is available, false if locked\n */\n static isAvailable(): boolean {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n }\n}\n"],"names":["fs"],"mappings":";;;AASA,MAAM,eAAe,KAAK,KAAK,QAAQ,gBAAgB;AAGvD,MAAM,YAAY,KAAK,QAAQ,YAAY;AAC3C,IAAI;AACF,YAAU,WAAW,EAAE,WAAW,KAAA,CAAM;AAC1C,SAAS,OAAO;AACd,MAAK,MAAgC,SAAS,UAAU;AACtD,UAAM;AAAA,EACR;AACF;AAGA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,CAACA,WAAG,WAAW,YAAY,GAAG;AAChCA,eAAG,cAAc,cAAc,KAAK,UAAU,CAAA,CAAE,GAAG,OAAO;AAAA,EAC5D;AACF;AAEA,MAAM,cAAc,MAA+B;AACjD,oBAAA;AACA,MAAI;AACF,WAAO,KAAK,MAAMA,WAAG,aAAa,cAAc,OAAO,CAAC;AAAA,EAC1D,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,YAAY,KAAK,KAAK;AACnE,WAAO,CAAA;AAAA,EACT;AACF;AAEA,MAAM,eAAe,CAAC,SAAwC;AAC5D,oBAAA;AACAA,aAAG,cAAc,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACvE;AAQO,MAAM,wBAAwB,MAA+B;AAClE,SAAO,YAAA;AACT;AAMO,MAAM,yBAAyB,CAAC,SAAwC;AAC7E,eAAa,IAAI;AACnB;AAGO,MAAM,yBAAyB,MAAe;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,uBAAmC;AAClD;AAEO,MAAM,yBAAyB,CAAC,aAA4B;AACjE,QAAM,OAAO,YAAA;AACb,OAAK,sBAAsB;AAC3B,eAAa,IAAI;AACnB;AAEO,MAAM,0BAA0B,MAAc;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,kBAA6B;AAC5C;AAMO,MAAM,2BAA2B,CAAC,WAAyB;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,iBAAiB;AACtB,eAAa,IAAI;AACnB;AAWO,MAAM,mBAAmB,MAA0B;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,cAA8B;AAC7C;AAMO,MAAM,mBAAmB,CAAC,cAAiC;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,aAAa;AAClB,eAAa,IAAI;AACnB;AC/FO,MAAM,kBAAkB,MAAoB;AACjD,QAAM,OAAO,sBAAA;AACb,SACG,KAAK,gBAAiC;AAAA,IACrC,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGhB;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,WAAU,oBAAI,KAAA,GAAO,YAAA;AAAA,EAAY;AAEnC,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,CAAC,YAAY,YAAY,YAAY,aAAa,aAAa;AACjE,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAEZ,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAMO,MAAM,sBAAsB,MAAe;AAChD,QAAM,OAAO,gBAAA;AACb,SAAO,CAAC,KAAK;AACf;AC9EO,MAAM,kBAAkB;AAAA,EAC7B,OAAwB,kBAAkB;AAAA;AAAA,EAC1C,OAAwB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjD,aAAa,sBAAqC;AAChD,UAAM,cAAc,gBAAA;AAEpB,QAAI,YAAY,UAAU;AACxB,aAAO;AAAA,QACL,+BAA+B,YAAY,QAAQ;AAAA,MAAA;AAErD,0BAAoB,YAAY,QAAQ;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAqB,eAAe,aAAuC;AACzE,WAAO,oBAAoB,WAAW;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,gBACX,aACA,WACY;AACZ,UAAM,YAAY,KAAK,IAAA;AAGvB,WAAO,KAAK,IAAA,IAAQ,YAAY,KAAK,iBAAiB;AACpD,UAAI,MAAM,KAAK,eAAe,WAAW,GAAG;AAC1C,YAAI;AAEF,gBAAM,SAAS,MAAM,UAAA;AACrB,iBAAO;AAAA,QACT,UAAA;AAEE,8BAAoB,WAAW;AAAA,QACjC;AAAA,MACF;AAGA,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,sBAAsB;AAAA,MAAA;AAAA,IAEnD;AAEA,UAAM,IAAI;AAAA,MACR,yCAAyC,KAAK,eAAe;AAAA,IAAA;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,UAAM,OAAO,gBAAA;AACb,WAAO,CAAC,KAAK;AAAA,EACf;AACF;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -14,6 +14,7 @@ export type { MMReceiveLaborTicket, MMReceiveLaborTicketReason, MMReceiveLaborTi
|
|
|
14
14
|
export { MMSendPerson, MMSendResource, MMSendPart, MMSendPartOperation, MMSendWorkOrder, MMSendWorkOrderOperation, MMSendReason, MMSendLaborTicket, } from "./services/mm-api-service";
|
|
15
15
|
export type { MMSendWorkOrderBatch } from "./services/mm-api-service";
|
|
16
16
|
export { getUniqueRows, removeExtraneousFields, getPayloadWithoutIDField, trimObjectValues, cleanupNumbers, addNewFieldFromExternalSource, addNewFieldFromLookupField, formatDateWithTZOffset, applyTimezoneOffsetsToFields, convertToLocalTime, } from "./utils";
|
|
17
|
+
export { getInitialLoadComplete, setInitialLoadComplete } from "./utils";
|
|
17
18
|
export { HTTPClientFactory } from "./utils/http-client";
|
|
18
19
|
export type { HTTPResponse } from "./utils/http-client";
|
|
19
20
|
export { ApplicationInitializer } from "./utils/application-initializer";
|
|
@@ -21,6 +22,7 @@ export { runDataSyncService } from "./services/data-sync-service";
|
|
|
21
22
|
export { logger } from "./services/reporting-service";
|
|
22
23
|
export type { IERPLaborTicketHandler } from "./types/erp-connector";
|
|
23
24
|
export { StandardProcessDrivers, getCachedTimezoneOffset } from "./utils";
|
|
25
|
+
export type { WriteEntitiesToMMResult, MMBatchValidationError } from "./utils";
|
|
24
26
|
export { RestAPIService } from "./services/erp-api-services/rest/rest-api-service";
|
|
25
27
|
export { ErrorHandler } from "./services/erp-api-services/errors";
|
|
26
28
|
export type { QueryParams, PaginatedAPIResponse, } from "./services/erp-api-services/rest/get-query-params";
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACxD,YAAY,EAAE,aAAa,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG/E,YAAY,EAAE,sBAAsB,EAAE,MAAM,oDAAoD,CAAC;AACjG,OAAO,EAAE,yBAAyB,EAAE,MAAM,oDAAoD,CAAC;AAC/F,OAAO,EAAE,qBAAqB,EAAE,MAAM,mDAAmD,CAAC;AAC1F,YAAY,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AAGtE,OAAO,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAC;AACxD,YAAY,EACV,oBAAoB,EACpB,0BAA0B,EAC1B,sCAAsC,EACtC,gBAAgB,GACjB,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,YAAY,EACZ,cAAc,EACd,UAAU,EACV,mBAAmB,EACnB,eAAe,EACf,wBAAwB,EACxB,YAAY,EACZ,iBAAiB,GAClB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAGtE,OAAO,EACL,aAAa,EACb,sBAAsB,EACtB,wBAAwB,EACxB,gBAAgB,EAChB,cAAc,EACd,6BAA6B,EAC7B,0BAA0B,EAC1B,sBAAsB,EACtB,4BAA4B,EAC5B,kBAAkB,GACnB,MAAM,SAAS,CAAC;AAGjB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,YAAY,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAGxD,OAAO,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAGzE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAGlE,OAAO,EAAE,MAAM,EAAE,MAAM,8BAA8B,CAAC;AAGtD,YAAY,EAAE,sBAAsB,EAAE,MAAM,uBAAuB,CAAC;AAGpE,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACxD,YAAY,EAAE,aAAa,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG/E,YAAY,EAAE,sBAAsB,EAAE,MAAM,oDAAoD,CAAC;AACjG,OAAO,EAAE,yBAAyB,EAAE,MAAM,oDAAoD,CAAC;AAC/F,OAAO,EAAE,qBAAqB,EAAE,MAAM,mDAAmD,CAAC;AAC1F,YAAY,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AAGtE,OAAO,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAC;AACxD,YAAY,EACV,oBAAoB,EACpB,0BAA0B,EAC1B,sCAAsC,EACtC,gBAAgB,GACjB,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,YAAY,EACZ,cAAc,EACd,UAAU,EACV,mBAAmB,EACnB,eAAe,EACf,wBAAwB,EACxB,YAAY,EACZ,iBAAiB,GAClB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAGtE,OAAO,EACL,aAAa,EACb,sBAAsB,EACtB,wBAAwB,EACxB,gBAAgB,EAChB,cAAc,EACd,6BAA6B,EAC7B,0BAA0B,EAC1B,sBAAsB,EACtB,4BAA4B,EAC5B,kBAAkB,GACnB,MAAM,SAAS,CAAC;AAGjB,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAGzE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,YAAY,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAGxD,OAAO,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAGzE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAGlE,OAAO,EAAE,MAAM,EAAE,MAAM,8BAA8B,CAAC;AAGtD,YAAY,EAAE,sBAAsB,EAAE,MAAM,uBAAuB,CAAC;AAGpE,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAC;AAC1E,YAAY,EAAG,uBAAuB,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAGhF,OAAO,EAAE,cAAc,EAAE,MAAM,mDAAmD,CAAC;AACnF,OAAO,EAAE,YAAY,EAAE,MAAM,oCAAoC,CAAC;AAClE,YAAY,EACV,WAAW,EACX,oBAAoB,GACrB,MAAM,mDAAmD,CAAC;AAC3D,OAAO,EAAE,WAAW,EAAE,MAAM,0CAA0C,CAAC;AACvE,YAAY,EAAE,WAAW,EAAE,MAAM,0CAA0C,CAAC;AAC5E,OAAO,EAAE,cAAc,EAAE,MAAM,qDAAqD,CAAC;AACrF,YAAY,EAAE,kBAAkB,EAAE,MAAM,2CAA2C,CAAC;AAGpF,OAAO,EAAE,iBAAiB,EAAE,MAAM,gDAAgD,CAAC;AAGnF,OAAO,EACL,iBAAiB,EACjB,yBAAyB,GAC1B,MAAM,oDAAoD,CAAC;AAG5D,OAAO,EACL,gBAAgB,EAChB,eAAe,GAChB,MAAM,mCAAmC,CAAC;AAC3C,YAAY,EAAE,QAAQ,EAAE,MAAM,mCAAmC,CAAC"}
|
package/dist/mm-erp-sdk.js
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-
|
|
2
|
-
import { g, a } from "./hashed-cache-manager-
|
|
1
|
+
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-Ci59eC75.js";
|
|
2
|
+
import { g, a } from "./hashed-cache-manager-Ci59eC75.js";
|
|
3
3
|
import { l as logger } from "./logger-QG73MndU.js";
|
|
4
|
-
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-
|
|
4
|
+
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-CXbOvFyf.js";
|
|
5
|
+
import { c, d } from "./index-CXbOvFyf.js";
|
|
5
6
|
import axios, { AxiosError } from "axios";
|
|
6
7
|
import knex from "knex";
|
|
7
8
|
import { c as config } from "./knexfile-1qKKIORB.js";
|
|
@@ -118,8 +119,12 @@ class HTTPClientFactory {
|
|
|
118
119
|
}
|
|
119
120
|
}
|
|
120
121
|
class AxiosClient {
|
|
121
|
-
client;
|
|
122
|
+
client = null;
|
|
122
123
|
retryAttempts;
|
|
124
|
+
isDestroyed = false;
|
|
125
|
+
inFlightControllers = /* @__PURE__ */ new Set();
|
|
126
|
+
pendingTimeouts = /* @__PURE__ */ new Set();
|
|
127
|
+
pendingSleepResolvers = /* @__PURE__ */ new Set();
|
|
123
128
|
/**
|
|
124
129
|
* Note regarding baseURL, from https://github.com/axios/axios
|
|
125
130
|
* `baseURL` will be prepended to `url` unless `url` is absolute and option `allowAbsoluteUrls` is set to true (the default).
|
|
@@ -135,13 +140,34 @@ class AxiosClient {
|
|
|
135
140
|
});
|
|
136
141
|
this.retryAttempts = retryAttempts;
|
|
137
142
|
}
|
|
143
|
+
sleep(ms) {
|
|
144
|
+
return new Promise((resolve) => {
|
|
145
|
+
if (this.isDestroyed) {
|
|
146
|
+
resolve();
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
const timeout = setTimeout(() => {
|
|
150
|
+
this.pendingTimeouts.delete(timeout);
|
|
151
|
+
this.pendingSleepResolvers.delete(resolve);
|
|
152
|
+
resolve();
|
|
153
|
+
}, ms);
|
|
154
|
+
this.pendingTimeouts.add(timeout);
|
|
155
|
+
this.pendingSleepResolvers.add(resolve);
|
|
156
|
+
});
|
|
157
|
+
}
|
|
138
158
|
async request(config2) {
|
|
159
|
+
if (this.isDestroyed || !this.client) {
|
|
160
|
+
throw new HTTPError("HTTP client has been destroyed", 500);
|
|
161
|
+
}
|
|
162
|
+
const controller = new AbortController();
|
|
163
|
+
this.inFlightControllers.add(controller);
|
|
139
164
|
const axiosConfig = {
|
|
140
165
|
method: config2.method,
|
|
141
166
|
url: config2.url,
|
|
142
167
|
headers: config2.headers,
|
|
143
168
|
data: config2.data,
|
|
144
|
-
params: config2.params
|
|
169
|
+
params: config2.params,
|
|
170
|
+
signal: controller.signal
|
|
145
171
|
};
|
|
146
172
|
logger.info("HTTP request starting", {
|
|
147
173
|
url: config2.url,
|
|
@@ -155,45 +181,58 @@ class AxiosClient {
|
|
|
155
181
|
console.log("full constructed URL:", (this.client.defaults.baseURL || "") + config2.url);
|
|
156
182
|
console.log("method:", config2.method);
|
|
157
183
|
let lastError;
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
184
|
+
try {
|
|
185
|
+
for (let attempt = 0; attempt <= this.retryAttempts; attempt++) {
|
|
186
|
+
try {
|
|
187
|
+
logger.info(`HTTP request attempt ${attempt + 1}/${this.retryAttempts + 1}`);
|
|
188
|
+
const response = await this.client.request(axiosConfig);
|
|
189
|
+
logger.info("HTTP request succeeded", { status: response.status });
|
|
190
|
+
return {
|
|
191
|
+
data: response.data,
|
|
192
|
+
status: response.status,
|
|
193
|
+
headers: response.headers
|
|
194
|
+
};
|
|
195
|
+
} catch (error) {
|
|
196
|
+
lastError = error;
|
|
197
|
+
const isAxiosErr = error instanceof AxiosError;
|
|
198
|
+
const code = isAxiosErr ? error.code : void 0;
|
|
199
|
+
const status = isAxiosErr ? error.response?.status : void 0;
|
|
200
|
+
const errorConstructor = error instanceof Error ? error.constructor.name : void 0;
|
|
201
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
202
|
+
logger.info(`HTTP request attempt ${attempt + 1} failed`, {
|
|
203
|
+
errorType: typeof error,
|
|
204
|
+
errorConstructor,
|
|
205
|
+
isAxiosError: isAxiosErr,
|
|
206
|
+
message,
|
|
207
|
+
code,
|
|
208
|
+
status
|
|
209
|
+
});
|
|
210
|
+
if (error instanceof AxiosError && error.response?.status && error.response.status >= 400 && error.response.status < 500) {
|
|
211
|
+
logger.info("Not retrying due to 4xx client error");
|
|
212
|
+
break;
|
|
213
|
+
}
|
|
214
|
+
if (error instanceof AxiosError && error.code === "ERR_CANCELED") {
|
|
215
|
+
break;
|
|
216
|
+
}
|
|
217
|
+
if (attempt < this.retryAttempts) {
|
|
218
|
+
const waitTime = Math.pow(2, attempt) * 1e3;
|
|
219
|
+
logger.info(`Waiting ${waitTime}ms before retry`);
|
|
220
|
+
await this.sleep(waitTime);
|
|
221
|
+
if (this.isDestroyed) {
|
|
222
|
+
throw new HTTPError("HTTP client has been destroyed", 500);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
188
225
|
}
|
|
189
226
|
}
|
|
227
|
+
} finally {
|
|
228
|
+
this.inFlightControllers.delete(controller);
|
|
190
229
|
}
|
|
191
230
|
logger.info("HTTP request failed after all retries, throwing error");
|
|
192
231
|
throw this.handleError(lastError, config2);
|
|
193
232
|
}
|
|
194
233
|
handleError(error, requestConfig) {
|
|
195
234
|
if (error instanceof AxiosError) {
|
|
196
|
-
const baseUrl = this.client
|
|
235
|
+
const baseUrl = this.client?.defaults.baseURL || "";
|
|
197
236
|
const fullUrl = requestConfig ? `${baseUrl}${requestConfig.url}` : "Unknown URL";
|
|
198
237
|
const method = requestConfig?.method || "Unknown method";
|
|
199
238
|
const errorMessage = `${error.message} (${method} ${fullUrl})`;
|
|
@@ -209,6 +248,29 @@ class AxiosClient {
|
|
|
209
248
|
500
|
|
210
249
|
);
|
|
211
250
|
}
|
|
251
|
+
async destroy() {
|
|
252
|
+
if (this.isDestroyed) return;
|
|
253
|
+
this.isDestroyed = true;
|
|
254
|
+
for (const c2 of this.inFlightControllers) {
|
|
255
|
+
try {
|
|
256
|
+
c2.abort();
|
|
257
|
+
} catch {
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
this.inFlightControllers.clear();
|
|
261
|
+
for (const t of this.pendingTimeouts) {
|
|
262
|
+
clearTimeout(t);
|
|
263
|
+
}
|
|
264
|
+
this.pendingTimeouts.clear();
|
|
265
|
+
for (const resolve of this.pendingSleepResolvers) {
|
|
266
|
+
try {
|
|
267
|
+
resolve();
|
|
268
|
+
} catch {
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
this.pendingSleepResolvers.clear();
|
|
272
|
+
this.client = null;
|
|
273
|
+
}
|
|
212
274
|
}
|
|
213
275
|
class MMTokenManager {
|
|
214
276
|
token;
|
|
@@ -415,8 +477,7 @@ class MMApiClient {
|
|
|
415
477
|
]: CoreConfiguration.inst().mmApiBaseUrl
|
|
416
478
|
};
|
|
417
479
|
this.api = HTTPClientFactory.getInstance({
|
|
418
|
-
baseUrl:
|
|
419
|
-
// We'll set the full URL in each request
|
|
480
|
+
baseUrl: CoreConfiguration.inst().mmERPSvcApiBaseUrl,
|
|
420
481
|
retryAttempts: CoreConfiguration.inst().mmApiRetryAttempts
|
|
421
482
|
});
|
|
422
483
|
}
|
|
@@ -898,6 +959,13 @@ class MMApiClient {
|
|
|
898
959
|
"query getErpResources { machineResource: erpResources { machineRef machineGroupId resourceId }}"
|
|
899
960
|
);
|
|
900
961
|
}
|
|
962
|
+
/**
|
|
963
|
+
* Cleanup all HTTP connections and resources
|
|
964
|
+
* Call this when the service is no longer needed
|
|
965
|
+
*/
|
|
966
|
+
async destroy() {
|
|
967
|
+
await this.api.destroy();
|
|
968
|
+
}
|
|
901
969
|
//#endregion public methods
|
|
902
970
|
}
|
|
903
971
|
class MMSendPerson {
|
|
@@ -1969,11 +2037,11 @@ class ErrorProcessor {
|
|
|
1969
2037
|
static createFailedRecordKeySet(entityType, batchErrors) {
|
|
1970
2038
|
const failedKeySet = /* @__PURE__ */ new Set();
|
|
1971
2039
|
batchErrors.forEach((batchError) => {
|
|
1972
|
-
batchError.
|
|
2040
|
+
batchError.affectedEntities.forEach((affectedEntity) => {
|
|
1973
2041
|
try {
|
|
1974
2042
|
const primaryKey = EntityTransformer.extractPrimaryKey(
|
|
1975
2043
|
entityType,
|
|
1976
|
-
|
|
2044
|
+
affectedEntity
|
|
1977
2045
|
);
|
|
1978
2046
|
failedKeySet.add(primaryKey);
|
|
1979
2047
|
} catch (error) {
|
|
@@ -2035,7 +2103,7 @@ class ErrorProcessor {
|
|
|
2035
2103
|
/**
|
|
2036
2104
|
* Extracts error count and batch errors from MM API response for partial failures (HTTP 207)
|
|
2037
2105
|
* This supports all entities, including the slightly different format for labor tickets.
|
|
2038
|
-
* In case of labor tickets, the updateErrors and insertErrors arrays are combined into
|
|
2106
|
+
* In case of labor tickets, the updateErrors and insertErrors arrays are combined into affectedEntities.
|
|
2039
2107
|
* @param mmApiResponse The full MM API response object
|
|
2040
2108
|
* @param entityType The type of entity being processed (determines response structure)
|
|
2041
2109
|
* @returns Object containing errorCount and batchErrors
|
|
@@ -2080,12 +2148,12 @@ class ErrorProcessor {
|
|
|
2080
2148
|
});
|
|
2081
2149
|
return {
|
|
2082
2150
|
message: error.message,
|
|
2083
|
-
|
|
2151
|
+
affectedEntities: typedErrorEntities
|
|
2084
2152
|
};
|
|
2085
2153
|
}
|
|
2086
2154
|
);
|
|
2087
2155
|
const errorCount = batchErrors.reduce((total, batchError) => {
|
|
2088
|
-
return total + batchError.
|
|
2156
|
+
return total + batchError.affectedEntities.length;
|
|
2089
2157
|
}, 0);
|
|
2090
2158
|
return {
|
|
2091
2159
|
errorCount,
|
|
@@ -2184,11 +2252,11 @@ class ErrorProcessor {
|
|
|
2184
2252
|
});
|
|
2185
2253
|
return {
|
|
2186
2254
|
message: typeof err?.message === "string" ? err.message : "Unknown error",
|
|
2187
|
-
|
|
2255
|
+
affectedEntities: typedErrorEntities
|
|
2188
2256
|
};
|
|
2189
2257
|
});
|
|
2190
2258
|
const errorCount = batchErrors.reduce((total, batchError) => {
|
|
2191
|
-
return total + batchError.
|
|
2259
|
+
return total + batchError.affectedEntities.length;
|
|
2192
2260
|
}, 0);
|
|
2193
2261
|
logger.info("writeEntitiesToMM: Extracted 500 error details", {
|
|
2194
2262
|
batchErrorsCount: batchErrors.length,
|
|
@@ -2449,9 +2517,10 @@ class StandardProcessDrivers {
|
|
|
2449
2517
|
*
|
|
2450
2518
|
* } catch (error) {
|
|
2451
2519
|
* if (error instanceof MMBatchValidationError) {
|
|
2452
|
-
* // HTTP 207 - Partial success with some validation errors
|
|
2453
|
-
* // HTTP 500 - A specific type
|
|
2520
|
+
* // HTTP 207 - Partial success with some batches failing due to validation errors
|
|
2521
|
+
* // HTTP 500 - A specific type representing complete failure due to validation issues;
|
|
2454
2522
|
* // other 500 types represent failure due to other issues and are not converted to MMBatchValidationError
|
|
2523
|
+
* // Note: Each batch error contains ALL entities from the failing batch, not necessarily just the failed ones
|
|
2455
2524
|
*
|
|
2456
2525
|
* console.log(`⚠️ Batch processing completed with errors (HTTP ${error.httpStatus})`);
|
|
2457
2526
|
* console.log(`📊 Metrics: ${error.upsertedEntities} upserted, ${error.localDedupeCount} locally deduplicated, ${error.apiDedupeCount} API deduplicated`);
|
|
@@ -2460,18 +2529,18 @@ class StandardProcessDrivers {
|
|
|
2460
2529
|
* // Process specific batch errors for retry or logging
|
|
2461
2530
|
* error.batchErrors.forEach((batchError, index) => {
|
|
2462
2531
|
* console.log(`Batch ${index + 1} error: ${batchError.message}`);
|
|
2463
|
-
* console.log(`
|
|
2532
|
+
* console.log(`All entities in failing batch:`, batchError.affectedEntities);
|
|
2464
2533
|
*
|
|
2465
|
-
* // Example: Queue
|
|
2466
|
-
* await queueForRetry(batchError.
|
|
2534
|
+
* // Example: Queue entire failing batch for retry (contains both successful and failed entities)
|
|
2535
|
+
* await queueForRetry(batchError.affectedEntities);
|
|
2467
2536
|
* });
|
|
2468
2537
|
*
|
|
2469
2538
|
* // Decide whether to continue or halt based on httpStatus
|
|
2470
2539
|
* if (error.httpStatus === 207) {
|
|
2471
|
-
* // Partial success - some
|
|
2540
|
+
* // Partial success - some batches processed successfully, others failed
|
|
2472
2541
|
* console.log('⚠️ Continuing with partial success');
|
|
2473
2542
|
* } else if (error.httpStatus === 500) {
|
|
2474
|
-
* // Complete failure -
|
|
2543
|
+
* // Complete failure - all batches failed due to validation issues
|
|
2475
2544
|
* console.log('🛑 Complete failure - no records processed');
|
|
2476
2545
|
* throw error; // Re-throw if complete failure should halt the process
|
|
2477
2546
|
* }
|
|
@@ -2958,6 +3027,13 @@ class RestAPIService {
|
|
|
2958
3027
|
ErrorHandler.handle(error);
|
|
2959
3028
|
}
|
|
2960
3029
|
}
|
|
3030
|
+
/**
|
|
3031
|
+
* Cleanup all HTTP connections and resources
|
|
3032
|
+
* Call this when the service is no longer needed
|
|
3033
|
+
*/
|
|
3034
|
+
async destroy() {
|
|
3035
|
+
await this.client.destroy();
|
|
3036
|
+
}
|
|
2961
3037
|
}
|
|
2962
3038
|
function getERPAPITypeFromEntity(entity, entityMap) {
|
|
2963
3039
|
const entry = Object.entries(entityMap).find(
|
|
@@ -3092,6 +3168,13 @@ class GraphQLService {
|
|
|
3092
3168
|
ErrorHandler.handle(error);
|
|
3093
3169
|
}
|
|
3094
3170
|
}
|
|
3171
|
+
/**
|
|
3172
|
+
* Cleanup all HTTP connections and resources
|
|
3173
|
+
* Call this when the service is no longer needed
|
|
3174
|
+
*/
|
|
3175
|
+
async destroy() {
|
|
3176
|
+
await this.client.destroy();
|
|
3177
|
+
}
|
|
3095
3178
|
}
|
|
3096
3179
|
class OAuthClient {
|
|
3097
3180
|
constructor(config2) {
|
|
@@ -3534,12 +3617,14 @@ export {
|
|
|
3534
3617
|
formatDateWithTZOffset,
|
|
3535
3618
|
getCachedTimezoneOffset,
|
|
3536
3619
|
g as getErpApiConnectionParams,
|
|
3620
|
+
c as getInitialLoadComplete,
|
|
3537
3621
|
getPayloadWithoutIDField,
|
|
3538
3622
|
a as getSQLServerConfiguration,
|
|
3539
3623
|
getUniqueRows,
|
|
3540
3624
|
logger,
|
|
3541
3625
|
removeExtraneousFields,
|
|
3542
3626
|
runDataSyncService,
|
|
3627
|
+
d as setInitialLoadComplete,
|
|
3543
3628
|
trimObjectValues
|
|
3544
3629
|
};
|
|
3545
3630
|
//# sourceMappingURL=mm-erp-sdk.js.map
|