@machinemetrics/mm-erp-sdk 0.1.8-beta.9 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -8
- package/dist/{hashed-cache-manager-B1hPBNnF.js → hashed-cache-manager-Ds-HksA0.js} +32 -2
- package/dist/hashed-cache-manager-Ds-HksA0.js.map +1 -0
- package/dist/{index-DCgheVjV.js → index-DTtmv8Iq.js} +15 -4
- package/dist/index-DTtmv8Iq.js.map +1 -0
- package/dist/index.d.ts +7 -5
- package/dist/index.d.ts.map +1 -1
- package/dist/mm-erp-sdk.js +254 -26
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/configuration-manager.d.ts +5 -0
- package/dist/services/data-sync-service/configuration-manager.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +2 -2
- package/dist/services/data-sync-service/jobs/from-erp.js +1 -1
- package/dist/services/mm-api-service/mm-api-service.d.ts +4 -4
- package/dist/services/mm-api-service/mm-api-service.d.ts.map +1 -1
- package/dist/services/mm-api-service/types/checkpoint.d.ts +1 -2
- package/dist/services/mm-api-service/types/checkpoint.d.ts.map +1 -1
- package/dist/services/psql-erp-service/configuration.d.ts +8 -0
- package/dist/services/psql-erp-service/configuration.d.ts.map +1 -0
- package/dist/services/psql-erp-service/index.d.ts +14 -0
- package/dist/services/psql-erp-service/index.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +12 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts +32 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-service.d.ts +50 -0
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -0
- package/dist/types/erp-connector.d.ts +4 -2
- package/dist/types/erp-connector.d.ts.map +1 -1
- package/dist/types/erp-types.d.ts +0 -9
- package/dist/types/erp-types.d.ts.map +1 -1
- package/dist/types/index.d.ts +1 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/local-data-store/jobs-shared-data.d.ts +15 -1
- package/dist/utils/local-data-store/jobs-shared-data.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts +2 -3
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts +3 -3
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts.map +1 -1
- package/dist/utils/time-utils.d.ts +2 -1
- package/dist/utils/time-utils.d.ts.map +1 -1
- package/dist/utils/timezone.d.ts +6 -3
- package/dist/utils/timezone.d.ts.map +1 -1
- package/package.json +4 -1
- package/src/index.ts +32 -7
- package/src/services/data-sync-service/configuration-manager.ts +33 -0
- package/src/services/mm-api-service/mm-api-service.ts +4 -4
- package/src/services/mm-api-service/types/checkpoint.ts +1 -3
- package/src/services/psql-erp-service/configuration.ts +7 -0
- package/src/services/psql-erp-service/index.ts +22 -0
- package/src/services/psql-erp-service/internal/types/psql-types.ts +13 -0
- package/src/services/psql-erp-service/psql-helpers.ts +114 -0
- package/src/services/psql-erp-service/psql-service.ts +247 -0
- package/src/types/erp-connector.ts +5 -2
- package/src/types/erp-types.ts +0 -11
- package/src/types/index.ts +0 -1
- package/src/utils/application-initializer.ts +1 -1
- package/src/utils/index.ts +4 -1
- package/src/utils/local-data-store/jobs-shared-data.ts +24 -1
- package/src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts +2 -3
- package/src/utils/standard-process-drivers/standard-process-drivers.ts +3 -3
- package/src/utils/time-utils.ts +11 -6
- package/src/utils/timezone.ts +9 -6
- package/dist/hashed-cache-manager-B1hPBNnF.js.map +0 -1
- package/dist/index-DCgheVjV.js.map +0 -1
package/README.md
CHANGED
|
@@ -25,19 +25,17 @@ npm install mm-erp-sdk
|
|
|
25
25
|
import {
|
|
26
26
|
IERPConnector,
|
|
27
27
|
IERPLaborTicketHandler,
|
|
28
|
-
ERPType,
|
|
29
28
|
ERPObjType,
|
|
30
29
|
MMReceiveLaborTicket,
|
|
31
30
|
ApplicationInitializer,
|
|
32
31
|
runDataSyncService,
|
|
33
|
-
registerERPConnector,
|
|
34
32
|
} from "mm-erp-sdk";
|
|
35
33
|
|
|
36
34
|
export default class MyERPConnector
|
|
37
35
|
implements IERPConnector, IERPLaborTicketHandler
|
|
38
36
|
{
|
|
39
|
-
get type():
|
|
40
|
-
return
|
|
37
|
+
get type(): string {
|
|
38
|
+
return "JOB_BOSS"; // replace with your ERP type
|
|
41
39
|
}
|
|
42
40
|
|
|
43
41
|
async startUp(): Promise<void> {
|
|
@@ -72,9 +70,6 @@ export default class MyERPConnector
|
|
|
72
70
|
|
|
73
71
|
// Implement other required methods...
|
|
74
72
|
}
|
|
75
|
-
|
|
76
|
-
// Register your connector
|
|
77
|
-
registerERPConnector(ERPType.JOB_BOSS, () => new MyERPConnector());
|
|
78
73
|
```
|
|
79
74
|
|
|
80
75
|
### 3. Start the Data Sync Service
|
|
@@ -144,4 +139,4 @@ LOG_LEVEL="info"
|
|
|
144
139
|
- `RestAPIService`: HTTP API client
|
|
145
140
|
- `MMApiClient`: MachineMetrics API client
|
|
146
141
|
|
|
147
|
-
For detailed API documentation, see the TypeScript definitions.
|
|
142
|
+
For detailed API documentation, see the TypeScript definitions.
|
|
@@ -4,7 +4,7 @@ import stringify from "json-stable-stringify";
|
|
|
4
4
|
import XXH from "xxhashjs";
|
|
5
5
|
import "./config-cB7h4yvc.js";
|
|
6
6
|
import { c as configureLogger, l as logger } from "./logger-CBDNtsMq.js";
|
|
7
|
-
import "./index-
|
|
7
|
+
import "./index-DTtmv8Iq.js";
|
|
8
8
|
class CoreConfiguration {
|
|
9
9
|
static instance;
|
|
10
10
|
// General Configuration
|
|
@@ -53,6 +53,36 @@ class CoreConfiguration {
|
|
|
53
53
|
}
|
|
54
54
|
return CoreConfiguration.instance;
|
|
55
55
|
}
|
|
56
|
+
/**
|
|
57
|
+
* Returns a sanitized version of the configuration for safe logging.
|
|
58
|
+
* Masks sensitive fields like authentication tokens.
|
|
59
|
+
*/
|
|
60
|
+
toSafeLogObject() {
|
|
61
|
+
const maskSensitiveValue = (value) => {
|
|
62
|
+
if (!value || value.length === 0) {
|
|
63
|
+
return "";
|
|
64
|
+
}
|
|
65
|
+
if (value.length < 6) {
|
|
66
|
+
return "********";
|
|
67
|
+
}
|
|
68
|
+
return value.substring(0, 3) + "********" + value.substring(value.length - 3);
|
|
69
|
+
};
|
|
70
|
+
return {
|
|
71
|
+
logLevel: this.logLevel,
|
|
72
|
+
erpSystem: this.erpSystem,
|
|
73
|
+
nodeEnv: this.nodeEnv,
|
|
74
|
+
mmERPSvcApiBaseUrl: this.mmERPSvcApiBaseUrl,
|
|
75
|
+
mmApiBaseUrl: this.mmApiBaseUrl,
|
|
76
|
+
mmApiAuthToken: maskSensitiveValue(this.mmApiAuthToken),
|
|
77
|
+
mmApiRetryAttempts: this.mmApiRetryAttempts,
|
|
78
|
+
cacheTTL: this.cacheTTL,
|
|
79
|
+
erpApiPagingLimit: this.erpApiPagingLimit,
|
|
80
|
+
fromErpInterval: this.fromErpInterval,
|
|
81
|
+
toErpInterval: this.toErpInterval,
|
|
82
|
+
retryLaborTicketsInterval: this.retryLaborTicketsInterval,
|
|
83
|
+
cacheExpirationCheckInterval: this.cacheExpirationCheckInterval
|
|
84
|
+
};
|
|
85
|
+
}
|
|
56
86
|
}
|
|
57
87
|
const getSQLServerConfiguration = () => {
|
|
58
88
|
return {
|
|
@@ -289,4 +319,4 @@ export {
|
|
|
289
319
|
getSQLServerConfiguration as a,
|
|
290
320
|
getErpApiConnectionParams as g
|
|
291
321
|
};
|
|
292
|
-
//# sourceMappingURL=hashed-cache-manager-
|
|
322
|
+
//# sourceMappingURL=hashed-cache-manager-Ds-HksA0.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hashed-cache-manager-Ds-HksA0.js","sources":["../src/services/data-sync-service/configuration-manager.ts","../src/services/caching-service/hashed-cache-manager.ts"],"sourcesContent":["import \"dotenv/config\";\nimport { configureLogger } from \"../reporting-service/logger\";\nimport { SQLServerConfiguration } from \"../sql-server-erp-service/configuration\";\n\nexport class CoreConfiguration {\n private static instance: CoreConfiguration;\n\n // General Configuration\n public readonly logLevel: string;\n public readonly erpSystem: string;\n public readonly nodeEnv: string;\n\n // MM API (aka \"Mapping\") Service\n public readonly mmERPSvcApiBaseUrl: string;\n public readonly mmApiBaseUrl: string;\n public readonly mmApiAuthToken: string;\n public readonly mmApiRetryAttempts: number;\n\n // Caching (optionally used for interacting with the MM API)\n public readonly cacheTTL: number;\n\n // ERP API Service\n public readonly erpApiPagingLimit: number; //Pagination limit for ERP API\n\n // Job timing Intervals\n public readonly fromErpInterval: string;\n public readonly toErpInterval: string;\n public readonly retryLaborTicketsInterval: string;\n public readonly cacheExpirationCheckInterval: string;\n\n private constructor() {\n this.logLevel = process.env.LOG_LEVEL || \"info\";\n this.erpSystem = process.env.ERP_SYSTEM || \"template\";\n this.nodeEnv = process.env.NODE_ENV || \"development\";\n\n //#region MM API (aka \"Mapping\") Service\n /**\n * MM ERP Service REST API URL (typically https://erp-api.svc.machinemetrics.com)\n */\n this.mmERPSvcApiBaseUrl = process.env.MM_MAPPING_SERVICE_URL || \"\";\n\n /**\n * MM REST API URL (typically https://api.machinemetrics.com)\n */\n console.log(\"=== CONFIG DEBUG ===\");\n console.log(\"MM_MAPPING_AUTH_SERVICE_URL env var:\", process.env.MM_MAPPING_AUTH_SERVICE_URL);\n this.mmApiBaseUrl = process.env.MM_MAPPING_AUTH_SERVICE_URL || \"\";\n console.log(\"mmApiBaseUrl set to:\", this.mmApiBaseUrl);\n console.log(\"=== END CONFIG DEBUG ===\");\n\n /**\n * Company Auth Token\n */\n this.mmApiAuthToken = process.env.MM_MAPPING_SERVICE_TOKEN || \"\";\n\n /**\n * Number of retry attempts for MM API calls\n */\n this.mmApiRetryAttempts = parseInt(process.env.RETRY_ATTEMPTS || \"0\");\n //#endregion MM API (aka \"Mapping\") Service\n\n /**\n * Default pagination limit for ERP API\n */\n this.erpApiPagingLimit = parseInt(process.env.ERP_PAGINATION_LIMIT || \"0\");\n //#endregion ERP API Service\n\n /**\n * For how to define the intervals, see Bree's documentation: https://github.com/breejs/bree\n */\n this.fromErpInterval =\n process.env.FROM_ERP_INTERVAL || process.env.POLL_INTERVAL || \"5 min\";\n this.toErpInterval = process.env.TO_ERP_INTERVAL || \"5 min\";\n this.retryLaborTicketsInterval =\n process.env.RETRY_LABOR_TICKETS_INTERVAL || \"30 min\";\n this.cacheExpirationCheckInterval =\n process.env.CACHE_EXPIRATION_CHECK_INTERVAL || \"5 min\";\n\n /**\n * Cache TTL (in seconds)\n */\n const cacheTTLDef = 7 * 24 * 60 * 60; // 7 days\n this.cacheTTL = parseInt(process.env.CACHE_TTL || cacheTTLDef.toString());\n\n // Configure the logger with our settings\n configureLogger(this.logLevel, this.nodeEnv);\n }\n\n public static inst(): CoreConfiguration {\n if (!CoreConfiguration.instance) {\n CoreConfiguration.instance = new CoreConfiguration();\n }\n return CoreConfiguration.instance;\n }\n\n /**\n * Returns a sanitized version of the configuration for safe logging.\n * Masks sensitive fields like authentication tokens.\n */\n public toSafeLogObject(): Record<string, unknown> {\n const maskSensitiveValue = (value: string): string => {\n if (!value || value.length === 0) {\n return \"\";\n }\n if (value.length < 6) {\n return \"********\";\n }\n // Show first 3 and last 3 characters, mask the middle\n return value.substring(0, 3) + \"********\" + value.substring(value.length - 3);\n };\n\n return {\n logLevel: this.logLevel,\n erpSystem: this.erpSystem,\n nodeEnv: this.nodeEnv,\n mmERPSvcApiBaseUrl: this.mmERPSvcApiBaseUrl,\n mmApiBaseUrl: this.mmApiBaseUrl,\n mmApiAuthToken: maskSensitiveValue(this.mmApiAuthToken),\n mmApiRetryAttempts: this.mmApiRetryAttempts,\n cacheTTL: this.cacheTTL,\n erpApiPagingLimit: this.erpApiPagingLimit,\n fromErpInterval: this.fromErpInterval,\n toErpInterval: this.toErpInterval,\n retryLaborTicketsInterval: this.retryLaborTicketsInterval,\n cacheExpirationCheckInterval: this.cacheExpirationCheckInterval,\n };\n }\n}\n\n/**\n * Helper function to get the SQL Server Configuration for collectors that use SQL Server to interact with the ERP\n */\nexport const getSQLServerConfiguration = (): SQLServerConfiguration => {\n return {\n username: process.env.ERP_SQLSERVER_USERNAME || \"\",\n password: process.env.ERP_SQLSERVER_PASSWORD || \"\",\n database: process.env.ERP_SQLSERVER_DATABASE || \"\",\n host:\n process.env.ERP_SQLSERVER_HOST || process.env.ERP_SQLSERVER_SERVER || \"\",\n port: process.env.ERP_SQLSERVER_PORT || \"1433\",\n connectionTimeout: process.env.ERP_SQLSERVER_CONNECTION_TIMEOUT || \"30000\",\n requestTimeout: process.env.ERP_SQLSERVER_REQUEST_TIMEOUT || \"60000\",\n poolMax: process.env.ERP_SQLSERVER_MAX || \"10\",\n poolMin: process.env.ERP_SQLSERVER_MIN || \"0\",\n idleTimeoutMillis:\n process.env.ERP_SQLSERVER_IDLE_TIMEOUT_MMILLIS || \"30000\",\n encrypt: process.env.ERP_SQLSERVER_ENCRYPT === \"true\",\n trustServer: process.env.ERP_SQLSERVER_TRUST_SERVER === \"true\",\n };\n};\n\n/**\n * Parameters required to connect to an ERP system via its API.\n * Contains all the necessary settings to establish a connection and authenticate with an ERP system's API.\n */\nexport class ErpApiConnectionParams {\n constructor(\n public readonly erpApiUrl: string, // Base url of ERP\n public readonly erpApiClientId: string, // Client ID to authenticate with ERP\n public readonly erpApiClientSecret: string, // Client Secret to authenticate with ERP\n public readonly erpApiOrganizationId: string, // Organization / tenant Id\n public readonly erpAuthBaseUrl: string, // Auth base url\n public readonly retryAttempts: number = 3 // Number of retry attempts for API calls\n ) {}\n}\n\n/**\n * Helper function to get the ERP API Connection Parameters\n * Not all connectors use these, but keeping these commonly values in one place may\n * make it easier to set and understand env var names set in App.\n */\nexport const getErpApiConnectionParams = (): ErpApiConnectionParams => {\n return new ErpApiConnectionParams(\n process.env.ERP_API_URL || \"\",\n process.env.ERP_API_CLIENT_ID || \"\",\n process.env.ERP_API_CLIENT_SECRET || \"\",\n process.env.ERP_API_ORGANIZATION_ID || \"\",\n process.env.ERP_AUTH_BASE_URL || \"\",\n parseInt(process.env.ERP_API_RETRY_ATTEMPTS || \"3\")\n );\n};\n","import knex, { Knex } from \"knex\";\nimport config from \"../../knexfile\";\nimport stringify from \"json-stable-stringify\";\nimport XXH from \"xxhashjs\";\nimport { ERPObjType } from \"../../types/erp-types\";\nimport { CacheMetrics } from \"./index\";\nimport { CoreConfiguration } from \"../data-sync-service/configuration-manager\";\nimport { logger } from \"../reporting-service\";\n\ntype HashedCacheManagerOptions = {\n ttl?: number;\n tableName?: string;\n};\n\nexport class HashedCacheManager {\n private static TABLE_NAME = \"sdk_cache\";\n private db: Knex;\n private options: HashedCacheManagerOptions;\n private static readonly SEED = 0xabcd; // Arbitrary seed for hashing\n private isDestroyed: boolean = false;\n private metrics: CacheMetrics = {\n recordCounts: {},\n };\n\n constructor(options?: HashedCacheManagerOptions) {\n this.options = {\n ttl: options?.ttl || CoreConfiguration.inst().cacheTTL,\n tableName: options?.tableName || HashedCacheManager.TABLE_NAME,\n };\n this.db = knex({\n ...config.local,\n pool: {\n min: 0,\n max: 10,\n },\n });\n }\n\n /**\n * Checks if the cache manager is still valid\n * @throws Error if the cache manager has been destroyed\n */\n private checkValid(): void {\n if (this.isDestroyed) {\n throw new Error(\"Cache manager has been destroyed\");\n }\n }\n\n /**\n * Generates a stable hash of a record using JSON stringify + xxhash\n */\n public static hashRecord(record: object): string {\n try {\n const serialized = stringify(record);\n if (!serialized) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n const hash = XXH.h64(serialized, HashedCacheManager.SEED).toString(16);\n return hash;\n } catch (error) {\n if (error instanceof Error && error.message.includes(\"circular\")) {\n throw new Error(\"Failed to serialize record for hashing\");\n }\n throw error;\n }\n }\n\n /**\n * Gets a record from the cache\n * @param type The type of record\n * @param hash The hash of the record\n * @returns The record if it exists, null otherwise\n */\n private async getRecord(\n type: ERPObjType,\n hash: string\n ): Promise<{ key: string } | null> {\n this.checkValid();\n return this.db(this.options.tableName)\n .select(\"key\")\n .where({ type, key: hash })\n .first();\n }\n\n /**\n * Stores a record in the cache\n * @param type The type of record\n * @param record The record to store\n * @returns true if a new record was created, false if an existing record was updated\n */\n public async store(type: ERPObjType, record: object): Promise<boolean> {\n if (!this.isDestroyed && record) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n const now = new Date();\n\n // First check if record exists with same type and hash\n const existing = await this.db(this.options.tableName)\n .where({\n type,\n key: hash,\n })\n .first();\n\n if (existing) {\n return false; // No need to update, hash hasn't changed\n } else {\n // Insert new record with minimal data\n const result = await this.db(this.options.tableName)\n .insert({\n type,\n key: hash,\n created_at: now,\n })\n .returning(\"id\");\n return result.length > 0;\n }\n } catch (error) {\n logger.error(\"Error storing record:\", error);\n throw error;\n }\n }\n return false;\n }\n\n /**\n * Checks if a record has changed since last seen\n * @param type The type of record\n * @param record The record to check\n * @returns true if the record has changed or is new\n */\n async hasChanged(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const newHash = HashedCacheManager.hashRecord(record);\n const existing = await this.getRecord(type, newHash);\n return !existing;\n }\n\n /**\n * Checks if a record has changed and stores it if it has\n * @param type The type of record\n * @param record The record to check and store\n * @returns true if the record was changed or is new\n */\n async upsert(type: ERPObjType, record: object): Promise<boolean> {\n this.checkValid();\n const hasChanged = await this.hasChanged(type, record);\n if (hasChanged) {\n await this.store(type, record as Record<string, unknown>);\n }\n return hasChanged;\n }\n\n /**\n * Removes expired records based on TTL\n */\n async removeExpiredObjects(): Promise<void> {\n this.checkValid();\n const ttl = this.options.ttl;\n if (!ttl) return;\n\n const ttlMilliseconds = ttl * 1000;\n const expirationLimitDate = new Date(Date.now() - ttlMilliseconds);\n const expirationLimit = expirationLimitDate\n .toISOString()\n .slice(0, 19)\n .replace(\"T\", \" \");\n\n await this.db(this.options.tableName)\n .where(\"created_at\", \"<\", expirationLimit)\n .del();\n }\n\n /**\n * Gets all records of a specific type\n */\n async getRecordsByType(type: ERPObjType): Promise<string[]> {\n this.checkValid();\n const records = await this.db(this.options.tableName)\n .select(\"key\")\n .where({ type });\n\n return records.map((record) => record.key);\n }\n\n /**\n * Removes all records of a specific type\n */\n async removeRecordsByType(type: ERPObjType): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).where({ type }).del();\n }\n\n /**\n * Removes a specific record\n */\n public async removeRecord(type: ERPObjType, record: object): Promise<void> {\n if (!this.isDestroyed) {\n try {\n const hash = HashedCacheManager.hashRecord(record);\n await this.db(this.options.tableName)\n .where({ type, key: hash }) // Use key for deletion\n .del();\n } catch (error) {\n logger.error(\"Error removing record:\", error);\n throw error;\n }\n }\n }\n\n /**\n * Clears all records from the cache\n */\n async clear(): Promise<void> {\n this.checkValid();\n await this.db(this.options.tableName).del();\n }\n\n /**\n * Cleans up database connection and marks the cache manager as destroyed\n */\n async destroy(): Promise<void> {\n if (!this.isDestroyed) {\n await this.db.destroy();\n this.isDestroyed = true;\n }\n }\n\n /**\n * Gets the current cache metrics\n * @returns The current cache metrics\n */\n async getMetrics(): Promise<CacheMetrics> {\n this.checkValid();\n\n // Get counts for each type\n const counts = (await this.db(this.options.tableName)\n .select(\"type\")\n .count(\"* as count\")\n .groupBy(\"type\")) as Array<{ type: string; count: string }>;\n\n // Update metrics\n this.metrics.recordCounts = counts.reduce(\n (acc, row) => {\n acc[row.type] = parseInt(row.count, 10);\n return acc;\n },\n {} as Record<string, number>\n );\n\n return this.metrics;\n }\n}\n"],"names":[],"mappings":";;;;;;;AAIO,MAAM,kBAAkB;AAAA,EAC7B,OAAe;AAAA;AAAA,EAGC;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,cAAc;AACpB,SAAK,WAAW,QAAQ,IAAI,aAAa;AACzC,SAAK,YAAY,QAAQ,IAAI,cAAc;AAC3C,SAAK,UAAU,QAAQ,IAAI,YAAY;AAMvC,SAAK,qBAAqB,QAAQ,IAAI,0BAA0B;AAKhE,YAAQ,IAAI,sBAAsB;AAClC,YAAQ,IAAI,wCAAwC,QAAQ,IAAI,2BAA2B;AAC3F,SAAK,eAAe,QAAQ,IAAI,+BAA+B;AAC/D,YAAQ,IAAI,wBAAwB,KAAK,YAAY;AACrD,YAAQ,IAAI,0BAA0B;AAKtC,SAAK,iBAAiB,QAAQ,IAAI,4BAA4B;AAK9D,SAAK,qBAAqB,SAAS,QAAQ,IAAI,kBAAkB,GAAG;AAMpE,SAAK,oBAAoB,SAAS,QAAQ,IAAI,wBAAwB,GAAG;AAMzE,SAAK,kBACH,QAAQ,IAAI,qBAAqB,QAAQ,IAAI,iBAAiB;AAChE,SAAK,gBAAgB,QAAQ,IAAI,mBAAmB;AACpD,SAAK,4BACH,QAAQ,IAAI,gCAAgC;AAC9C,SAAK,+BACH,QAAQ,IAAI,mCAAmC;AAKjD,UAAM,cAAc,IAAI,KAAK,KAAK;AAClC,SAAK,WAAW,SAAS,QAAQ,IAAI,aAAa,YAAY,UAAU;AAGxE,oBAAgB,KAAK,UAAU,KAAK,OAAO;AAAA,EAC7C;AAAA,EAEA,OAAc,OAA0B;AACtC,QAAI,CAAC,kBAAkB,UAAU;AAC/B,wBAAkB,WAAW,IAAI,kBAAA;AAAA,IACnC;AACA,WAAO,kBAAkB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,kBAA2C;AAChD,UAAM,qBAAqB,CAAC,UAA0B;AACpD,UAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,eAAO;AAAA,MACT;AACA,UAAI,MAAM,SAAS,GAAG;AACpB,eAAO;AAAA,MACT;AAEA,aAAO,MAAM,UAAU,GAAG,CAAC,IAAI,aAAa,MAAM,UAAU,MAAM,SAAS,CAAC;AAAA,IAC9E;AAEA,WAAO;AAAA,MACL,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,oBAAoB,KAAK;AAAA,MACzB,cAAc,KAAK;AAAA,MACnB,gBAAgB,mBAAmB,KAAK,cAAc;AAAA,MACtD,oBAAoB,KAAK;AAAA,MACzB,UAAU,KAAK;AAAA,MACf,mBAAmB,KAAK;AAAA,MACxB,iBAAiB,KAAK;AAAA,MACtB,eAAe,KAAK;AAAA,MACpB,2BAA2B,KAAK;AAAA,MAChC,8BAA8B,KAAK;AAAA,IAAA;AAAA,EAEvC;AACF;AAKO,MAAM,4BAA4B,MAA8B;AACrE,SAAO;AAAA,IACL,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,UAAU,QAAQ,IAAI,0BAA0B;AAAA,IAChD,MACE,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IACxE,MAAM,QAAQ,IAAI,sBAAsB;AAAA,IACxC,mBAAmB,QAAQ,IAAI,oCAAoC;AAAA,IACnE,gBAAgB,QAAQ,IAAI,iCAAiC;AAAA,IAC7D,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,SAAS,QAAQ,IAAI,qBAAqB;AAAA,IAC1C,mBACE,QAAQ,IAAI,sCAAsC;AAAA,IACpD,SAAS,QAAQ,IAAI,0BAA0B;AAAA,IAC/C,aAAa,QAAQ,IAAI,+BAA+B;AAAA,EAAA;AAE5D;AAMO,MAAM,uBAAuB;AAAA,EAClC,YACkB,WACA,gBACA,oBACA,sBACA,gBACA,gBAAwB,GACxC;AANgB,SAAA,YAAA;AACA,SAAA,iBAAA;AACA,SAAA,qBAAA;AACA,SAAA,uBAAA;AACA,SAAA,iBAAA;AACA,SAAA,gBAAA;AAAA,EACf;AACL;AAOO,MAAM,4BAA4B,MAA8B;AACrE,SAAO,IAAI;AAAA,IACT,QAAQ,IAAI,eAAe;AAAA,IAC3B,QAAQ,IAAI,qBAAqB;AAAA,IACjC,QAAQ,IAAI,yBAAyB;AAAA,IACrC,QAAQ,IAAI,2BAA2B;AAAA,IACvC,QAAQ,IAAI,qBAAqB;AAAA,IACjC,SAAS,QAAQ,IAAI,0BAA0B,GAAG;AAAA,EAAA;AAEtD;ACtKO,MAAM,mBAAmB;AAAA,EAC9B,OAAe,aAAa;AAAA,EACpB;AAAA,EACA;AAAA,EACR,OAAwB,OAAO;AAAA;AAAA,EACvB,cAAuB;AAAA,EACvB,UAAwB;AAAA,IAC9B,cAAc,CAAA;AAAA,EAAC;AAAA,EAGjB,YAAY,SAAqC;AAC/C,SAAK,UAAU;AAAA,MACb,KAAK,SAAS,OAAO,kBAAkB,OAAO;AAAA,MAC9C,WAAW,SAAS,aAAa,mBAAmB;AAAA,IAAA;AAEtD,SAAK,KAAK,KAAK;AAAA,MACb,GAAG,OAAO;AAAA,MACV,MAAM;AAAA,QACJ,KAAK;AAAA,QACL,KAAK;AAAA,MAAA;AAAA,IACP,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAmB;AACzB,QAAI,KAAK,aAAa;AACpB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAc,WAAW,QAAwB;AAC/C,QAAI;AACF,YAAM,aAAa,UAAU,MAAM;AACnC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM,OAAO,IAAI,IAAI,YAAY,mBAAmB,IAAI,EAAE,SAAS,EAAE;AACrE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,UAAU,GAAG;AAChE,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,UACZ,MACA,MACiC;AACjC,SAAK,WAAA;AACL,WAAO,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClC,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,MAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAa,MAAM,MAAkB,QAAkC;AACrE,QAAI,CAAC,KAAK,eAAe,QAAQ;AAC/B,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,0BAAU,KAAA;AAGhB,cAAM,WAAW,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAClD,MAAM;AAAA,UACL;AAAA,UACA,KAAK;AAAA,QAAA,CACN,EACA,MAAA;AAEH,YAAI,UAAU;AACZ,iBAAO;AAAA,QACT,OAAO;AAEL,gBAAM,SAAS,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAChD,OAAO;AAAA,YACN;AAAA,YACA,KAAK;AAAA,YACL,YAAY;AAAA,UAAA,CACb,EACA,UAAU,IAAI;AACjB,iBAAO,OAAO,SAAS;AAAA,QACzB;AAAA,MACF,SAAS,OAAO;AACd,eAAO,MAAM,yBAAyB,KAAK;AAC3C,cAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WAAW,MAAkB,QAAkC;AACnE,SAAK,WAAA;AACL,UAAM,UAAU,mBAAmB,WAAW,MAAM;AACpD,UAAM,WAAW,MAAM,KAAK,UAAU,MAAM,OAAO;AACnD,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,MAAkB,QAAkC;AAC/D,SAAK,WAAA;AACL,UAAM,aAAa,MAAM,KAAK,WAAW,MAAM,MAAM;AACrD,QAAI,YAAY;AACd,YAAM,KAAK,MAAM,MAAM,MAAiC;AAAA,IAC1D;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAsC;AAC1C,SAAK,WAAA;AACL,UAAM,MAAM,KAAK,QAAQ;AACzB,QAAI,CAAC,IAAK;AAEV,UAAM,kBAAkB,MAAM;AAC9B,UAAM,sBAAsB,IAAI,KAAK,KAAK,IAAA,IAAQ,eAAe;AACjE,UAAM,kBAAkB,oBACrB,YAAA,EACA,MAAM,GAAG,EAAE,EACX,QAAQ,KAAK,GAAG;AAEnB,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,cAAc,KAAK,eAAe,EACxC,IAAA;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,MAAqC;AAC1D,SAAK,WAAA;AACL,UAAM,UAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,KAAK,EACZ,MAAM,EAAE,MAAM;AAEjB,WAAO,QAAQ,IAAI,CAAC,WAAW,OAAO,GAAG;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,MAAiC;AACzD,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,IAAA;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,aAAa,MAAkB,QAA+B;AACzE,QAAI,CAAC,KAAK,aAAa;AACrB,UAAI;AACF,cAAM,OAAO,mBAAmB,WAAW,MAAM;AACjD,cAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjC,MAAM,EAAE,MAAM,KAAK,KAAA,CAAM,EACzB,IAAA;AAAA,MACL,SAAS,OAAO;AACd,eAAO,MAAM,0BAA0B,KAAK;AAC5C,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,WAAA;AACL,UAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EAAE,IAAA;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,KAAK,GAAG,QAAA;AACd,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAoC;AACxC,SAAK,WAAA;AAGL,UAAM,SAAU,MAAM,KAAK,GAAG,KAAK,QAAQ,SAAS,EACjD,OAAO,MAAM,EACb,MAAM,YAAY,EAClB,QAAQ,MAAM;AAGjB,SAAK,QAAQ,eAAe,OAAO;AAAA,MACjC,CAAC,KAAK,QAAQ;AACZ,YAAI,IAAI,IAAI,IAAI,SAAS,IAAI,OAAO,EAAE;AACtC,eAAO;AAAA,MACT;AAAA,MACA,CAAA;AAAA,IAAC;AAGH,WAAO,KAAK;AAAA,EACd;AACF;"}
|
|
@@ -52,6 +52,15 @@ const setTimezoneOffsetInCache = (offset) => {
|
|
|
52
52
|
data.timezoneOffset = offset;
|
|
53
53
|
writeStorage(data);
|
|
54
54
|
};
|
|
55
|
+
const getCachedTimezoneName = () => {
|
|
56
|
+
const data = readStorage();
|
|
57
|
+
return data.timezoneName ?? null;
|
|
58
|
+
};
|
|
59
|
+
const setTimezoneNameInCache = (timezone) => {
|
|
60
|
+
const data = readStorage();
|
|
61
|
+
data.timezoneName = timezone;
|
|
62
|
+
writeStorage(data);
|
|
63
|
+
};
|
|
55
64
|
const getCachedMMToken = () => {
|
|
56
65
|
const data = readStorage();
|
|
57
66
|
return data.mmApiToken ?? null;
|
|
@@ -172,10 +181,12 @@ class SQLiteCoordinator {
|
|
|
172
181
|
export {
|
|
173
182
|
SQLiteCoordinator as S,
|
|
174
183
|
setTimezoneOffsetInCache as a,
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
184
|
+
setTimezoneNameInCache as b,
|
|
185
|
+
getCachedTimezoneOffset as c,
|
|
186
|
+
getInitialLoadComplete as d,
|
|
187
|
+
setInitialLoadComplete as e,
|
|
188
|
+
getCachedTimezoneName as f,
|
|
178
189
|
getCachedMMToken as g,
|
|
179
190
|
setCachedMMToken as s
|
|
180
191
|
};
|
|
181
|
-
//# sourceMappingURL=index-
|
|
192
|
+
//# sourceMappingURL=index-DTtmv8Iq.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index-DTtmv8Iq.js","sources":["../src/utils/local-data-store/jobs-shared-data.ts","../src/utils/local-data-store/database-lock.ts","../src/services/sqlite-service/sqlite-coordinator.ts"],"sourcesContent":["import fs from \"fs\";\nimport path from \"path\";\nimport { mkdirSync } from \"fs\";\n\n/**\n * This file contains the logic for storing and retrieving data from the job state file.\n * It is used to store data that is shared between jobs, and (more importantly) across job instances.\n */\n\nconst STORAGE_FILE = path.join(\"/tmp\", \"job-state.json\");\n\n// Ensure parent directory exists\nconst parentDir = path.dirname(STORAGE_FILE);\ntry {\n mkdirSync(parentDir, { recursive: true });\n} catch (error) {\n if ((error as NodeJS.ErrnoException).code !== \"EEXIST\") {\n throw error;\n }\n}\n\n//#region Non-exported functions\nconst ensureStorageFile = () => {\n if (!fs.existsSync(STORAGE_FILE)) {\n fs.writeFileSync(STORAGE_FILE, JSON.stringify({}), \"utf-8\");\n }\n};\n\nconst readStorage = (): Record<string, unknown> => {\n ensureStorageFile();\n try {\n return JSON.parse(fs.readFileSync(STORAGE_FILE, \"utf-8\"));\n } catch (error) {\n console.error(`Failed to read storage from ${STORAGE_FILE}:`, error);\n return {};\n }\n};\n\nconst writeStorage = (data: Record<string, unknown>): void => {\n ensureStorageFile();\n fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), \"utf-8\");\n};\n//#endregion\n\n//#region Database lock storage functions\n/**\n * Reads the database lock state from the shared storage file\n * @returns The data stored in the file\n */\nexport const readDatabaseLockState = (): Record<string, unknown> => {\n return readStorage();\n};\n\n/**\n * Writes the database lock state to the shared storage file\n * @param data The lock state data to write\n */\nexport const writeDatabaseLockState = (data: Record<string, unknown>): void => {\n writeStorage(data);\n};\n//#endregion\n\nexport const getInitialLoadComplete = (): boolean => {\n const data = readStorage();\n return (data.initialLoadComplete as boolean) ?? false;\n};\n\nexport const setInitialLoadComplete = (complete: boolean): void => {\n const data = readStorage();\n data.initialLoadComplete = complete;\n writeStorage(data);\n};\n\n/**\n * Gets the company's cached current timezone offset (e.g., -5)\n * @returns The cached timezone offset or 0 if not found\n */\nexport const getCachedTimezoneOffset = (): number => {\n const data = readStorage();\n return (data.timezoneOffset as number) ?? 0;\n};\n\n/**\n * Sets the company's current timezone offset in the cache\n * @param offset The timezone offset in hours\n */\nexport const setTimezoneOffsetInCache = (offset: number): void => {\n const data = readStorage();\n data.timezoneOffset = offset;\n writeStorage(data);\n};\n\n/**\n * Gets the cached timezone name (e.g., \"America/New_York\")\n * @returns The cached timezone name or null if not found\n */\nexport const getCachedTimezoneName = (): string | null => {\n const data = readStorage();\n return (data.timezoneName as string) ?? null;\n};\n\n/**\n * Sets the timezone name in the cache\n * @param timezone The timezone name (e.g., \"America/New_York\")\n */\nexport const setTimezoneNameInCache = (timezone: string): void => {\n const data = readStorage();\n data.timezoneName = timezone;\n writeStorage(data);\n};\n\ninterface CachedToken {\n token: string;\n expiration: number | null;\n}\n\n/**\n * Gets the cached MM API token and its expiration\n * @returns The cached token and expiration or null if not found\n */\nexport const getCachedMMToken = (): CachedToken | null => {\n const data = readStorage();\n return (data.mmApiToken as CachedToken) ?? null;\n};\n\n/**\n * Sets the MM API token and its expiration in the cache\n * @param tokenData The token and expiration to cache\n */\nexport const setCachedMMToken = (tokenData: CachedToken): void => {\n const data = readStorage();\n data.mmApiToken = tokenData;\n writeStorage(data);\n};\n","import {\n readDatabaseLockState,\n writeDatabaseLockState,\n} from \"./jobs-shared-data\";\n\ninterface DatabaseLock {\n isLocked: boolean;\n lockedBy: string;\n lockedAt: string | null;\n}\n\n/**\n * Gets the current database lock state\n * @returns The current database lock state\n */\nexport const getDatabaseLock = (): DatabaseLock => {\n const data = readDatabaseLockState();\n return (\n (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n }\n );\n};\n\n/**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\nexport const acquireDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (currentLock.isLocked) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: true,\n lockedBy: processName,\n lockedAt: new Date().toISOString(),\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Releases the database lock\n * @param processName Name of the process releasing the lock\n * @returns true if lock was released, false if process doesn't own the lock\n */\nexport const releaseDatabaseLock = (processName: string): boolean => {\n const data = readDatabaseLockState();\n const currentLock = (data.databaseLock as DatabaseLock) ?? {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n\n if (!currentLock.isLocked || currentLock.lockedBy !== processName) {\n return false;\n }\n\n data.databaseLock = {\n isLocked: false,\n lockedBy: \"\",\n lockedAt: null,\n };\n writeDatabaseLockState(data);\n return true;\n};\n\n/**\n * Checks if the database is available for use\n * @returns true if database is available, false if locked\n */\nexport const isDatabaseAvailable = (): boolean => {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n};\n","import {\n acquireDatabaseLock,\n releaseDatabaseLock,\n getDatabaseLock,\n} from \"../../utils/local-data-store/database-lock\";\nimport { logger } from \"../reporting-service\";\n\nexport class SQLiteCoordinator {\n private static readonly LOCK_TIMEOUT_MS = 30_000; // 30 seconds\n private static readonly LOCK_RETRY_INTERVAL_MS = 1_000; // 1 second\n\n /**\n * Performs startup checks to ensure no stale locks exist\n * Should be called when the application starts\n */\n static async performStartupCheck(): Promise<void> {\n const currentLock = getDatabaseLock();\n\n if (currentLock.isLocked) {\n logger.warn(\n `Found existing lock held by ${currentLock.lockedBy}, releasing for clean startup`\n );\n releaseDatabaseLock(currentLock.lockedBy);\n }\n }\n\n /**\n * Attempts to acquire the database lock\n * @param processName Name of the process requesting the lock\n * @returns true if lock was acquired, false if database is already locked\n */\n private static async tryAcquireLock(processName: string): Promise<boolean> {\n return acquireDatabaseLock(processName);\n }\n\n /**\n * Executes a database operation with proper locking\n * @param processName Name of the process executing the operation\n * @param operation The operation to execute\n * @returns The result of the operation\n */\n static async executeWithLock<T>(\n processName: string,\n operation: () => Promise<T>\n ): Promise<T> {\n const startTime = Date.now();\n\n // Try to acquire the lock with timeout\n while (Date.now() - startTime < this.LOCK_TIMEOUT_MS) {\n if (await this.tryAcquireLock(processName)) {\n try {\n // Execute the operation\n const result = await operation();\n return result;\n } finally {\n // Always release the lock\n releaseDatabaseLock(processName);\n }\n }\n\n // Wait before retrying\n await new Promise((resolve) =>\n setTimeout(resolve, this.LOCK_RETRY_INTERVAL_MS)\n );\n }\n\n throw new Error(\n `Failed to acquire database lock after ${this.LOCK_TIMEOUT_MS}ms`\n );\n }\n\n /**\n * Checks if the database is currently available for operations\n * @returns true if the database is available, false if locked\n */\n static isAvailable(): boolean {\n const lock = getDatabaseLock();\n return !lock.isLocked;\n }\n}\n"],"names":[],"mappings":";;;AASA,MAAM,eAAe,KAAK,KAAK,QAAQ,gBAAgB;AAGvD,MAAM,YAAY,KAAK,QAAQ,YAAY;AAC3C,IAAI;AACF,YAAU,WAAW,EAAE,WAAW,KAAA,CAAM;AAC1C,SAAS,OAAO;AACd,MAAK,MAAgC,SAAS,UAAU;AACtD,UAAM;AAAA,EACR;AACF;AAGA,MAAM,oBAAoB,MAAM;AAC9B,MAAI,CAAC,GAAG,WAAW,YAAY,GAAG;AAChC,OAAG,cAAc,cAAc,KAAK,UAAU,CAAA,CAAE,GAAG,OAAO;AAAA,EAC5D;AACF;AAEA,MAAM,cAAc,MAA+B;AACjD,oBAAA;AACA,MAAI;AACF,WAAO,KAAK,MAAM,GAAG,aAAa,cAAc,OAAO,CAAC;AAAA,EAC1D,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,YAAY,KAAK,KAAK;AACnE,WAAO,CAAA;AAAA,EACT;AACF;AAEA,MAAM,eAAe,CAAC,SAAwC;AAC5D,oBAAA;AACA,KAAG,cAAc,cAAc,KAAK,UAAU,MAAM,MAAM,CAAC,GAAG,OAAO;AACvE;AAQO,MAAM,wBAAwB,MAA+B;AAClE,SAAO,YAAA;AACT;AAMO,MAAM,yBAAyB,CAAC,SAAwC;AAC7E,eAAa,IAAI;AACnB;AAGO,MAAM,yBAAyB,MAAe;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,uBAAmC;AAClD;AAEO,MAAM,yBAAyB,CAAC,aAA4B;AACjE,QAAM,OAAO,YAAA;AACb,OAAK,sBAAsB;AAC3B,eAAa,IAAI;AACnB;AAMO,MAAM,0BAA0B,MAAc;AACnD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,kBAA6B;AAC5C;AAMO,MAAM,2BAA2B,CAAC,WAAyB;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,iBAAiB;AACtB,eAAa,IAAI;AACnB;AAMO,MAAM,wBAAwB,MAAqB;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,gBAA2B;AAC1C;AAMO,MAAM,yBAAyB,CAAC,aAA2B;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,eAAe;AACpB,eAAa,IAAI;AACnB;AAWO,MAAM,mBAAmB,MAA0B;AACxD,QAAM,OAAO,YAAA;AACb,SAAQ,KAAK,cAA8B;AAC7C;AAMO,MAAM,mBAAmB,CAAC,cAAiC;AAChE,QAAM,OAAO,YAAA;AACb,OAAK,aAAa;AAClB,eAAa,IAAI;AACnB;ACtHO,MAAM,kBAAkB,MAAoB;AACjD,QAAM,OAAO,sBAAA;AACb,SACG,KAAK,gBAAiC;AAAA,IACrC,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGhB;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,WAAU,oBAAI,KAAA,GAAO,YAAA;AAAA,EAAY;AAEnC,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAOO,MAAM,sBAAsB,CAAC,gBAAiC;AACnE,QAAM,OAAO,sBAAA;AACb,QAAM,cAAe,KAAK,gBAAiC;AAAA,IACzD,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAGZ,MAAI,CAAC,YAAY,YAAY,YAAY,aAAa,aAAa;AACjE,WAAO;AAAA,EACT;AAEA,OAAK,eAAe;AAAA,IAClB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,EAAA;AAEZ,yBAAuB,IAAI;AAC3B,SAAO;AACT;AAMO,MAAM,sBAAsB,MAAe;AAChD,QAAM,OAAO,gBAAA;AACb,SAAO,CAAC,KAAK;AACf;AC9EO,MAAM,kBAAkB;AAAA,EAC7B,OAAwB,kBAAkB;AAAA;AAAA,EAC1C,OAAwB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjD,aAAa,sBAAqC;AAChD,UAAM,cAAc,gBAAA;AAEpB,QAAI,YAAY,UAAU;AACxB,aAAO;AAAA,QACL,+BAA+B,YAAY,QAAQ;AAAA,MAAA;AAErD,0BAAoB,YAAY,QAAQ;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAqB,eAAe,aAAuC;AACzE,WAAO,oBAAoB,WAAW;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,gBACX,aACA,WACY;AACZ,UAAM,YAAY,KAAK,IAAA;AAGvB,WAAO,KAAK,IAAA,IAAQ,YAAY,KAAK,iBAAiB;AACpD,UAAI,MAAM,KAAK,eAAe,WAAW,GAAG;AAC1C,YAAI;AAEF,gBAAM,SAAS,MAAM,UAAA;AACrB,iBAAO;AAAA,QACT,UAAA;AAEE,8BAAoB,WAAW;AAAA,QACjC;AAAA,MACF;AAGA,YAAM,IAAI;AAAA,QAAQ,CAAC,YACjB,WAAW,SAAS,KAAK,sBAAsB;AAAA,MAAA;AAAA,IAEnD;AAEA,UAAM,IAAI;AAAA,MACR,yCAAyC,KAAK,eAAe;AAAA,IAAA;AAAA,EAEjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,cAAuB;AAC5B,UAAM,OAAO,gBAAA;AACb,WAAO,CAAC,KAAK;AAAA,EACf;AACF;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* This file exports the public API that client code should use.
|
|
4
4
|
*/
|
|
5
5
|
export type { IERPConnector } from "./types/erp-connector";
|
|
6
|
-
export {
|
|
6
|
+
export { ERPObjType } from "./types/erp-types";
|
|
7
7
|
export type { ERPPagination, ERPResponse, ERPObject } from "./types/erp-types";
|
|
8
8
|
export { ErpApiConnectionParams } from "./services/data-sync-service/configuration-manager";
|
|
9
9
|
export { getErpApiConnectionParams } from "./services/data-sync-service/configuration-manager";
|
|
@@ -18,15 +18,15 @@ export { getUniqueRows, removeExtraneousFields, getPayloadWithoutIDField, trimOb
|
|
|
18
18
|
export { getInitialLoadComplete, setInitialLoadComplete } from "./utils";
|
|
19
19
|
export { HTTPClientFactory } from "./utils/http-client";
|
|
20
20
|
export { HTTPError } from "./utils/http-client";
|
|
21
|
-
export type { HTTPResponse, HTTPClient, HTTPRequestConfig } from "./utils/http-client";
|
|
21
|
+
export type { HTTPResponse, HTTPClient, HTTPRequestConfig, } from "./utils/http-client";
|
|
22
22
|
export { ApplicationInitializer } from "./utils/application-initializer";
|
|
23
23
|
export { runDataSyncService } from "./services/data-sync-service";
|
|
24
24
|
export { logger } from "./services/reporting-service";
|
|
25
25
|
export type { IERPLaborTicketHandler } from "./types/erp-connector";
|
|
26
|
-
export { StandardProcessDrivers, getCachedTimezoneOffset } from "./utils";
|
|
26
|
+
export { StandardProcessDrivers, getCachedTimezoneOffset, getCachedTimezoneName, } from "./utils";
|
|
27
27
|
export type { WriteEntitiesToMMResult } from "./utils";
|
|
28
28
|
export { MMBatchValidationError } from "./utils";
|
|
29
|
-
export type { APIResponse } from
|
|
29
|
+
export type { APIResponse } from "./services/erp-api-services/types";
|
|
30
30
|
export { RestAPIService } from "./services/erp-api-services/rest/rest-api-service";
|
|
31
31
|
export { ErrorHandler, GraphQLError } from "./services/erp-api-services/errors";
|
|
32
32
|
export type { QueryParams, PaginatedAPIResponse, } from "./services/erp-api-services/rest/get-query-params";
|
|
@@ -38,8 +38,10 @@ export { BatchCacheManager } from "./services/caching-service/batch-cache-manage
|
|
|
38
38
|
export { CoreConfiguration, getSQLServerConfiguration, } from "./services/data-sync-service/configuration-manager";
|
|
39
39
|
export { SqlServerService, SqlServerHelper, } from "./services/sql-server-erp-service";
|
|
40
40
|
export type { SQLInput } from "./services/sql-server-erp-service";
|
|
41
|
+
export { PsqlService, formatPsqlDate, formatPsqlTime, combinePsqlDateTime, isPsqlDateEmpty, cleanPsqlCharField, } from "./services/psql-erp-service";
|
|
42
|
+
export type { PsqlConfiguration } from "./services/psql-erp-service";
|
|
41
43
|
export { RecordTrackingManager } from "./services/caching-service/record-tracking-manager";
|
|
42
44
|
export type { RecordTrackingObject } from "./services/caching-service/record-tracking-manager";
|
|
43
45
|
export { default as knexDatabaseConfig } from "./knexfile";
|
|
44
|
-
export { MMConnectorLogger, FileLogDeduper, LogEntry, type LogLevelString, type LogResponse } from
|
|
46
|
+
export { MMConnectorLogger, FileLogDeduper, LogEntry, type LogLevelString, type LogResponse, } from "./utils";
|
|
45
47
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAC/C,YAAY,EAAE,aAAa,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG/E,OAAO,EAAE,sBAAsB,EAAE,MAAM,oDAAoD,CAAC;AAC5F,OAAO,EAAE,yBAAyB,EAAE,MAAM,oDAAoD,CAAC;AAC/F,OAAO,EAAE,qBAAqB,EAAE,MAAM,mDAAmD,CAAC;AAC1F,YAAY,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AAGtE,OAAO,EAAE,WAAW,EAAE,MAAM,2BAA2B,CAAC;AACxD,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,YAAY,EACV,0BAA0B,EAC1B,sCAAsC,EACtC,gBAAgB,GACjB,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,YAAY,EACZ,cAAc,EACd,UAAU,EACV,mBAAmB,EACnB,eAAe,EACf,wBAAwB,EACxB,YAAY,EACZ,iBAAiB,GAClB,MAAM,2BAA2B,CAAC;AAEnC,YAAY,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAGtE,OAAO,EACL,aAAa,EACb,sBAAsB,EACtB,wBAAwB,EACxB,gBAAgB,EAChB,cAAc,EACd,6BAA6B,EAC7B,0BAA0B,EAC1B,sBAAsB,EACtB,4BAA4B,EAC5B,kBAAkB,EAClB,YAAY,GACb,MAAM,SAAS,CAAC;AAGjB,OAAO,EAAE,sBAAsB,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAGzE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAC;AAChD,YAAY,EACV,YAAY,EACZ,UAAU,EACV,iBAAiB,GAClB,MAAM,qBAAqB,CAAC;AAG7B,OAAO,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAGzE,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAGlE,OAAO,EAAE,MAAM,EAAE,MAAM,8BAA8B,CAAC;AAGtD,YAAY,EAAE,sBAAsB,EAAE,MAAM,uBAAuB,CAAC;AAGpE,OAAO,EACL,sBAAsB,EACtB,uBAAuB,EACvB,qBAAqB,GACtB,MAAM,SAAS,CAAC;AACjB,YAAY,EAAE,uBAAuB,EAAE,MAAM,SAAS,CAAC;AACvD,OAAO,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AAGjD,YAAY,EAAE,WAAW,EAAE,MAAM,mCAAmC,CAAC;AACrE,OAAO,EAAE,cAAc,EAAE,MAAM,mDAAmD,CAAC;AACnF,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,oCAAoC,CAAC;AAChF,YAAY,EACV,WAAW,EACX,oBAAoB,GACrB,MAAM,mDAAmD,CAAC;AAC3D,OAAO,EAAE,WAAW,EAAE,MAAM,0CAA0C,CAAC;AACvE,YAAY,EAAE,WAAW,EAAE,MAAM,0CAA0C,CAAC;AAC5E,OAAO,EAAE,cAAc,EAAE,MAAM,qDAAqD,CAAC;AACrF,YAAY,EAAE,kBAAkB,EAAE,MAAM,2CAA2C,CAAC;AAGpF,OAAO,EAAE,iBAAiB,EAAE,MAAM,gDAAgD,CAAC;AAGnF,OAAO,EACL,iBAAiB,EACjB,yBAAyB,GAC1B,MAAM,oDAAoD,CAAC;AAG5D,OAAO,EACL,gBAAgB,EAChB,eAAe,GAChB,MAAM,mCAAmC,CAAC;AAC3C,YAAY,EAAE,QAAQ,EAAE,MAAM,mCAAmC,CAAC;AAGlE,OAAO,EACL,WAAW,EACX,cAAc,EACd,cAAc,EACd,mBAAmB,EACnB,eAAe,EACf,kBAAkB,GACnB,MAAM,6BAA6B,CAAC;AACrC,YAAY,EAAE,iBAAiB,EAAE,MAAM,6BAA6B,CAAC;AAGrE,OAAO,EAAE,qBAAqB,EAAE,MAAM,oDAAoD,CAAC;AAC3F,YAAY,EAAE,oBAAoB,EAAE,MAAM,oDAAoD,CAAC;AAG/F,OAAO,EAAE,OAAO,IAAI,kBAAkB,EAAE,MAAM,YAAY,CAAC;AAG3D,OAAO,EACL,iBAAiB,EACjB,cAAc,EACd,QAAQ,EACR,KAAK,cAAc,EACnB,KAAK,WAAW,GACjB,MAAM,SAAS,CAAC"}
|
package/dist/mm-erp-sdk.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-
|
|
2
|
-
import { E, g, a } from "./hashed-cache-manager-
|
|
1
|
+
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-Ds-HksA0.js";
|
|
2
|
+
import { E, g, a } from "./hashed-cache-manager-Ds-HksA0.js";
|
|
3
3
|
import { l as logger } from "./logger-CBDNtsMq.js";
|
|
4
|
-
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-
|
|
5
|
-
import {
|
|
4
|
+
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as setTimezoneNameInCache, c as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-DTtmv8Iq.js";
|
|
5
|
+
import { f, d, e } from "./index-DTtmv8Iq.js";
|
|
6
6
|
import axios, { AxiosError } from "axios";
|
|
7
7
|
import knex from "knex";
|
|
8
8
|
import { c as config } from "./knexfile-1qKKIORB.js";
|
|
@@ -14,16 +14,6 @@ import Graceful from "@ladjs/graceful";
|
|
|
14
14
|
import { fileURLToPath } from "url";
|
|
15
15
|
import sql from "mssql";
|
|
16
16
|
import { z } from "zod";
|
|
17
|
-
var ERPType = /* @__PURE__ */ ((ERPType2) => {
|
|
18
|
-
ERPType2["INVALID"] = "INVALID";
|
|
19
|
-
ERPType2["D365"] = "D365";
|
|
20
|
-
ERPType2["JOB_BOSS"] = "JOB_BOSS";
|
|
21
|
-
ERPType2["PROFITKEY"] = "PROFITKEY";
|
|
22
|
-
ERPType2["PROSHOP"] = "PROSHOP";
|
|
23
|
-
ERPType2["SYTELINE"] = "SYTELINE";
|
|
24
|
-
ERPType2["TEMPLATE"] = "TEMPLATE";
|
|
25
|
-
return ERPType2;
|
|
26
|
-
})(ERPType || {});
|
|
27
17
|
var ERPObjType = /* @__PURE__ */ ((ERPObjType2) => {
|
|
28
18
|
ERPObjType2[ERPObjType2["RESOURCES"] = 0] = "RESOURCES";
|
|
29
19
|
ERPObjType2[ERPObjType2["PARTS"] = 1] = "PARTS";
|
|
@@ -252,9 +242,9 @@ class AxiosClient {
|
|
|
252
242
|
async destroy() {
|
|
253
243
|
if (this.isDestroyed) return;
|
|
254
244
|
this.isDestroyed = true;
|
|
255
|
-
for (const
|
|
245
|
+
for (const c of this.inFlightControllers) {
|
|
256
246
|
try {
|
|
257
|
-
|
|
247
|
+
c.abort();
|
|
258
248
|
} catch {
|
|
259
249
|
}
|
|
260
250
|
}
|
|
@@ -1435,12 +1425,14 @@ const getTimezoneOffset = async () => {
|
|
|
1435
1425
|
if (!userInfo?.company?.timezone) {
|
|
1436
1426
|
throw new Error("Unable to retrieve company timezone from API");
|
|
1437
1427
|
}
|
|
1428
|
+
const timezone = userInfo.company.timezone;
|
|
1438
1429
|
const date = /* @__PURE__ */ new Date();
|
|
1439
1430
|
const utcDate = new Date(date.toLocaleString("en-US", { timeZone: "UTC" }));
|
|
1440
1431
|
const tzDate = new Date(
|
|
1441
|
-
date.toLocaleString("en-US", { timeZone:
|
|
1432
|
+
date.toLocaleString("en-US", { timeZone: timezone })
|
|
1442
1433
|
);
|
|
1443
|
-
|
|
1434
|
+
const offset = (tzDate.getTime() - utcDate.getTime()) / 36e5;
|
|
1435
|
+
return { offset, timezone };
|
|
1444
1436
|
} catch (error) {
|
|
1445
1437
|
throw new Error(
|
|
1446
1438
|
`Failed to get timezone offset: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
@@ -1527,13 +1519,14 @@ const getTimezoneOffsetAndPersist = async (params = {
|
|
|
1527
1519
|
let success = false;
|
|
1528
1520
|
let retries = 0;
|
|
1529
1521
|
logger.info(
|
|
1530
|
-
"Acquiring the timezone offset from MachineMetrics and storing in cache"
|
|
1522
|
+
"Acquiring the timezone offset and timezone name from MachineMetrics and storing in cache"
|
|
1531
1523
|
);
|
|
1532
1524
|
while (!success && retries < params.maxRetries) {
|
|
1533
1525
|
try {
|
|
1534
|
-
const
|
|
1535
|
-
logger.info(`Timezone offset: ${
|
|
1536
|
-
setTimezoneOffsetInCache(
|
|
1526
|
+
const { offset, timezone } = await getTimezoneOffset();
|
|
1527
|
+
logger.info(`Timezone offset: ${offset} hours, timezone: ${timezone}`);
|
|
1528
|
+
setTimezoneOffsetInCache(offset);
|
|
1529
|
+
setTimezoneNameInCache(timezone);
|
|
1537
1530
|
success = true;
|
|
1538
1531
|
} catch (error) {
|
|
1539
1532
|
logger.error("Error getting timezone offset:", error);
|
|
@@ -3449,7 +3442,7 @@ class ApplicationInitializer {
|
|
|
3449
3442
|
"\n================================INITIALIZING APPLICATION================================\n"
|
|
3450
3443
|
);
|
|
3451
3444
|
const coreConfig = CoreConfiguration.inst();
|
|
3452
|
-
logger.info("Core Configuration loaded:", coreConfig);
|
|
3445
|
+
logger.info("Core Configuration loaded:", coreConfig.toSafeLogObject());
|
|
3453
3446
|
logger.info("Performing database startup checks...");
|
|
3454
3447
|
await SQLiteCoordinator.performStartupCheck();
|
|
3455
3448
|
logger.info("Database startup checks completed successfully");
|
|
@@ -3982,12 +3975,240 @@ class SqlServerHelper {
|
|
|
3982
3975
|
logger.info("----------------------------------------");
|
|
3983
3976
|
}
|
|
3984
3977
|
}
|
|
3978
|
+
class PsqlService {
|
|
3979
|
+
config;
|
|
3980
|
+
static odbcModule = null;
|
|
3981
|
+
static odbcLoadError = null;
|
|
3982
|
+
constructor(config2) {
|
|
3983
|
+
this.config = config2;
|
|
3984
|
+
}
|
|
3985
|
+
/**
|
|
3986
|
+
* Dynamically load the ODBC module with lazy initialization and caching
|
|
3987
|
+
* @throws Error with helpful message if ODBC package is not installed
|
|
3988
|
+
*/
|
|
3989
|
+
static async getOdbc() {
|
|
3990
|
+
if (this.odbcLoadError) {
|
|
3991
|
+
throw this.odbcLoadError;
|
|
3992
|
+
}
|
|
3993
|
+
if (this.odbcModule) {
|
|
3994
|
+
return this.odbcModule;
|
|
3995
|
+
}
|
|
3996
|
+
try {
|
|
3997
|
+
const odbcImport = await import("odbc");
|
|
3998
|
+
const odbc = odbcImport.default || odbcImport;
|
|
3999
|
+
this.odbcModule = odbc;
|
|
4000
|
+
return this.odbcModule;
|
|
4001
|
+
} catch (error) {
|
|
4002
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
4003
|
+
this.odbcLoadError = new Error(
|
|
4004
|
+
`ODBC package is required for PSQL service but is not installed or failed to load.
|
|
4005
|
+
Install it with: npm install odbc
|
|
4006
|
+
Also install OS-level dependencies, e.g. on Alpine Linux:
|
|
4007
|
+
apk add --no-cache unixodbc unixodbc-dev python3 make g++
|
|
4008
|
+
For other Linux distributions, install unixodbc and unixodbc-dev packages.
|
|
4009
|
+
Original error: ${errorMessage}`
|
|
4010
|
+
);
|
|
4011
|
+
throw this.odbcLoadError;
|
|
4012
|
+
}
|
|
4013
|
+
}
|
|
4014
|
+
// REMOVED: dispose() method - not needed anymore
|
|
4015
|
+
// REMOVED: connection property - not needed anymore
|
|
4016
|
+
// REMOVED: openConnection() method - not needed anymore
|
|
4017
|
+
// REMOVED: closeConnection() method - not needed anymore
|
|
4018
|
+
/**
|
|
4019
|
+
* Build PSQL ODBC connection string
|
|
4020
|
+
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
4021
|
+
*/
|
|
4022
|
+
buildConnectionString() {
|
|
4023
|
+
const serverName = `${this.config.host}.${this.config.port}`;
|
|
4024
|
+
return [
|
|
4025
|
+
"Driver={Pervasive ODBC Interface}",
|
|
4026
|
+
`ServerName=${serverName}`,
|
|
4027
|
+
`DBQ=${this.config.database}`,
|
|
4028
|
+
`UID=${this.config.username}`,
|
|
4029
|
+
`PWD=${this.config.password}`,
|
|
4030
|
+
"AutoDoubleQuote=0"
|
|
4031
|
+
].join(";") + ";";
|
|
4032
|
+
}
|
|
4033
|
+
/**
|
|
4034
|
+
* Execute a query and return the results
|
|
4035
|
+
* Creates a fresh connection for each query to avoid handle corruption
|
|
4036
|
+
*
|
|
4037
|
+
* SECURITY WARNING: This method executes the provided SQL string as-is.
|
|
4038
|
+
* - Parameter binding is NOT implemented; the `params` argument is currently ignored.
|
|
4039
|
+
* - Never concatenate untrusted/user-supplied input into `query`.
|
|
4040
|
+
* - Doing so can result in SQL injection vulnerabilities and data exposure.
|
|
4041
|
+
* If dynamic values are required, ensure they are strictly validated/escaped
|
|
4042
|
+
* or implement proper parameterized execution before accepting untrusted input.
|
|
4043
|
+
*
|
|
4044
|
+
* @param query The SQL query to execute
|
|
4045
|
+
* @param params Query parameters (currently unused for PSQL read operations)
|
|
4046
|
+
* @param paging Optional paging parameters
|
|
4047
|
+
* @returns The entities fetched from the database, along with paging information
|
|
4048
|
+
*/
|
|
4049
|
+
async executePreparedStatement(query, params = {}, paging) {
|
|
4050
|
+
const odbc = await PsqlService.getOdbc();
|
|
4051
|
+
let connection = null;
|
|
4052
|
+
try {
|
|
4053
|
+
const connStr = this.buildConnectionString();
|
|
4054
|
+
logger.debug("Creating fresh PSQL connection for query");
|
|
4055
|
+
connection = await odbc.connect(connStr);
|
|
4056
|
+
if (Object.keys(params).length > 0) {
|
|
4057
|
+
logger.warn(
|
|
4058
|
+
"PsqlService: Query parameters provided but parameter binding not yet implemented. Using direct query execution."
|
|
4059
|
+
);
|
|
4060
|
+
}
|
|
4061
|
+
const records = await connection.query(query);
|
|
4062
|
+
const allRecords = PsqlService.recordsetToRecords(records);
|
|
4063
|
+
const rowsFetched = allRecords.length;
|
|
4064
|
+
const pagedData = paging?.offset !== void 0 || paging?.limit !== void 0 ? allRecords.slice(
|
|
4065
|
+
paging.offset || 0,
|
|
4066
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
4067
|
+
) : allRecords;
|
|
4068
|
+
return {
|
|
4069
|
+
data: pagedData,
|
|
4070
|
+
paging: {
|
|
4071
|
+
count: rowsFetched,
|
|
4072
|
+
limit: paging?.limit || 0,
|
|
4073
|
+
offset: paging?.offset || 0,
|
|
4074
|
+
nextPage: paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched ? String((paging.offset || 0) + paging.limit) : void 0,
|
|
4075
|
+
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4076
|
+
}
|
|
4077
|
+
};
|
|
4078
|
+
} catch (error) {
|
|
4079
|
+
if (error instanceof Error && error.message.includes("ODBC package is required")) {
|
|
4080
|
+
throw error;
|
|
4081
|
+
}
|
|
4082
|
+
const errorInfo = error;
|
|
4083
|
+
logger.error("Error fetching data from PSQL", {
|
|
4084
|
+
error: errorInfo.message,
|
|
4085
|
+
odbcErrors: errorInfo.odbcErrors,
|
|
4086
|
+
query: query.substring(0, 200)
|
|
4087
|
+
// Log first 200 chars of query
|
|
4088
|
+
});
|
|
4089
|
+
throw this.handleOdbcError(errorInfo);
|
|
4090
|
+
} finally {
|
|
4091
|
+
if (connection) {
|
|
4092
|
+
try {
|
|
4093
|
+
await connection.close();
|
|
4094
|
+
logger.debug("PSQL connection closed successfully");
|
|
4095
|
+
} catch (err) {
|
|
4096
|
+
logger.warn("Error closing PSQL connection (non-fatal)", {
|
|
4097
|
+
error: err
|
|
4098
|
+
});
|
|
4099
|
+
}
|
|
4100
|
+
}
|
|
4101
|
+
}
|
|
4102
|
+
}
|
|
4103
|
+
/**
|
|
4104
|
+
* Transform ODBC result set to array of Record<string, string> instances.
|
|
4105
|
+
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
4106
|
+
*/
|
|
4107
|
+
static recordsetToRecords(recordset) {
|
|
4108
|
+
if (!Array.isArray(recordset)) {
|
|
4109
|
+
return [];
|
|
4110
|
+
}
|
|
4111
|
+
const data = recordset.map((row) => {
|
|
4112
|
+
const transformedRow = {};
|
|
4113
|
+
Object.keys(row).forEach((key) => {
|
|
4114
|
+
const value = row[key];
|
|
4115
|
+
transformedRow[key] = value !== null && value !== void 0 ? String(value).trim() : "";
|
|
4116
|
+
});
|
|
4117
|
+
return transformedRow;
|
|
4118
|
+
});
|
|
4119
|
+
return data;
|
|
4120
|
+
}
|
|
4121
|
+
/**
|
|
4122
|
+
* Handle ODBC errors and provide meaningful messages
|
|
4123
|
+
*/
|
|
4124
|
+
handleOdbcError(error) {
|
|
4125
|
+
const odbcError = error.odbcErrors?.[0];
|
|
4126
|
+
const errorCode = odbcError?.state;
|
|
4127
|
+
const message = odbcError?.message || error.message;
|
|
4128
|
+
switch (errorCode) {
|
|
4129
|
+
case "08S01":
|
|
4130
|
+
return new Error(
|
|
4131
|
+
`PSQL connection failed. Check: 1) PVSW environment variable set to /usr/local/psql/etc/pvsw.ini, 2) Network connectivity to ports 1583/3351, 3) ODBC configuration files in /usr/local/psql/etc/ and /etc/. Original error: ${message}`
|
|
4132
|
+
);
|
|
4133
|
+
case "28000":
|
|
4134
|
+
return new Error(
|
|
4135
|
+
`PSQL authentication failed. Check username/password. Original error: ${message}`
|
|
4136
|
+
);
|
|
4137
|
+
case "42000":
|
|
4138
|
+
return new Error(`PSQL SQL syntax error. Original error: ${message}`);
|
|
4139
|
+
case "42S02":
|
|
4140
|
+
return new Error(
|
|
4141
|
+
`PSQL table or view not found. Check table names in query. Original error: ${message}`
|
|
4142
|
+
);
|
|
4143
|
+
default:
|
|
4144
|
+
return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
|
|
4145
|
+
}
|
|
4146
|
+
}
|
|
4147
|
+
}
|
|
4148
|
+
function formatPsqlDate(psqlDate) {
|
|
4149
|
+
if (!psqlDate || psqlDate.trim().length !== 6 || psqlDate === "000000") {
|
|
4150
|
+
return null;
|
|
4151
|
+
}
|
|
4152
|
+
try {
|
|
4153
|
+
const year = parseInt(psqlDate.substring(0, 2), 10);
|
|
4154
|
+
const month = parseInt(psqlDate.substring(2, 4), 10);
|
|
4155
|
+
const day = parseInt(psqlDate.substring(4, 6), 10);
|
|
4156
|
+
const fullYear = year + 2e3;
|
|
4157
|
+
if (month < 1 || month > 12 || day < 1 || day > 31) {
|
|
4158
|
+
return null;
|
|
4159
|
+
}
|
|
4160
|
+
const date = new Date(fullYear, month - 1, day);
|
|
4161
|
+
if (date.getFullYear() !== fullYear || date.getMonth() !== month - 1 || date.getDate() !== day) {
|
|
4162
|
+
return null;
|
|
4163
|
+
}
|
|
4164
|
+
return date.toISOString().split("T")[0];
|
|
4165
|
+
} catch (error) {
|
|
4166
|
+
return null;
|
|
4167
|
+
}
|
|
4168
|
+
}
|
|
4169
|
+
function formatPsqlTime(psqlTime) {
|
|
4170
|
+
if (!psqlTime) {
|
|
4171
|
+
return null;
|
|
4172
|
+
}
|
|
4173
|
+
const trimmed = psqlTime.trim();
|
|
4174
|
+
if (trimmed.length !== 4) {
|
|
4175
|
+
return null;
|
|
4176
|
+
}
|
|
4177
|
+
try {
|
|
4178
|
+
const hours = parseInt(trimmed.substring(0, 2), 10);
|
|
4179
|
+
const minutes = parseInt(trimmed.substring(2, 4), 10);
|
|
4180
|
+
if (isNaN(hours) || isNaN(minutes) || hours < 0 || hours > 23 || minutes < 0 || minutes > 59) {
|
|
4181
|
+
return null;
|
|
4182
|
+
}
|
|
4183
|
+
const hoursStr = hours.toString().padStart(2, "0");
|
|
4184
|
+
const minutesStr = minutes.toString().padStart(2, "0");
|
|
4185
|
+
return `${hoursStr}:${minutesStr}:00`;
|
|
4186
|
+
} catch (error) {
|
|
4187
|
+
return null;
|
|
4188
|
+
}
|
|
4189
|
+
}
|
|
4190
|
+
function combinePsqlDateTime(psqlDate, psqlTime) {
|
|
4191
|
+
const date = formatPsqlDate(psqlDate);
|
|
4192
|
+
const time = formatPsqlTime(psqlTime);
|
|
4193
|
+
if (!date || !time) {
|
|
4194
|
+
return null;
|
|
4195
|
+
}
|
|
4196
|
+
return `${date}T${time}`;
|
|
4197
|
+
}
|
|
4198
|
+
function isPsqlDateEmpty(psqlDate) {
|
|
4199
|
+
return !psqlDate || psqlDate === "000000" || psqlDate.trim() === "";
|
|
4200
|
+
}
|
|
4201
|
+
function cleanPsqlCharField(value) {
|
|
4202
|
+
if (value === null || value === void 0) {
|
|
4203
|
+
return "";
|
|
4204
|
+
}
|
|
4205
|
+
return String(value).trim();
|
|
4206
|
+
}
|
|
3985
4207
|
export {
|
|
3986
4208
|
ApplicationInitializer,
|
|
3987
4209
|
BatchCacheManager,
|
|
3988
4210
|
CoreConfiguration,
|
|
3989
4211
|
ERPObjType,
|
|
3990
|
-
ERPType,
|
|
3991
4212
|
E as ErpApiConnectionParams,
|
|
3992
4213
|
ErrorHandler,
|
|
3993
4214
|
FileLogDeduper,
|
|
@@ -4009,6 +4230,7 @@ export {
|
|
|
4009
4230
|
MMSendWorkOrder,
|
|
4010
4231
|
MMSendWorkOrderOperation,
|
|
4011
4232
|
OAuthClient,
|
|
4233
|
+
PsqlService,
|
|
4012
4234
|
RecordTrackingManager,
|
|
4013
4235
|
RestAPIService,
|
|
4014
4236
|
SqlServerHelper,
|
|
@@ -4018,21 +4240,27 @@ export {
|
|
|
4018
4240
|
addNewFieldFromLookupField,
|
|
4019
4241
|
applyTimezoneOffsetsToFields,
|
|
4020
4242
|
buildLogicalCondition,
|
|
4243
|
+
cleanPsqlCharField,
|
|
4021
4244
|
cleanupNumbers,
|
|
4245
|
+
combinePsqlDateTime,
|
|
4022
4246
|
convertToLocalTime,
|
|
4023
4247
|
formatDateWithTZOffset,
|
|
4248
|
+
formatPsqlDate,
|
|
4249
|
+
formatPsqlTime,
|
|
4250
|
+
f as getCachedTimezoneName,
|
|
4024
4251
|
getCachedTimezoneOffset,
|
|
4025
4252
|
g as getErpApiConnectionParams,
|
|
4026
4253
|
getErrorType,
|
|
4027
|
-
|
|
4254
|
+
d as getInitialLoadComplete,
|
|
4028
4255
|
getPayloadWithoutIDField,
|
|
4029
4256
|
a as getSQLServerConfiguration,
|
|
4030
4257
|
getUniqueRows,
|
|
4258
|
+
isPsqlDateEmpty,
|
|
4031
4259
|
config as knexDatabaseConfig,
|
|
4032
4260
|
logger,
|
|
4033
4261
|
removeExtraneousFields,
|
|
4034
4262
|
runDataSyncService,
|
|
4035
|
-
|
|
4263
|
+
e as setInitialLoadComplete,
|
|
4036
4264
|
trimObjectValues
|
|
4037
4265
|
};
|
|
4038
4266
|
//# sourceMappingURL=mm-erp-sdk.js.map
|