@machinemetrics/mm-erp-sdk 0.1.8-beta.1 → 0.1.8-beta.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{config-CV-KosWV.js → config-cB7h4yvc.js} +2 -2
- package/dist/{config-CV-KosWV.js.map → config-cB7h4yvc.js.map} +1 -1
- package/dist/{connector-factory-D8v6aQIt.js → connector-factory-CKm74_WZ.js} +2 -2
- package/dist/{connector-factory-D8v6aQIt.js.map → connector-factory-CKm74_WZ.js.map} +1 -1
- package/dist/{hashed-cache-manager-B6hTDLxU.js → hashed-cache-manager-B1hPBNnF.js} +4 -4
- package/dist/{hashed-cache-manager-B6hTDLxU.js.map → hashed-cache-manager-B1hPBNnF.js.map} +1 -1
- package/dist/{index-Bg76oouR.js → index-DCgheVjV.js} +2 -2
- package/dist/{index-Bg76oouR.js.map → index-DCgheVjV.js.map} +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/{logger-SqdNut1H.js → logger-CBDNtsMq.js} +969 -1024
- package/dist/logger-CBDNtsMq.js.map +1 -0
- package/dist/mm-erp-sdk.js +60 -53
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
- package/dist/services/data-sync-service/jobs/from-erp.js +4 -4
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +3 -3
- package/dist/services/psql-erp-service/index.d.ts +0 -5
- package/dist/services/psql-erp-service/index.d.ts.map +1 -1
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +0 -3
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +1 -1
- package/dist/services/psql-erp-service/psql-service.d.ts +7 -0
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -1
- package/dist/services/reporting-service/logger.d.ts.map +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/timezone.d.ts +7 -0
- package/dist/utils/timezone.d.ts.map +1 -1
- package/package.json +4 -3
- package/src/index.ts +0 -1
- package/src/services/psql-erp-service/index.ts +0 -6
- package/src/services/psql-erp-service/internal/types/psql-types.ts +0 -4
- package/src/services/psql-erp-service/psql-service.ts +65 -3
- package/src/services/reporting-service/logger.ts +57 -70
- package/src/utils/index.ts +1 -1
- package/src/utils/mm-labor-ticket-helpers.ts +2 -2
- package/src/utils/timezone.ts +28 -0
- package/dist/logger-SqdNut1H.js.map +0 -1
- package/dist/services/psql-erp-service/internal/psql-config.d.ts +0 -28
- package/dist/services/psql-erp-service/internal/psql-config.d.ts.map +0 -1
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts +0 -40
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts.map +0 -1
- package/src/services/psql-erp-service/internal/psql-config.ts +0 -13
- package/src/services/psql-erp-service/internal/psql-labor-ticket-operations.ts +0 -58
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { H as HashedCacheManager } from "../../../hashed-cache-manager-
|
|
3
|
-
import { S as SQLiteCoordinator } from "../../../index-
|
|
4
|
-
import { l as logger } from "../../../logger-
|
|
1
|
+
import "../../../config-cB7h4yvc.js";
|
|
2
|
+
import { H as HashedCacheManager } from "../../../hashed-cache-manager-B1hPBNnF.js";
|
|
3
|
+
import { S as SQLiteCoordinator } from "../../../index-DCgheVjV.js";
|
|
4
|
+
import { l as logger } from "../../../logger-CBDNtsMq.js";
|
|
5
5
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
6
6
|
const main = async () => {
|
|
7
7
|
const cacheManager = new HashedCacheManager();
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { S as SQLiteCoordinator } from "../../../index-
|
|
4
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-cB7h4yvc.js";
|
|
2
|
+
import { l as logger } from "../../../logger-CBDNtsMq.js";
|
|
3
|
+
import { S as SQLiteCoordinator } from "../../../index-DCgheVjV.js";
|
|
4
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-CKm74_WZ.js";
|
|
5
5
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
6
6
|
const main = async () => {
|
|
7
7
|
try {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-cB7h4yvc.js";
|
|
2
|
+
import { l as logger } from "../../../logger-CBDNtsMq.js";
|
|
3
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-CKm74_WZ.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
5
|
const main = async () => {
|
|
6
6
|
try {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import knex from "knex";
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
2
|
+
import { l as logger } from "../../../logger-CBDNtsMq.js";
|
|
3
3
|
import { c as config } from "../../../knexfile-1qKKIORB.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
5
|
const db = knex(config.local);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-cB7h4yvc.js";
|
|
2
|
+
import { l as logger } from "../../../logger-CBDNtsMq.js";
|
|
3
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-CKm74_WZ.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
5
|
const main = async () => {
|
|
6
6
|
try {
|
|
@@ -1,13 +1,8 @@
|
|
|
1
1
|
import { PsqlService } from "./psql-service";
|
|
2
|
-
import { PsqlLaborTicketOperations } from "./internal/psql-labor-ticket-operations";
|
|
3
2
|
/**
|
|
4
3
|
* A class to manage interactions with PSQL (Pervasive) databases via ODBC
|
|
5
4
|
*/
|
|
6
5
|
export { PsqlService };
|
|
7
|
-
/**
|
|
8
|
-
* Labor ticket operations for PSQL (Phase 2)
|
|
9
|
-
*/
|
|
10
|
-
export { PsqlLaborTicketOperations };
|
|
11
6
|
/**
|
|
12
7
|
* Configuration interface for PSQL connections
|
|
13
8
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C;;GAEG;AACH,OAAO,EAAE,WAAW,EAAE,CAAC;AAEvB;;GAEG;AACH,YAAY,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEzD;;GAEG;AACH,OAAO,EACL,cAAc,EACd,cAAc,EACd,mBAAmB,EACnB,eAAe,EACf,kBAAkB,GACnB,MAAM,gBAAgB,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"psql-types.d.ts","sourceRoot":"","sources":["../../../../../src/services/psql-erp-service/internal/types/psql-types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,
|
|
1
|
+
{"version":3,"file":"psql-types.d.ts","sourceRoot":"","sources":["../../../../../src/services/psql-erp-service/internal/types/psql-types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,iBAAkB,SAAQ,KAAK;IAC9C,UAAU,CAAC,EAAE,SAAS,EAAE,CAAC;CAC1B"}
|
|
@@ -6,7 +6,14 @@ type PagingParams = {
|
|
|
6
6
|
};
|
|
7
7
|
export declare class PsqlService {
|
|
8
8
|
private config;
|
|
9
|
+
private static odbcModule;
|
|
10
|
+
private static odbcLoadError;
|
|
9
11
|
constructor(config: PsqlConfiguration);
|
|
12
|
+
/**
|
|
13
|
+
* Dynamically load the ODBC module with lazy initialization and caching
|
|
14
|
+
* @throws Error with helpful message if ODBC package is not installed
|
|
15
|
+
*/
|
|
16
|
+
private static getOdbc;
|
|
10
17
|
/**
|
|
11
18
|
* Build PSQL ODBC connection string
|
|
12
19
|
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"psql-service.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-service.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"psql-service.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAIpD,KAAK,YAAY,GAAG;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAiBF,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAoB;IAClC,OAAO,CAAC,MAAM,CAAC,UAAU,CAA2B;IACpD,OAAO,CAAC,MAAM,CAAC,aAAa,CAAsB;gBAEtC,MAAM,EAAE,iBAAiB;IAIrC;;;OAGG;mBACkB,OAAO;IAsC5B;;;OAGG;IACH,OAAO,CAAC,qBAAqB;IAe7B;;;;;;;;OAQG;IACU,wBAAwB,CACnC,KAAK,EAAE,MAAM,EACb,MAAM,GAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAM,EACnC,MAAM,CAAC,EAAE,YAAY,GACpB,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;IA8EnC;;;OAGG;WACW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;IAkB5E;;OAEG;IACH,OAAO,CAAC,eAAe;CA4BxB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../../src/services/reporting-service/logger.ts"],"names":[],"mappings":"AA+DA,QAAA,MAAM,MAAM,0BAaV,CAAC;
|
|
1
|
+
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../../src/services/reporting-service/logger.ts"],"names":[],"mappings":"AA+DA,QAAA,MAAM,MAAM,0BAaV,CAAC;AA0DH,eAAO,MAAM,eAAe,GAAI,UAAU,MAAM,EAAE,SAAS,MAAM,SAuChE,CAAC;AAKF,eAAe,MAAM,CAAC"}
|
package/dist/utils/index.d.ts
CHANGED
|
@@ -11,7 +11,7 @@ export { addNewFieldFromExternalSource, addNewFieldFromLookupField, } from "./da
|
|
|
11
11
|
* Timezone and time-related utilities
|
|
12
12
|
*/
|
|
13
13
|
export { getTimezoneOffsetAndPersist } from "./time-utils";
|
|
14
|
-
export { formatDateWithTZOffset, convertToLocalTime } from "./timezone";
|
|
14
|
+
export { formatDateWithTZOffset, convertToLocalTime, toISOWithOffset } from "./timezone";
|
|
15
15
|
export { applyTimezoneOffsetsToFields } from "./time-utils";
|
|
16
16
|
export * from "./time-utils";
|
|
17
17
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EACL,6BAA6B,EAC7B,0BAA0B,GAC3B,MAAM,uBAAuB,CAAC;AAE/B;;GAEG;AACH,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EACL,6BAA6B,EAC7B,0BAA0B,GAC3B,MAAM,uBAAuB,CAAC;AAE/B;;GAEG;AACH,OAAO,EAAE,2BAA2B,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AACzF,OAAO,EAAE,4BAA4B,EAAE,MAAM,cAAc,CAAC;AAC5D,cAAc,cAAc,CAAC;AAE7B;;GAEG;AACH,OAAO,EAAE,qBAAqB,EAAE,MAAM,qDAAqD,CAAC;AAC5F,YAAY,EAAE,oBAAoB,EAAE,MAAM,qDAAqD,CAAC;AAChG,OAAO,EAAE,kBAAkB,EAAE,MAAM,kDAAkD,CAAC;AACtF,OAAO,EAAE,iBAAiB,EAAE,MAAM,iDAAiD,CAAC;AAEpF;;GAEG;AACH,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,YAAY,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,uBAAuB,EAAE,MAAM,qCAAqC,CAAC;AAG9E,OAAO,EACL,sBAAsB,EACtB,sBAAsB,GACvB,MAAM,qCAAqC,CAAC;AAE7C;;GAEG;AACH,cAAc,oDAAoD,CAAC;AACnE,cAAc,oDAAoD,CAAC;AACnE,OAAO,EAAE,uBAAuB,EAAE,MAAM,wBAAwB,CAAC;AAEjE;;GAEG;AACH,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAClD,OAAO,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAC1C,YAAY,EAAE,UAAU,EAAE,iBAAiB,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAEjF;;GAEG;AACH,cAAc,2BAA2B,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAE7C;;GAEG;AACH,OAAO,EAAE,iBAAiB,EAAE,QAAQ,EAAE,KAAK,WAAW,EAAE,KAAK,cAAc,EAAE,MAAM,qCAAqC,CAAC;AACzH,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAE7D;;GAEG;AACH,cAAc,2BAA2B,CAAC;AAE1C;;GAEG;AACH,OAAO,EAAE,cAAc,EAAE,MAAM,sDAAsD,CAAC;AACtF,OAAO,EAAE,WAAW,EAAE,MAAM,2CAA2C,CAAC;AAExE;;GAEG;AACH,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAC"}
|
package/dist/utils/timezone.d.ts
CHANGED
|
@@ -18,4 +18,11 @@ export declare const convertToLocalTime: (zuluTime: string | Date | null | undef
|
|
|
18
18
|
* @returns The formatted date string in format: YYYY-MM-DDTHH:mm:ss+/-HH:MM
|
|
19
19
|
*/
|
|
20
20
|
export declare const formatDateWithTZOffset: (date: string | null | undefined, timezoneOffset: number) => string | undefined;
|
|
21
|
+
/**
|
|
22
|
+
* Formats a Date object as an ISO string with the specified timezone offset
|
|
23
|
+
* @param date The Date object to format (should be a Date that has been shifted by convertToLocalTime)
|
|
24
|
+
* @param timezoneOffset The timezone offset in hours
|
|
25
|
+
* @returns ISO string with offset in format: YYYY-MM-DDTHH:mm:ss.SSS±HH:MM
|
|
26
|
+
*/
|
|
27
|
+
export declare const toISOWithOffset: (date: Date, timezoneOffset: number) => string;
|
|
21
28
|
//# sourceMappingURL=timezone.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"timezone.d.ts","sourceRoot":"","sources":["../../src/utils/timezone.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,eAAO,MAAM,iBAAiB,QAAa,OAAO,CAAC,MAAM,CA6CxD,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,kBAAkB,GAC7B,UAAU,MAAM,GAAG,IAAI,GAAG,IAAI,GAAG,SAAS,EAC1C,gBAAgB,MAAM,KACrB,IAAI,GAAG,SAIT,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,sBAAsB,GACjC,MAAM,MAAM,GAAG,IAAI,GAAG,SAAS,EAC/B,gBAAgB,MAAM,KACrB,MAAM,GAAG,SAgBX,CAAC"}
|
|
1
|
+
{"version":3,"file":"timezone.d.ts","sourceRoot":"","sources":["../../src/utils/timezone.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,eAAO,MAAM,iBAAiB,QAAa,OAAO,CAAC,MAAM,CA6CxD,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,kBAAkB,GAC7B,UAAU,MAAM,GAAG,IAAI,GAAG,IAAI,GAAG,SAAS,EAC1C,gBAAgB,MAAM,KACrB,IAAI,GAAG,SAIT,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,sBAAsB,GACjC,MAAM,MAAM,GAAG,IAAI,GAAG,SAAS,EAC/B,gBAAgB,MAAM,KACrB,MAAM,GAAG,SAgBX,CAAC;AAEF;;;;;GAKG;AACH,eAAO,MAAM,eAAe,GAAI,MAAM,IAAI,EAAE,gBAAgB,MAAM,KAAG,MAoBpE,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@machinemetrics/mm-erp-sdk",
|
|
3
3
|
"description": "A library for syncing data between MachineMetrics and ERP systems",
|
|
4
|
-
"version": "0.1.8-beta.
|
|
4
|
+
"version": "0.1.8-beta.10",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "machinemetrics",
|
|
7
7
|
"main": "dist/mm-erp-sdk.js",
|
|
@@ -19,7 +19,6 @@
|
|
|
19
19
|
"dependencies": {
|
|
20
20
|
"@azure/msal-node": "^2.12.0",
|
|
21
21
|
"@ladjs/graceful": "^4.2.0",
|
|
22
|
-
"aws-sdk": "^2.1691.0",
|
|
23
22
|
"axios": "^1.7.3",
|
|
24
23
|
"axios-retry": "^4.5.0",
|
|
25
24
|
"better-sqlite3": "^11.3.0",
|
|
@@ -29,12 +28,14 @@
|
|
|
29
28
|
"knex": "^3.1.0",
|
|
30
29
|
"lodash": "^4.17.21",
|
|
31
30
|
"mssql": "^11.0.1",
|
|
32
|
-
"odbc": "^2.4.8",
|
|
33
31
|
"winston": "^3.14.0",
|
|
34
32
|
"winston-daily-rotate-file": "^5.0.0",
|
|
35
33
|
"xxhashjs": "^0.2.2",
|
|
36
34
|
"zod": "^3.24.1"
|
|
37
35
|
},
|
|
36
|
+
"optionalDependencies": {
|
|
37
|
+
"odbc": "^2.4.8"
|
|
38
|
+
},
|
|
38
39
|
"devDependencies": {
|
|
39
40
|
"@types/json-stable-stringify": "^1.1.0",
|
|
40
41
|
"@types/lodash": "^4.17.10",
|
package/src/index.ts
CHANGED
|
@@ -1,16 +1,10 @@
|
|
|
1
1
|
import { PsqlService } from "./psql-service";
|
|
2
|
-
import { PsqlLaborTicketOperations } from "./internal/psql-labor-ticket-operations";
|
|
3
2
|
|
|
4
3
|
/**
|
|
5
4
|
* A class to manage interactions with PSQL (Pervasive) databases via ODBC
|
|
6
5
|
*/
|
|
7
6
|
export { PsqlService };
|
|
8
7
|
|
|
9
|
-
/**
|
|
10
|
-
* Labor ticket operations for PSQL (Phase 2)
|
|
11
|
-
*/
|
|
12
|
-
export { PsqlLaborTicketOperations };
|
|
13
|
-
|
|
14
8
|
/**
|
|
15
9
|
* Configuration interface for PSQL connections
|
|
16
10
|
*/
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import odbc from "odbc";
|
|
2
1
|
import { PsqlConfiguration } from "./configuration";
|
|
3
2
|
import { ERPResponse } from "../../types/erp-types";
|
|
4
3
|
import { OdbcErrorResponse } from "./internal/types/psql-types";
|
|
@@ -9,13 +8,67 @@ type PagingParams = {
|
|
|
9
8
|
offset?: number;
|
|
10
9
|
};
|
|
11
10
|
|
|
11
|
+
/**
|
|
12
|
+
* ODBC connection interface for type safety
|
|
13
|
+
*/
|
|
14
|
+
interface OdbcConnection {
|
|
15
|
+
query(sql: string): Promise<any[]>;
|
|
16
|
+
close(): Promise<void>;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* ODBC module interface
|
|
21
|
+
*/
|
|
22
|
+
interface OdbcModule {
|
|
23
|
+
connect(connectionString: string): Promise<OdbcConnection>;
|
|
24
|
+
}
|
|
25
|
+
|
|
12
26
|
export class PsqlService {
|
|
13
27
|
private config: PsqlConfiguration;
|
|
28
|
+
private static odbcModule: OdbcModule | null = null;
|
|
29
|
+
private static odbcLoadError: Error | null = null;
|
|
14
30
|
|
|
15
31
|
constructor(config: PsqlConfiguration) {
|
|
16
32
|
this.config = config;
|
|
17
33
|
}
|
|
18
34
|
|
|
35
|
+
/**
|
|
36
|
+
* Dynamically load the ODBC module with lazy initialization and caching
|
|
37
|
+
* @throws Error with helpful message if ODBC package is not installed
|
|
38
|
+
*/
|
|
39
|
+
private static async getOdbc(): Promise<OdbcModule> {
|
|
40
|
+
// If we've already tried and failed, throw the cached error
|
|
41
|
+
if (this.odbcLoadError) {
|
|
42
|
+
throw this.odbcLoadError;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// If already loaded, return cached module
|
|
46
|
+
if (this.odbcModule) {
|
|
47
|
+
return this.odbcModule;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
// Dynamic import - only loads when actually needed
|
|
52
|
+
// @ts-ignore - odbc is an optional dependency, may not be installed at build time
|
|
53
|
+
const odbcImport = await import("odbc");
|
|
54
|
+
// Handle both default export and named export patterns
|
|
55
|
+
const odbc = odbcImport.default || odbcImport;
|
|
56
|
+
this.odbcModule = odbc as OdbcModule;
|
|
57
|
+
return this.odbcModule;
|
|
58
|
+
} catch (error) {
|
|
59
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
60
|
+
this.odbcLoadError = new Error(
|
|
61
|
+
`ODBC package is required for PSQL service but is not installed or failed to load.\n` +
|
|
62
|
+
`Install it with: npm install odbc\n` +
|
|
63
|
+
`Also install OS-level dependencies, e.g. on Alpine Linux:\n` +
|
|
64
|
+
` apk add --no-cache unixodbc unixodbc-dev python3 make g++\n` +
|
|
65
|
+
`For other Linux distributions, install unixodbc and unixodbc-dev packages.\n` +
|
|
66
|
+
`Original error: ${errorMessage}`
|
|
67
|
+
);
|
|
68
|
+
throw this.odbcLoadError;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
19
72
|
// REMOVED: dispose() method - not needed anymore
|
|
20
73
|
// REMOVED: connection property - not needed anymore
|
|
21
74
|
// REMOVED: openConnection() method - not needed anymore
|
|
@@ -54,7 +107,9 @@ export class PsqlService {
|
|
|
54
107
|
params: Record<string, string> = {},
|
|
55
108
|
paging?: PagingParams
|
|
56
109
|
): Promise<ERPResponse | undefined> {
|
|
57
|
-
|
|
110
|
+
// Dynamically load ODBC module (will throw helpful error if not installed)
|
|
111
|
+
const odbc = await PsqlService.getOdbc();
|
|
112
|
+
let connection: OdbcConnection | null = null;
|
|
58
113
|
|
|
59
114
|
try {
|
|
60
115
|
// Create fresh connection for THIS query only
|
|
@@ -65,7 +120,7 @@ export class PsqlService {
|
|
|
65
120
|
if (Object.keys(params).length > 0) {
|
|
66
121
|
logger.warn(
|
|
67
122
|
"PsqlService: Query parameters provided but parameter binding not yet implemented. " +
|
|
68
|
-
"Using direct query execution.
|
|
123
|
+
"Using direct query execution."
|
|
69
124
|
);
|
|
70
125
|
}
|
|
71
126
|
|
|
@@ -98,6 +153,13 @@ export class PsqlService {
|
|
|
98
153
|
},
|
|
99
154
|
};
|
|
100
155
|
} catch (error) {
|
|
156
|
+
// If this is an ODBC load error (from getOdbc), re-throw it as-is
|
|
157
|
+
// since it already has a helpful error message
|
|
158
|
+
if (error instanceof Error && error.message.includes("ODBC package is required")) {
|
|
159
|
+
throw error;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Otherwise, handle as ODBC runtime error
|
|
101
163
|
const errorInfo = error as OdbcErrorResponse;
|
|
102
164
|
logger.error("Error fetching data from PSQL", {
|
|
103
165
|
error: errorInfo.message,
|
|
@@ -76,6 +76,61 @@ const logger = createLogger({
|
|
|
76
76
|
],
|
|
77
77
|
});
|
|
78
78
|
|
|
79
|
+
// Helper function to create a file transport with shared configuration
|
|
80
|
+
const createFileTransport = (): DailyRotateFile => {
|
|
81
|
+
return new DailyRotateFile({
|
|
82
|
+
filename: path.join(logDirectory, "%DATE%.log"),
|
|
83
|
+
datePattern: "YYYY-MM-DD",
|
|
84
|
+
zippedArchive: true,
|
|
85
|
+
maxSize: "20m",
|
|
86
|
+
maxFiles: "14d",
|
|
87
|
+
format: logFormat,
|
|
88
|
+
});
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
// Rotate mitigation helper: attaches rotate handler to transport and recursively attaches to replacements
|
|
92
|
+
function attachRotateMitigation(
|
|
93
|
+
transport: DailyRotateFile,
|
|
94
|
+
opts: { logLevel: string; nodeEnv: string }
|
|
95
|
+
) {
|
|
96
|
+
const { logLevel, nodeEnv } = opts;
|
|
97
|
+
let isRefreshing = false;
|
|
98
|
+
transport.on("rotate", (_old: string, _new: string) => {
|
|
99
|
+
if (isRefreshing) return;
|
|
100
|
+
isRefreshing = true;
|
|
101
|
+
let removalTimer: NodeJS.Timeout | null = null;
|
|
102
|
+
|
|
103
|
+
// Create replacement first to avoid any logging gap
|
|
104
|
+
const next = createFileTransport();
|
|
105
|
+
// When the new file is created, remove the old transport
|
|
106
|
+
next.on("new", () => {
|
|
107
|
+
if (removalTimer) {
|
|
108
|
+
clearTimeout(removalTimer);
|
|
109
|
+
removalTimer = null;
|
|
110
|
+
}
|
|
111
|
+
try {
|
|
112
|
+
logger.remove(transport);
|
|
113
|
+
} catch {}
|
|
114
|
+
isRefreshing = false;
|
|
115
|
+
});
|
|
116
|
+
attachRotateMitigation(next, opts);
|
|
117
|
+
logger.add(next);
|
|
118
|
+
|
|
119
|
+
// Fallback: if the "new" event doesn't fire, remove the old transport after a grace period
|
|
120
|
+
const REMOVAL_GRACE_MS = 30000;
|
|
121
|
+
removalTimer = setTimeout(() => {
|
|
122
|
+
try {
|
|
123
|
+
logger.remove(transport);
|
|
124
|
+
} catch {}
|
|
125
|
+
isRefreshing = false;
|
|
126
|
+
removalTimer = null;
|
|
127
|
+
}, REMOVAL_GRACE_MS);
|
|
128
|
+
|
|
129
|
+
// Keep console and other transports intact; do not silence or clear
|
|
130
|
+
logger.level = logLevel;
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
|
|
79
134
|
// Function to reconfigure the logger once CoreConfiguration is available
|
|
80
135
|
export const configureLogger = (logLevel: string, nodeEnv: string) => {
|
|
81
136
|
// Remove existing transports (safely): close any DailyRotateFile streams first
|
|
@@ -96,78 +151,10 @@ export const configureLogger = (logLevel: string, nodeEnv: string) => {
|
|
|
96
151
|
logger.clear();
|
|
97
152
|
|
|
98
153
|
// Add file transport
|
|
99
|
-
const fileTransport =
|
|
100
|
-
|
|
101
|
-
datePattern: "YYYY-MM-DD",
|
|
102
|
-
zippedArchive: true,
|
|
103
|
-
maxSize: "20m",
|
|
104
|
-
maxFiles: "14d",
|
|
105
|
-
format: logFormat,
|
|
106
|
-
});
|
|
154
|
+
const fileTransport = createFileTransport();
|
|
155
|
+
attachRotateMitigation(fileTransport, { logLevel, nodeEnv });
|
|
107
156
|
logger.add(fileTransport);
|
|
108
157
|
|
|
109
|
-
// Rotate-time mitigation for long-running single-process apps
|
|
110
|
-
let isRefreshing = false;
|
|
111
|
-
fileTransport.on("rotate", (_oldFilename: string, _newFilename: string) => {
|
|
112
|
-
if (isRefreshing) return;
|
|
113
|
-
isRefreshing = true;
|
|
114
|
-
(logger as any).silent = true; // gate writes during refresh to avoid write-after-end
|
|
115
|
-
|
|
116
|
-
try {
|
|
117
|
-
// Close all existing DailyRotateFile streams
|
|
118
|
-
const existing = (logger.transports || []).filter(
|
|
119
|
-
(t: any) => t instanceof DailyRotateFile
|
|
120
|
-
);
|
|
121
|
-
for (const t of existing) {
|
|
122
|
-
const s = (t as any).logStream;
|
|
123
|
-
if (s && typeof s.end === "function") {
|
|
124
|
-
try {
|
|
125
|
-
s.end();
|
|
126
|
-
} catch {}
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
// Refresh the file transport cleanly
|
|
131
|
-
logger.clear();
|
|
132
|
-
const refreshed = new DailyRotateFile({
|
|
133
|
-
filename: path.join(logDirectory, "%DATE%.log"),
|
|
134
|
-
datePattern: "YYYY-MM-DD",
|
|
135
|
-
zippedArchive: true,
|
|
136
|
-
maxSize: "20m",
|
|
137
|
-
maxFiles: "14d",
|
|
138
|
-
format: logFormat,
|
|
139
|
-
});
|
|
140
|
-
|
|
141
|
-
// Once new file stream is ready, resume writes
|
|
142
|
-
refreshed.on("new", () => {
|
|
143
|
-
(logger as any).silent = false;
|
|
144
|
-
isRefreshing = false;
|
|
145
|
-
});
|
|
146
|
-
|
|
147
|
-
logger.add(refreshed);
|
|
148
|
-
|
|
149
|
-
// Preserve console transport behavior in non-production
|
|
150
|
-
if (nodeEnv !== "production") {
|
|
151
|
-
logger.add(
|
|
152
|
-
new transports.Console({
|
|
153
|
-
format: format.combine(
|
|
154
|
-
format.timestamp(),
|
|
155
|
-
format.splat(),
|
|
156
|
-
baseFormat,
|
|
157
|
-
format.colorize({ all: true })
|
|
158
|
-
),
|
|
159
|
-
})
|
|
160
|
-
);
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
logger.level = logLevel;
|
|
164
|
-
} catch {
|
|
165
|
-
// If anything goes wrong, resume writes to avoid permanent silence
|
|
166
|
-
(logger as any).silent = false;
|
|
167
|
-
isRefreshing = false;
|
|
168
|
-
}
|
|
169
|
-
});
|
|
170
|
-
|
|
171
158
|
// Add console transport in non-production environments
|
|
172
159
|
if (nodeEnv !== "production") {
|
|
173
160
|
logger.add(
|
package/src/utils/index.ts
CHANGED
|
@@ -15,7 +15,7 @@ export {
|
|
|
15
15
|
* Timezone and time-related utilities
|
|
16
16
|
*/
|
|
17
17
|
export { getTimezoneOffsetAndPersist } from "./time-utils";
|
|
18
|
-
export { formatDateWithTZOffset, convertToLocalTime } from "./timezone";
|
|
18
|
+
export { formatDateWithTZOffset, convertToLocalTime, toISOWithOffset } from "./timezone";
|
|
19
19
|
export { applyTimezoneOffsetsToFields } from "./time-utils";
|
|
20
20
|
export * from "./time-utils";
|
|
21
21
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { convertToLocalTime } from "./timezone";
|
|
1
|
+
import { convertToLocalTime, toISOWithOffset } from "./timezone";
|
|
2
2
|
import { MMReceiveLaborTicket } from "../services/mm-api-service/types/receive-types";
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -22,7 +22,7 @@ export function convertLaborTicketToLocalTimezone(
|
|
|
22
22
|
|
|
23
23
|
timeFields.forEach((field) => {
|
|
24
24
|
const localTime = convertToLocalTime(laborTicket[field], timezoneOffset);
|
|
25
|
-
laborTicket[field] = localTime
|
|
25
|
+
laborTicket[field] = localTime ? toISOWithOffset(localTime, timezoneOffset) : null;
|
|
26
26
|
});
|
|
27
27
|
return laborTicket;
|
|
28
28
|
}
|
package/src/utils/timezone.ts
CHANGED
|
@@ -94,3 +94,31 @@ export const formatDateWithTZOffset = (
|
|
|
94
94
|
// Append the timezone offset
|
|
95
95
|
return `${isoDate}${sign}${hours}:${minutes}`;
|
|
96
96
|
};
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Formats a Date object as an ISO string with the specified timezone offset
|
|
100
|
+
* @param date The Date object to format (should be a Date that has been shifted by convertToLocalTime)
|
|
101
|
+
* @param timezoneOffset The timezone offset in hours
|
|
102
|
+
* @returns ISO string with offset in format: YYYY-MM-DDTHH:mm:ss.SSS±HH:MM
|
|
103
|
+
*/
|
|
104
|
+
export const toISOWithOffset = (date: Date, timezoneOffset: number): string => {
|
|
105
|
+
const sign = timezoneOffset >= 0 ? "+" : "-";
|
|
106
|
+
const abs = Math.abs(timezoneOffset);
|
|
107
|
+
const hours = Math.floor(abs);
|
|
108
|
+
const minutes = Math.round((abs - hours) * 60);
|
|
109
|
+
const pad2 = (n: number) => n.toString().padStart(2, "0");
|
|
110
|
+
const pad3 = (n: number) => n.toString().padStart(3, "0");
|
|
111
|
+
|
|
112
|
+
// Use UTC getters since convertToLocalTime shifts the Date's internal timestamp
|
|
113
|
+
// The UTC components of the shifted Date represent the local wall time
|
|
114
|
+
const yyyy = date.getUTCFullYear();
|
|
115
|
+
const MM = pad2(date.getUTCMonth() + 1);
|
|
116
|
+
const dd = pad2(date.getUTCDate());
|
|
117
|
+
const HH = pad2(date.getUTCHours());
|
|
118
|
+
const mm = pad2(date.getUTCMinutes());
|
|
119
|
+
const ss = pad2(date.getUTCSeconds());
|
|
120
|
+
const SSS = pad3(date.getUTCMilliseconds());
|
|
121
|
+
const off = `${sign}${pad2(hours)}:${pad2(minutes)}`;
|
|
122
|
+
|
|
123
|
+
return `${yyyy}-${MM}-${dd}T${HH}:${mm}:${ss}.${SSS}${off}`;
|
|
124
|
+
};
|