@machinemetrics/mm-erp-sdk 0.1.6-beta.2 → 0.1.7-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{config-WKwu1mMo.js → config-qat9zgOl.js} +2 -2
- package/dist/{config-WKwu1mMo.js.map → config-qat9zgOl.js.map} +1 -1
- package/dist/{connector-factory-DHmMYsRs.js → connector-factory-C2czCs9v.js} +2 -2
- package/dist/{connector-factory-DHmMYsRs.js.map → connector-factory-C2czCs9v.js.map} +1 -1
- package/dist/{hashed-cache-manager-CtDhFqj6.js → hashed-cache-manager-CzyFSt2B.js} +4 -4
- package/dist/{hashed-cache-manager-CtDhFqj6.js.map → hashed-cache-manager-CzyFSt2B.js.map} +1 -1
- package/dist/{index-aci_wdcn.js → index-B9wo8pld.js} +2 -2
- package/dist/{index-aci_wdcn.js.map → index-B9wo8pld.js.map} +1 -1
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/{logger-hqtl8hFM.js → logger-Db8CkwR6.js} +924 -966
- package/dist/logger-Db8CkwR6.js.map +1 -0
- package/dist/mm-erp-sdk.js +275 -17
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/data-sync-service.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +25 -8
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +24 -7
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/psql-erp-service/configuration.d.ts +10 -0
- package/dist/services/psql-erp-service/configuration.d.ts.map +1 -0
- package/dist/services/psql-erp-service/index.d.ts +19 -0
- package/dist/services/psql-erp-service/index.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/psql-config.d.ts +28 -0
- package/dist/services/psql-erp-service/internal/psql-config.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts +40 -0
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +15 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts +32 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-service.d.ts +36 -0
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -0
- package/dist/types/erp-types.d.ts +2 -1
- package/dist/types/erp-types.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts.map +1 -1
- package/package.json +3 -1
- package/src/index.ts +27 -5
- package/src/services/data-sync-service/data-sync-service.ts +43 -3
- package/src/services/data-sync-service/jobs/from-erp.ts +35 -6
- package/src/services/data-sync-service/jobs/to-erp.ts +35 -5
- package/src/services/psql-erp-service/configuration.ts +9 -0
- package/src/services/psql-erp-service/index.ts +28 -0
- package/src/services/psql-erp-service/internal/psql-config.ts +13 -0
- package/src/services/psql-erp-service/internal/psql-labor-ticket-operations.ts +58 -0
- package/src/services/psql-erp-service/internal/types/psql-types.ts +17 -0
- package/src/services/psql-erp-service/psql-helpers.ts +90 -0
- package/src/services/psql-erp-service/psql-service.ts +178 -0
- package/src/types/erp-types.ts +1 -0
- package/src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts +1 -2
- package/dist/logger-hqtl8hFM.js.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"data-sync-service.d.ts","sourceRoot":"","sources":["../../../src/services/data-sync-service/data-sync-service.ts"],"names":[],"mappings":"AAOA,eAAO,MAAM,kBAAkB,GAAU,eAAe,MAAM,
|
|
1
|
+
{"version":3,"file":"data-sync-service.d.ts","sourceRoot":"","sources":["../../../src/services/data-sync-service/data-sync-service.ts"],"names":[],"mappings":"AAOA,eAAO,MAAM,kBAAkB,GAAU,eAAe,MAAM,kBA+I7D,CAAC"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { H as HashedCacheManager } from "../../../hashed-cache-manager-
|
|
3
|
-
import { S as SQLiteCoordinator } from "../../../index-
|
|
4
|
-
import { l as logger } from "../../../logger-
|
|
1
|
+
import "../../../config-qat9zgOl.js";
|
|
2
|
+
import { H as HashedCacheManager } from "../../../hashed-cache-manager-CzyFSt2B.js";
|
|
3
|
+
import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
|
|
4
|
+
import { l as logger } from "../../../logger-Db8CkwR6.js";
|
|
5
5
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
6
6
|
const main = async () => {
|
|
7
7
|
const cacheManager = new HashedCacheManager();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"from-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;
|
|
1
|
+
{"version":3,"file":"from-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAqBvB,QAAA,MAAM,IAAI,qBAmDT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
|
|
@@ -1,8 +1,17 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { S as SQLiteCoordinator } from "../../../index-
|
|
4
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-qat9zgOl.js";
|
|
2
|
+
import { l as logger } from "../../../logger-Db8CkwR6.js";
|
|
3
|
+
import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
|
|
4
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
|
|
5
5
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
6
|
+
if (global.gc) {
|
|
7
|
+
process.on("exit", () => {
|
|
8
|
+
logger.debug("from-erp: Running garbage collection on exit");
|
|
9
|
+
try {
|
|
10
|
+
global.gc?.();
|
|
11
|
+
} catch (e) {
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
}
|
|
6
15
|
const main = async () => {
|
|
7
16
|
try {
|
|
8
17
|
logger.info('Worker for job "from-erp" online');
|
|
@@ -17,6 +26,17 @@ const main = async () => {
|
|
|
17
26
|
});
|
|
18
27
|
await connector.syncFromERPCompleted();
|
|
19
28
|
logger.info("==========Completed from-erp job cycle==========");
|
|
29
|
+
logger.debug("from-erp: Starting cleanup sequence");
|
|
30
|
+
if (global.gc) {
|
|
31
|
+
logger.debug("from-erp: Running manual garbage collection");
|
|
32
|
+
try {
|
|
33
|
+
global.gc?.();
|
|
34
|
+
} catch (e) {
|
|
35
|
+
logger.debug("from-erp: GC not available or failed");
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
39
|
+
logger.debug("from-erp: Cleanup sequence completed");
|
|
20
40
|
} catch (error) {
|
|
21
41
|
const errorDetails = {
|
|
22
42
|
message: error instanceof Error ? error.message : String(error),
|
|
@@ -32,10 +52,7 @@ const main = async () => {
|
|
|
32
52
|
}
|
|
33
53
|
};
|
|
34
54
|
const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
|
|
35
|
-
const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` :
|
|
36
|
-
// Unix: file:// + /path = file:///path
|
|
37
|
-
`file:///${normalizedArgv1}`
|
|
38
|
-
);
|
|
55
|
+
const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : `file:///${normalizedArgv1}`;
|
|
39
56
|
const isMainModule = import.meta.url === fileUrl;
|
|
40
57
|
if (isMainModule) {
|
|
41
58
|
try {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"from-erp.js","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"from-erp\" online');\n logger.info(\"==========Starting from-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n
|
|
1
|
+
{"version":3,"file":"from-erp.js","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\n// Enable garbage collection on exit if available\nif (global.gc) {\n process.on(\"exit\", () => {\n logger.debug(\"from-erp: Running garbage collection on exit\");\n try {\n global.gc?.();\n } catch (e) {\n // Ignore GC errors\n }\n });\n}\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"from-erp\" online');\n logger.info(\"==========Starting from-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await SQLiteCoordinator.executeWithLock(\"from-erp\", async () => {\n await connector.syncFromERP();\n });\n\n await connector.syncFromERPCompleted();\n logger.info(\"==========Completed from-erp job cycle==========\");\n\n // Cleanup before worker exit\n logger.debug(\"from-erp: Starting cleanup sequence\");\n\n // Trigger garbage collection if available\n if (global.gc) {\n logger.debug(\"from-erp: Running manual garbage collection\");\n try {\n global.gc?.();\n } catch (e) {\n logger.debug(\"from-erp: GC not available or failed\");\n }\n }\n\n // Small delay to allow any pending operations to complete\n await new Promise((resolve) => setTimeout(resolve, 100));\n logger.debug(\"from-erp: Cleanup sequence completed\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"from-erp\" had an error', {\n error: errorDetails,\n });\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, \"/\");\nconst fileUrl = normalizedArgv1.startsWith(\"/\")\n ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path\n : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAGxC,IAAI,OAAO,IAAI;AACb,UAAQ,GAAG,QAAQ,MAAM;AACvB,WAAO,MAAM,8CAA8C;AAC3D,QAAI;AACF,aAAO,KAAA;AAAA,IACT,SAAS,GAAG;AAAA,IAEZ;AAAA,EACF,CAAC;AACH;AAEA,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,kCAAkC;AAC9C,WAAO,KAAK,iDAAiD;AAG7D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,kBAAkB,gBAAgB,YAAY,YAAY;AAC9D,YAAM,UAAU,YAAA;AAAA,IAClB,CAAC;AAED,UAAM,UAAU,qBAAA;AAChB,WAAO,KAAK,kDAAkD;AAG9D,WAAO,MAAM,qCAAqC;AAGlD,QAAI,OAAO,IAAI;AACb,aAAO,MAAM,6CAA6C;AAC1D,UAAI;AACF,eAAO,KAAA;AAAA,MACT,SAAS,GAAG;AACV,eAAO,MAAM,sCAAsC;AAAA,MACrD;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AACvD,WAAO,MAAM,sCAAsC;AAAA,EACrD,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,0CAA0C;AAAA,MACrD,OAAO;AAAA,IAAA,CACR;AAED,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC1C,UAAU,eAAe,KACzB,WAAW,eAAe;AAC9B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-qat9zgOl.js";
|
|
2
|
+
import { l as logger } from "../../../logger-Db8CkwR6.js";
|
|
3
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
5
|
const main = async () => {
|
|
6
6
|
try {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import knex from "knex";
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
2
|
+
import { l as logger } from "../../../logger-Db8CkwR6.js";
|
|
3
3
|
import { c as config } from "../../../knexfile-1qKKIORB.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
5
|
const db = knex(config.local);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;
|
|
1
|
+
{"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAqBvB,QAAA,MAAM,IAAI,qBAqDT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
|
|
@@ -1,7 +1,16 @@
|
|
|
1
|
-
import "../../../config-
|
|
2
|
-
import { l as logger } from "../../../logger-
|
|
3
|
-
import { c as createConnectorFromPath } from "../../../connector-factory-
|
|
1
|
+
import "../../../config-qat9zgOl.js";
|
|
2
|
+
import { l as logger } from "../../../logger-Db8CkwR6.js";
|
|
3
|
+
import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
|
|
4
4
|
logger.level = process.env.LOG_LEVEL || "info";
|
|
5
|
+
if (global.gc) {
|
|
6
|
+
process.on("exit", () => {
|
|
7
|
+
logger.debug("to-erp: Running garbage collection on exit");
|
|
8
|
+
try {
|
|
9
|
+
global.gc?.();
|
|
10
|
+
} catch (e) {
|
|
11
|
+
}
|
|
12
|
+
});
|
|
13
|
+
}
|
|
5
14
|
const main = async () => {
|
|
6
15
|
try {
|
|
7
16
|
logger.info('Worker for job "to-erp" online');
|
|
@@ -14,6 +23,17 @@ const main = async () => {
|
|
|
14
23
|
await connector.syncToERP();
|
|
15
24
|
await connector.syncToERPCompleted();
|
|
16
25
|
logger.info("==========Completed to-erp job cycle==========");
|
|
26
|
+
logger.debug("to-erp: Starting cleanup sequence");
|
|
27
|
+
if (global.gc) {
|
|
28
|
+
logger.debug("to-erp: Running manual garbage collection");
|
|
29
|
+
try {
|
|
30
|
+
global.gc?.();
|
|
31
|
+
} catch (e) {
|
|
32
|
+
logger.debug("to-erp: GC not available or failed");
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
36
|
+
logger.debug("to-erp: Cleanup sequence completed");
|
|
17
37
|
} catch (error) {
|
|
18
38
|
const errorDetails = {
|
|
19
39
|
message: error instanceof Error ? error.message : String(error),
|
|
@@ -31,10 +51,7 @@ const main = async () => {
|
|
|
31
51
|
}
|
|
32
52
|
};
|
|
33
53
|
const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
|
|
34
|
-
const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` :
|
|
35
|
-
// Unix: file:// + /path = file:///path
|
|
36
|
-
`file:///${normalizedArgv1}`
|
|
37
|
-
);
|
|
54
|
+
const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : `file:///${normalizedArgv1}`;
|
|
38
55
|
const isMainModule = import.meta.url === fileUrl;
|
|
39
56
|
if (isMainModule) {
|
|
40
57
|
try {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"
|
|
1
|
+
{"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\n// Enable garbage collection on exit if available\nif (global.gc) {\n process.on(\"exit\", () => {\n logger.debug(\"to-erp: Running garbage collection on exit\");\n try {\n global.gc?.();\n } catch (e) {\n // Ignore GC errors\n }\n });\n}\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"to-erp\" online');\n logger.info(\"==========Starting to-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await connector.syncToERP();\n await connector.syncToERPCompleted();\n\n logger.info(\"==========Completed to-erp job cycle==========\");\n\n // Cleanup before worker exit\n logger.debug(\"to-erp: Starting cleanup sequence\");\n\n // Trigger garbage collection if available\n if (global.gc) {\n logger.debug(\"to-erp: Running manual garbage collection\");\n try {\n global.gc?.();\n } catch (e) {\n logger.debug(\"to-erp: GC not available or failed\");\n }\n }\n\n // Small delay to allow any pending operations to complete\n await new Promise((resolve) => setTimeout(resolve, 100));\n logger.debug(\"to-erp: Cleanup sequence completed\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"to-erp\" had an error', {\n error: errorDetails,\n connectorPath: process.env.CONNECTOR_PATH,\n });\n\n // Also log to console for immediate visibility\n console.error(\"to-erp job error:\", error);\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, \"/\");\nconst fileUrl = normalizedArgv1.startsWith(\"/\")\n ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path\n : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAGxC,IAAI,OAAO,IAAI;AACb,UAAQ,GAAG,QAAQ,MAAM;AACvB,WAAO,MAAM,4CAA4C;AACzD,QAAI;AACF,aAAO,KAAA;AAAA,IACT,SAAS,GAAG;AAAA,IAEZ;AAAA,EACF,CAAC;AACH;AAEA,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,gCAAgC;AAC5C,WAAO,KAAK,+CAA+C;AAG3D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,UAAU,UAAA;AAChB,UAAM,UAAU,mBAAA;AAEhB,WAAO,KAAK,gDAAgD;AAG5D,WAAO,MAAM,mCAAmC;AAGhD,QAAI,OAAO,IAAI;AACb,aAAO,MAAM,2CAA2C;AACxD,UAAI;AACF,eAAO,KAAA;AAAA,MACT,SAAS,GAAG;AACV,eAAO,MAAM,oCAAoC;AAAA,MACnD;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AACvD,WAAO,MAAM,oCAAoC;AAAA,EACnD,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,wCAAwC;AAAA,MACnD,OAAO;AAAA,MACP,eAAe,QAAQ,IAAI;AAAA,IAAA,CAC5B;AAGD,YAAQ,MAAM,qBAAqB,KAAK;AAExC,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC1C,UAAU,eAAe,KACzB,WAAW,eAAe;AAC9B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"configuration.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/configuration.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { PsqlService } from "./psql-service";
|
|
2
|
+
import { PsqlLaborTicketOperations } from "./internal/psql-labor-ticket-operations";
|
|
3
|
+
/**
|
|
4
|
+
* A class to manage interactions with PSQL (Pervasive) databases via ODBC
|
|
5
|
+
*/
|
|
6
|
+
export { PsqlService };
|
|
7
|
+
/**
|
|
8
|
+
* Labor ticket operations for PSQL (Phase 2)
|
|
9
|
+
*/
|
|
10
|
+
export { PsqlLaborTicketOperations };
|
|
11
|
+
/**
|
|
12
|
+
* Configuration interface for PSQL connections
|
|
13
|
+
*/
|
|
14
|
+
export type { PsqlConfiguration } from "./configuration";
|
|
15
|
+
/**
|
|
16
|
+
* Helper functions for PSQL data formatting
|
|
17
|
+
*/
|
|
18
|
+
export { formatPsqlDate, formatPsqlTime, combinePsqlDateTime, isPsqlDateEmpty, cleanPsqlCharField, } from "./psql-helpers";
|
|
19
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,yBAAyB,EAAE,MAAM,yCAAyC,CAAC;AAEpF;;GAEG;AACH,OAAO,EAAE,WAAW,EAAE,CAAC;AAEvB;;GAEG;AACH,OAAO,EAAE,yBAAyB,EAAE,CAAC;AAErC;;GAEG;AACH,YAAY,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEzD;;GAEG;AACH,OAAO,EACL,cAAc,EACd,cAAc,EACd,mBAAmB,EACnB,eAAe,EACf,kBAAkB,GACnB,MAAM,gBAAgB,CAAC"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
export declare const PsqlConfigSchema: z.ZodObject<{
|
|
3
|
+
host: z.ZodString;
|
|
4
|
+
port: z.ZodString;
|
|
5
|
+
database: z.ZodString;
|
|
6
|
+
username: z.ZodString;
|
|
7
|
+
password: z.ZodString;
|
|
8
|
+
connectionTimeout: z.ZodDefault<z.ZodOptional<z.ZodString>>;
|
|
9
|
+
requestTimeout: z.ZodDefault<z.ZodOptional<z.ZodString>>;
|
|
10
|
+
}, "strip", z.ZodTypeAny, {
|
|
11
|
+
password: string;
|
|
12
|
+
database: string;
|
|
13
|
+
port: string;
|
|
14
|
+
connectionTimeout: string;
|
|
15
|
+
requestTimeout: string;
|
|
16
|
+
host: string;
|
|
17
|
+
username: string;
|
|
18
|
+
}, {
|
|
19
|
+
password: string;
|
|
20
|
+
database: string;
|
|
21
|
+
port: string;
|
|
22
|
+
host: string;
|
|
23
|
+
username: string;
|
|
24
|
+
connectionTimeout?: string | undefined;
|
|
25
|
+
requestTimeout?: string | undefined;
|
|
26
|
+
}>;
|
|
27
|
+
export type PsqlConfig = z.infer<typeof PsqlConfigSchema>;
|
|
28
|
+
//# sourceMappingURL=psql-config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"psql-config.d.ts","sourceRoot":"","sources":["../../../../src/services/psql-erp-service/internal/psql-config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;EAQ3B,CAAC;AAEH,MAAM,MAAM,UAAU,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,gBAAgB,CAAC,CAAC"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PSQL Labor Ticket Operations
|
|
3
|
+
*
|
|
4
|
+
* Phase 2: This will handle INSERT/UPDATE/DELETE operations for labor tickets
|
|
5
|
+
* Phase 1: Placeholder - throws errors if called
|
|
6
|
+
*/
|
|
7
|
+
import { PsqlService } from "../psql-service";
|
|
8
|
+
import { MMReceiveLaborTicket } from "../../../services/mm-api-service";
|
|
9
|
+
export declare class PsqlLaborTicketOperations {
|
|
10
|
+
private service;
|
|
11
|
+
constructor(service: PsqlService);
|
|
12
|
+
/**
|
|
13
|
+
* Create labor ticket in START_LABOR table
|
|
14
|
+
*
|
|
15
|
+
* Phase 2 Implementation Notes:
|
|
16
|
+
* - Will use prepared statements with parameter binding
|
|
17
|
+
* - Insert into START_LABOR table
|
|
18
|
+
* - Return GUID as erpUid
|
|
19
|
+
*
|
|
20
|
+
* @param laborTicket Labor ticket from MachineMetrics
|
|
21
|
+
* @returns Labor ticket and ERP unique ID
|
|
22
|
+
*/
|
|
23
|
+
createLaborTicket(laborTicket: MMReceiveLaborTicket): Promise<{
|
|
24
|
+
laborTicket: MMReceiveLaborTicket;
|
|
25
|
+
erpUid: string;
|
|
26
|
+
}>;
|
|
27
|
+
/**
|
|
28
|
+
* Update labor ticket (move from START_LABOR to COMPLETED_LABOR)
|
|
29
|
+
*
|
|
30
|
+
* Phase 2 Implementation Notes:
|
|
31
|
+
* - Insert into COMPLETED_LABOR
|
|
32
|
+
* - Delete from START_LABOR
|
|
33
|
+
* - Should be done in a transaction
|
|
34
|
+
*
|
|
35
|
+
* @param laborTicket Labor ticket to update
|
|
36
|
+
* @returns Updated labor ticket
|
|
37
|
+
*/
|
|
38
|
+
updateLaborTicket(laborTicket: MMReceiveLaborTicket): Promise<MMReceiveLaborTicket>;
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=psql-labor-ticket-operations.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"psql-labor-ticket-operations.d.ts","sourceRoot":"","sources":["../../../../src/services/psql-erp-service/internal/psql-labor-ticket-operations.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,kCAAkC,CAAC;AAGxE,qBAAa,yBAAyB;IACxB,OAAO,CAAC,OAAO;gBAAP,OAAO,EAAE,WAAW;IAExC;;;;;;;;;;OAUG;IACG,iBAAiB,CACrB,WAAW,EAAE,oBAAoB,GAChC,OAAO,CAAC;QAAE,WAAW,EAAE,oBAAoB,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC;IASjE;;;;;;;;;;OAUG;IACG,iBAAiB,CACrB,WAAW,EAAE,oBAAoB,GAChC,OAAO,CAAC,oBAAoB,CAAC;CAQjC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PSQL-specific type definitions
|
|
3
|
+
*/
|
|
4
|
+
export interface PsqlConnectionOptions {
|
|
5
|
+
connectionString: string;
|
|
6
|
+
}
|
|
7
|
+
export interface OdbcError {
|
|
8
|
+
state: string;
|
|
9
|
+
message: string;
|
|
10
|
+
code?: number;
|
|
11
|
+
}
|
|
12
|
+
export interface OdbcErrorResponse extends Error {
|
|
13
|
+
odbcErrors?: OdbcError[];
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=psql-types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"psql-types.d.ts","sourceRoot":"","sources":["../../../../../src/services/psql-erp-service/internal/types/psql-types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,qBAAqB;IACpC,gBAAgB,EAAE,MAAM,CAAC;CAC1B;AAED,MAAM,WAAW,SAAS;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,iBAAkB,SAAQ,KAAK;IAC9C,UAAU,CAAC,EAAE,SAAS,EAAE,CAAC;CAC1B"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Helper functions for PSQL/Pervasive database operations
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Formats a date from PSQL YYMMDD format to ISO date string
|
|
6
|
+
* @param psqlDate Date in YYMMDD format (e.g., "250105" for Jan 5, 2025)
|
|
7
|
+
* @returns ISO date string (e.g., "2025-01-05") or null if invalid
|
|
8
|
+
*/
|
|
9
|
+
export declare function formatPsqlDate(psqlDate: string): string | null;
|
|
10
|
+
/**
|
|
11
|
+
* Formats a time from PSQL HHMM format to HH:MM:SS
|
|
12
|
+
* @param psqlTime Time in HHMM format (e.g., "1430" for 2:30 PM)
|
|
13
|
+
* @returns Time string in HH:MM:SS format or null if invalid
|
|
14
|
+
*/
|
|
15
|
+
export declare function formatPsqlTime(psqlTime: string): string | null;
|
|
16
|
+
/**
|
|
17
|
+
* Combines PSQL date and time into ISO datetime string
|
|
18
|
+
* @param psqlDate Date in YYMMDD format
|
|
19
|
+
* @param psqlTime Time in HHMM format
|
|
20
|
+
* @returns ISO datetime string or null if invalid
|
|
21
|
+
*/
|
|
22
|
+
export declare function combinePsqlDateTime(psqlDate: string, psqlTime: string): string | null;
|
|
23
|
+
/**
|
|
24
|
+
* Helper to check if a PSQL date is "empty" (000000 or blank)
|
|
25
|
+
*/
|
|
26
|
+
export declare function isPsqlDateEmpty(psqlDate: string): boolean;
|
|
27
|
+
/**
|
|
28
|
+
* Clean and trim PSQL CHAR field (removes trailing spaces)
|
|
29
|
+
* PSQL CHAR fields are fixed-width and padded with spaces
|
|
30
|
+
*/
|
|
31
|
+
export declare function cleanPsqlCharField(value: string | null | undefined): string;
|
|
32
|
+
//# sourceMappingURL=psql-helpers.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"psql-helpers.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-helpers.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAuB9D;AAED;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAY9D;AAED;;;;;GAKG;AACH,wBAAgB,mBAAmB,CACjC,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,GACf,MAAM,GAAG,IAAI,CASf;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAEzD;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,GAAG,MAAM,CAK3E"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { PsqlConfiguration } from "./configuration";
|
|
2
|
+
import { ERPResponse } from "../../types/erp-types";
|
|
3
|
+
type PagingParams = {
|
|
4
|
+
limit?: number;
|
|
5
|
+
offset?: number;
|
|
6
|
+
};
|
|
7
|
+
export declare class PsqlService {
|
|
8
|
+
private config;
|
|
9
|
+
constructor(config: PsqlConfiguration);
|
|
10
|
+
/**
|
|
11
|
+
* Build PSQL ODBC connection string
|
|
12
|
+
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
13
|
+
*/
|
|
14
|
+
private buildConnectionString;
|
|
15
|
+
/**
|
|
16
|
+
* Execute a query and return the results
|
|
17
|
+
* Creates a fresh connection for each query to avoid handle corruption
|
|
18
|
+
*
|
|
19
|
+
* @param query The SQL query to execute
|
|
20
|
+
* @param params Query parameters (currently unused for PSQL read operations)
|
|
21
|
+
* @param paging Optional paging parameters
|
|
22
|
+
* @returns The entities fetched from the database, along with paging information
|
|
23
|
+
*/
|
|
24
|
+
executePreparedStatement(query: string, params?: Record<string, string>, paging?: PagingParams): Promise<ERPResponse | undefined>;
|
|
25
|
+
/**
|
|
26
|
+
* Transform ODBC result set to array of Record<string, string> instances.
|
|
27
|
+
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
28
|
+
*/
|
|
29
|
+
static recordsetToRecords(recordset: any[]): Record<string, string>[];
|
|
30
|
+
/**
|
|
31
|
+
* Handle ODBC errors and provide meaningful messages
|
|
32
|
+
*/
|
|
33
|
+
private handleOdbcError;
|
|
34
|
+
}
|
|
35
|
+
export {};
|
|
36
|
+
//# sourceMappingURL=psql-service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"psql-service.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAIpD,KAAK,YAAY,GAAG;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAoB;gBAEtB,MAAM,EAAE,iBAAiB;IASrC;;;OAGG;IACH,OAAO,CAAC,qBAAqB;IAe7B;;;;;;;;OAQG;IACU,wBAAwB,CACnC,KAAK,EAAE,MAAM,EACb,MAAM,GAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAM,EACnC,MAAM,CAAC,EAAE,YAAY,GACpB,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;IAqEnC;;;OAGG;WACW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;IAkB5E;;OAEG;IACH,OAAO,CAAC,eAAe;CA4BxB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"erp-types.d.ts","sourceRoot":"","sources":["../../src/types/erp-types.ts"],"names":[],"mappings":"AACA,oBAAY,OAAO;IACjB,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,QAAQ,aAAa;IACrB,SAAS,cAAc;IACvB,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,QAAQ,aAAa;
|
|
1
|
+
{"version":3,"file":"erp-types.d.ts","sourceRoot":"","sources":["../../src/types/erp-types.ts"],"names":[],"mappings":"AACA,oBAAY,OAAO;IACjB,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,QAAQ,aAAa;IACrB,SAAS,cAAc;IACvB,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,QAAQ,aAAa;IACrB,UAAU,eAAe;CAC1B;AAED,oBAAY,UAAU;IACpB,SAAS,IAAA;IACT,KAAK,IAAA;IACL,cAAc,IAAA;IACd,WAAW,IAAA;IACX,qBAAqB,IAAA;IACrB,OAAO,IAAA;IACP,OAAO,IAAA;IACP,aAAa,IAAA;CAEd;AAED,MAAM,WAAW,aAAa;IAC5B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC;IAC/B,MAAM,EAAE,aAAa,CAAC;CACvB;AAED,MAAM,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"labor-ticket-erp-synchronizer.d.ts","sourceRoot":"","sources":["../../../src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAChD,OAAO,EAAE,sBAAsB,EAAE,MAAM,2BAA2B,CAAC;AAOnE;;GAEG;AACH,qBAAa,0BAA0B;IACrC;;OAEG;WACU,SAAS,CACpB,aAAa,EAAE,OAAO,EACtB,SAAS,EAAE,sBAAsB,GAChC,OAAO,CAAC,IAAI,CAAC;
|
|
1
|
+
{"version":3,"file":"labor-ticket-erp-synchronizer.d.ts","sourceRoot":"","sources":["../../../src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,uBAAuB,CAAC;AAChD,OAAO,EAAE,sBAAsB,EAAE,MAAM,2BAA2B,CAAC;AAOnE;;GAEG;AACH,qBAAa,0BAA0B;IACrC;;OAEG;WACU,SAAS,CACpB,aAAa,EAAE,OAAO,EACtB,SAAS,EAAE,sBAAsB,GAChC,OAAO,CAAC,IAAI,CAAC;IA4GhB;;OAEG;WACU,WAAW,CACtB,aAAa,EAAE,OAAO,EACtB,SAAS,EAAE,sBAAsB,GAChC,OAAO,CAAC,IAAI,CAAC;mBA+DK,sBAAsB;mBAetB,kBAAkB;CAkDxC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@machinemetrics/mm-erp-sdk",
|
|
3
3
|
"description": "A library for syncing data between MachineMetrics and ERP systems",
|
|
4
|
-
"version": "0.1.
|
|
4
|
+
"version": "0.1.7-beta.1",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "machinemetrics",
|
|
7
7
|
"main": "dist/mm-erp-sdk.js",
|
|
@@ -19,6 +19,7 @@
|
|
|
19
19
|
"dependencies": {
|
|
20
20
|
"@azure/msal-node": "^2.12.0",
|
|
21
21
|
"@ladjs/graceful": "^4.2.0",
|
|
22
|
+
"aws-sdk": "^2.1691.0",
|
|
22
23
|
"axios": "^1.7.3",
|
|
23
24
|
"axios-retry": "^4.5.0",
|
|
24
25
|
"better-sqlite3": "^11.3.0",
|
|
@@ -28,6 +29,7 @@
|
|
|
28
29
|
"knex": "^3.1.0",
|
|
29
30
|
"lodash": "^4.17.21",
|
|
30
31
|
"mssql": "^11.0.1",
|
|
32
|
+
"odbc": "^2.4.8",
|
|
31
33
|
"winston": "^3.14.0",
|
|
32
34
|
"winston-daily-rotate-file": "^5.0.0",
|
|
33
35
|
"xxhashjs": "^0.2.2",
|
package/src/index.ts
CHANGED
|
@@ -17,13 +17,13 @@ export type { ERPApiConfig } from "./services/erp-api-services/types";
|
|
|
17
17
|
// MM API client and types
|
|
18
18
|
export { MMApiClient } from "./services/mm-api-service";
|
|
19
19
|
export { MMReceiveLaborTicket } from "./services/mm-api-service";
|
|
20
|
-
export type {
|
|
20
|
+
export type {
|
|
21
21
|
MMReceiveLaborTicketReason,
|
|
22
22
|
MMReceiveLaborTicketWorkOrderOperation,
|
|
23
23
|
IToRESTApiObject,
|
|
24
24
|
} from "./services/mm-api-service";
|
|
25
25
|
|
|
26
|
-
export {
|
|
26
|
+
export {
|
|
27
27
|
MMSendPerson,
|
|
28
28
|
MMSendResource,
|
|
29
29
|
MMSendPart,
|
|
@@ -57,7 +57,11 @@ export { getInitialLoadComplete, setInitialLoadComplete } from "./utils";
|
|
|
57
57
|
// HTTP client factory and types for custom API integrations
|
|
58
58
|
export { HTTPClientFactory } from "./utils/http-client";
|
|
59
59
|
export { HTTPError } from "./utils/http-client";
|
|
60
|
-
export type {
|
|
60
|
+
export type {
|
|
61
|
+
HTTPResponse,
|
|
62
|
+
HTTPClient,
|
|
63
|
+
HTTPRequestConfig,
|
|
64
|
+
} from "./utils/http-client";
|
|
61
65
|
|
|
62
66
|
// Application initialization utilities
|
|
63
67
|
export { ApplicationInitializer } from "./utils/application-initializer";
|
|
@@ -77,7 +81,7 @@ export type { WriteEntitiesToMMResult } from "./utils";
|
|
|
77
81
|
export { MMBatchValidationError } from "./utils";
|
|
78
82
|
|
|
79
83
|
// API services
|
|
80
|
-
export type { APIResponse } from
|
|
84
|
+
export type { APIResponse } from "./services/erp-api-services/types";
|
|
81
85
|
export { RestAPIService } from "./services/erp-api-services/rest/rest-api-service";
|
|
82
86
|
export { ErrorHandler, GraphQLError } from "./services/erp-api-services/errors";
|
|
83
87
|
export type {
|
|
@@ -105,6 +109,18 @@ export {
|
|
|
105
109
|
} from "./services/sql-server-erp-service";
|
|
106
110
|
export type { SQLInput } from "./services/sql-server-erp-service";
|
|
107
111
|
|
|
112
|
+
// PSQL (Pervasive) services
|
|
113
|
+
export {
|
|
114
|
+
PsqlService,
|
|
115
|
+
PsqlLaborTicketOperations,
|
|
116
|
+
formatPsqlDate,
|
|
117
|
+
formatPsqlTime,
|
|
118
|
+
combinePsqlDateTime,
|
|
119
|
+
isPsqlDateEmpty,
|
|
120
|
+
cleanPsqlCharField,
|
|
121
|
+
} from "./services/psql-erp-service";
|
|
122
|
+
export type { PsqlConfiguration } from "./services/psql-erp-service";
|
|
123
|
+
|
|
108
124
|
// Record tracking services
|
|
109
125
|
export { RecordTrackingManager } from "./services/caching-service/record-tracking-manager";
|
|
110
126
|
export type { RecordTrackingObject } from "./services/caching-service/record-tracking-manager";
|
|
@@ -113,4 +129,10 @@ export type { RecordTrackingObject } from "./services/caching-service/record-tra
|
|
|
113
129
|
export { default as knexDatabaseConfig } from "./knexfile";
|
|
114
130
|
|
|
115
131
|
// MM Connector Logging
|
|
116
|
-
export {
|
|
132
|
+
export {
|
|
133
|
+
MMConnectorLogger,
|
|
134
|
+
FileLogDeduper,
|
|
135
|
+
LogEntry,
|
|
136
|
+
type LogLevelString,
|
|
137
|
+
type LogResponse,
|
|
138
|
+
} from "./utils";
|
|
@@ -27,6 +27,9 @@ export const runDataSyncService = async (connectorPath: string) => {
|
|
|
27
27
|
const bree = new Bree({
|
|
28
28
|
root: jobsPath,
|
|
29
29
|
logger,
|
|
30
|
+
// Enable worker metadata for debugging
|
|
31
|
+
outputWorkerMetadata: true,
|
|
32
|
+
|
|
30
33
|
worker: {
|
|
31
34
|
env: {
|
|
32
35
|
CONNECTOR_PATH: connectorPath,
|
|
@@ -42,7 +45,13 @@ export const runDataSyncService = async (connectorPath: string) => {
|
|
|
42
45
|
// Pass through all other environment variables that might be needed
|
|
43
46
|
...process.env,
|
|
44
47
|
},
|
|
48
|
+
|
|
49
|
+
// Add worker options for better isolation and cleanup
|
|
50
|
+
execArgv: [
|
|
51
|
+
"--expose-gc", // Allow manual garbage collection
|
|
52
|
+
],
|
|
45
53
|
},
|
|
54
|
+
|
|
46
55
|
jobs: [
|
|
47
56
|
// {
|
|
48
57
|
// name: 'run-migrations', // Running this once on startup will create the tables in the sqlite database
|
|
@@ -51,19 +60,24 @@ export const runDataSyncService = async (connectorPath: string) => {
|
|
|
51
60
|
name: "from-erp",
|
|
52
61
|
timeout: "10s",
|
|
53
62
|
interval: config.fromErpInterval,
|
|
63
|
+
// Ensure worker terminates completely after execution
|
|
64
|
+
closeWorkerAfterMs: 1000,
|
|
54
65
|
},
|
|
55
66
|
{
|
|
56
67
|
name: "to-erp",
|
|
57
68
|
//timeout: '3s', // Use timeout during development to see the job in action quickly
|
|
58
69
|
interval: config.toErpInterval,
|
|
70
|
+
closeWorkerAfterMs: 1000,
|
|
59
71
|
},
|
|
60
72
|
{
|
|
61
73
|
name: "retry-failed-labor-tickets",
|
|
62
74
|
interval: config.retryLaborTicketsInterval,
|
|
75
|
+
closeWorkerAfterMs: 1000,
|
|
63
76
|
},
|
|
64
77
|
{
|
|
65
78
|
name: "clean-up-expired-cache",
|
|
66
79
|
interval: config.cacheExpirationCheckInterval,
|
|
80
|
+
closeWorkerAfterMs: 1000,
|
|
67
81
|
},
|
|
68
82
|
],
|
|
69
83
|
});
|
|
@@ -75,15 +89,41 @@ export const runDataSyncService = async (connectorPath: string) => {
|
|
|
75
89
|
name: job.name,
|
|
76
90
|
interval: job.interval,
|
|
77
91
|
timeout: job.timeout,
|
|
92
|
+
closeWorkerAfterMs: job.closeWorkerAfterMs,
|
|
78
93
|
}));
|
|
79
94
|
logger.info("JOBS CONFIGURATION:", { jobs: jobsConfig });
|
|
80
95
|
|
|
81
96
|
const graceful = new Graceful({ brees: [bree] });
|
|
82
97
|
graceful.listen();
|
|
83
98
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
99
|
+
// Enhanced event handlers for debugging worker lifecycle
|
|
100
|
+
bree.on("worker created", (name) => {
|
|
101
|
+
logger.debug(`Worker created for job: ${name}`);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
bree.on("worker online", (name) => {
|
|
105
|
+
logger.debug(`Worker online for job: ${name}`);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
bree.on("worker message", (name, message) => {
|
|
109
|
+
logger.debug(`Worker message from ${name}:`, message);
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
bree.on("worker deleted", (name) => {
|
|
113
|
+
logger.debug(`Worker deleted for job: ${name}`);
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
bree.on("worker exit", (name, code, signal) => {
|
|
117
|
+
if (code !== 0) {
|
|
118
|
+
logger.error(
|
|
119
|
+
`Worker ${name} exited with code ${code}, signal ${signal}`
|
|
120
|
+
);
|
|
121
|
+
} else {
|
|
122
|
+
logger.debug(`Worker ${name} exited successfully`);
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
await bree.start();
|
|
87
127
|
|
|
88
128
|
bree.on("jobStarted", (job) => {
|
|
89
129
|
console.log("Job " + job.name + " started");
|