@machinemetrics/mm-erp-sdk 0.1.7-beta.2 → 0.1.8-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/{config-qat9zgOl.js → config-CV-KosWV.js} +2 -2
  2. package/dist/{config-qat9zgOl.js.map → config-CV-KosWV.js.map} +1 -1
  3. package/dist/{connector-factory-C2czCs9v.js → connector-factory-D8v6aQIt.js} +2 -2
  4. package/dist/{connector-factory-C2czCs9v.js.map → connector-factory-D8v6aQIt.js.map} +1 -1
  5. package/dist/{hashed-cache-manager-CzyFSt2B.js → hashed-cache-manager-B6hTDLxU.js} +4 -4
  6. package/dist/{hashed-cache-manager-CzyFSt2B.js.map → hashed-cache-manager-B6hTDLxU.js.map} +1 -1
  7. package/dist/{index-B9wo8pld.js → index-Bg76oouR.js} +2 -2
  8. package/dist/{index-B9wo8pld.js.map → index-Bg76oouR.js.map} +1 -1
  9. package/dist/index.d.ts +3 -5
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/{logger-Db8CkwR6.js → logger-SqdNut1H.js} +1040 -934
  12. package/dist/logger-SqdNut1H.js.map +1 -0
  13. package/dist/mm-erp-sdk.js +14 -268
  14. package/dist/mm-erp-sdk.js.map +1 -1
  15. package/dist/services/data-sync-service/data-sync-service.d.ts.map +1 -1
  16. package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
  17. package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
  18. package/dist/services/data-sync-service/jobs/from-erp.js +8 -25
  19. package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
  20. package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
  21. package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
  22. package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
  23. package/dist/services/data-sync-service/jobs/to-erp.js +7 -24
  24. package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
  25. package/dist/services/reporting-service/logger.d.ts.map +1 -1
  26. package/dist/types/erp-types.d.ts +1 -2
  27. package/dist/types/erp-types.d.ts.map +1 -1
  28. package/package.json +1 -3
  29. package/src/index.ts +5 -27
  30. package/src/services/data-sync-service/data-sync-service.ts +3 -38
  31. package/src/services/data-sync-service/jobs/from-erp.ts +6 -35
  32. package/src/services/data-sync-service/jobs/to-erp.ts +5 -35
  33. package/src/services/reporting-service/logger.ts +86 -11
  34. package/src/types/erp-types.ts +0 -1
  35. package/dist/logger-Db8CkwR6.js.map +0 -1
  36. package/dist/services/psql-erp-service/configuration.d.ts +0 -10
  37. package/dist/services/psql-erp-service/configuration.d.ts.map +0 -1
  38. package/dist/services/psql-erp-service/index.d.ts +0 -19
  39. package/dist/services/psql-erp-service/index.d.ts.map +0 -1
  40. package/dist/services/psql-erp-service/internal/psql-config.d.ts +0 -28
  41. package/dist/services/psql-erp-service/internal/psql-config.d.ts.map +0 -1
  42. package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts +0 -40
  43. package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts.map +0 -1
  44. package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +0 -15
  45. package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +0 -1
  46. package/dist/services/psql-erp-service/psql-helpers.d.ts +0 -32
  47. package/dist/services/psql-erp-service/psql-helpers.d.ts.map +0 -1
  48. package/dist/services/psql-erp-service/psql-service.d.ts +0 -36
  49. package/dist/services/psql-erp-service/psql-service.d.ts.map +0 -1
  50. package/src/services/psql-erp-service/configuration.ts +0 -9
  51. package/src/services/psql-erp-service/index.ts +0 -28
  52. package/src/services/psql-erp-service/internal/psql-config.ts +0 -13
  53. package/src/services/psql-erp-service/internal/psql-labor-ticket-operations.ts +0 -58
  54. package/src/services/psql-erp-service/internal/types/psql-types.ts +0 -17
  55. package/src/services/psql-erp-service/psql-helpers.ts +0 -90
  56. package/src/services/psql-erp-service/psql-service.ts +0 -178
@@ -1 +1 @@
1
- {"version":3,"file":"data-sync-service.d.ts","sourceRoot":"","sources":["../../../src/services/data-sync-service/data-sync-service.ts"],"names":[],"mappings":"AAOA,eAAO,MAAM,kBAAkB,GAAU,eAAe,MAAM,kBA0I7D,CAAC"}
1
+ {"version":3,"file":"data-sync-service.d.ts","sourceRoot":"","sources":["../../../src/services/data-sync-service/data-sync-service.ts"],"names":[],"mappings":"AAOA,eAAO,MAAM,kBAAkB,GAAU,eAAe,MAAM,kBAuG7D,CAAC"}
@@ -1,7 +1,7 @@
1
- import "../../../config-qat9zgOl.js";
2
- import { H as HashedCacheManager } from "../../../hashed-cache-manager-CzyFSt2B.js";
3
- import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
4
- import { l as logger } from "../../../logger-Db8CkwR6.js";
1
+ import "../../../config-CV-KosWV.js";
2
+ import { H as HashedCacheManager } from "../../../hashed-cache-manager-B6hTDLxU.js";
3
+ import { S as SQLiteCoordinator } from "../../../index-Bg76oouR.js";
4
+ import { l as logger } from "../../../logger-SqdNut1H.js";
5
5
  logger.level = process.env.LOG_LEVEL || "info";
6
6
  const main = async () => {
7
7
  const cacheManager = new HashedCacheManager();
@@ -1 +1 @@
1
- {"version":3,"file":"from-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAqBvB,QAAA,MAAM,IAAI,qBAmDT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
1
+ {"version":3,"file":"from-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AASvB,QAAA,MAAM,IAAI,qBAkCT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
@@ -1,17 +1,8 @@
1
- import "../../../config-qat9zgOl.js";
2
- import { l as logger } from "../../../logger-Db8CkwR6.js";
3
- import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
4
- import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
1
+ import "../../../config-CV-KosWV.js";
2
+ import { l as logger } from "../../../logger-SqdNut1H.js";
3
+ import { S as SQLiteCoordinator } from "../../../index-Bg76oouR.js";
4
+ import { c as createConnectorFromPath } from "../../../connector-factory-D8v6aQIt.js";
5
5
  logger.level = process.env.LOG_LEVEL || "info";
6
- if (global.gc) {
7
- process.on("exit", () => {
8
- logger.debug("from-erp: Running garbage collection on exit");
9
- try {
10
- global.gc?.();
11
- } catch (e) {
12
- }
13
- });
14
- }
15
6
  const main = async () => {
16
7
  try {
17
8
  logger.info('Worker for job "from-erp" online');
@@ -26,17 +17,6 @@ const main = async () => {
26
17
  });
27
18
  await connector.syncFromERPCompleted();
28
19
  logger.info("==========Completed from-erp job cycle==========");
29
- logger.debug("from-erp: Starting cleanup sequence");
30
- if (global.gc) {
31
- logger.debug("from-erp: Running manual garbage collection");
32
- try {
33
- global.gc?.();
34
- } catch (e) {
35
- logger.debug("from-erp: GC not available or failed");
36
- }
37
- }
38
- await new Promise((resolve) => setTimeout(resolve, 100));
39
- logger.debug("from-erp: Cleanup sequence completed");
40
20
  } catch (error) {
41
21
  const errorDetails = {
42
22
  message: error instanceof Error ? error.message : String(error),
@@ -52,7 +32,10 @@ const main = async () => {
52
32
  }
53
33
  };
54
34
  const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
55
- const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : `file:///${normalizedArgv1}`;
35
+ const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : (
36
+ // Unix: file:// + /path = file:///path
37
+ `file:///${normalizedArgv1}`
38
+ );
56
39
  const isMainModule = import.meta.url === fileUrl;
57
40
  if (isMainModule) {
58
41
  try {
@@ -1 +1 @@
1
- {"version":3,"file":"from-erp.js","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\n// Enable garbage collection on exit if available\nif (global.gc) {\n process.on(\"exit\", () => {\n logger.debug(\"from-erp: Running garbage collection on exit\");\n try {\n global.gc?.();\n } catch (e) {\n // Ignore GC errors\n }\n });\n}\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"from-erp\" online');\n logger.info(\"==========Starting from-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await SQLiteCoordinator.executeWithLock(\"from-erp\", async () => {\n await connector.syncFromERP();\n });\n\n await connector.syncFromERPCompleted();\n logger.info(\"==========Completed from-erp job cycle==========\");\n\n // Cleanup before worker exit\n logger.debug(\"from-erp: Starting cleanup sequence\");\n\n // Trigger garbage collection if available\n if (global.gc) {\n logger.debug(\"from-erp: Running manual garbage collection\");\n try {\n global.gc?.();\n } catch (e) {\n logger.debug(\"from-erp: GC not available or failed\");\n }\n }\n\n // Small delay to allow any pending operations to complete\n await new Promise((resolve) => setTimeout(resolve, 100));\n logger.debug(\"from-erp: Cleanup sequence completed\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"from-erp\" had an error', {\n error: errorDetails,\n });\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, \"/\");\nconst fileUrl = normalizedArgv1.startsWith(\"/\")\n ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path\n : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAGxC,IAAI,OAAO,IAAI;AACb,UAAQ,GAAG,QAAQ,MAAM;AACvB,WAAO,MAAM,8CAA8C;AAC3D,QAAI;AACF,aAAO,KAAA;AAAA,IACT,SAAS,GAAG;AAAA,IAEZ;AAAA,EACF,CAAC;AACH;AAEA,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,kCAAkC;AAC9C,WAAO,KAAK,iDAAiD;AAG7D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,kBAAkB,gBAAgB,YAAY,YAAY;AAC9D,YAAM,UAAU,YAAA;AAAA,IAClB,CAAC;AAED,UAAM,UAAU,qBAAA;AAChB,WAAO,KAAK,kDAAkD;AAG9D,WAAO,MAAM,qCAAqC;AAGlD,QAAI,OAAO,IAAI;AACb,aAAO,MAAM,6CAA6C;AAC1D,UAAI;AACF,eAAO,KAAA;AAAA,MACT,SAAS,GAAG;AACV,eAAO,MAAM,sCAAsC;AAAA,MACrD;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AACvD,WAAO,MAAM,sCAAsC;AAAA,EACrD,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,0CAA0C;AAAA,MACrD,OAAO;AAAA,IAAA,CACR;AAED,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC1C,UAAU,eAAe,KACzB,WAAW,eAAe;AAC9B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
1
+ {"version":3,"file":"from-erp.js","sources":["../../../../src/services/data-sync-service/jobs/from-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"from-erp\" online');\n logger.info(\"==========Starting from-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n \n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await SQLiteCoordinator.executeWithLock(\"from-erp\", async () => {\n await connector.syncFromERP();\n });\n\n await connector.syncFromERPCompleted();\n logger.info(\"==========Completed from-erp job cycle==========\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"from-erp\" had an error', {\n error: errorDetails,\n });\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, '/');\nconst fileUrl = normalizedArgv1.startsWith('/') ? \n `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path\n `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;"],"names":[],"mappings":";;;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAExC,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,kCAAkC;AAC9C,WAAO,KAAK,iDAAiD;AAG7D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,kBAAkB,gBAAgB,YAAY,YAAY;AAC9D,YAAM,UAAU,YAAA;AAAA,IAClB,CAAC;AAED,UAAM,UAAU,qBAAA;AAChB,WAAO,KAAK,kDAAkD;AAAA,EAChE,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,0CAA0C;AAAA,MACrD,OAAO;AAAA,IAAA,CACR;AAED,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC5C,UAAU,eAAe;AAAA;AAAA,EACzB,WAAW,eAAe;AAAA;AAC5B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
@@ -1,6 +1,6 @@
1
- import "../../../config-qat9zgOl.js";
2
- import { l as logger } from "../../../logger-Db8CkwR6.js";
3
- import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
1
+ import "../../../config-CV-KosWV.js";
2
+ import { l as logger } from "../../../logger-SqdNut1H.js";
3
+ import { c as createConnectorFromPath } from "../../../connector-factory-D8v6aQIt.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
5
  const main = async () => {
6
6
  try {
@@ -1,5 +1,5 @@
1
1
  import knex from "knex";
2
- import { l as logger } from "../../../logger-Db8CkwR6.js";
2
+ import { l as logger } from "../../../logger-SqdNut1H.js";
3
3
  import { c as config } from "../../../knexfile-1qKKIORB.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
5
  const db = knex(config.local);
@@ -1 +1 @@
1
- {"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAqBvB,QAAA,MAAM,IAAI,qBAqDT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
1
+ {"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAQvB,QAAA,MAAM,IAAI,qBAoCT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
@@ -1,16 +1,7 @@
1
- import "../../../config-qat9zgOl.js";
2
- import { l as logger } from "../../../logger-Db8CkwR6.js";
3
- import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
1
+ import "../../../config-CV-KosWV.js";
2
+ import { l as logger } from "../../../logger-SqdNut1H.js";
3
+ import { c as createConnectorFromPath } from "../../../connector-factory-D8v6aQIt.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
- if (global.gc) {
6
- process.on("exit", () => {
7
- logger.debug("to-erp: Running garbage collection on exit");
8
- try {
9
- global.gc?.();
10
- } catch (e) {
11
- }
12
- });
13
- }
14
5
  const main = async () => {
15
6
  try {
16
7
  logger.info('Worker for job "to-erp" online');
@@ -23,17 +14,6 @@ const main = async () => {
23
14
  await connector.syncToERP();
24
15
  await connector.syncToERPCompleted();
25
16
  logger.info("==========Completed to-erp job cycle==========");
26
- logger.debug("to-erp: Starting cleanup sequence");
27
- if (global.gc) {
28
- logger.debug("to-erp: Running manual garbage collection");
29
- try {
30
- global.gc?.();
31
- } catch (e) {
32
- logger.debug("to-erp: GC not available or failed");
33
- }
34
- }
35
- await new Promise((resolve) => setTimeout(resolve, 100));
36
- logger.debug("to-erp: Cleanup sequence completed");
37
17
  } catch (error) {
38
18
  const errorDetails = {
39
19
  message: error instanceof Error ? error.message : String(error),
@@ -51,7 +31,10 @@ const main = async () => {
51
31
  }
52
32
  };
53
33
  const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
54
- const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : `file:///${normalizedArgv1}`;
34
+ const fileUrl = normalizedArgv1.startsWith("/") ? `file://${normalizedArgv1}` : (
35
+ // Unix: file:// + /path = file:///path
36
+ `file:///${normalizedArgv1}`
37
+ );
55
38
  const isMainModule = import.meta.url === fileUrl;
56
39
  if (isMainModule) {
57
40
  try {
@@ -1 +1 @@
1
- {"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\n// Enable garbage collection on exit if available\nif (global.gc) {\n process.on(\"exit\", () => {\n logger.debug(\"to-erp: Running garbage collection on exit\");\n try {\n global.gc?.();\n } catch (e) {\n // Ignore GC errors\n }\n });\n}\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"to-erp\" online');\n logger.info(\"==========Starting to-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await connector.syncToERP();\n await connector.syncToERPCompleted();\n\n logger.info(\"==========Completed to-erp job cycle==========\");\n\n // Cleanup before worker exit\n logger.debug(\"to-erp: Starting cleanup sequence\");\n\n // Trigger garbage collection if available\n if (global.gc) {\n logger.debug(\"to-erp: Running manual garbage collection\");\n try {\n global.gc?.();\n } catch (e) {\n logger.debug(\"to-erp: GC not available or failed\");\n }\n }\n\n // Small delay to allow any pending operations to complete\n await new Promise((resolve) => setTimeout(resolve, 100));\n logger.debug(\"to-erp: Cleanup sequence completed\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"to-erp\" had an error', {\n error: errorDetails,\n connectorPath: process.env.CONNECTOR_PATH,\n });\n\n // Also log to console for immediate visibility\n console.error(\"to-erp job error:\", error);\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, \"/\");\nconst fileUrl = normalizedArgv1.startsWith(\"/\")\n ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path\n : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAGxC,IAAI,OAAO,IAAI;AACb,UAAQ,GAAG,QAAQ,MAAM;AACvB,WAAO,MAAM,4CAA4C;AACzD,QAAI;AACF,aAAO,KAAA;AAAA,IACT,SAAS,GAAG;AAAA,IAEZ;AAAA,EACF,CAAC;AACH;AAEA,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,gCAAgC;AAC5C,WAAO,KAAK,+CAA+C;AAG3D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,UAAU,UAAA;AAChB,UAAM,UAAU,mBAAA;AAEhB,WAAO,KAAK,gDAAgD;AAG5D,WAAO,MAAM,mCAAmC;AAGhD,QAAI,OAAO,IAAI;AACb,aAAO,MAAM,2CAA2C;AACxD,UAAI;AACF,eAAO,KAAA;AAAA,MACT,SAAS,GAAG;AACV,eAAO,MAAM,oCAAoC;AAAA,MACnD;AAAA,IACF;AAGA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,GAAG,CAAC;AACvD,WAAO,MAAM,oCAAoC;AAAA,EACnD,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,wCAAwC;AAAA,MACnD,OAAO;AAAA,MACP,eAAe,QAAQ,IAAI;AAAA,IAAA,CAC5B;AAGD,YAAQ,MAAM,qBAAqB,KAAK;AAExC,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC1C,UAAU,eAAe,KACzB,WAAW,eAAe;AAC9B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
1
+ {"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../reporting-service/logger\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"to-erp\" online');\n logger.info(\"==========Starting to-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await connector.syncToERP();\n await connector.syncToERPCompleted();\n\n logger.info(\"==========Completed to-erp job cycle==========\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"to-erp\" had an error', {\n error: errorDetails,\n connectorPath: process.env.CONNECTOR_PATH,\n });\n\n // Also log to console for immediate visibility\n console.error(\"to-erp job error:\", error);\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, '/');\nconst fileUrl = normalizedArgv1.startsWith('/') ? \n `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path\n `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;AAMA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAExC,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,gCAAgC;AAC5C,WAAO,KAAK,+CAA+C;AAG3D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,UAAU,UAAA;AAChB,UAAM,UAAU,mBAAA;AAEhB,WAAO,KAAK,gDAAgD;AAAA,EAC9D,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,wCAAwC;AAAA,MACnD,OAAO;AAAA,MACP,eAAe,QAAQ,IAAI;AAAA,IAAA,CAC5B;AAGD,YAAQ,MAAM,qBAAqB,KAAK;AAExC,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC5C,UAAU,eAAe;AAAA;AAAA,EACzB,WAAW,eAAe;AAAA;AAC5B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
@@ -1 +1 @@
1
- {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../../src/services/reporting-service/logger.ts"],"names":[],"mappings":"AA+DA,QAAA,MAAM,MAAM,0BAaV,CAAC;AAGH,eAAO,MAAM,eAAe,GAAI,UAAU,MAAM,EAAE,SAAS,MAAM,SAgChE,CAAC;AAKF,eAAe,MAAM,CAAC"}
1
+ {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../../src/services/reporting-service/logger.ts"],"names":[],"mappings":"AA+DA,QAAA,MAAM,MAAM,0BAaV,CAAC;AAGH,eAAO,MAAM,eAAe,GAAI,UAAU,MAAM,EAAE,SAAS,MAAM,SA2GhE,CAAC;AAKF,eAAe,MAAM,CAAC"}
@@ -5,8 +5,7 @@ export declare enum ERPType {
5
5
  PROFITKEY = "PROFITKEY",
6
6
  PROSHOP = "PROSHOP",
7
7
  SYTELINE = "SYTELINE",
8
- TEMPLATE = "TEMPLATE",
9
- GLOBALSHOP = "GLOBALSHOP"
8
+ TEMPLATE = "TEMPLATE"
10
9
  }
11
10
  export declare enum ERPObjType {
12
11
  RESOURCES = 0,
@@ -1 +1 @@
1
- {"version":3,"file":"erp-types.d.ts","sourceRoot":"","sources":["../../src/types/erp-types.ts"],"names":[],"mappings":"AACA,oBAAY,OAAO;IACjB,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,QAAQ,aAAa;IACrB,SAAS,cAAc;IACvB,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,QAAQ,aAAa;IACrB,UAAU,eAAe;CAC1B;AAED,oBAAY,UAAU;IACpB,SAAS,IAAA;IACT,KAAK,IAAA;IACL,cAAc,IAAA;IACd,WAAW,IAAA;IACX,qBAAqB,IAAA;IACrB,OAAO,IAAA;IACP,OAAO,IAAA;IACP,aAAa,IAAA;CAEd;AAED,MAAM,WAAW,aAAa;IAC5B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC;IAC/B,MAAM,EAAE,aAAa,CAAC;CACvB;AAED,MAAM,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC"}
1
+ {"version":3,"file":"erp-types.d.ts","sourceRoot":"","sources":["../../src/types/erp-types.ts"],"names":[],"mappings":"AACA,oBAAY,OAAO;IACjB,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,QAAQ,aAAa;IACrB,SAAS,cAAc;IACvB,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,QAAQ,aAAa;CACtB;AAED,oBAAY,UAAU;IACpB,SAAS,IAAA;IACT,KAAK,IAAA;IACL,cAAc,IAAA;IACd,WAAW,IAAA;IACX,qBAAqB,IAAA;IACrB,OAAO,IAAA;IACP,OAAO,IAAA;IACP,aAAa,IAAA;CAEd;AAED,MAAM,WAAW,aAAa;IAC5B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC;IAC/B,MAAM,EAAE,aAAa,CAAC;CACvB;AAED,MAAM,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@machinemetrics/mm-erp-sdk",
3
3
  "description": "A library for syncing data between MachineMetrics and ERP systems",
4
- "version": "0.1.7-beta.2",
4
+ "version": "0.1.8-beta.0",
5
5
  "license": "MIT",
6
6
  "author": "machinemetrics",
7
7
  "main": "dist/mm-erp-sdk.js",
@@ -19,7 +19,6 @@
19
19
  "dependencies": {
20
20
  "@azure/msal-node": "^2.12.0",
21
21
  "@ladjs/graceful": "^4.2.0",
22
- "aws-sdk": "^2.1691.0",
23
22
  "axios": "^1.7.3",
24
23
  "axios-retry": "^4.5.0",
25
24
  "better-sqlite3": "^11.3.0",
@@ -29,7 +28,6 @@
29
28
  "knex": "^3.1.0",
30
29
  "lodash": "^4.17.21",
31
30
  "mssql": "^11.0.1",
32
- "odbc": "^2.4.8",
33
31
  "winston": "^3.14.0",
34
32
  "winston-daily-rotate-file": "^5.0.0",
35
33
  "xxhashjs": "^0.2.2",
package/src/index.ts CHANGED
@@ -17,13 +17,13 @@ export type { ERPApiConfig } from "./services/erp-api-services/types";
17
17
  // MM API client and types
18
18
  export { MMApiClient } from "./services/mm-api-service";
19
19
  export { MMReceiveLaborTicket } from "./services/mm-api-service";
20
- export type {
20
+ export type {
21
21
  MMReceiveLaborTicketReason,
22
22
  MMReceiveLaborTicketWorkOrderOperation,
23
23
  IToRESTApiObject,
24
24
  } from "./services/mm-api-service";
25
25
 
26
- export {
26
+ export {
27
27
  MMSendPerson,
28
28
  MMSendResource,
29
29
  MMSendPart,
@@ -57,11 +57,7 @@ export { getInitialLoadComplete, setInitialLoadComplete } from "./utils";
57
57
  // HTTP client factory and types for custom API integrations
58
58
  export { HTTPClientFactory } from "./utils/http-client";
59
59
  export { HTTPError } from "./utils/http-client";
60
- export type {
61
- HTTPResponse,
62
- HTTPClient,
63
- HTTPRequestConfig,
64
- } from "./utils/http-client";
60
+ export type { HTTPResponse, HTTPClient, HTTPRequestConfig } from "./utils/http-client";
65
61
 
66
62
  // Application initialization utilities
67
63
  export { ApplicationInitializer } from "./utils/application-initializer";
@@ -81,7 +77,7 @@ export type { WriteEntitiesToMMResult } from "./utils";
81
77
  export { MMBatchValidationError } from "./utils";
82
78
 
83
79
  // API services
84
- export type { APIResponse } from "./services/erp-api-services/types";
80
+ export type { APIResponse } from './services/erp-api-services/types';
85
81
  export { RestAPIService } from "./services/erp-api-services/rest/rest-api-service";
86
82
  export { ErrorHandler, GraphQLError } from "./services/erp-api-services/errors";
87
83
  export type {
@@ -109,18 +105,6 @@ export {
109
105
  } from "./services/sql-server-erp-service";
110
106
  export type { SQLInput } from "./services/sql-server-erp-service";
111
107
 
112
- // PSQL (Pervasive) services
113
- export {
114
- PsqlService,
115
- PsqlLaborTicketOperations,
116
- formatPsqlDate,
117
- formatPsqlTime,
118
- combinePsqlDateTime,
119
- isPsqlDateEmpty,
120
- cleanPsqlCharField,
121
- } from "./services/psql-erp-service";
122
- export type { PsqlConfiguration } from "./services/psql-erp-service";
123
-
124
108
  // Record tracking services
125
109
  export { RecordTrackingManager } from "./services/caching-service/record-tracking-manager";
126
110
  export type { RecordTrackingObject } from "./services/caching-service/record-tracking-manager";
@@ -129,10 +113,4 @@ export type { RecordTrackingObject } from "./services/caching-service/record-tra
129
113
  export { default as knexDatabaseConfig } from "./knexfile";
130
114
 
131
115
  // MM Connector Logging
132
- export {
133
- MMConnectorLogger,
134
- FileLogDeduper,
135
- LogEntry,
136
- type LogLevelString,
137
- type LogResponse,
138
- } from "./utils";
116
+ export { MMConnectorLogger, FileLogDeduper, LogEntry, type LogLevelString, type LogResponse } from './utils';
@@ -27,9 +27,6 @@ export const runDataSyncService = async (connectorPath: string) => {
27
27
  const bree = new Bree({
28
28
  root: jobsPath,
29
29
  logger,
30
- // Enable worker metadata for debugging
31
- outputWorkerMetadata: true,
32
-
33
30
  worker: {
34
31
  env: {
35
32
  CONNECTOR_PATH: connectorPath,
@@ -46,7 +43,6 @@ export const runDataSyncService = async (connectorPath: string) => {
46
43
  ...process.env,
47
44
  },
48
45
  },
49
-
50
46
  jobs: [
51
47
  // {
52
48
  // name: 'run-migrations', // Running this once on startup will create the tables in the sqlite database
@@ -55,24 +51,19 @@ export const runDataSyncService = async (connectorPath: string) => {
55
51
  name: "from-erp",
56
52
  timeout: "10s",
57
53
  interval: config.fromErpInterval,
58
- // Ensure worker terminates completely after execution
59
- closeWorkerAfterMs: 1000,
60
54
  },
61
55
  {
62
56
  name: "to-erp",
63
57
  //timeout: '3s', // Use timeout during development to see the job in action quickly
64
58
  interval: config.toErpInterval,
65
- closeWorkerAfterMs: 1000,
66
59
  },
67
60
  {
68
61
  name: "retry-failed-labor-tickets",
69
62
  interval: config.retryLaborTicketsInterval,
70
- closeWorkerAfterMs: 1000,
71
63
  },
72
64
  {
73
65
  name: "clean-up-expired-cache",
74
66
  interval: config.cacheExpirationCheckInterval,
75
- closeWorkerAfterMs: 1000,
76
67
  },
77
68
  ],
78
69
  });
@@ -84,41 +75,15 @@ export const runDataSyncService = async (connectorPath: string) => {
84
75
  name: job.name,
85
76
  interval: job.interval,
86
77
  timeout: job.timeout,
87
- closeWorkerAfterMs: job.closeWorkerAfterMs,
88
78
  }));
89
79
  logger.info("JOBS CONFIGURATION:", { jobs: jobsConfig });
90
80
 
91
81
  const graceful = new Graceful({ brees: [bree] });
92
82
  graceful.listen();
93
83
 
94
- // Enhanced event handlers for debugging worker lifecycle
95
- bree.on("worker created", (name) => {
96
- logger.debug(`Worker created for job: ${name}`);
97
- });
98
-
99
- bree.on("worker online", (name) => {
100
- logger.debug(`Worker online for job: ${name}`);
101
- });
102
-
103
- bree.on("worker message", (name, message) => {
104
- logger.debug(`Worker message from ${name}:`, message);
105
- });
106
-
107
- bree.on("worker deleted", (name) => {
108
- logger.debug(`Worker deleted for job: ${name}`);
109
- });
110
-
111
- bree.on("worker exit", (name, code, signal) => {
112
- if (code !== 0) {
113
- logger.error(
114
- `Worker ${name} exited with code ${code}, signal ${signal}`
115
- );
116
- } else {
117
- logger.debug(`Worker ${name} exited successfully`);
118
- }
119
- });
120
-
121
- await bree.start();
84
+ (async () => {
85
+ await bree.start();
86
+ })();
122
87
 
123
88
  bree.on("jobStarted", (job) => {
124
89
  console.log("Job " + job.name + " started");
@@ -7,18 +7,6 @@ import { createConnectorFromPath } from "../../../utils/connector-factory";
7
7
  // Configure the logger with the correct log level
8
8
  logger.level = process.env.LOG_LEVEL || "info";
9
9
 
10
- // Enable garbage collection on exit if available
11
- if (global.gc) {
12
- process.on("exit", () => {
13
- logger.debug("from-erp: Running garbage collection on exit");
14
- try {
15
- global.gc?.();
16
- } catch (e) {
17
- // Ignore GC errors
18
- }
19
- });
20
- }
21
-
22
10
  const main = async () => {
23
11
  try {
24
12
  logger.info('Worker for job "from-erp" online');
@@ -26,7 +14,7 @@ const main = async () => {
26
14
 
27
15
  // Get the connector path from the environment variable
28
16
  const connectorPath = process.env.CONNECTOR_PATH;
29
-
17
+
30
18
  if (!connectorPath) {
31
19
  throw new Error("Connector path not provided in environment variables");
32
20
  }
@@ -40,23 +28,6 @@ const main = async () => {
40
28
 
41
29
  await connector.syncFromERPCompleted();
42
30
  logger.info("==========Completed from-erp job cycle==========");
43
-
44
- // Cleanup before worker exit
45
- logger.debug("from-erp: Starting cleanup sequence");
46
-
47
- // Trigger garbage collection if available
48
- if (global.gc) {
49
- logger.debug("from-erp: Running manual garbage collection");
50
- try {
51
- global.gc?.();
52
- } catch (e) {
53
- logger.debug("from-erp: GC not available or failed");
54
- }
55
- }
56
-
57
- // Small delay to allow any pending operations to complete
58
- await new Promise((resolve) => setTimeout(resolve, 100));
59
- logger.debug("from-erp: Cleanup sequence completed");
60
31
  } catch (error) {
61
32
  const errorDetails = {
62
33
  message: error instanceof Error ? error.message : String(error),
@@ -75,10 +46,10 @@ const main = async () => {
75
46
  // Cross-platform module detection fix for Bree compatibility
76
47
  // Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes
77
48
  // Linux/Mac: both use forward slashes, so this normalization is safe
78
- const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
79
- const fileUrl = normalizedArgv1.startsWith("/")
80
- ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path
81
- : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path
49
+ const normalizedArgv1 = process.argv[1].replace(/\\/g, '/');
50
+ const fileUrl = normalizedArgv1.startsWith('/') ?
51
+ `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path
52
+ `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path
82
53
  const isMainModule = import.meta.url === fileUrl;
83
54
 
84
55
  if (isMainModule) {
@@ -90,4 +61,4 @@ if (isMainModule) {
90
61
  }
91
62
  }
92
63
 
93
- export default main;
64
+ export default main;
@@ -1,24 +1,11 @@
1
1
  import "dotenv/config";
2
2
 
3
- import logger from "../../../services/reporting-service/logger";
4
- import { SQLiteCoordinator } from "../../sqlite-service";
3
+ import logger from "../../reporting-service/logger";
5
4
  import { createConnectorFromPath } from "../../../utils/connector-factory";
6
5
 
7
6
  // Configure the logger with the correct log level
8
7
  logger.level = process.env.LOG_LEVEL || "info";
9
8
 
10
- // Enable garbage collection on exit if available
11
- if (global.gc) {
12
- process.on("exit", () => {
13
- logger.debug("to-erp: Running garbage collection on exit");
14
- try {
15
- global.gc?.();
16
- } catch (e) {
17
- // Ignore GC errors
18
- }
19
- });
20
- }
21
-
22
9
  const main = async () => {
23
10
  try {
24
11
  logger.info('Worker for job "to-erp" online');
@@ -38,23 +25,6 @@ const main = async () => {
38
25
  await connector.syncToERPCompleted();
39
26
 
40
27
  logger.info("==========Completed to-erp job cycle==========");
41
-
42
- // Cleanup before worker exit
43
- logger.debug("to-erp: Starting cleanup sequence");
44
-
45
- // Trigger garbage collection if available
46
- if (global.gc) {
47
- logger.debug("to-erp: Running manual garbage collection");
48
- try {
49
- global.gc?.();
50
- } catch (e) {
51
- logger.debug("to-erp: GC not available or failed");
52
- }
53
- }
54
-
55
- // Small delay to allow any pending operations to complete
56
- await new Promise((resolve) => setTimeout(resolve, 100));
57
- logger.debug("to-erp: Cleanup sequence completed");
58
28
  } catch (error) {
59
29
  const errorDetails = {
60
30
  message: error instanceof Error ? error.message : String(error),
@@ -77,10 +47,10 @@ const main = async () => {
77
47
  // Cross-platform module detection fix for Bree compatibility
78
48
  // Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes
79
49
  // Linux/Mac: both use forward slashes, so this normalization is safe
80
- const normalizedArgv1 = process.argv[1].replace(/\\/g, "/");
81
- const fileUrl = normalizedArgv1.startsWith("/")
82
- ? `file://${normalizedArgv1}` // Unix: file:// + /path = file:///path
83
- : `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path
50
+ const normalizedArgv1 = process.argv[1].replace(/\\/g, '/');
51
+ const fileUrl = normalizedArgv1.startsWith('/') ?
52
+ `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path
53
+ `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path
84
54
  const isMainModule = import.meta.url === fileUrl;
85
55
 
86
56
  if (isMainModule) {
@@ -78,20 +78,95 @@ const logger = createLogger({
78
78
 
79
79
  // Function to reconfigure the logger once CoreConfiguration is available
80
80
  export const configureLogger = (logLevel: string, nodeEnv: string) => {
81
- // Remove existing transports
81
+ // Remove existing transports (safely): close any DailyRotateFile streams first
82
+ try {
83
+ const existingFileTransports = (logger.transports || []).filter(
84
+ (t: any) => t instanceof DailyRotateFile
85
+ );
86
+ for (const t of existingFileTransports) {
87
+ const s = (t as any).logStream;
88
+ if (s && typeof s.end === "function") {
89
+ try {
90
+ s.end();
91
+ } catch {}
92
+ }
93
+ }
94
+ } catch {}
95
+
82
96
  logger.clear();
83
97
 
84
98
  // Add file transport
85
- logger.add(
86
- new DailyRotateFile({
87
- filename: path.join(logDirectory, "%DATE%.log"),
88
- datePattern: "YYYY-MM-DD",
89
- zippedArchive: true,
90
- maxSize: "20m",
91
- maxFiles: "14d",
92
- format: logFormat,
93
- })
94
- );
99
+ const fileTransport = new DailyRotateFile({
100
+ filename: path.join(logDirectory, "%DATE%.log"),
101
+ datePattern: "YYYY-MM-DD",
102
+ zippedArchive: true,
103
+ maxSize: "20m",
104
+ maxFiles: "14d",
105
+ format: logFormat,
106
+ });
107
+ logger.add(fileTransport);
108
+
109
+ // Rotate-time mitigation for long-running single-process apps
110
+ let isRefreshing = false;
111
+ fileTransport.on("rotate", (_oldFilename: string, _newFilename: string) => {
112
+ if (isRefreshing) return;
113
+ isRefreshing = true;
114
+ (logger as any).silent = true; // gate writes during refresh to avoid write-after-end
115
+
116
+ try {
117
+ // Close all existing DailyRotateFile streams
118
+ const existing = (logger.transports || []).filter(
119
+ (t: any) => t instanceof DailyRotateFile
120
+ );
121
+ for (const t of existing) {
122
+ const s = (t as any).logStream;
123
+ if (s && typeof s.end === "function") {
124
+ try {
125
+ s.end();
126
+ } catch {}
127
+ }
128
+ }
129
+
130
+ // Refresh the file transport cleanly
131
+ logger.clear();
132
+ const refreshed = new DailyRotateFile({
133
+ filename: path.join(logDirectory, "%DATE%.log"),
134
+ datePattern: "YYYY-MM-DD",
135
+ zippedArchive: true,
136
+ maxSize: "20m",
137
+ maxFiles: "14d",
138
+ format: logFormat,
139
+ });
140
+
141
+ // Once new file stream is ready, resume writes
142
+ refreshed.on("new", () => {
143
+ (logger as any).silent = false;
144
+ isRefreshing = false;
145
+ });
146
+
147
+ logger.add(refreshed);
148
+
149
+ // Preserve console transport behavior in non-production
150
+ if (nodeEnv !== "production") {
151
+ logger.add(
152
+ new transports.Console({
153
+ format: format.combine(
154
+ format.timestamp(),
155
+ format.splat(),
156
+ baseFormat,
157
+ format.colorize({ all: true })
158
+ ),
159
+ })
160
+ );
161
+ }
162
+
163
+ logger.level = logLevel;
164
+ } catch {
165
+ // If anything goes wrong, resume writes to avoid permanent silence
166
+ (logger as any).silent = false;
167
+ isRefreshing = false;
168
+ }
169
+ });
95
170
 
96
171
  // Add console transport in non-production environments
97
172
  if (nodeEnv !== "production") {