@machinemetrics/mm-erp-sdk 0.1.7-beta.3 → 0.1.8-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{config-qat9zgOl.js → config-CV-KosWV.js} +2 -2
- package/dist/{config-qat9zgOl.js.map → config-CV-KosWV.js.map} +1 -1
- package/dist/{connector-factory-C2czCs9v.js → connector-factory-D8v6aQIt.js} +2 -2
- package/dist/{connector-factory-C2czCs9v.js.map → connector-factory-D8v6aQIt.js.map} +1 -1
- package/dist/{hashed-cache-manager-CzyFSt2B.js → hashed-cache-manager-B6hTDLxU.js} +4 -4
- package/dist/{hashed-cache-manager-CzyFSt2B.js.map → hashed-cache-manager-B6hTDLxU.js.map} +1 -1
- package/dist/{index-B9wo8pld.js → index-Bg76oouR.js} +2 -2
- package/dist/{index-B9wo8pld.js.map → index-Bg76oouR.js.map} +1 -1
- package/dist/{logger-Db8CkwR6.js → logger-SqdNut1H.js} +1040 -934
- package/dist/logger-SqdNut1H.js.map +1 -0
- package/dist/mm-erp-sdk.js +53 -108
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/data-sync-service.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +8 -25
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +7 -24
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/psql-erp-service/psql-service.d.ts +1 -14
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -1
- package/dist/services/reporting-service/logger.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/services/data-sync-service/data-sync-service.ts +3 -38
- package/src/services/data-sync-service/jobs/from-erp.ts +6 -35
- package/src/services/data-sync-service/jobs/to-erp.ts +4 -33
- package/src/services/psql-erp-service/psql-service.ts +53 -90
- package/src/services/reporting-service/logger.ts +86 -11
- package/dist/logger-Db8CkwR6.js.map +0 -1
package/dist/mm-erp-sdk.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-
|
|
2
|
-
import { E, g, a } from "./hashed-cache-manager-
|
|
3
|
-
import { l as logger } from "./logger-
|
|
4
|
-
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-
|
|
5
|
-
import { c, d } from "./index-
|
|
1
|
+
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-B6hTDLxU.js";
|
|
2
|
+
import { E, g, a } from "./hashed-cache-manager-B6hTDLxU.js";
|
|
3
|
+
import { l as logger } from "./logger-SqdNut1H.js";
|
|
4
|
+
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-Bg76oouR.js";
|
|
5
|
+
import { c, d } from "./index-Bg76oouR.js";
|
|
6
6
|
import axios, { AxiosError } from "axios";
|
|
7
7
|
import knex from "knex";
|
|
8
8
|
import { c as config } from "./knexfile-1qKKIORB.js";
|
|
9
9
|
import fs from "fs";
|
|
10
10
|
import path from "path";
|
|
11
|
-
import "./connector-factory-
|
|
11
|
+
import "./connector-factory-D8v6aQIt.js";
|
|
12
12
|
import Bree from "bree";
|
|
13
13
|
import Graceful from "@ladjs/graceful";
|
|
14
14
|
import { fileURLToPath } from "url";
|
|
@@ -3607,8 +3607,6 @@ const runDataSyncService = async (connectorPath) => {
|
|
|
3607
3607
|
const bree = new Bree({
|
|
3608
3608
|
root: jobsPath,
|
|
3609
3609
|
logger,
|
|
3610
|
-
// Enable worker metadata for debugging
|
|
3611
|
-
outputWorkerMetadata: true,
|
|
3612
3610
|
worker: {
|
|
3613
3611
|
env: {
|
|
3614
3612
|
CONNECTOR_PATH: connectorPath,
|
|
@@ -3632,25 +3630,20 @@ const runDataSyncService = async (connectorPath) => {
|
|
|
3632
3630
|
{
|
|
3633
3631
|
name: "from-erp",
|
|
3634
3632
|
timeout: "10s",
|
|
3635
|
-
interval: config2.fromErpInterval
|
|
3636
|
-
// Ensure worker terminates completely after execution
|
|
3637
|
-
closeWorkerAfterMs: 1e3
|
|
3633
|
+
interval: config2.fromErpInterval
|
|
3638
3634
|
},
|
|
3639
3635
|
{
|
|
3640
3636
|
name: "to-erp",
|
|
3641
3637
|
//timeout: '3s', // Use timeout during development to see the job in action quickly
|
|
3642
|
-
interval: config2.toErpInterval
|
|
3643
|
-
closeWorkerAfterMs: 1e3
|
|
3638
|
+
interval: config2.toErpInterval
|
|
3644
3639
|
},
|
|
3645
3640
|
{
|
|
3646
3641
|
name: "retry-failed-labor-tickets",
|
|
3647
|
-
interval: config2.retryLaborTicketsInterval
|
|
3648
|
-
closeWorkerAfterMs: 1e3
|
|
3642
|
+
interval: config2.retryLaborTicketsInterval
|
|
3649
3643
|
},
|
|
3650
3644
|
{
|
|
3651
3645
|
name: "clean-up-expired-cache",
|
|
3652
|
-
interval: config2.cacheExpirationCheckInterval
|
|
3653
|
-
closeWorkerAfterMs: 1e3
|
|
3646
|
+
interval: config2.cacheExpirationCheckInterval
|
|
3654
3647
|
}
|
|
3655
3648
|
]
|
|
3656
3649
|
});
|
|
@@ -3660,34 +3653,14 @@ const runDataSyncService = async (connectorPath) => {
|
|
|
3660
3653
|
const jobsConfig = bree.config.jobs.map((job) => ({
|
|
3661
3654
|
name: job.name,
|
|
3662
3655
|
interval: job.interval,
|
|
3663
|
-
timeout: job.timeout
|
|
3664
|
-
closeWorkerAfterMs: job.closeWorkerAfterMs
|
|
3656
|
+
timeout: job.timeout
|
|
3665
3657
|
}));
|
|
3666
3658
|
logger.info("JOBS CONFIGURATION:", { jobs: jobsConfig });
|
|
3667
3659
|
const graceful = new Graceful({ brees: [bree] });
|
|
3668
3660
|
graceful.listen();
|
|
3669
|
-
|
|
3670
|
-
|
|
3671
|
-
});
|
|
3672
|
-
bree.on("worker online", (name) => {
|
|
3673
|
-
logger.debug(`Worker online for job: ${name}`);
|
|
3674
|
-
});
|
|
3675
|
-
bree.on("worker message", (name, message) => {
|
|
3676
|
-
logger.debug(`Worker message from ${name}:`, message);
|
|
3677
|
-
});
|
|
3678
|
-
bree.on("worker deleted", (name) => {
|
|
3679
|
-
logger.debug(`Worker deleted for job: ${name}`);
|
|
3680
|
-
});
|
|
3681
|
-
bree.on("worker exit", (name, code, signal) => {
|
|
3682
|
-
if (code !== 0) {
|
|
3683
|
-
logger.error(
|
|
3684
|
-
`Worker ${name} exited with code ${code}, signal ${signal}`
|
|
3685
|
-
);
|
|
3686
|
-
} else {
|
|
3687
|
-
logger.debug(`Worker ${name} exited successfully`);
|
|
3688
|
-
}
|
|
3689
|
-
});
|
|
3690
|
-
await bree.start();
|
|
3661
|
+
(async () => {
|
|
3662
|
+
await bree.start();
|
|
3663
|
+
})();
|
|
3691
3664
|
bree.on("jobStarted", (job) => {
|
|
3692
3665
|
console.log("Job " + job.name + " started");
|
|
3693
3666
|
});
|
|
@@ -3995,14 +3968,14 @@ class SqlServerHelper {
|
|
|
3995
3968
|
}
|
|
3996
3969
|
}
|
|
3997
3970
|
class PsqlService {
|
|
3998
|
-
connection = null;
|
|
3999
3971
|
config;
|
|
4000
3972
|
constructor(config2) {
|
|
4001
3973
|
this.config = config2;
|
|
4002
3974
|
}
|
|
4003
|
-
|
|
4004
|
-
|
|
4005
|
-
|
|
3975
|
+
// REMOVED: dispose() method - not needed anymore
|
|
3976
|
+
// REMOVED: connection property - not needed anymore
|
|
3977
|
+
// REMOVED: openConnection() method - not needed anymore
|
|
3978
|
+
// REMOVED: closeConnection() method - not needed anymore
|
|
4006
3979
|
/**
|
|
4007
3980
|
* Build PSQL ODBC connection string
|
|
4008
3981
|
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
@@ -4020,7 +3993,7 @@ class PsqlService {
|
|
|
4020
3993
|
}
|
|
4021
3994
|
/**
|
|
4022
3995
|
* Execute a query and return the results
|
|
4023
|
-
*
|
|
3996
|
+
* Creates a fresh connection for each query to avoid handle corruption
|
|
4024
3997
|
*
|
|
4025
3998
|
* @param query The SQL query to execute
|
|
4026
3999
|
* @param params Query parameters (currently unused for PSQL read operations)
|
|
@@ -4028,73 +4001,58 @@ class PsqlService {
|
|
|
4028
4001
|
* @returns The entities fetched from the database, along with paging information
|
|
4029
4002
|
*/
|
|
4030
4003
|
async executePreparedStatement(query, params = {}, paging) {
|
|
4031
|
-
|
|
4032
|
-
let records;
|
|
4004
|
+
let connection = null;
|
|
4033
4005
|
try {
|
|
4006
|
+
const connStr = this.buildConnectionString();
|
|
4007
|
+
logger.debug("Creating fresh PSQL connection for query");
|
|
4008
|
+
connection = await odbc.connect(connStr);
|
|
4034
4009
|
if (Object.keys(params).length > 0) {
|
|
4035
4010
|
logger.warn(
|
|
4036
4011
|
"PsqlService: Query parameters provided but parameter binding not yet implemented. Using direct query execution. This is acceptable for Phase 1 read operations."
|
|
4037
4012
|
);
|
|
4038
4013
|
}
|
|
4039
|
-
records = await connection.query(query);
|
|
4014
|
+
const records = await connection.query(query);
|
|
4015
|
+
const allRecords = PsqlService.recordsetToRecords(records);
|
|
4016
|
+
const rowsFetched = allRecords.length;
|
|
4017
|
+
const pagedData = paging?.offset !== void 0 || paging?.limit !== void 0 ? allRecords.slice(
|
|
4018
|
+
paging.offset || 0,
|
|
4019
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
4020
|
+
) : allRecords;
|
|
4021
|
+
return {
|
|
4022
|
+
data: pagedData,
|
|
4023
|
+
paging: {
|
|
4024
|
+
count: rowsFetched,
|
|
4025
|
+
limit: paging?.limit || 0,
|
|
4026
|
+
offset: paging?.offset || 0,
|
|
4027
|
+
nextPage: paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched ? String((paging.offset || 0) + paging.limit) : void 0,
|
|
4028
|
+
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4029
|
+
}
|
|
4030
|
+
};
|
|
4040
4031
|
} catch (error) {
|
|
4041
4032
|
const errorInfo = error;
|
|
4042
4033
|
logger.error("Error fetching data from PSQL", {
|
|
4043
4034
|
error: errorInfo.message,
|
|
4044
|
-
odbcErrors: errorInfo.odbcErrors
|
|
4035
|
+
odbcErrors: errorInfo.odbcErrors,
|
|
4036
|
+
query: query.substring(0, 200)
|
|
4037
|
+
// Log first 200 chars of query
|
|
4045
4038
|
});
|
|
4046
4039
|
throw this.handleOdbcError(errorInfo);
|
|
4047
|
-
}
|
|
4048
|
-
|
|
4049
|
-
|
|
4050
|
-
|
|
4051
|
-
|
|
4052
|
-
|
|
4053
|
-
|
|
4054
|
-
|
|
4055
|
-
|
|
4056
|
-
|
|
4057
|
-
count: rowsFetched,
|
|
4058
|
-
limit: paging?.limit || 0,
|
|
4059
|
-
offset: paging?.offset || 0,
|
|
4060
|
-
nextPage: paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched ? String((paging.offset || 0) + paging.limit) : void 0,
|
|
4061
|
-
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4040
|
+
} finally {
|
|
4041
|
+
if (connection) {
|
|
4042
|
+
try {
|
|
4043
|
+
await connection.close();
|
|
4044
|
+
logger.debug("PSQL connection closed successfully");
|
|
4045
|
+
} catch (err) {
|
|
4046
|
+
logger.warn("Error closing PSQL connection (non-fatal)", {
|
|
4047
|
+
error: err
|
|
4048
|
+
});
|
|
4049
|
+
}
|
|
4062
4050
|
}
|
|
4063
|
-
};
|
|
4064
|
-
}
|
|
4065
|
-
/**
|
|
4066
|
-
* Opens a connection to PSQL database
|
|
4067
|
-
* Caches the connection so that it can be reused.
|
|
4068
|
-
* On failure to connect, throws
|
|
4069
|
-
*/
|
|
4070
|
-
async openConnection() {
|
|
4071
|
-
if (this.connection) {
|
|
4072
|
-
logger.debug("Reusing existing PSQL connection");
|
|
4073
|
-
return this.connection;
|
|
4074
|
-
}
|
|
4075
|
-
try {
|
|
4076
|
-
const connStr = this.buildConnectionString();
|
|
4077
|
-
logger.info("Opening new PSQL connection");
|
|
4078
|
-
logger.debug(
|
|
4079
|
-
"Connection string (password hidden):",
|
|
4080
|
-
connStr.replace(/PWD=[^;]+/, "PWD=***")
|
|
4081
|
-
);
|
|
4082
|
-
this.connection = await odbc.connect(connStr);
|
|
4083
|
-
logger.info("Successfully connected to PSQL database");
|
|
4084
|
-
return this.connection;
|
|
4085
|
-
} catch (error) {
|
|
4086
|
-
logger.error("PsqlService>>openConnection>> Connection failed", {
|
|
4087
|
-
error
|
|
4088
|
-
});
|
|
4089
|
-
throw this.handleOdbcError(error);
|
|
4090
4051
|
}
|
|
4091
4052
|
}
|
|
4092
4053
|
/**
|
|
4093
4054
|
* Transform ODBC result set to array of Record<string, string> instances.
|
|
4094
4055
|
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
4095
|
-
*
|
|
4096
|
-
* @param recordset Result set from ODBC query
|
|
4097
|
-
* @returns array of Record<string, string> instances
|
|
4098
4056
|
*/
|
|
4099
4057
|
static recordsetToRecords(recordset) {
|
|
4100
4058
|
if (!Array.isArray(recordset)) {
|
|
@@ -4136,19 +4094,6 @@ class PsqlService {
|
|
|
4136
4094
|
return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
|
|
4137
4095
|
}
|
|
4138
4096
|
}
|
|
4139
|
-
async closeConnection() {
|
|
4140
|
-
if (this.connection) {
|
|
4141
|
-
logger.info("Closing PSQL connection");
|
|
4142
|
-
try {
|
|
4143
|
-
await this.connection.close();
|
|
4144
|
-
} catch (error) {
|
|
4145
|
-
logger.error("PsqlService::closeConnection: Error closing connection", {
|
|
4146
|
-
error
|
|
4147
|
-
});
|
|
4148
|
-
}
|
|
4149
|
-
this.connection = null;
|
|
4150
|
-
}
|
|
4151
|
-
}
|
|
4152
4097
|
}
|
|
4153
4098
|
class PsqlLaborTicketOperations {
|
|
4154
4099
|
constructor(service) {
|