@machinemetrics/mm-erp-sdk 0.1.7-beta.3 → 0.1.8-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{config-qat9zgOl.js → config-CV-KosWV.js} +2 -2
- package/dist/{config-qat9zgOl.js.map → config-CV-KosWV.js.map} +1 -1
- package/dist/{connector-factory-C2czCs9v.js → connector-factory-D8v6aQIt.js} +2 -2
- package/dist/{connector-factory-C2czCs9v.js.map → connector-factory-D8v6aQIt.js.map} +1 -1
- package/dist/{hashed-cache-manager-CzyFSt2B.js → hashed-cache-manager-B6hTDLxU.js} +4 -4
- package/dist/{hashed-cache-manager-CzyFSt2B.js.map → hashed-cache-manager-B6hTDLxU.js.map} +1 -1
- package/dist/{index-B9wo8pld.js → index-Bg76oouR.js} +2 -2
- package/dist/{index-B9wo8pld.js.map → index-Bg76oouR.js.map} +1 -1
- package/dist/{logger-Db8CkwR6.js → logger-SqdNut1H.js} +1040 -934
- package/dist/logger-SqdNut1H.js.map +1 -0
- package/dist/mm-erp-sdk.js +53 -108
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/data-sync-service.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +8 -25
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +7 -24
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/psql-erp-service/psql-service.d.ts +1 -14
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -1
- package/dist/services/reporting-service/logger.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/services/data-sync-service/data-sync-service.ts +3 -38
- package/src/services/data-sync-service/jobs/from-erp.ts +6 -35
- package/src/services/data-sync-service/jobs/to-erp.ts +4 -33
- package/src/services/psql-erp-service/psql-service.ts +53 -90
- package/src/services/reporting-service/logger.ts +86 -11
- package/dist/logger-Db8CkwR6.js.map +0 -1
|
@@ -10,23 +10,22 @@ type PagingParams = {
|
|
|
10
10
|
};
|
|
11
11
|
|
|
12
12
|
export class PsqlService {
|
|
13
|
-
private connection: odbc.Connection | null = null;
|
|
14
13
|
private config: PsqlConfiguration;
|
|
15
14
|
|
|
16
15
|
constructor(config: PsqlConfiguration) {
|
|
17
16
|
this.config = config;
|
|
18
17
|
}
|
|
19
18
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
19
|
+
// REMOVED: dispose() method - not needed anymore
|
|
20
|
+
// REMOVED: connection property - not needed anymore
|
|
21
|
+
// REMOVED: openConnection() method - not needed anymore
|
|
22
|
+
// REMOVED: closeConnection() method - not needed anymore
|
|
23
23
|
|
|
24
24
|
/**
|
|
25
25
|
* Build PSQL ODBC connection string
|
|
26
26
|
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
27
27
|
*/
|
|
28
28
|
private buildConnectionString(): string {
|
|
29
|
-
// PSQL requires ServerName in format IP.PORT (not IP:PORT)
|
|
30
29
|
const serverName = `${this.config.host}.${this.config.port}`;
|
|
31
30
|
|
|
32
31
|
return (
|
|
@@ -43,7 +42,7 @@ export class PsqlService {
|
|
|
43
42
|
|
|
44
43
|
/**
|
|
45
44
|
* Execute a query and return the results
|
|
46
|
-
*
|
|
45
|
+
* Creates a fresh connection for each query to avoid handle corruption
|
|
47
46
|
*
|
|
48
47
|
* @param query The SQL query to execute
|
|
49
48
|
* @param params Query parameters (currently unused for PSQL read operations)
|
|
@@ -55,12 +54,14 @@ export class PsqlService {
|
|
|
55
54
|
params: Record<string, string> = {},
|
|
56
55
|
paging?: PagingParams
|
|
57
56
|
): Promise<ERPResponse | undefined> {
|
|
58
|
-
|
|
57
|
+
let connection: odbc.Connection | null = null;
|
|
59
58
|
|
|
60
|
-
let records;
|
|
61
59
|
try {
|
|
62
|
-
//
|
|
63
|
-
|
|
60
|
+
// Create fresh connection for THIS query only
|
|
61
|
+
const connStr = this.buildConnectionString();
|
|
62
|
+
logger.debug("Creating fresh PSQL connection for query");
|
|
63
|
+
connection = await odbc.connect(connStr);
|
|
64
|
+
|
|
64
65
|
if (Object.keys(params).length > 0) {
|
|
65
66
|
logger.warn(
|
|
66
67
|
"PsqlService: Query parameters provided but parameter binding not yet implemented. " +
|
|
@@ -68,85 +69,62 @@ export class PsqlService {
|
|
|
68
69
|
);
|
|
69
70
|
}
|
|
70
71
|
|
|
71
|
-
records = await connection.query(query);
|
|
72
|
+
const records = await connection.query(query);
|
|
73
|
+
const allRecords = PsqlService.recordsetToRecords(records);
|
|
74
|
+
const rowsFetched = allRecords.length;
|
|
75
|
+
|
|
76
|
+
// Apply paging if requested
|
|
77
|
+
const pagedData =
|
|
78
|
+
paging?.offset !== undefined || paging?.limit !== undefined
|
|
79
|
+
? allRecords.slice(
|
|
80
|
+
paging.offset || 0,
|
|
81
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
82
|
+
)
|
|
83
|
+
: allRecords;
|
|
84
|
+
|
|
85
|
+
return {
|
|
86
|
+
data: pagedData,
|
|
87
|
+
paging: {
|
|
88
|
+
count: rowsFetched,
|
|
89
|
+
limit: paging?.limit || 0,
|
|
90
|
+
offset: paging?.offset || 0,
|
|
91
|
+
nextPage:
|
|
92
|
+
paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched
|
|
93
|
+
? String((paging.offset || 0) + paging.limit)
|
|
94
|
+
: undefined,
|
|
95
|
+
previousPage: paging?.offset
|
|
96
|
+
? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10)))
|
|
97
|
+
: undefined,
|
|
98
|
+
},
|
|
99
|
+
};
|
|
72
100
|
} catch (error) {
|
|
73
101
|
const errorInfo = error as OdbcErrorResponse;
|
|
74
102
|
logger.error("Error fetching data from PSQL", {
|
|
75
103
|
error: errorInfo.message,
|
|
76
104
|
odbcErrors: errorInfo.odbcErrors,
|
|
105
|
+
query: query.substring(0, 200), // Log first 200 chars of query
|
|
77
106
|
});
|
|
78
107
|
|
|
79
108
|
throw this.handleOdbcError(errorInfo);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
return {
|
|
95
|
-
data: pagedData,
|
|
96
|
-
paging: {
|
|
97
|
-
count: rowsFetched,
|
|
98
|
-
limit: paging?.limit || 0,
|
|
99
|
-
offset: paging?.offset || 0,
|
|
100
|
-
nextPage:
|
|
101
|
-
paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched
|
|
102
|
-
? String((paging.offset || 0) + paging.limit)
|
|
103
|
-
: undefined,
|
|
104
|
-
previousPage: paging?.offset
|
|
105
|
-
? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10)))
|
|
106
|
-
: undefined,
|
|
107
|
-
},
|
|
108
|
-
};
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
/**
|
|
112
|
-
* Opens a connection to PSQL database
|
|
113
|
-
* Caches the connection so that it can be reused.
|
|
114
|
-
* On failure to connect, throws
|
|
115
|
-
*/
|
|
116
|
-
async openConnection(): Promise<odbc.Connection> {
|
|
117
|
-
// If we have a connection, reuse it
|
|
118
|
-
// Note: ODBC connections don't have a .connected property like SQL Server
|
|
119
|
-
// We'll keep it simple and reuse if not null
|
|
120
|
-
if (this.connection) {
|
|
121
|
-
logger.debug("Reusing existing PSQL connection");
|
|
122
|
-
return this.connection;
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
try {
|
|
126
|
-
const connStr = this.buildConnectionString();
|
|
127
|
-
logger.info("Opening new PSQL connection");
|
|
128
|
-
logger.debug(
|
|
129
|
-
"Connection string (password hidden):",
|
|
130
|
-
connStr.replace(/PWD=[^;]+/, "PWD=***")
|
|
131
|
-
);
|
|
132
|
-
|
|
133
|
-
this.connection = await odbc.connect(connStr);
|
|
134
|
-
logger.info("Successfully connected to PSQL database");
|
|
135
|
-
return this.connection;
|
|
136
|
-
} catch (error) {
|
|
137
|
-
logger.error("PsqlService>>openConnection>> Connection failed", {
|
|
138
|
-
error,
|
|
139
|
-
});
|
|
140
|
-
throw this.handleOdbcError(error as OdbcErrorResponse);
|
|
109
|
+
} finally {
|
|
110
|
+
// CRITICAL: Always close connection, even on error
|
|
111
|
+
if (connection) {
|
|
112
|
+
try {
|
|
113
|
+
await connection.close();
|
|
114
|
+
logger.debug("PSQL connection closed successfully");
|
|
115
|
+
} catch (err) {
|
|
116
|
+
// Don't throw on close errors, just log
|
|
117
|
+
logger.warn("Error closing PSQL connection (non-fatal)", {
|
|
118
|
+
error: err,
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
}
|
|
141
122
|
}
|
|
142
123
|
}
|
|
143
124
|
|
|
144
125
|
/**
|
|
145
126
|
* Transform ODBC result set to array of Record<string, string> instances.
|
|
146
127
|
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
147
|
-
*
|
|
148
|
-
* @param recordset Result set from ODBC query
|
|
149
|
-
* @returns array of Record<string, string> instances
|
|
150
128
|
*/
|
|
151
129
|
public static recordsetToRecords(recordset: any[]): Record<string, string>[] {
|
|
152
130
|
if (!Array.isArray(recordset)) {
|
|
@@ -157,7 +135,6 @@ export class PsqlService {
|
|
|
157
135
|
const transformedRow: Record<string, string> = {};
|
|
158
136
|
Object.keys(row).forEach((key) => {
|
|
159
137
|
const value = row[key];
|
|
160
|
-
// Convert to string and trim (PSQL CHAR fields have trailing spaces)
|
|
161
138
|
transformedRow[key] =
|
|
162
139
|
value !== null && value !== undefined ? String(value).trim() : "";
|
|
163
140
|
});
|
|
@@ -198,18 +175,4 @@ export class PsqlService {
|
|
|
198
175
|
return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
|
|
199
176
|
}
|
|
200
177
|
}
|
|
201
|
-
|
|
202
|
-
private async closeConnection(): Promise<void> {
|
|
203
|
-
if (this.connection) {
|
|
204
|
-
logger.info("Closing PSQL connection");
|
|
205
|
-
try {
|
|
206
|
-
await this.connection.close();
|
|
207
|
-
} catch (error) {
|
|
208
|
-
logger.error("PsqlService::closeConnection: Error closing connection", {
|
|
209
|
-
error,
|
|
210
|
-
});
|
|
211
|
-
}
|
|
212
|
-
this.connection = null;
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
178
|
}
|
|
@@ -78,20 +78,95 @@ const logger = createLogger({
|
|
|
78
78
|
|
|
79
79
|
// Function to reconfigure the logger once CoreConfiguration is available
|
|
80
80
|
export const configureLogger = (logLevel: string, nodeEnv: string) => {
|
|
81
|
-
// Remove existing transports
|
|
81
|
+
// Remove existing transports (safely): close any DailyRotateFile streams first
|
|
82
|
+
try {
|
|
83
|
+
const existingFileTransports = (logger.transports || []).filter(
|
|
84
|
+
(t: any) => t instanceof DailyRotateFile
|
|
85
|
+
);
|
|
86
|
+
for (const t of existingFileTransports) {
|
|
87
|
+
const s = (t as any).logStream;
|
|
88
|
+
if (s && typeof s.end === "function") {
|
|
89
|
+
try {
|
|
90
|
+
s.end();
|
|
91
|
+
} catch {}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
} catch {}
|
|
95
|
+
|
|
82
96
|
logger.clear();
|
|
83
97
|
|
|
84
98
|
// Add file transport
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
99
|
+
const fileTransport = new DailyRotateFile({
|
|
100
|
+
filename: path.join(logDirectory, "%DATE%.log"),
|
|
101
|
+
datePattern: "YYYY-MM-DD",
|
|
102
|
+
zippedArchive: true,
|
|
103
|
+
maxSize: "20m",
|
|
104
|
+
maxFiles: "14d",
|
|
105
|
+
format: logFormat,
|
|
106
|
+
});
|
|
107
|
+
logger.add(fileTransport);
|
|
108
|
+
|
|
109
|
+
// Rotate-time mitigation for long-running single-process apps
|
|
110
|
+
let isRefreshing = false;
|
|
111
|
+
fileTransport.on("rotate", (_oldFilename: string, _newFilename: string) => {
|
|
112
|
+
if (isRefreshing) return;
|
|
113
|
+
isRefreshing = true;
|
|
114
|
+
(logger as any).silent = true; // gate writes during refresh to avoid write-after-end
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
// Close all existing DailyRotateFile streams
|
|
118
|
+
const existing = (logger.transports || []).filter(
|
|
119
|
+
(t: any) => t instanceof DailyRotateFile
|
|
120
|
+
);
|
|
121
|
+
for (const t of existing) {
|
|
122
|
+
const s = (t as any).logStream;
|
|
123
|
+
if (s && typeof s.end === "function") {
|
|
124
|
+
try {
|
|
125
|
+
s.end();
|
|
126
|
+
} catch {}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Refresh the file transport cleanly
|
|
131
|
+
logger.clear();
|
|
132
|
+
const refreshed = new DailyRotateFile({
|
|
133
|
+
filename: path.join(logDirectory, "%DATE%.log"),
|
|
134
|
+
datePattern: "YYYY-MM-DD",
|
|
135
|
+
zippedArchive: true,
|
|
136
|
+
maxSize: "20m",
|
|
137
|
+
maxFiles: "14d",
|
|
138
|
+
format: logFormat,
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
// Once new file stream is ready, resume writes
|
|
142
|
+
refreshed.on("new", () => {
|
|
143
|
+
(logger as any).silent = false;
|
|
144
|
+
isRefreshing = false;
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
logger.add(refreshed);
|
|
148
|
+
|
|
149
|
+
// Preserve console transport behavior in non-production
|
|
150
|
+
if (nodeEnv !== "production") {
|
|
151
|
+
logger.add(
|
|
152
|
+
new transports.Console({
|
|
153
|
+
format: format.combine(
|
|
154
|
+
format.timestamp(),
|
|
155
|
+
format.splat(),
|
|
156
|
+
baseFormat,
|
|
157
|
+
format.colorize({ all: true })
|
|
158
|
+
),
|
|
159
|
+
})
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
logger.level = logLevel;
|
|
164
|
+
} catch {
|
|
165
|
+
// If anything goes wrong, resume writes to avoid permanent silence
|
|
166
|
+
(logger as any).silent = false;
|
|
167
|
+
isRefreshing = false;
|
|
168
|
+
}
|
|
169
|
+
});
|
|
95
170
|
|
|
96
171
|
// Add console transport in non-production environments
|
|
97
172
|
if (nodeEnv !== "production") {
|