@machinemetrics/mm-erp-sdk 0.1.8-beta.0 → 0.1.8-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dist/{config-CV-KosWV.js → config-cB7h4yvc.js} +2 -2
  2. package/dist/{config-CV-KosWV.js.map → config-cB7h4yvc.js.map} +1 -1
  3. package/dist/{connector-factory-D8v6aQIt.js → connector-factory-CKm74_WZ.js} +2 -2
  4. package/dist/{connector-factory-D8v6aQIt.js.map → connector-factory-CKm74_WZ.js.map} +1 -1
  5. package/dist/{hashed-cache-manager-B6hTDLxU.js → hashed-cache-manager-B1hPBNnF.js} +4 -4
  6. package/dist/{hashed-cache-manager-B6hTDLxU.js.map → hashed-cache-manager-B1hPBNnF.js.map} +1 -1
  7. package/dist/{index-Bg76oouR.js → index-DCgheVjV.js} +2 -2
  8. package/dist/{index-Bg76oouR.js.map → index-DCgheVjV.js.map} +1 -1
  9. package/dist/index.d.ts +5 -3
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/{logger-SqdNut1H.js → logger-CBDNtsMq.js} +969 -1024
  12. package/dist/logger-CBDNtsMq.js.map +1 -0
  13. package/dist/mm-erp-sdk.js +241 -7
  14. package/dist/mm-erp-sdk.js.map +1 -1
  15. package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
  16. package/dist/services/data-sync-service/jobs/from-erp.js +4 -4
  17. package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
  18. package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
  19. package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
  20. package/dist/services/data-sync-service/jobs/to-erp.js +3 -3
  21. package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
  22. package/dist/services/psql-erp-service/configuration.d.ts +10 -0
  23. package/dist/services/psql-erp-service/configuration.d.ts.map +1 -0
  24. package/dist/services/psql-erp-service/index.d.ts +14 -0
  25. package/dist/services/psql-erp-service/index.d.ts.map +1 -0
  26. package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +12 -0
  27. package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +1 -0
  28. package/dist/services/psql-erp-service/psql-helpers.d.ts +32 -0
  29. package/dist/services/psql-erp-service/psql-helpers.d.ts.map +1 -0
  30. package/dist/services/psql-erp-service/psql-service.d.ts +43 -0
  31. package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -0
  32. package/dist/services/reporting-service/logger.d.ts.map +1 -1
  33. package/dist/types/erp-types.d.ts +2 -1
  34. package/dist/types/erp-types.d.ts.map +1 -1
  35. package/dist/utils/index.d.ts +1 -1
  36. package/dist/utils/index.d.ts.map +1 -1
  37. package/dist/utils/timezone.d.ts +7 -0
  38. package/dist/utils/timezone.d.ts.map +1 -1
  39. package/package.json +4 -1
  40. package/src/index.ts +26 -5
  41. package/src/services/data-sync-service/jobs/to-erp.ts +2 -1
  42. package/src/services/psql-erp-service/configuration.ts +9 -0
  43. package/src/services/psql-erp-service/index.ts +22 -0
  44. package/src/services/psql-erp-service/internal/types/psql-types.ts +13 -0
  45. package/src/services/psql-erp-service/psql-helpers.ts +90 -0
  46. package/src/services/psql-erp-service/psql-service.ts +240 -0
  47. package/src/services/reporting-service/logger.ts +57 -70
  48. package/src/types/erp-types.ts +1 -0
  49. package/src/utils/index.ts +1 -1
  50. package/src/utils/mm-labor-ticket-helpers.ts +2 -2
  51. package/src/utils/timezone.ts +28 -0
  52. package/dist/logger-SqdNut1H.js.map +0 -1
@@ -0,0 +1,240 @@
1
+ import { PsqlConfiguration } from "./configuration";
2
+ import { ERPResponse } from "../../types/erp-types";
3
+ import { OdbcErrorResponse } from "./internal/types/psql-types";
4
+ import logger from "../reporting-service/logger";
5
+
6
+ type PagingParams = {
7
+ limit?: number;
8
+ offset?: number;
9
+ };
10
+
11
+ /**
12
+ * ODBC connection interface for type safety
13
+ */
14
+ interface OdbcConnection {
15
+ query(sql: string): Promise<any[]>;
16
+ close(): Promise<void>;
17
+ }
18
+
19
+ /**
20
+ * ODBC module interface
21
+ */
22
+ interface OdbcModule {
23
+ connect(connectionString: string): Promise<OdbcConnection>;
24
+ }
25
+
26
+ export class PsqlService {
27
+ private config: PsqlConfiguration;
28
+ private static odbcModule: OdbcModule | null = null;
29
+ private static odbcLoadError: Error | null = null;
30
+
31
+ constructor(config: PsqlConfiguration) {
32
+ this.config = config;
33
+ }
34
+
35
+ /**
36
+ * Dynamically load the ODBC module with lazy initialization and caching
37
+ * @throws Error with helpful message if ODBC package is not installed
38
+ */
39
+ private static async getOdbc(): Promise<OdbcModule> {
40
+ // If we've already tried and failed, throw the cached error
41
+ if (this.odbcLoadError) {
42
+ throw this.odbcLoadError;
43
+ }
44
+
45
+ // If already loaded, return cached module
46
+ if (this.odbcModule) {
47
+ return this.odbcModule;
48
+ }
49
+
50
+ try {
51
+ // Dynamic import - only loads when actually needed
52
+ // @ts-ignore - odbc is an optional dependency, may not be installed at build time
53
+ const odbcImport = await import("odbc");
54
+ // Handle both default export and named export patterns
55
+ const odbc = odbcImport.default || odbcImport;
56
+ this.odbcModule = odbc as OdbcModule;
57
+ return this.odbcModule;
58
+ } catch (error) {
59
+ const errorMessage = error instanceof Error ? error.message : String(error);
60
+ this.odbcLoadError = new Error(
61
+ `ODBC package is required for PSQL service but is not installed or failed to load.\n` +
62
+ `Install it with: npm install odbc\n` +
63
+ `Also install OS-level dependencies, e.g. on Alpine Linux:\n` +
64
+ ` apk add --no-cache unixodbc unixodbc-dev python3 make g++\n` +
65
+ `For other Linux distributions, install unixodbc and unixodbc-dev packages.\n` +
66
+ `Original error: ${errorMessage}`
67
+ );
68
+ throw this.odbcLoadError;
69
+ }
70
+ }
71
+
72
+ // REMOVED: dispose() method - not needed anymore
73
+ // REMOVED: connection property - not needed anymore
74
+ // REMOVED: openConnection() method - not needed anymore
75
+ // REMOVED: closeConnection() method - not needed anymore
76
+
77
+ /**
78
+ * Build PSQL ODBC connection string
79
+ * CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
80
+ */
81
+ private buildConnectionString(): string {
82
+ const serverName = `${this.config.host}.${this.config.port}`;
83
+
84
+ return (
85
+ [
86
+ "Driver={Pervasive ODBC Interface}",
87
+ `ServerName=${serverName}`,
88
+ `DBQ=${this.config.database}`,
89
+ `UID=${this.config.username}`,
90
+ `PWD=${this.config.password}`,
91
+ "AutoDoubleQuote=0",
92
+ ].join(";") + ";"
93
+ );
94
+ }
95
+
96
+ /**
97
+ * Execute a query and return the results
98
+ * Creates a fresh connection for each query to avoid handle corruption
99
+ *
100
+ * @param query The SQL query to execute
101
+ * @param params Query parameters (currently unused for PSQL read operations)
102
+ * @param paging Optional paging parameters
103
+ * @returns The entities fetched from the database, along with paging information
104
+ */
105
+ public async executePreparedStatement(
106
+ query: string,
107
+ params: Record<string, string> = {},
108
+ paging?: PagingParams
109
+ ): Promise<ERPResponse | undefined> {
110
+ // Dynamically load ODBC module (will throw helpful error if not installed)
111
+ const odbc = await PsqlService.getOdbc();
112
+ let connection: OdbcConnection | null = null;
113
+
114
+ try {
115
+ // Create fresh connection for THIS query only
116
+ const connStr = this.buildConnectionString();
117
+ logger.debug("Creating fresh PSQL connection for query");
118
+ connection = await odbc.connect(connStr);
119
+
120
+ if (Object.keys(params).length > 0) {
121
+ logger.warn(
122
+ "PsqlService: Query parameters provided but parameter binding not yet implemented. " +
123
+ "Using direct query execution."
124
+ );
125
+ }
126
+
127
+ const records = await connection.query(query);
128
+ const allRecords = PsqlService.recordsetToRecords(records);
129
+ const rowsFetched = allRecords.length;
130
+
131
+ // Apply paging if requested
132
+ const pagedData =
133
+ paging?.offset !== undefined || paging?.limit !== undefined
134
+ ? allRecords.slice(
135
+ paging.offset || 0,
136
+ (paging.offset || 0) + (paging.limit || allRecords.length)
137
+ )
138
+ : allRecords;
139
+
140
+ return {
141
+ data: pagedData,
142
+ paging: {
143
+ count: rowsFetched,
144
+ limit: paging?.limit || 0,
145
+ offset: paging?.offset || 0,
146
+ nextPage:
147
+ paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched
148
+ ? String((paging.offset || 0) + paging.limit)
149
+ : undefined,
150
+ previousPage: paging?.offset
151
+ ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10)))
152
+ : undefined,
153
+ },
154
+ };
155
+ } catch (error) {
156
+ // If this is an ODBC load error (from getOdbc), re-throw it as-is
157
+ // since it already has a helpful error message
158
+ if (error instanceof Error && error.message.includes("ODBC package is required")) {
159
+ throw error;
160
+ }
161
+
162
+ // Otherwise, handle as ODBC runtime error
163
+ const errorInfo = error as OdbcErrorResponse;
164
+ logger.error("Error fetching data from PSQL", {
165
+ error: errorInfo.message,
166
+ odbcErrors: errorInfo.odbcErrors,
167
+ query: query.substring(0, 200), // Log first 200 chars of query
168
+ });
169
+
170
+ throw this.handleOdbcError(errorInfo);
171
+ } finally {
172
+ // CRITICAL: Always close connection, even on error
173
+ if (connection) {
174
+ try {
175
+ await connection.close();
176
+ logger.debug("PSQL connection closed successfully");
177
+ } catch (err) {
178
+ // Don't throw on close errors, just log
179
+ logger.warn("Error closing PSQL connection (non-fatal)", {
180
+ error: err,
181
+ });
182
+ }
183
+ }
184
+ }
185
+ }
186
+
187
+ /**
188
+ * Transform ODBC result set to array of Record<string, string> instances.
189
+ * IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
190
+ */
191
+ public static recordsetToRecords(recordset: any[]): Record<string, string>[] {
192
+ if (!Array.isArray(recordset)) {
193
+ return [];
194
+ }
195
+
196
+ const data: Record<string, string>[] = recordset.map((row) => {
197
+ const transformedRow: Record<string, string> = {};
198
+ Object.keys(row).forEach((key) => {
199
+ const value = row[key];
200
+ transformedRow[key] =
201
+ value !== null && value !== undefined ? String(value).trim() : "";
202
+ });
203
+ return transformedRow;
204
+ });
205
+
206
+ return data;
207
+ }
208
+
209
+ /**
210
+ * Handle ODBC errors and provide meaningful messages
211
+ */
212
+ private handleOdbcError(error: OdbcErrorResponse): Error {
213
+ const odbcError = error.odbcErrors?.[0];
214
+ const errorCode = odbcError?.state;
215
+ const message = odbcError?.message || error.message;
216
+
217
+ switch (errorCode) {
218
+ case "08S01":
219
+ return new Error(
220
+ "PSQL connection failed. Check: " +
221
+ "1) PVSW environment variable set to /usr/local/psql/etc/pvsw.ini, " +
222
+ "2) Network connectivity to ports 1583/3351, " +
223
+ "3) ODBC configuration files in /usr/local/psql/etc/ and /etc/. " +
224
+ `Original error: ${message}`
225
+ );
226
+ case "28000":
227
+ return new Error(
228
+ `PSQL authentication failed. Check username/password. Original error: ${message}`
229
+ );
230
+ case "42000":
231
+ return new Error(`PSQL SQL syntax error. Original error: ${message}`);
232
+ case "42S02":
233
+ return new Error(
234
+ `PSQL table or view not found. Check table names in query. Original error: ${message}`
235
+ );
236
+ default:
237
+ return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
238
+ }
239
+ }
240
+ }
@@ -76,6 +76,61 @@ const logger = createLogger({
76
76
  ],
77
77
  });
78
78
 
79
+ // Helper function to create a file transport with shared configuration
80
+ const createFileTransport = (): DailyRotateFile => {
81
+ return new DailyRotateFile({
82
+ filename: path.join(logDirectory, "%DATE%.log"),
83
+ datePattern: "YYYY-MM-DD",
84
+ zippedArchive: true,
85
+ maxSize: "20m",
86
+ maxFiles: "14d",
87
+ format: logFormat,
88
+ });
89
+ };
90
+
91
+ // Rotate mitigation helper: attaches rotate handler to transport and recursively attaches to replacements
92
+ function attachRotateMitigation(
93
+ transport: DailyRotateFile,
94
+ opts: { logLevel: string; nodeEnv: string }
95
+ ) {
96
+ const { logLevel, nodeEnv } = opts;
97
+ let isRefreshing = false;
98
+ transport.on("rotate", (_old: string, _new: string) => {
99
+ if (isRefreshing) return;
100
+ isRefreshing = true;
101
+ let removalTimer: NodeJS.Timeout | null = null;
102
+
103
+ // Create replacement first to avoid any logging gap
104
+ const next = createFileTransport();
105
+ // When the new file is created, remove the old transport
106
+ next.on("new", () => {
107
+ if (removalTimer) {
108
+ clearTimeout(removalTimer);
109
+ removalTimer = null;
110
+ }
111
+ try {
112
+ logger.remove(transport);
113
+ } catch {}
114
+ isRefreshing = false;
115
+ });
116
+ attachRotateMitigation(next, opts);
117
+ logger.add(next);
118
+
119
+ // Fallback: if the "new" event doesn't fire, remove the old transport after a grace period
120
+ const REMOVAL_GRACE_MS = 30000;
121
+ removalTimer = setTimeout(() => {
122
+ try {
123
+ logger.remove(transport);
124
+ } catch {}
125
+ isRefreshing = false;
126
+ removalTimer = null;
127
+ }, REMOVAL_GRACE_MS);
128
+
129
+ // Keep console and other transports intact; do not silence or clear
130
+ logger.level = logLevel;
131
+ });
132
+ }
133
+
79
134
  // Function to reconfigure the logger once CoreConfiguration is available
80
135
  export const configureLogger = (logLevel: string, nodeEnv: string) => {
81
136
  // Remove existing transports (safely): close any DailyRotateFile streams first
@@ -96,78 +151,10 @@ export const configureLogger = (logLevel: string, nodeEnv: string) => {
96
151
  logger.clear();
97
152
 
98
153
  // Add file transport
99
- const fileTransport = new DailyRotateFile({
100
- filename: path.join(logDirectory, "%DATE%.log"),
101
- datePattern: "YYYY-MM-DD",
102
- zippedArchive: true,
103
- maxSize: "20m",
104
- maxFiles: "14d",
105
- format: logFormat,
106
- });
154
+ const fileTransport = createFileTransport();
155
+ attachRotateMitigation(fileTransport, { logLevel, nodeEnv });
107
156
  logger.add(fileTransport);
108
157
 
109
- // Rotate-time mitigation for long-running single-process apps
110
- let isRefreshing = false;
111
- fileTransport.on("rotate", (_oldFilename: string, _newFilename: string) => {
112
- if (isRefreshing) return;
113
- isRefreshing = true;
114
- (logger as any).silent = true; // gate writes during refresh to avoid write-after-end
115
-
116
- try {
117
- // Close all existing DailyRotateFile streams
118
- const existing = (logger.transports || []).filter(
119
- (t: any) => t instanceof DailyRotateFile
120
- );
121
- for (const t of existing) {
122
- const s = (t as any).logStream;
123
- if (s && typeof s.end === "function") {
124
- try {
125
- s.end();
126
- } catch {}
127
- }
128
- }
129
-
130
- // Refresh the file transport cleanly
131
- logger.clear();
132
- const refreshed = new DailyRotateFile({
133
- filename: path.join(logDirectory, "%DATE%.log"),
134
- datePattern: "YYYY-MM-DD",
135
- zippedArchive: true,
136
- maxSize: "20m",
137
- maxFiles: "14d",
138
- format: logFormat,
139
- });
140
-
141
- // Once new file stream is ready, resume writes
142
- refreshed.on("new", () => {
143
- (logger as any).silent = false;
144
- isRefreshing = false;
145
- });
146
-
147
- logger.add(refreshed);
148
-
149
- // Preserve console transport behavior in non-production
150
- if (nodeEnv !== "production") {
151
- logger.add(
152
- new transports.Console({
153
- format: format.combine(
154
- format.timestamp(),
155
- format.splat(),
156
- baseFormat,
157
- format.colorize({ all: true })
158
- ),
159
- })
160
- );
161
- }
162
-
163
- logger.level = logLevel;
164
- } catch {
165
- // If anything goes wrong, resume writes to avoid permanent silence
166
- (logger as any).silent = false;
167
- isRefreshing = false;
168
- }
169
- });
170
-
171
158
  // Add console transport in non-production environments
172
159
  if (nodeEnv !== "production") {
173
160
  logger.add(
@@ -7,6 +7,7 @@ export enum ERPType {
7
7
  PROSHOP = "PROSHOP",
8
8
  SYTELINE = "SYTELINE",
9
9
  TEMPLATE = "TEMPLATE",
10
+ GLOBALSHOP = "GLOBALSHOP",
10
11
  }
11
12
 
12
13
  export enum ERPObjType {
@@ -15,7 +15,7 @@ export {
15
15
  * Timezone and time-related utilities
16
16
  */
17
17
  export { getTimezoneOffsetAndPersist } from "./time-utils";
18
- export { formatDateWithTZOffset, convertToLocalTime } from "./timezone";
18
+ export { formatDateWithTZOffset, convertToLocalTime, toISOWithOffset } from "./timezone";
19
19
  export { applyTimezoneOffsetsToFields } from "./time-utils";
20
20
  export * from "./time-utils";
21
21
 
@@ -1,4 +1,4 @@
1
- import { convertToLocalTime } from "./timezone";
1
+ import { convertToLocalTime, toISOWithOffset } from "./timezone";
2
2
  import { MMReceiveLaborTicket } from "../services/mm-api-service/types/receive-types";
3
3
 
4
4
  /**
@@ -22,7 +22,7 @@ export function convertLaborTicketToLocalTimezone(
22
22
 
23
23
  timeFields.forEach((field) => {
24
24
  const localTime = convertToLocalTime(laborTicket[field], timezoneOffset);
25
- laborTicket[field] = localTime?.toISOString() || null;
25
+ laborTicket[field] = localTime ? toISOWithOffset(localTime, timezoneOffset) : null;
26
26
  });
27
27
  return laborTicket;
28
28
  }
@@ -94,3 +94,31 @@ export const formatDateWithTZOffset = (
94
94
  // Append the timezone offset
95
95
  return `${isoDate}${sign}${hours}:${minutes}`;
96
96
  };
97
+
98
+ /**
99
+ * Formats a Date object as an ISO string with the specified timezone offset
100
+ * @param date The Date object to format (should be a Date that has been shifted by convertToLocalTime)
101
+ * @param timezoneOffset The timezone offset in hours
102
+ * @returns ISO string with offset in format: YYYY-MM-DDTHH:mm:ss.SSS±HH:MM
103
+ */
104
+ export const toISOWithOffset = (date: Date, timezoneOffset: number): string => {
105
+ const sign = timezoneOffset >= 0 ? "+" : "-";
106
+ const abs = Math.abs(timezoneOffset);
107
+ const hours = Math.floor(abs);
108
+ const minutes = Math.round((abs - hours) * 60);
109
+ const pad2 = (n: number) => n.toString().padStart(2, "0");
110
+ const pad3 = (n: number) => n.toString().padStart(3, "0");
111
+
112
+ // Use UTC getters since convertToLocalTime shifts the Date's internal timestamp
113
+ // The UTC components of the shifted Date represent the local wall time
114
+ const yyyy = date.getUTCFullYear();
115
+ const MM = pad2(date.getUTCMonth() + 1);
116
+ const dd = pad2(date.getUTCDate());
117
+ const HH = pad2(date.getUTCHours());
118
+ const mm = pad2(date.getUTCMinutes());
119
+ const ss = pad2(date.getUTCSeconds());
120
+ const SSS = pad3(date.getUTCMilliseconds());
121
+ const off = `${sign}${pad2(hours)}:${pad2(minutes)}`;
122
+
123
+ return `${yyyy}-${MM}-${dd}T${HH}:${mm}:${ss}.${SSS}${off}`;
124
+ };