@machinemetrics/mm-erp-sdk 0.1.5-beta.0 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/{config-2l5vnNkA.js → config-qat9zgOl.js} +6 -6
- package/dist/{config-2l5vnNkA.js.map → config-qat9zgOl.js.map} +1 -1
- package/dist/{connector-factory-CQ8e7Tae.js → connector-factory-C2czCs9v.js} +12 -3
- package/dist/connector-factory-C2czCs9v.js.map +1 -0
- package/dist/{hashed-cache-manager-Ci59eC75.js → hashed-cache-manager-CzyFSt2B.js} +5 -4
- package/dist/{hashed-cache-manager-Ci59eC75.js.map → hashed-cache-manager-CzyFSt2B.js.map} +1 -1
- package/dist/{index-CXbOvFyf.js → index-B9wo8pld.js} +7 -7
- package/dist/{index-CXbOvFyf.js.map → index-B9wo8pld.js.map} +1 -1
- package/dist/index.d.ts +14 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/{logger-QG73MndU.js → logger-Db8CkwR6.js} +929 -971
- package/dist/logger-Db8CkwR6.js.map +1 -0
- package/dist/mm-erp-sdk.js +417 -10
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/index.d.ts +1 -1
- package/dist/services/data-sync-service/index.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.d.ts +2 -0
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +42 -41
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +11 -5
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.d.ts +2 -0
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +39 -40
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.js +4 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +15 -5
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/erp-api-services/index.d.ts +5 -1
- package/dist/services/erp-api-services/index.d.ts.map +1 -1
- package/dist/services/mm-api-service/index.d.ts +3 -2
- package/dist/services/mm-api-service/index.d.ts.map +1 -1
- package/dist/services/mm-api-service/mm-api-service.d.ts +20 -0
- package/dist/services/mm-api-service/mm-api-service.d.ts.map +1 -1
- package/dist/types/erp-types.d.ts +1 -2
- package/dist/types/erp-types.d.ts.map +1 -1
- package/dist/utils/connector-factory.d.ts.map +1 -1
- package/dist/utils/connector-log/log-deduper.d.ts +56 -0
- package/dist/utils/connector-log/log-deduper.d.ts.map +1 -0
- package/dist/utils/connector-log/mm-connector-logger-example.d.ts +1 -0
- package/dist/utils/connector-log/mm-connector-logger-example.d.ts.map +1 -0
- package/dist/utils/connector-log/mm-connector-logger.d.ts +74 -0
- package/dist/utils/connector-log/mm-connector-logger.d.ts.map +1 -0
- package/dist/utils/error-utils.d.ts +2 -0
- package/dist/utils/error-utils.d.ts.map +1 -0
- package/dist/utils/index.d.ts +11 -2
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/index.d.ts +2 -1
- package/dist/utils/standard-process-drivers/index.d.ts.map +1 -1
- package/dist/utils/timezone.d.ts +7 -0
- package/dist/utils/timezone.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/index.ts +19 -5
- package/src/services/data-sync-service/index.ts +1 -4
- package/src/services/data-sync-service/jobs/clean-up-expired-cache.ts +19 -7
- package/src/services/data-sync-service/jobs/from-erp.ts +12 -5
- package/src/services/data-sync-service/jobs/retry-failed-labor-tickets.ts +15 -5
- package/src/services/data-sync-service/jobs/run-migrations.ts +5 -2
- package/src/services/data-sync-service/jobs/to-erp.ts +17 -5
- package/src/services/erp-api-services/index.ts +9 -1
- package/src/services/mm-api-service/index.ts +1 -1
- package/src/services/mm-api-service/mm-api-service.ts +28 -0
- package/src/types/erp-types.ts +0 -1
- package/src/utils/application-initializer.ts +1 -1
- package/src/utils/connector-factory.ts +14 -3
- package/src/utils/connector-log/log-deduper.ts +284 -0
- package/src/utils/connector-log/mm-connector-logger-example.ts +97 -0
- package/src/utils/connector-log/mm-connector-logger.ts +177 -0
- package/src/utils/error-utils.ts +18 -0
- package/src/utils/index.ts +12 -5
- package/src/utils/mm-labor-ticket-helpers.ts +2 -2
- package/src/utils/standard-process-drivers/index.ts +2 -4
- package/src/utils/timezone.ts +28 -0
- package/dist/connector-factory-CQ8e7Tae.js.map +0 -1
- package/dist/logger-QG73MndU.js.map +0 -1
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import logger from '../../services/reporting-service/logger';
|
|
4
|
+
import type { LogEntry, LogLevelString } from './mm-connector-logger';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Implement a custom LogDeduper or use the provided FileLogDeduper
|
|
8
|
+
*/
|
|
9
|
+
export interface LogDeduper {
|
|
10
|
+
decide(entry: LogEntry, now: number): Promise<string | null>;
|
|
11
|
+
onSuccess(entry: LogEntry, now: number): Promise<void>;
|
|
12
|
+
retryFailedTransmissions?(send: (entry: LogEntry, message: string) => Promise<void>): Promise<void>;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
interface LogDedupeEntry {
|
|
16
|
+
lastTransmitted: number;
|
|
17
|
+
suppressedCount: number;
|
|
18
|
+
firstUnsentEventTs: number; // Earliest unsent occurrence time for the current unsent batch (if any)
|
|
19
|
+
lastEventTs: number; // Most recent occurrence time (allowed or suppressed)
|
|
20
|
+
level: LogLevelString;
|
|
21
|
+
message: string;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* LogDedupeStore is the store for of the most recent log entries for each dedupeKey
|
|
26
|
+
*/
|
|
27
|
+
interface LogDedupeStore {
|
|
28
|
+
[key: string]: LogDedupeEntry;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* FileLogDeduper implements the LogDeduper interface to deduplicate
|
|
33
|
+
* logs by storing them in a file; this allows deduping across jobs.
|
|
34
|
+
*/
|
|
35
|
+
export class FileLogDeduper implements LogDeduper {
|
|
36
|
+
private readonly storeFilePath: string;
|
|
37
|
+
private readonly windowMs: number;
|
|
38
|
+
private readonly ttlMs: number;
|
|
39
|
+
private readonly sweepIntervalMs: number;
|
|
40
|
+
private lastSweepTsMs: number;
|
|
41
|
+
|
|
42
|
+
private readonly DEFAULT_WINDOW_TEN_MINS = 600;
|
|
43
|
+
private readonly DEFAULT_TTL_ONE_HOUR = 3600;
|
|
44
|
+
private readonly DEFAULT_SWEEP_INTERVAL_FIVE_MINS = 300;
|
|
45
|
+
private readonly DEFAULT_STORE_FILE_PATH = path.join('/tmp', 'log-deduplication.json');
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Ctor.
|
|
49
|
+
* @param storeFilePath: The path to the file where the deduplication store is stored; recommended is to use the default
|
|
50
|
+
* @param windowSeconds: Suppression window. Duplicates within this period are suppressed.
|
|
51
|
+
* @param ttlSeconds: Eviction TTL. Store entries for keys inactive beyond this are removed. Enforced to be ≥ windowSeconds.
|
|
52
|
+
* @param sweepIntervalSeconds: Efficiency parameter. How often (min interval) to run opportunistic eviction; retry always sweeps
|
|
53
|
+
* The sweep is lazy, used only when the store is accessed
|
|
54
|
+
*/
|
|
55
|
+
constructor({
|
|
56
|
+
storeFilePath = this.DEFAULT_STORE_FILE_PATH,
|
|
57
|
+
windowSeconds = this.DEFAULT_WINDOW_TEN_MINS,
|
|
58
|
+
ttlSeconds = this.DEFAULT_TTL_ONE_HOUR,
|
|
59
|
+
sweepIntervalSeconds = this.DEFAULT_SWEEP_INTERVAL_FIVE_MINS
|
|
60
|
+
} : {
|
|
61
|
+
storeFilePath?: string;
|
|
62
|
+
windowSeconds?: number;
|
|
63
|
+
ttlSeconds?: number;
|
|
64
|
+
sweepIntervalSeconds?: number;
|
|
65
|
+
} = {}) {
|
|
66
|
+
|
|
67
|
+
this.storeFilePath = storeFilePath;
|
|
68
|
+
this.windowMs = Math.max(1, windowSeconds) * 1000;
|
|
69
|
+
this.ttlMs = Math.max(this.windowMs, Math.max(1, ttlSeconds) * 1000);
|
|
70
|
+
this.sweepIntervalMs = Math.max(1, sweepIntervalSeconds) * 1000;
|
|
71
|
+
this.lastSweepTsMs = 0;
|
|
72
|
+
|
|
73
|
+
this.ensureStoreFileExists();
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Deduplication gating function
|
|
78
|
+
* Returns the formatted message to send, or null to suppress
|
|
79
|
+
* Decision is based on the dedupeKey and the time of the entry
|
|
80
|
+
*/
|
|
81
|
+
async decide(entry: LogEntry, now: number): Promise<string | null> {
|
|
82
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== 'string' || entry.dedupeKey.trim().length === 0) {
|
|
83
|
+
throw new Error('dedupeKey is required and must be a non-empty string');
|
|
84
|
+
}
|
|
85
|
+
const key = entry.dedupeKey;
|
|
86
|
+
|
|
87
|
+
return this.withLock(async () => {
|
|
88
|
+
const store = this.readStore();
|
|
89
|
+
|
|
90
|
+
// Opportunistic eviction gated by sweep interval
|
|
91
|
+
if (now - this.lastSweepTsMs >= this.sweepIntervalMs) {
|
|
92
|
+
this.evictExpiredInStore(store, now);
|
|
93
|
+
this.lastSweepTsMs = now;
|
|
94
|
+
this.writeStore(store);
|
|
95
|
+
}
|
|
96
|
+
const existing = store[key];
|
|
97
|
+
|
|
98
|
+
if (existing) {
|
|
99
|
+
const withinWindow = existing.lastTransmitted > 0 && (existing.lastTransmitted + this.windowMs) > now;
|
|
100
|
+
if (withinWindow) {
|
|
101
|
+
// Suppress and increment count
|
|
102
|
+
store[key] = {
|
|
103
|
+
...existing,
|
|
104
|
+
suppressedCount: existing.suppressedCount + 1,
|
|
105
|
+
firstUnsentEventTs: existing.suppressedCount === 0 ? (entry.eventTime ?? now) : existing.firstUnsentEventTs,
|
|
106
|
+
lastEventTs: entry.eventTime ?? now,
|
|
107
|
+
level: entry.level,
|
|
108
|
+
message: entry.message
|
|
109
|
+
};
|
|
110
|
+
this.writeStore(store);
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Outside window: allow, include timestamp and prior suppression count
|
|
115
|
+
const messageToSend = this.formatMessage(entry.message, entry.eventTime ?? now, existing.suppressedCount, existing.firstUnsentEventTs);
|
|
116
|
+
store[key] = {
|
|
117
|
+
...existing,
|
|
118
|
+
suppressedCount: 0,
|
|
119
|
+
firstUnsentEventTs: 0,
|
|
120
|
+
lastEventTs: entry.eventTime ?? now,
|
|
121
|
+
level: entry.level,
|
|
122
|
+
message: entry.message
|
|
123
|
+
};
|
|
124
|
+
this.writeStore(store);
|
|
125
|
+
return messageToSend;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// New entry: allow, timestamp, no suppression count
|
|
129
|
+
const messageToSend = this.formatMessage(entry.message, entry.eventTime ?? now, 0);
|
|
130
|
+
store[key] = {
|
|
131
|
+
lastTransmitted: 0,
|
|
132
|
+
suppressedCount: 0,
|
|
133
|
+
firstUnsentEventTs: entry.eventTime ?? now,
|
|
134
|
+
lastEventTs: entry.eventTime ?? now,
|
|
135
|
+
level: entry.level,
|
|
136
|
+
message: entry.message
|
|
137
|
+
};
|
|
138
|
+
this.writeStore(store);
|
|
139
|
+
return messageToSend;
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
async onSuccess(entry: LogEntry, now: number): Promise<void> {
|
|
144
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== 'string' || entry.dedupeKey.trim().length === 0) {
|
|
145
|
+
throw new Error('dedupeKey is required and must be a non-empty string');
|
|
146
|
+
}
|
|
147
|
+
const key = entry.dedupeKey;
|
|
148
|
+
|
|
149
|
+
await this.withLock(async () => {
|
|
150
|
+
const store = this.readStore();
|
|
151
|
+
const existing = store[key];
|
|
152
|
+
if (existing) {
|
|
153
|
+
store[key] = {
|
|
154
|
+
...existing,
|
|
155
|
+
lastTransmitted: now,
|
|
156
|
+
firstUnsentEventTs: 0,
|
|
157
|
+
suppressedCount: 0
|
|
158
|
+
};
|
|
159
|
+
this.writeStore(store);
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async retryFailedTransmissions(send: (entry: LogEntry, message: string) => Promise<void>): Promise<void> {
|
|
165
|
+
// Best-effort retry: first sweep expired entries, then iterate over entries with lastTransmitted === 0
|
|
166
|
+
const now = Date.now();
|
|
167
|
+
const entries = await this.withLock(async () => {
|
|
168
|
+
const store = this.readStore();
|
|
169
|
+
// Always sweep on retry
|
|
170
|
+
this.evictExpiredInStore(store, now);
|
|
171
|
+
this.lastSweepTsMs = now;
|
|
172
|
+
this.writeStore(store);
|
|
173
|
+
return Object.entries(store)
|
|
174
|
+
.filter(([, rec]) => rec.lastTransmitted === 0)
|
|
175
|
+
.map(([key, rec]) => ({ key, rec }));
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
for (const { key, rec } of entries) {
|
|
179
|
+
try {
|
|
180
|
+
const message = this.formatMessage(rec.message, rec.lastEventTs, rec.suppressedCount, rec.firstUnsentEventTs);
|
|
181
|
+
await send({ level: rec.level, message: rec.message, dedupeKey: key, eventTime: rec.lastEventTs }, message);
|
|
182
|
+
// Mark success
|
|
183
|
+
await this.withLock(async () => {
|
|
184
|
+
const store = this.readStore();
|
|
185
|
+
const current = store[key];
|
|
186
|
+
if (current) {
|
|
187
|
+
store[key] = {
|
|
188
|
+
...current,
|
|
189
|
+
lastTransmitted: Date.now(),
|
|
190
|
+
suppressedCount: 0
|
|
191
|
+
};
|
|
192
|
+
this.writeStore(store);
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
} catch (err) {
|
|
196
|
+
// Leave entry for future retries; do not throw
|
|
197
|
+
logger.error('Failed to retry failed transmission', { key, rec, error: err });
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// --- Internals ---
|
|
204
|
+
|
|
205
|
+
private ensureStoreFileExists(): void {
|
|
206
|
+
try {
|
|
207
|
+
if (!fs.existsSync(this.storeFilePath)) {
|
|
208
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify({}), 'utf-8');
|
|
209
|
+
}
|
|
210
|
+
} catch {
|
|
211
|
+
// ignore
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
private readStore(): LogDedupeStore {
|
|
216
|
+
try {
|
|
217
|
+
if (!fs.existsSync(this.storeFilePath)) return {};
|
|
218
|
+
const content = fs.readFileSync(this.storeFilePath, 'utf-8');
|
|
219
|
+
return content ? (JSON.parse(content) as LogDedupeStore) : {};
|
|
220
|
+
} catch {
|
|
221
|
+
return {};
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
private writeStore(store: LogDedupeStore): void {
|
|
226
|
+
try {
|
|
227
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify(store, null, 2), 'utf-8');
|
|
228
|
+
} catch {
|
|
229
|
+
// ignore
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
private formatMessage(message: string, eventTs: number, suppressedCount: number, firstUnsentEventTs?: number): string {
|
|
234
|
+
const timestamp = new Date(eventTs).toISOString();
|
|
235
|
+
const base = `${timestamp} | ${message}`;
|
|
236
|
+
if (suppressedCount > 0) {
|
|
237
|
+
const since = firstUnsentEventTs && firstUnsentEventTs > 0 ? ` since ${new Date(firstUnsentEventTs).toISOString()}` : '';
|
|
238
|
+
return `${base} (${suppressedCount} suppressed${since})`;
|
|
239
|
+
}
|
|
240
|
+
return base;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
private async withLock<T>(fn: () => Promise<T>): Promise<T> {
|
|
244
|
+
const lockPath = `${this.storeFilePath}.lock`;
|
|
245
|
+
const start = Date.now();
|
|
246
|
+
|
|
247
|
+
// Try to acquire the lock with retries (best effort)
|
|
248
|
+
// Avoid infinite waits; cap at ~3 seconds
|
|
249
|
+
while (true) {
|
|
250
|
+
try {
|
|
251
|
+
const fd = fs.openSync(lockPath, 'wx');
|
|
252
|
+
try {
|
|
253
|
+
const result = await fn();
|
|
254
|
+
return result;
|
|
255
|
+
} finally {
|
|
256
|
+
try { fs.closeSync(fd); } catch {/* ignore */}
|
|
257
|
+
try { fs.unlinkSync(lockPath); } catch {/* ignore */}
|
|
258
|
+
}
|
|
259
|
+
} catch {
|
|
260
|
+
// Lock exists
|
|
261
|
+
if (Date.now() - start > 3000) {
|
|
262
|
+
// Timed out; proceed without lock to avoid deadlock
|
|
263
|
+
return await fn();
|
|
264
|
+
}
|
|
265
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
/**
|
|
271
|
+
* Evict expired entries from the store based on the TTL and the key's last transmitted time
|
|
272
|
+
*/
|
|
273
|
+
private evictExpiredInStore(store: LogDedupeStore, now: number): void {
|
|
274
|
+
const keys = Object.keys(store);
|
|
275
|
+
if (keys.length === 0) return;
|
|
276
|
+
for (const key of keys) {
|
|
277
|
+
const rec = store[key];
|
|
278
|
+
const referenceTs = rec.lastTransmitted > 0 ? rec.lastTransmitted : rec.lastEventTs;
|
|
279
|
+
if (now - referenceTs > this.ttlMs) {
|
|
280
|
+
delete store[key];
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
// import { FileLogDeduper } from './log-deduper';
|
|
2
|
+
// import { MMConnectorLogger, LogEntry } from './mm-connector-logger';
|
|
3
|
+
// import { HTTPError } from '../http-client';
|
|
4
|
+
// import { getErrorType } from '../error-utils';
|
|
5
|
+
// /**
|
|
6
|
+
// * Example usage of the MM Connector Logger with deduplication
|
|
7
|
+
// *
|
|
8
|
+
// * This example demonstrates how to use the MMConnectorLogger class to send
|
|
9
|
+
// * progress updates and error logs to the MM cloud. Deduplication is configured
|
|
10
|
+
// * via environment variables and applied consistently to all log messages.
|
|
11
|
+
// */
|
|
12
|
+
|
|
13
|
+
// async function exampleUsage() {
|
|
14
|
+
// // Initialize logger with source identifier and deduplication settings
|
|
15
|
+
// const logger = new MMConnectorLogger('proshop', new FileLogDeduper({
|
|
16
|
+
// windowSeconds: 300, // 5 minute deduplication window
|
|
17
|
+
// ttlSeconds: 3600, // 1 hour TTL for store cleanup
|
|
18
|
+
// sweepIntervalSeconds: 600 // 10 minute sweep interval
|
|
19
|
+
// }));
|
|
20
|
+
|
|
21
|
+
// try {
|
|
22
|
+
// // Sends initial sync start message
|
|
23
|
+
// await logger.sendLog(new LogEntry({
|
|
24
|
+
// level: 'info',
|
|
25
|
+
// dedupeKey: 'SyncStart',
|
|
26
|
+
// message: 'Starting sync process'
|
|
27
|
+
// }));
|
|
28
|
+
|
|
29
|
+
// // Simulates some work with progress updates
|
|
30
|
+
// for (let i = 1; i <= 3; i++) {
|
|
31
|
+
// await logger.sendLog(new LogEntry({
|
|
32
|
+
// level: 'info',
|
|
33
|
+
// dedupeKey: 'SyncProgress',
|
|
34
|
+
// message: `Processing batch ${i}/3`
|
|
35
|
+
// }));
|
|
36
|
+
// await new Promise(resolve => setTimeout(resolve, 100)); // Simulate work
|
|
37
|
+
// }
|
|
38
|
+
|
|
39
|
+
// // Sends completion message
|
|
40
|
+
// await logger.sendLog(new LogEntry({
|
|
41
|
+
// level: 'info',
|
|
42
|
+
// dedupeKey: 'SyncComplete',
|
|
43
|
+
// message: 'Sync process completed successfully'
|
|
44
|
+
// }));
|
|
45
|
+
|
|
46
|
+
// // Demonstrates error logging with error type extraction
|
|
47
|
+
// try {
|
|
48
|
+
// throw new Error('Simulated database connection failure');
|
|
49
|
+
// } catch (error) {
|
|
50
|
+
// const errorType = getErrorType(error);
|
|
51
|
+
// await logger.sendLog(new LogEntry({
|
|
52
|
+
// level: 'error',
|
|
53
|
+
// dedupeKey: `DatabaseError_${errorType}`,
|
|
54
|
+
// message: `Database operation failed: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
55
|
+
// }));
|
|
56
|
+
// }
|
|
57
|
+
|
|
58
|
+
// // Demonstrates getErrorType with different error types
|
|
59
|
+
// const httpError = new HTTPError('Bad Request', 400, 'INVALID_REQUEST');
|
|
60
|
+
// const errorTypeFromCode = getErrorType(httpError); // Returns 'INVALID_REQUEST'
|
|
61
|
+
|
|
62
|
+
// const typeError = new TypeError('Cannot read property of undefined');
|
|
63
|
+
// const errorTypeFromName = getErrorType(typeError); // Returns 'TypeError'
|
|
64
|
+
|
|
65
|
+
// await logger.sendLog(new LogEntry({
|
|
66
|
+
// level: 'error',
|
|
67
|
+
// dedupeKey: `HTTPError_${errorTypeFromCode}`,
|
|
68
|
+
// message: `HTTP request failed with code: ${errorTypeFromCode}`
|
|
69
|
+
// }));
|
|
70
|
+
// await logger.sendLog(new LogEntry({
|
|
71
|
+
// level: 'error',
|
|
72
|
+
// dedupeKey: `TypeError_${errorTypeFromName}`,
|
|
73
|
+
// message: `Type error occurred: ${errorTypeFromName}`
|
|
74
|
+
// }));
|
|
75
|
+
|
|
76
|
+
// // Demonstrate deduplication - this message will be suppressed if sent within 5 minutes
|
|
77
|
+
// await logger.sendLog(new LogEntry({
|
|
78
|
+
// level: 'warn',
|
|
79
|
+
// dedupeKey: 'RateLimit',
|
|
80
|
+
// message: 'API rate limit approaching'
|
|
81
|
+
// }));
|
|
82
|
+
// await logger.sendLog(new LogEntry({
|
|
83
|
+
// level: 'warn',
|
|
84
|
+
// dedupeKey: 'RateLimit',
|
|
85
|
+
// message: 'API rate limit approaching'
|
|
86
|
+
// })); // This will be deduplicated
|
|
87
|
+
|
|
88
|
+
// // Retry any failed transmissions
|
|
89
|
+
// await logger.retryFailedTransmissions();
|
|
90
|
+
|
|
91
|
+
// } catch (error) {
|
|
92
|
+
// console.error('Failed to send logs:', error);
|
|
93
|
+
// } finally {
|
|
94
|
+
// // Clean up resources
|
|
95
|
+
// await logger.destroy();
|
|
96
|
+
// }
|
|
97
|
+
// }
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import { MMApiClient } from '../../services/mm-api-service/mm-api-service';
|
|
2
|
+
import logger from '../../services/reporting-service/logger';
|
|
3
|
+
import type { LogDeduper } from './log-deduper';
|
|
4
|
+
|
|
5
|
+
export type LogLevelString = 'info' | 'warn' | 'error';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* dedupeKey - key used to deduplicate log entries when a LogDeduper is injected into the MMConnectorLogger
|
|
9
|
+
*
|
|
10
|
+
* A recommended practice is to use a meaningful unique identifier for this log entry type.
|
|
11
|
+
* Examples: SyncFromERPCompleted
|
|
12
|
+
* SyncFromERPError:HTTPError
|
|
13
|
+
*/
|
|
14
|
+
export class LogEntry {
|
|
15
|
+
level: LogLevelString;
|
|
16
|
+
message: string;
|
|
17
|
+
dedupeKey: string;
|
|
18
|
+
eventTime: number;
|
|
19
|
+
|
|
20
|
+
constructor(params: { level: LogLevelString; message: string; dedupeKey: string}) {
|
|
21
|
+
this.level = params.level;
|
|
22
|
+
this.message = params.message;
|
|
23
|
+
this.dedupeKey = params.dedupeKey;
|
|
24
|
+
this.eventTime = Date.now();
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* The response from the MM connector logging API
|
|
30
|
+
*/
|
|
31
|
+
export interface LogResponse {
|
|
32
|
+
message: string;
|
|
33
|
+
processed?: number;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Helper function to check if a value is a LogResponse
|
|
38
|
+
*/
|
|
39
|
+
function isLogResponse(value: unknown): value is LogResponse {
|
|
40
|
+
if (value === null || typeof value !== 'object') return false;
|
|
41
|
+
const v = value as Record<string, unknown>;
|
|
42
|
+
if (typeof v.message !== 'string') return false;
|
|
43
|
+
if ('processed' in v && typeof v.processed !== 'number') return false;
|
|
44
|
+
return true;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* MM Connector Logger for sending progress and error updates to the MM cloud
|
|
49
|
+
*
|
|
50
|
+
* This class provides helper methods to send individual logs to the
|
|
51
|
+
* MM connector logging API, providing support for validation, retries, and error handling
|
|
52
|
+
* according to the API specification.
|
|
53
|
+
*
|
|
54
|
+
* By injecting a LogDeduper, the log entries are deduplicated across jobs over a specified time window
|
|
55
|
+
*/
|
|
56
|
+
export class MMConnectorLogger {
|
|
57
|
+
private readonly MAX_MSG_LEN = 2000;
|
|
58
|
+
|
|
59
|
+
private mmApiClient: MMApiClient;
|
|
60
|
+
private readonly deduper?: LogDeduper;
|
|
61
|
+
private readonly source: string;
|
|
62
|
+
|
|
63
|
+
constructor(source: string, deduper?: LogDeduper) {
|
|
64
|
+
if (source.length < 1 || source.length > 64) {
|
|
65
|
+
throw new Error('source must be 1-64 characters');
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
this.mmApiClient = new MMApiClient();
|
|
69
|
+
this.deduper = deduper;
|
|
70
|
+
this.source = source;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Deduplication helpers are delegated to injected FileLogDeduper
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Send a single log entry to the MM cloud with deduplication.
|
|
77
|
+
*
|
|
78
|
+
* The deduplication is handled by the injected LogDeduper.
|
|
79
|
+
* If no deduper is injected, the log entry is sent without deduplication.
|
|
80
|
+
*
|
|
81
|
+
* The standard deduper, FileLogDeduper, stores the deduplication state in a file,
|
|
82
|
+
* allowing deduplication across jobs,
|
|
83
|
+
*
|
|
84
|
+
* @param logEntry - The log entry to send
|
|
85
|
+
* @returns Promise resolving to the API response or null if suppressed
|
|
86
|
+
* @throws HTTPError if the request fails or Error if the log entry is invalid
|
|
87
|
+
*/
|
|
88
|
+
async sendLog(logEntry: LogEntry): Promise<LogResponse | null> {
|
|
89
|
+
this.validateLogEntry(logEntry);
|
|
90
|
+
|
|
91
|
+
const now = Date.now();
|
|
92
|
+
let messageToSend = logEntry.message;
|
|
93
|
+
if (this.deduper) {
|
|
94
|
+
const decision = await this.deduper.decide(logEntry, now);
|
|
95
|
+
if (decision === null) return null; // suppressed
|
|
96
|
+
messageToSend = decision;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
try {
|
|
100
|
+
const logEntryToSend = {
|
|
101
|
+
source: this.source,
|
|
102
|
+
level: logEntry.level,
|
|
103
|
+
message: messageToSend
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
const response = await this.mmApiClient.sendConnectorLog(logEntryToSend);
|
|
107
|
+
if (this.deduper) {
|
|
108
|
+
await this.deduper.onSuccess(logEntry, now);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (!isLogResponse(response)) {
|
|
112
|
+
logger.warn('Unexpected success response format from MM API for connector log', { response });
|
|
113
|
+
return { message: 'Unexpected success response format when sending log' };
|
|
114
|
+
}
|
|
115
|
+
return { message: response.message };
|
|
116
|
+
|
|
117
|
+
} catch (error) {
|
|
118
|
+
logger.error('Failed to send log to MM cloud', {
|
|
119
|
+
level: logEntry.level,
|
|
120
|
+
error: error instanceof Error ? error.message : 'Unknown error'
|
|
121
|
+
});
|
|
122
|
+
throw error;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* @throws Error if validation fails
|
|
128
|
+
*/
|
|
129
|
+
private validateLogEntry(logEntry: LogEntry): void {
|
|
130
|
+
const allowedLevels: ReadonlyArray<LogLevelString> = ['info', 'warn', 'error'];
|
|
131
|
+
if (!logEntry.level || !allowedLevels.includes(logEntry.level)) {
|
|
132
|
+
throw new Error(`level must be one of: ${allowedLevels.join(', ')}`);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (!logEntry.message || typeof logEntry.message !== 'string') {
|
|
136
|
+
throw new Error('message is required and must be a string');
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Truncate message to MAX_MSG_LEN characters
|
|
140
|
+
logEntry.message = logEntry.message.slice(0, this.MAX_MSG_LEN);
|
|
141
|
+
|
|
142
|
+
if (!logEntry.dedupeKey || typeof logEntry.dedupeKey !== 'string') {
|
|
143
|
+
throw new Error('dedupeKey is required and must be a string');
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (logEntry.dedupeKey.trim().length < 1) {
|
|
147
|
+
throw new Error('dedupeKey must be a non-empty string');
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Retry all failed transmissions silently
|
|
153
|
+
* This method attempts to retry all messages that failed to transmit
|
|
154
|
+
* and removes them from the failed list if successful, else leaves them for the client to retry
|
|
155
|
+
*
|
|
156
|
+
* Expected usage is by a client to call this as part of its own retry mechanism
|
|
157
|
+
*/
|
|
158
|
+
async retryFailedTransmissions(): Promise<void> {
|
|
159
|
+
if (!this.deduper || !this.deduper.retryFailedTransmissions) {
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
await this.deduper.retryFailedTransmissions(async (entry, message) => {
|
|
163
|
+
await this.mmApiClient.sendConnectorLog({
|
|
164
|
+
source: this.source,
|
|
165
|
+
level: entry.level,
|
|
166
|
+
message
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Clean up resources
|
|
173
|
+
*/
|
|
174
|
+
async destroy(): Promise<void> {
|
|
175
|
+
await this.mmApiClient.destroy();
|
|
176
|
+
}
|
|
177
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/** A utility to get the type of an error, particularly
|
|
2
|
+
* useful for the purpose of error deduplication
|
|
3
|
+
*
|
|
4
|
+
* Note: prefers 'code' over 'name' in the expectation that this better
|
|
5
|
+
* represents the error type
|
|
6
|
+
*/
|
|
7
|
+
const isNonEmptyString = (v: unknown): v is string => typeof v === 'string' && v.trim().length > 0;
|
|
8
|
+
|
|
9
|
+
export function getErrorType(error: unknown): string {
|
|
10
|
+
if (error && typeof error === 'object') {
|
|
11
|
+
const o = error as { code?: unknown; name?: unknown; constructor?: { name?: unknown } };
|
|
12
|
+
if (isNonEmptyString(o.code)) return o.code;
|
|
13
|
+
if (isNonEmptyString(o.name)) return o.name;
|
|
14
|
+
const ctorName = o.constructor?.name;
|
|
15
|
+
if (isNonEmptyString(ctorName) && ctorName !== 'Object') return ctorName;
|
|
16
|
+
}
|
|
17
|
+
return 'Error';
|
|
18
|
+
}
|
package/src/utils/index.ts
CHANGED
|
@@ -15,7 +15,7 @@ export {
|
|
|
15
15
|
* Timezone and time-related utilities
|
|
16
16
|
*/
|
|
17
17
|
export { getTimezoneOffsetAndPersist } from "./time-utils";
|
|
18
|
-
export { formatDateWithTZOffset, convertToLocalTime } from "./timezone";
|
|
18
|
+
export { formatDateWithTZOffset, convertToLocalTime, toISOWithOffset } from "./timezone";
|
|
19
19
|
export { applyTimezoneOffsetsToFields } from "./time-utils";
|
|
20
20
|
export * from "./time-utils";
|
|
21
21
|
|
|
@@ -31,10 +31,8 @@ export { BatchCacheManager } from "../services/caching-service/batch-cache-manag
|
|
|
31
31
|
* Process and data transformation utilities
|
|
32
32
|
*/
|
|
33
33
|
export { StandardProcessDrivers } from "./standard-process-drivers/";
|
|
34
|
-
export type {
|
|
35
|
-
|
|
36
|
-
MMBatchValidationError,
|
|
37
|
-
} from "./standard-process-drivers/";
|
|
34
|
+
export type { WriteEntitiesToMMResult } from "./standard-process-drivers/";
|
|
35
|
+
export { MMBatchValidationError } from "./standard-process-drivers/";
|
|
38
36
|
export { getCachedTimezoneOffset } from "./local-data-store/jobs-shared-data";
|
|
39
37
|
|
|
40
38
|
// Local data store
|
|
@@ -54,11 +52,20 @@ export { getERPAPITypeFromEntity } from "./erp-type-from-entity";
|
|
|
54
52
|
* HTTP Client utilities
|
|
55
53
|
*/
|
|
56
54
|
export { HTTPClientFactory } from "./http-client";
|
|
55
|
+
export { HTTPError } from "./http-client";
|
|
56
|
+
export type { HTTPClient, HTTPRequestConfig, HTTPResponse } from "./http-client";
|
|
57
57
|
|
|
58
58
|
/**
|
|
59
59
|
* MM Labor Ticket utilities
|
|
60
60
|
*/
|
|
61
61
|
export * from "./mm-labor-ticket-helpers";
|
|
62
|
+
export { getErrorType } from './error-utils';
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* MM Connector Logger utilities
|
|
66
|
+
*/
|
|
67
|
+
export { MMConnectorLogger, LogEntry, type LogResponse, type LogLevelString } from './connector-log/mm-connector-logger';
|
|
68
|
+
export { FileLogDeduper } from './connector-log/log-deduper';
|
|
62
69
|
|
|
63
70
|
/**
|
|
64
71
|
* Application initializer
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { convertToLocalTime } from "./timezone";
|
|
1
|
+
import { convertToLocalTime, toISOWithOffset } from "./timezone";
|
|
2
2
|
import { MMReceiveLaborTicket } from "../services/mm-api-service/types/receive-types";
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -22,7 +22,7 @@ export function convertLaborTicketToLocalTimezone(
|
|
|
22
22
|
|
|
23
23
|
timeFields.forEach((field) => {
|
|
24
24
|
const localTime = convertToLocalTime(laborTicket[field], timezoneOffset);
|
|
25
|
-
laborTicket[field] = localTime
|
|
25
|
+
laborTicket[field] = localTime ? toISOWithOffset(localTime, timezoneOffset) : null;
|
|
26
26
|
});
|
|
27
27
|
return laborTicket;
|
|
28
28
|
}
|
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
// Public exports for standard-process-drivers
|
|
2
2
|
export { StandardProcessDrivers } from "./standard-process-drivers";
|
|
3
|
-
export type {
|
|
4
|
-
|
|
5
|
-
MMBatchValidationError,
|
|
6
|
-
} from "./standard-process-drivers";
|
|
3
|
+
export type { WriteEntitiesToMMResult } from "./standard-process-drivers";
|
|
4
|
+
export { MMBatchValidationError } from "./standard-process-drivers";
|
package/src/utils/timezone.ts
CHANGED
|
@@ -94,3 +94,31 @@ export const formatDateWithTZOffset = (
|
|
|
94
94
|
// Append the timezone offset
|
|
95
95
|
return `${isoDate}${sign}${hours}:${minutes}`;
|
|
96
96
|
};
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Formats a Date object as an ISO string with the specified timezone offset
|
|
100
|
+
* @param date The Date object to format (should be a Date that has been shifted by convertToLocalTime)
|
|
101
|
+
* @param timezoneOffset The timezone offset in hours
|
|
102
|
+
* @returns ISO string with offset in format: YYYY-MM-DDTHH:mm:ss.SSS±HH:MM
|
|
103
|
+
*/
|
|
104
|
+
export const toISOWithOffset = (date: Date, timezoneOffset: number): string => {
|
|
105
|
+
const sign = timezoneOffset >= 0 ? "+" : "-";
|
|
106
|
+
const abs = Math.abs(timezoneOffset);
|
|
107
|
+
const hours = Math.floor(abs);
|
|
108
|
+
const minutes = Math.round((abs - hours) * 60);
|
|
109
|
+
const pad2 = (n: number) => n.toString().padStart(2, "0");
|
|
110
|
+
const pad3 = (n: number) => n.toString().padStart(3, "0");
|
|
111
|
+
|
|
112
|
+
// Use UTC getters since convertToLocalTime shifts the Date's internal timestamp
|
|
113
|
+
// The UTC components of the shifted Date represent the local wall time
|
|
114
|
+
const yyyy = date.getUTCFullYear();
|
|
115
|
+
const MM = pad2(date.getUTCMonth() + 1);
|
|
116
|
+
const dd = pad2(date.getUTCDate());
|
|
117
|
+
const HH = pad2(date.getUTCHours());
|
|
118
|
+
const mm = pad2(date.getUTCMinutes());
|
|
119
|
+
const ss = pad2(date.getUTCSeconds());
|
|
120
|
+
const SSS = pad3(date.getUTCMilliseconds());
|
|
121
|
+
const off = `${sign}${pad2(hours)}:${pad2(minutes)}`;
|
|
122
|
+
|
|
123
|
+
return `${yyyy}-${MM}-${dd}T${HH}:${mm}:${ss}.${SSS}${off}`;
|
|
124
|
+
};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"connector-factory-CQ8e7Tae.js","sources":["../src/utils/connector-factory.ts"],"sourcesContent":["import { IERPConnector } from \"../types/erp-connector\";\nimport logger from \"../services/reporting-service/logger\";\n\n/**\n * Helper function to dynamically import and create connector instance from a file path\n * @param connectorPath - The file path to the connector module\n * @returns A new instance of the IERPConnector\n */\nexport const createConnectorFromPath = async (\n connectorPath: string\n): Promise<IERPConnector> => {\n try {\n // Dynamic import the connector module\n const connectorModule = await import(connectorPath);\n\n // Get the default export or named export\n const ConnectorClass =\n connectorModule.default ||\n connectorModule[Object.keys(connectorModule)[0]];\n\n if (!ConnectorClass) {\n throw new Error(`No connector class found in module: ${connectorPath}`);\n }\n\n // Create new instance of the connector\n return new ConnectorClass();\n } catch (error) {\n logger.error(\n `Failed to create connector instance from path: ${connectorPath}`,\n { error }\n );\n throw error;\n }\n};\n"],"names":[],"mappings":";AAQO,MAAM,0BAA0B,OACrC,kBAC2B;AAC3B,MAAI;AAEF,UAAM,kBAAkB,MAAM,OAAO;AAGrC,UAAM,iBACJ,gBAAgB,WAChB,gBAAgB,OAAO,KAAK,eAAe,EAAE,CAAC,CAAC;AAEjD,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI,MAAM,uCAAuC,aAAa,EAAE;AAAA,IACxE;AAGA,WAAO,IAAI,eAAA;AAAA,EACb,SAAS,OAAO;AACd,WAAO;AAAA,MACL,kDAAkD,aAAa;AAAA,MAC/D,EAAE,MAAA;AAAA,IAAM;AAEV,UAAM;AAAA,EACR;AACF;"}
|