@machinemetrics/mm-erp-sdk 0.1.5-beta.0 → 0.1.6-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/{config-2l5vnNkA.js → config-WKwu1mMo.js} +6 -6
- package/dist/{config-2l5vnNkA.js.map → config-WKwu1mMo.js.map} +1 -1
- package/dist/{connector-factory-CQ8e7Tae.js → connector-factory-DHmMYsRs.js} +12 -3
- package/dist/connector-factory-DHmMYsRs.js.map +1 -0
- package/dist/{hashed-cache-manager-Ci59eC75.js → hashed-cache-manager-CtDhFqj6.js} +5 -4
- package/dist/{hashed-cache-manager-Ci59eC75.js.map → hashed-cache-manager-CtDhFqj6.js.map} +1 -1
- package/dist/{index-CXbOvFyf.js → index-aci_wdcn.js} +7 -7
- package/dist/{index-CXbOvFyf.js.map → index-aci_wdcn.js.map} +1 -1
- package/dist/index.d.ts +16 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/{logger-QG73MndU.js → logger-hqtl8hFM.js} +6 -6
- package/dist/{logger-QG73MndU.js.map → logger-hqtl8hFM.js.map} +1 -1
- package/dist/mm-erp-sdk.js +653 -8
- package/dist/mm-erp-sdk.js.map +1 -1
- package/dist/services/data-sync-service/index.d.ts +1 -1
- package/dist/services/data-sync-service/index.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.d.ts +2 -0
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +42 -41
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +11 -5
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.d.ts +2 -0
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +39 -40
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.js +4 -3
- package/dist/services/data-sync-service/jobs/run-migrations.js.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +15 -5
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/erp-api-services/index.d.ts +5 -1
- package/dist/services/erp-api-services/index.d.ts.map +1 -1
- package/dist/services/mm-api-service/index.d.ts +3 -2
- package/dist/services/mm-api-service/index.d.ts.map +1 -1
- package/dist/services/mm-api-service/mm-api-service.d.ts +20 -0
- package/dist/services/mm-api-service/mm-api-service.d.ts.map +1 -1
- package/dist/services/psql-erp-service/configuration.d.ts +10 -0
- package/dist/services/psql-erp-service/configuration.d.ts.map +1 -0
- package/dist/services/psql-erp-service/index.d.ts +19 -0
- package/dist/services/psql-erp-service/index.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/psql-config.d.ts +28 -0
- package/dist/services/psql-erp-service/internal/psql-config.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts +40 -0
- package/dist/services/psql-erp-service/internal/psql-labor-ticket-operations.d.ts.map +1 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts +15 -0
- package/dist/services/psql-erp-service/internal/types/psql-types.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts +32 -0
- package/dist/services/psql-erp-service/psql-helpers.d.ts.map +1 -0
- package/dist/services/psql-erp-service/psql-service.d.ts +49 -0
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -0
- package/dist/utils/connector-factory.d.ts.map +1 -1
- package/dist/utils/connector-log/log-deduper.d.ts +56 -0
- package/dist/utils/connector-log/log-deduper.d.ts.map +1 -0
- package/dist/utils/connector-log/mm-connector-logger-example.d.ts +1 -0
- package/dist/utils/connector-log/mm-connector-logger-example.d.ts.map +1 -0
- package/dist/utils/connector-log/mm-connector-logger.d.ts +74 -0
- package/dist/utils/connector-log/mm-connector-logger.d.ts.map +1 -0
- package/dist/utils/error-utils.d.ts +2 -0
- package/dist/utils/error-utils.d.ts.map +1 -0
- package/dist/utils/index.d.ts +10 -1
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/index.d.ts +2 -1
- package/dist/utils/standard-process-drivers/index.d.ts.map +1 -1
- package/package.json +3 -1
- package/src/index.ts +43 -7
- package/src/services/data-sync-service/index.ts +1 -4
- package/src/services/data-sync-service/jobs/clean-up-expired-cache.ts +19 -7
- package/src/services/data-sync-service/jobs/from-erp.ts +12 -5
- package/src/services/data-sync-service/jobs/retry-failed-labor-tickets.ts +15 -5
- package/src/services/data-sync-service/jobs/run-migrations.ts +5 -2
- package/src/services/data-sync-service/jobs/to-erp.ts +17 -5
- package/src/services/erp-api-services/index.ts +9 -1
- package/src/services/mm-api-service/index.ts +1 -1
- package/src/services/mm-api-service/mm-api-service.ts +28 -0
- package/src/services/psql-erp-service/configuration.ts +9 -0
- package/src/services/psql-erp-service/index.ts +28 -0
- package/src/services/psql-erp-service/internal/psql-config.ts +13 -0
- package/src/services/psql-erp-service/internal/psql-labor-ticket-operations.ts +58 -0
- package/src/services/psql-erp-service/internal/types/psql-types.ts +17 -0
- package/src/services/psql-erp-service/psql-helpers.ts +90 -0
- package/src/services/psql-erp-service/psql-service.ts +215 -0
- package/src/utils/application-initializer.ts +1 -1
- package/src/utils/connector-factory.ts +14 -3
- package/src/utils/connector-log/log-deduper.ts +284 -0
- package/src/utils/connector-log/mm-connector-logger-example.ts +97 -0
- package/src/utils/connector-log/mm-connector-logger.ts +177 -0
- package/src/utils/error-utils.ts +18 -0
- package/src/utils/index.ts +11 -4
- package/src/utils/standard-process-drivers/index.ts +2 -4
- package/dist/connector-factory-CQ8e7Tae.js.map +0 -1
package/dist/mm-erp-sdk.js
CHANGED
|
@@ -1,18 +1,20 @@
|
|
|
1
|
-
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-
|
|
2
|
-
import { g, a } from "./hashed-cache-manager-
|
|
3
|
-
import { l as logger } from "./logger-
|
|
4
|
-
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-
|
|
5
|
-
import { c, d } from "./index-
|
|
1
|
+
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-CtDhFqj6.js";
|
|
2
|
+
import { E, g, a } from "./hashed-cache-manager-CtDhFqj6.js";
|
|
3
|
+
import { l as logger } from "./logger-hqtl8hFM.js";
|
|
4
|
+
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-aci_wdcn.js";
|
|
5
|
+
import { c, d } from "./index-aci_wdcn.js";
|
|
6
6
|
import axios, { AxiosError } from "axios";
|
|
7
7
|
import knex from "knex";
|
|
8
8
|
import { c as config } from "./knexfile-1qKKIORB.js";
|
|
9
|
-
import "
|
|
9
|
+
import fs from "fs";
|
|
10
|
+
import path from "path";
|
|
11
|
+
import "./connector-factory-DHmMYsRs.js";
|
|
10
12
|
import Bree from "bree";
|
|
11
13
|
import Graceful from "@ladjs/graceful";
|
|
12
|
-
import path from "path";
|
|
13
14
|
import { fileURLToPath } from "url";
|
|
14
15
|
import sql from "mssql";
|
|
15
16
|
import { z } from "zod";
|
|
17
|
+
import odbc from "odbc";
|
|
16
18
|
var ERPType = /* @__PURE__ */ ((ERPType2) => {
|
|
17
19
|
ERPType2["INVALID"] = "INVALID";
|
|
18
20
|
ERPType2["D365"] = "D365";
|
|
@@ -929,6 +931,38 @@ class MMApiClient {
|
|
|
929
931
|
(ticket) => new MMReceiveLaborTicket(ticket)
|
|
930
932
|
);
|
|
931
933
|
}
|
|
934
|
+
/**
|
|
935
|
+
* Send connector logs to the MM API
|
|
936
|
+
* @param logEntry Single log entry to send
|
|
937
|
+
* @returns Promise with the API response
|
|
938
|
+
*/
|
|
939
|
+
async sendConnectorLog(logEntry) {
|
|
940
|
+
return await this.postData(
|
|
941
|
+
"/connector/logs",
|
|
942
|
+
logEntry,
|
|
943
|
+
{},
|
|
944
|
+
{
|
|
945
|
+
baseUrl: "ApiBase"
|
|
946
|
+
/* ApiBase */
|
|
947
|
+
}
|
|
948
|
+
);
|
|
949
|
+
}
|
|
950
|
+
/**
|
|
951
|
+
* Send bulk connector logs to the MM API
|
|
952
|
+
* @param logs Array of log entries to send
|
|
953
|
+
* @returns Promise with the API response
|
|
954
|
+
*/
|
|
955
|
+
async sendBulkConnectorLogs(logs) {
|
|
956
|
+
return await this.postData(
|
|
957
|
+
"/connector/logs",
|
|
958
|
+
{ logs },
|
|
959
|
+
{},
|
|
960
|
+
{
|
|
961
|
+
baseUrl: "ApiBase"
|
|
962
|
+
/* ApiBase */
|
|
963
|
+
}
|
|
964
|
+
);
|
|
965
|
+
}
|
|
932
966
|
async deleteFailedLaborTicketIds(system, laborTicketRefs) {
|
|
933
967
|
return await this.postData(
|
|
934
968
|
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/failed/remove`,
|
|
@@ -3042,6 +3076,353 @@ function getERPAPITypeFromEntity(entity, entityMap) {
|
|
|
3042
3076
|
);
|
|
3043
3077
|
return entry ? Number(entry[0]) : void 0;
|
|
3044
3078
|
}
|
|
3079
|
+
const isNonEmptyString = (v) => typeof v === "string" && v.trim().length > 0;
|
|
3080
|
+
function getErrorType(error) {
|
|
3081
|
+
if (error && typeof error === "object") {
|
|
3082
|
+
const o = error;
|
|
3083
|
+
if (isNonEmptyString(o.code)) return o.code;
|
|
3084
|
+
if (isNonEmptyString(o.name)) return o.name;
|
|
3085
|
+
const ctorName = o.constructor?.name;
|
|
3086
|
+
if (isNonEmptyString(ctorName) && ctorName !== "Object") return ctorName;
|
|
3087
|
+
}
|
|
3088
|
+
return "Error";
|
|
3089
|
+
}
|
|
3090
|
+
class LogEntry {
|
|
3091
|
+
level;
|
|
3092
|
+
message;
|
|
3093
|
+
dedupeKey;
|
|
3094
|
+
eventTime;
|
|
3095
|
+
constructor(params) {
|
|
3096
|
+
this.level = params.level;
|
|
3097
|
+
this.message = params.message;
|
|
3098
|
+
this.dedupeKey = params.dedupeKey;
|
|
3099
|
+
this.eventTime = Date.now();
|
|
3100
|
+
}
|
|
3101
|
+
}
|
|
3102
|
+
function isLogResponse(value) {
|
|
3103
|
+
if (value === null || typeof value !== "object") return false;
|
|
3104
|
+
const v = value;
|
|
3105
|
+
if (typeof v.message !== "string") return false;
|
|
3106
|
+
if ("processed" in v && typeof v.processed !== "number") return false;
|
|
3107
|
+
return true;
|
|
3108
|
+
}
|
|
3109
|
+
class MMConnectorLogger {
|
|
3110
|
+
MAX_MSG_LEN = 2e3;
|
|
3111
|
+
mmApiClient;
|
|
3112
|
+
deduper;
|
|
3113
|
+
source;
|
|
3114
|
+
constructor(source, deduper) {
|
|
3115
|
+
if (source.length < 1 || source.length > 64) {
|
|
3116
|
+
throw new Error("source must be 1-64 characters");
|
|
3117
|
+
}
|
|
3118
|
+
this.mmApiClient = new MMApiClient();
|
|
3119
|
+
this.deduper = deduper;
|
|
3120
|
+
this.source = source;
|
|
3121
|
+
}
|
|
3122
|
+
// Deduplication helpers are delegated to injected FileLogDeduper
|
|
3123
|
+
/**
|
|
3124
|
+
* Send a single log entry to the MM cloud with deduplication.
|
|
3125
|
+
*
|
|
3126
|
+
* The deduplication is handled by the injected LogDeduper.
|
|
3127
|
+
* If no deduper is injected, the log entry is sent without deduplication.
|
|
3128
|
+
*
|
|
3129
|
+
* The standard deduper, FileLogDeduper, stores the deduplication state in a file,
|
|
3130
|
+
* allowing deduplication across jobs,
|
|
3131
|
+
*
|
|
3132
|
+
* @param logEntry - The log entry to send
|
|
3133
|
+
* @returns Promise resolving to the API response or null if suppressed
|
|
3134
|
+
* @throws HTTPError if the request fails or Error if the log entry is invalid
|
|
3135
|
+
*/
|
|
3136
|
+
async sendLog(logEntry) {
|
|
3137
|
+
this.validateLogEntry(logEntry);
|
|
3138
|
+
const now = Date.now();
|
|
3139
|
+
let messageToSend = logEntry.message;
|
|
3140
|
+
if (this.deduper) {
|
|
3141
|
+
const decision = await this.deduper.decide(logEntry, now);
|
|
3142
|
+
if (decision === null) return null;
|
|
3143
|
+
messageToSend = decision;
|
|
3144
|
+
}
|
|
3145
|
+
try {
|
|
3146
|
+
const logEntryToSend = {
|
|
3147
|
+
source: this.source,
|
|
3148
|
+
level: logEntry.level,
|
|
3149
|
+
message: messageToSend
|
|
3150
|
+
};
|
|
3151
|
+
const response = await this.mmApiClient.sendConnectorLog(logEntryToSend);
|
|
3152
|
+
if (this.deduper) {
|
|
3153
|
+
await this.deduper.onSuccess(logEntry, now);
|
|
3154
|
+
}
|
|
3155
|
+
if (!isLogResponse(response)) {
|
|
3156
|
+
logger.warn("Unexpected success response format from MM API for connector log", { response });
|
|
3157
|
+
return { message: "Unexpected success response format when sending log" };
|
|
3158
|
+
}
|
|
3159
|
+
return { message: response.message };
|
|
3160
|
+
} catch (error) {
|
|
3161
|
+
logger.error("Failed to send log to MM cloud", {
|
|
3162
|
+
level: logEntry.level,
|
|
3163
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
3164
|
+
});
|
|
3165
|
+
throw error;
|
|
3166
|
+
}
|
|
3167
|
+
}
|
|
3168
|
+
/**
|
|
3169
|
+
* @throws Error if validation fails
|
|
3170
|
+
*/
|
|
3171
|
+
validateLogEntry(logEntry) {
|
|
3172
|
+
const allowedLevels = ["info", "warn", "error"];
|
|
3173
|
+
if (!logEntry.level || !allowedLevels.includes(logEntry.level)) {
|
|
3174
|
+
throw new Error(`level must be one of: ${allowedLevels.join(", ")}`);
|
|
3175
|
+
}
|
|
3176
|
+
if (!logEntry.message || typeof logEntry.message !== "string") {
|
|
3177
|
+
throw new Error("message is required and must be a string");
|
|
3178
|
+
}
|
|
3179
|
+
logEntry.message = logEntry.message.slice(0, this.MAX_MSG_LEN);
|
|
3180
|
+
if (!logEntry.dedupeKey || typeof logEntry.dedupeKey !== "string") {
|
|
3181
|
+
throw new Error("dedupeKey is required and must be a string");
|
|
3182
|
+
}
|
|
3183
|
+
if (logEntry.dedupeKey.trim().length < 1) {
|
|
3184
|
+
throw new Error("dedupeKey must be a non-empty string");
|
|
3185
|
+
}
|
|
3186
|
+
}
|
|
3187
|
+
/**
|
|
3188
|
+
* Retry all failed transmissions silently
|
|
3189
|
+
* This method attempts to retry all messages that failed to transmit
|
|
3190
|
+
* and removes them from the failed list if successful, else leaves them for the client to retry
|
|
3191
|
+
*
|
|
3192
|
+
* Expected usage is by a client to call this as part of its own retry mechanism
|
|
3193
|
+
*/
|
|
3194
|
+
async retryFailedTransmissions() {
|
|
3195
|
+
if (!this.deduper || !this.deduper.retryFailedTransmissions) {
|
|
3196
|
+
return;
|
|
3197
|
+
}
|
|
3198
|
+
await this.deduper.retryFailedTransmissions(async (entry, message) => {
|
|
3199
|
+
await this.mmApiClient.sendConnectorLog({
|
|
3200
|
+
source: this.source,
|
|
3201
|
+
level: entry.level,
|
|
3202
|
+
message
|
|
3203
|
+
});
|
|
3204
|
+
});
|
|
3205
|
+
}
|
|
3206
|
+
/**
|
|
3207
|
+
* Clean up resources
|
|
3208
|
+
*/
|
|
3209
|
+
async destroy() {
|
|
3210
|
+
await this.mmApiClient.destroy();
|
|
3211
|
+
}
|
|
3212
|
+
}
|
|
3213
|
+
class FileLogDeduper {
|
|
3214
|
+
storeFilePath;
|
|
3215
|
+
windowMs;
|
|
3216
|
+
ttlMs;
|
|
3217
|
+
sweepIntervalMs;
|
|
3218
|
+
lastSweepTsMs;
|
|
3219
|
+
DEFAULT_WINDOW_TEN_MINS = 600;
|
|
3220
|
+
DEFAULT_TTL_ONE_HOUR = 3600;
|
|
3221
|
+
DEFAULT_SWEEP_INTERVAL_FIVE_MINS = 300;
|
|
3222
|
+
DEFAULT_STORE_FILE_PATH = path.join("/tmp", "log-deduplication.json");
|
|
3223
|
+
/**
|
|
3224
|
+
* Ctor.
|
|
3225
|
+
* @param storeFilePath: The path to the file where the deduplication store is stored; recommended is to use the default
|
|
3226
|
+
* @param windowSeconds: Suppression window. Duplicates within this period are suppressed.
|
|
3227
|
+
* @param ttlSeconds: Eviction TTL. Store entries for keys inactive beyond this are removed. Enforced to be ≥ windowSeconds.
|
|
3228
|
+
* @param sweepIntervalSeconds: Efficiency parameter. How often (min interval) to run opportunistic eviction; retry always sweeps
|
|
3229
|
+
* The sweep is lazy, used only when the store is accessed
|
|
3230
|
+
*/
|
|
3231
|
+
constructor({
|
|
3232
|
+
storeFilePath = this.DEFAULT_STORE_FILE_PATH,
|
|
3233
|
+
windowSeconds = this.DEFAULT_WINDOW_TEN_MINS,
|
|
3234
|
+
ttlSeconds = this.DEFAULT_TTL_ONE_HOUR,
|
|
3235
|
+
sweepIntervalSeconds = this.DEFAULT_SWEEP_INTERVAL_FIVE_MINS
|
|
3236
|
+
} = {}) {
|
|
3237
|
+
this.storeFilePath = storeFilePath;
|
|
3238
|
+
this.windowMs = Math.max(1, windowSeconds) * 1e3;
|
|
3239
|
+
this.ttlMs = Math.max(this.windowMs, Math.max(1, ttlSeconds) * 1e3);
|
|
3240
|
+
this.sweepIntervalMs = Math.max(1, sweepIntervalSeconds) * 1e3;
|
|
3241
|
+
this.lastSweepTsMs = 0;
|
|
3242
|
+
this.ensureStoreFileExists();
|
|
3243
|
+
}
|
|
3244
|
+
/**
|
|
3245
|
+
* Deduplication gating function
|
|
3246
|
+
* Returns the formatted message to send, or null to suppress
|
|
3247
|
+
* Decision is based on the dedupeKey and the time of the entry
|
|
3248
|
+
*/
|
|
3249
|
+
async decide(entry, now) {
|
|
3250
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== "string" || entry.dedupeKey.trim().length === 0) {
|
|
3251
|
+
throw new Error("dedupeKey is required and must be a non-empty string");
|
|
3252
|
+
}
|
|
3253
|
+
const key = entry.dedupeKey;
|
|
3254
|
+
return this.withLock(async () => {
|
|
3255
|
+
const store = this.readStore();
|
|
3256
|
+
if (now - this.lastSweepTsMs >= this.sweepIntervalMs) {
|
|
3257
|
+
this.evictExpiredInStore(store, now);
|
|
3258
|
+
this.lastSweepTsMs = now;
|
|
3259
|
+
this.writeStore(store);
|
|
3260
|
+
}
|
|
3261
|
+
const existing = store[key];
|
|
3262
|
+
if (existing) {
|
|
3263
|
+
const withinWindow = existing.lastTransmitted > 0 && existing.lastTransmitted + this.windowMs > now;
|
|
3264
|
+
if (withinWindow) {
|
|
3265
|
+
store[key] = {
|
|
3266
|
+
...existing,
|
|
3267
|
+
suppressedCount: existing.suppressedCount + 1,
|
|
3268
|
+
firstUnsentEventTs: existing.suppressedCount === 0 ? entry.eventTime ?? now : existing.firstUnsentEventTs,
|
|
3269
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3270
|
+
level: entry.level,
|
|
3271
|
+
message: entry.message
|
|
3272
|
+
};
|
|
3273
|
+
this.writeStore(store);
|
|
3274
|
+
return null;
|
|
3275
|
+
}
|
|
3276
|
+
const messageToSend2 = this.formatMessage(entry.message, entry.eventTime ?? now, existing.suppressedCount, existing.firstUnsentEventTs);
|
|
3277
|
+
store[key] = {
|
|
3278
|
+
...existing,
|
|
3279
|
+
suppressedCount: 0,
|
|
3280
|
+
firstUnsentEventTs: 0,
|
|
3281
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3282
|
+
level: entry.level,
|
|
3283
|
+
message: entry.message
|
|
3284
|
+
};
|
|
3285
|
+
this.writeStore(store);
|
|
3286
|
+
return messageToSend2;
|
|
3287
|
+
}
|
|
3288
|
+
const messageToSend = this.formatMessage(entry.message, entry.eventTime ?? now, 0);
|
|
3289
|
+
store[key] = {
|
|
3290
|
+
lastTransmitted: 0,
|
|
3291
|
+
suppressedCount: 0,
|
|
3292
|
+
firstUnsentEventTs: entry.eventTime ?? now,
|
|
3293
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3294
|
+
level: entry.level,
|
|
3295
|
+
message: entry.message
|
|
3296
|
+
};
|
|
3297
|
+
this.writeStore(store);
|
|
3298
|
+
return messageToSend;
|
|
3299
|
+
});
|
|
3300
|
+
}
|
|
3301
|
+
async onSuccess(entry, now) {
|
|
3302
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== "string" || entry.dedupeKey.trim().length === 0) {
|
|
3303
|
+
throw new Error("dedupeKey is required and must be a non-empty string");
|
|
3304
|
+
}
|
|
3305
|
+
const key = entry.dedupeKey;
|
|
3306
|
+
await this.withLock(async () => {
|
|
3307
|
+
const store = this.readStore();
|
|
3308
|
+
const existing = store[key];
|
|
3309
|
+
if (existing) {
|
|
3310
|
+
store[key] = {
|
|
3311
|
+
...existing,
|
|
3312
|
+
lastTransmitted: now,
|
|
3313
|
+
firstUnsentEventTs: 0,
|
|
3314
|
+
suppressedCount: 0
|
|
3315
|
+
};
|
|
3316
|
+
this.writeStore(store);
|
|
3317
|
+
}
|
|
3318
|
+
});
|
|
3319
|
+
}
|
|
3320
|
+
async retryFailedTransmissions(send) {
|
|
3321
|
+
const now = Date.now();
|
|
3322
|
+
const entries = await this.withLock(async () => {
|
|
3323
|
+
const store = this.readStore();
|
|
3324
|
+
this.evictExpiredInStore(store, now);
|
|
3325
|
+
this.lastSweepTsMs = now;
|
|
3326
|
+
this.writeStore(store);
|
|
3327
|
+
return Object.entries(store).filter(([, rec]) => rec.lastTransmitted === 0).map(([key, rec]) => ({ key, rec }));
|
|
3328
|
+
});
|
|
3329
|
+
for (const { key, rec } of entries) {
|
|
3330
|
+
try {
|
|
3331
|
+
const message = this.formatMessage(rec.message, rec.lastEventTs, rec.suppressedCount, rec.firstUnsentEventTs);
|
|
3332
|
+
await send({ level: rec.level, message: rec.message, dedupeKey: key, eventTime: rec.lastEventTs }, message);
|
|
3333
|
+
await this.withLock(async () => {
|
|
3334
|
+
const store = this.readStore();
|
|
3335
|
+
const current = store[key];
|
|
3336
|
+
if (current) {
|
|
3337
|
+
store[key] = {
|
|
3338
|
+
...current,
|
|
3339
|
+
lastTransmitted: Date.now(),
|
|
3340
|
+
suppressedCount: 0
|
|
3341
|
+
};
|
|
3342
|
+
this.writeStore(store);
|
|
3343
|
+
}
|
|
3344
|
+
});
|
|
3345
|
+
} catch (err) {
|
|
3346
|
+
logger.error("Failed to retry failed transmission", { key, rec, error: err });
|
|
3347
|
+
return;
|
|
3348
|
+
}
|
|
3349
|
+
}
|
|
3350
|
+
}
|
|
3351
|
+
// --- Internals ---
|
|
3352
|
+
ensureStoreFileExists() {
|
|
3353
|
+
try {
|
|
3354
|
+
if (!fs.existsSync(this.storeFilePath)) {
|
|
3355
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify({}), "utf-8");
|
|
3356
|
+
}
|
|
3357
|
+
} catch {
|
|
3358
|
+
}
|
|
3359
|
+
}
|
|
3360
|
+
readStore() {
|
|
3361
|
+
try {
|
|
3362
|
+
if (!fs.existsSync(this.storeFilePath)) return {};
|
|
3363
|
+
const content = fs.readFileSync(this.storeFilePath, "utf-8");
|
|
3364
|
+
return content ? JSON.parse(content) : {};
|
|
3365
|
+
} catch {
|
|
3366
|
+
return {};
|
|
3367
|
+
}
|
|
3368
|
+
}
|
|
3369
|
+
writeStore(store) {
|
|
3370
|
+
try {
|
|
3371
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify(store, null, 2), "utf-8");
|
|
3372
|
+
} catch {
|
|
3373
|
+
}
|
|
3374
|
+
}
|
|
3375
|
+
formatMessage(message, eventTs, suppressedCount, firstUnsentEventTs) {
|
|
3376
|
+
const timestamp = new Date(eventTs).toISOString();
|
|
3377
|
+
const base = `${timestamp} | ${message}`;
|
|
3378
|
+
if (suppressedCount > 0) {
|
|
3379
|
+
const since = firstUnsentEventTs && firstUnsentEventTs > 0 ? ` since ${new Date(firstUnsentEventTs).toISOString()}` : "";
|
|
3380
|
+
return `${base} (${suppressedCount} suppressed${since})`;
|
|
3381
|
+
}
|
|
3382
|
+
return base;
|
|
3383
|
+
}
|
|
3384
|
+
async withLock(fn) {
|
|
3385
|
+
const lockPath = `${this.storeFilePath}.lock`;
|
|
3386
|
+
const start = Date.now();
|
|
3387
|
+
while (true) {
|
|
3388
|
+
try {
|
|
3389
|
+
const fd = fs.openSync(lockPath, "wx");
|
|
3390
|
+
try {
|
|
3391
|
+
const result = await fn();
|
|
3392
|
+
return result;
|
|
3393
|
+
} finally {
|
|
3394
|
+
try {
|
|
3395
|
+
fs.closeSync(fd);
|
|
3396
|
+
} catch {
|
|
3397
|
+
}
|
|
3398
|
+
try {
|
|
3399
|
+
fs.unlinkSync(lockPath);
|
|
3400
|
+
} catch {
|
|
3401
|
+
}
|
|
3402
|
+
}
|
|
3403
|
+
} catch {
|
|
3404
|
+
if (Date.now() - start > 3e3) {
|
|
3405
|
+
return await fn();
|
|
3406
|
+
}
|
|
3407
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
3408
|
+
}
|
|
3409
|
+
}
|
|
3410
|
+
}
|
|
3411
|
+
/**
|
|
3412
|
+
* Evict expired entries from the store based on the TTL and the key's last transmitted time
|
|
3413
|
+
*/
|
|
3414
|
+
evictExpiredInStore(store, now) {
|
|
3415
|
+
const keys = Object.keys(store);
|
|
3416
|
+
if (keys.length === 0) return;
|
|
3417
|
+
for (const key of keys) {
|
|
3418
|
+
const rec = store[key];
|
|
3419
|
+
const referenceTs = rec.lastTransmitted > 0 ? rec.lastTransmitted : rec.lastEventTs;
|
|
3420
|
+
if (now - referenceTs > this.ttlMs) {
|
|
3421
|
+
delete store[key];
|
|
3422
|
+
}
|
|
3423
|
+
}
|
|
3424
|
+
}
|
|
3425
|
+
}
|
|
3045
3426
|
class ApplicationInitializer {
|
|
3046
3427
|
/**
|
|
3047
3428
|
* Performs all necessary application initialization tasks
|
|
@@ -3066,7 +3447,7 @@ class ApplicationInitializer {
|
|
|
3066
3447
|
);
|
|
3067
3448
|
} catch (error) {
|
|
3068
3449
|
logger.error("Critical initialization failure. Exiting.", error);
|
|
3069
|
-
process.
|
|
3450
|
+
process.exitCode = 1;
|
|
3070
3451
|
}
|
|
3071
3452
|
}
|
|
3072
3453
|
/**
|
|
@@ -3586,16 +3967,270 @@ class SqlServerHelper {
|
|
|
3586
3967
|
logger.info("----------------------------------------");
|
|
3587
3968
|
}
|
|
3588
3969
|
}
|
|
3970
|
+
class PsqlService {
|
|
3971
|
+
connection = null;
|
|
3972
|
+
config;
|
|
3973
|
+
constructor(config2) {
|
|
3974
|
+
this.config = config2;
|
|
3975
|
+
}
|
|
3976
|
+
async dispose() {
|
|
3977
|
+
await this.closeConnection();
|
|
3978
|
+
}
|
|
3979
|
+
/**
|
|
3980
|
+
* Build PSQL ODBC connection string
|
|
3981
|
+
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
3982
|
+
*/
|
|
3983
|
+
buildConnectionString() {
|
|
3984
|
+
const serverName = `${this.config.host}.${this.config.port}`;
|
|
3985
|
+
return [
|
|
3986
|
+
"Driver={Pervasive ODBC Interface}",
|
|
3987
|
+
`ServerName=${serverName}`,
|
|
3988
|
+
`DBQ=${this.config.database}`,
|
|
3989
|
+
`UID=${this.config.username}`,
|
|
3990
|
+
`PWD=${this.config.password}`,
|
|
3991
|
+
"AutoDoubleQuote=0"
|
|
3992
|
+
].join(";") + ";";
|
|
3993
|
+
}
|
|
3994
|
+
/**
|
|
3995
|
+
* Execute a query and return the results
|
|
3996
|
+
* Interface matches SqlServerService for consistency
|
|
3997
|
+
*
|
|
3998
|
+
* @param query The SQL query to execute
|
|
3999
|
+
* @param params Query parameters (currently unused for PSQL read operations)
|
|
4000
|
+
* @param paging Optional paging parameters
|
|
4001
|
+
* @returns The entities fetched from the database, along with paging information
|
|
4002
|
+
*/
|
|
4003
|
+
async executePreparedStatement(query, params = {}, paging) {
|
|
4004
|
+
const connection = await this.openConnection();
|
|
4005
|
+
let records;
|
|
4006
|
+
try {
|
|
4007
|
+
if (Object.keys(params).length > 0) {
|
|
4008
|
+
logger.warn(
|
|
4009
|
+
"PsqlService: Query parameters provided but parameter binding not yet implemented. Using direct query execution. This is acceptable for Phase 1 read operations."
|
|
4010
|
+
);
|
|
4011
|
+
}
|
|
4012
|
+
records = await connection.query(query);
|
|
4013
|
+
} catch (error) {
|
|
4014
|
+
const errorInfo = error;
|
|
4015
|
+
logger.error("Error fetching data from PSQL", {
|
|
4016
|
+
error: errorInfo.message,
|
|
4017
|
+
odbcErrors: errorInfo.odbcErrors
|
|
4018
|
+
});
|
|
4019
|
+
throw this.handleOdbcError(errorInfo);
|
|
4020
|
+
}
|
|
4021
|
+
const allRecords = PsqlService.recordsetToRecords(records);
|
|
4022
|
+
const rowsFetched = allRecords.length;
|
|
4023
|
+
const pagedData = paging?.offset !== void 0 || paging?.limit !== void 0 ? allRecords.slice(
|
|
4024
|
+
paging.offset || 0,
|
|
4025
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
4026
|
+
) : allRecords;
|
|
4027
|
+
return {
|
|
4028
|
+
data: pagedData,
|
|
4029
|
+
paging: {
|
|
4030
|
+
count: rowsFetched,
|
|
4031
|
+
limit: paging?.limit || 0,
|
|
4032
|
+
offset: paging?.offset || 0,
|
|
4033
|
+
nextPage: paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched ? String((paging.offset || 0) + paging.limit) : void 0,
|
|
4034
|
+
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4035
|
+
}
|
|
4036
|
+
};
|
|
4037
|
+
}
|
|
4038
|
+
/**
|
|
4039
|
+
* Opens a connection to PSQL database
|
|
4040
|
+
* Caches the connection so that it can be reused.
|
|
4041
|
+
* On failure to connect, throws
|
|
4042
|
+
*/
|
|
4043
|
+
async openConnection() {
|
|
4044
|
+
if (this.connection) {
|
|
4045
|
+
logger.debug("Reusing existing PSQL connection");
|
|
4046
|
+
return this.connection;
|
|
4047
|
+
}
|
|
4048
|
+
try {
|
|
4049
|
+
const connStr = this.buildConnectionString();
|
|
4050
|
+
logger.info("Opening new PSQL connection");
|
|
4051
|
+
logger.debug(
|
|
4052
|
+
"Connection string (password hidden):",
|
|
4053
|
+
connStr.replace(/PWD=[^;]+/, "PWD=***")
|
|
4054
|
+
);
|
|
4055
|
+
this.connection = await odbc.connect(connStr);
|
|
4056
|
+
logger.info("Successfully connected to PSQL database");
|
|
4057
|
+
return this.connection;
|
|
4058
|
+
} catch (error) {
|
|
4059
|
+
logger.error("PsqlService>>openConnection>> Connection failed", {
|
|
4060
|
+
error
|
|
4061
|
+
});
|
|
4062
|
+
throw this.handleOdbcError(error);
|
|
4063
|
+
}
|
|
4064
|
+
}
|
|
4065
|
+
/**
|
|
4066
|
+
* Transform ODBC result set to array of Record<string, string> instances.
|
|
4067
|
+
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
4068
|
+
*
|
|
4069
|
+
* @param recordset Result set from ODBC query
|
|
4070
|
+
* @returns array of Record<string, string> instances
|
|
4071
|
+
*/
|
|
4072
|
+
static recordsetToRecords(recordset) {
|
|
4073
|
+
if (!Array.isArray(recordset)) {
|
|
4074
|
+
return [];
|
|
4075
|
+
}
|
|
4076
|
+
const data = recordset.map((row) => {
|
|
4077
|
+
const transformedRow = {};
|
|
4078
|
+
Object.keys(row).forEach((key) => {
|
|
4079
|
+
const value = row[key];
|
|
4080
|
+
transformedRow[key] = value !== null && value !== void 0 ? String(value).trim() : "";
|
|
4081
|
+
});
|
|
4082
|
+
return transformedRow;
|
|
4083
|
+
});
|
|
4084
|
+
return data;
|
|
4085
|
+
}
|
|
4086
|
+
/**
|
|
4087
|
+
* Handle ODBC errors and provide meaningful messages
|
|
4088
|
+
*/
|
|
4089
|
+
handleOdbcError(error) {
|
|
4090
|
+
const odbcError = error.odbcErrors?.[0];
|
|
4091
|
+
const errorCode = odbcError?.state;
|
|
4092
|
+
const message = odbcError?.message || error.message;
|
|
4093
|
+
switch (errorCode) {
|
|
4094
|
+
case "08S01":
|
|
4095
|
+
return new Error(
|
|
4096
|
+
`PSQL connection failed. Check: 1) PVSW environment variable set to /usr/local/psql/etc/pvsw.ini, 2) Network connectivity to ports 1583/3351, 3) ODBC configuration files in /usr/local/psql/etc/ and /etc/. Original error: ${message}`
|
|
4097
|
+
);
|
|
4098
|
+
case "28000":
|
|
4099
|
+
return new Error(
|
|
4100
|
+
`PSQL authentication failed. Check username/password. Original error: ${message}`
|
|
4101
|
+
);
|
|
4102
|
+
case "42000":
|
|
4103
|
+
return new Error(`PSQL SQL syntax error. Original error: ${message}`);
|
|
4104
|
+
case "42S02":
|
|
4105
|
+
return new Error(
|
|
4106
|
+
`PSQL table or view not found. Check table names in query. Original error: ${message}`
|
|
4107
|
+
);
|
|
4108
|
+
default:
|
|
4109
|
+
return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
|
|
4110
|
+
}
|
|
4111
|
+
}
|
|
4112
|
+
async closeConnection() {
|
|
4113
|
+
if (this.connection) {
|
|
4114
|
+
logger.info("Closing PSQL connection");
|
|
4115
|
+
try {
|
|
4116
|
+
await this.connection.close();
|
|
4117
|
+
} catch (error) {
|
|
4118
|
+
logger.error("PsqlService::closeConnection: Error closing connection", {
|
|
4119
|
+
error
|
|
4120
|
+
});
|
|
4121
|
+
}
|
|
4122
|
+
this.connection = null;
|
|
4123
|
+
}
|
|
4124
|
+
}
|
|
4125
|
+
}
|
|
4126
|
+
class PsqlLaborTicketOperations {
|
|
4127
|
+
constructor(service) {
|
|
4128
|
+
this.service = service;
|
|
4129
|
+
}
|
|
4130
|
+
/**
|
|
4131
|
+
* Create labor ticket in START_LABOR table
|
|
4132
|
+
*
|
|
4133
|
+
* Phase 2 Implementation Notes:
|
|
4134
|
+
* - Will use prepared statements with parameter binding
|
|
4135
|
+
* - Insert into START_LABOR table
|
|
4136
|
+
* - Return GUID as erpUid
|
|
4137
|
+
*
|
|
4138
|
+
* @param laborTicket Labor ticket from MachineMetrics
|
|
4139
|
+
* @returns Labor ticket and ERP unique ID
|
|
4140
|
+
*/
|
|
4141
|
+
async createLaborTicket(laborTicket) {
|
|
4142
|
+
logger.warn(
|
|
4143
|
+
"PsqlLaborTicketOperations.createLaborTicket not yet implemented (Phase 2)"
|
|
4144
|
+
);
|
|
4145
|
+
throw new Error(
|
|
4146
|
+
"Labor ticket creation not implemented for PSQL. This is a Phase 2 feature."
|
|
4147
|
+
);
|
|
4148
|
+
}
|
|
4149
|
+
/**
|
|
4150
|
+
* Update labor ticket (move from START_LABOR to COMPLETED_LABOR)
|
|
4151
|
+
*
|
|
4152
|
+
* Phase 2 Implementation Notes:
|
|
4153
|
+
* - Insert into COMPLETED_LABOR
|
|
4154
|
+
* - Delete from START_LABOR
|
|
4155
|
+
* - Should be done in a transaction
|
|
4156
|
+
*
|
|
4157
|
+
* @param laborTicket Labor ticket to update
|
|
4158
|
+
* @returns Updated labor ticket
|
|
4159
|
+
*/
|
|
4160
|
+
async updateLaborTicket(laborTicket) {
|
|
4161
|
+
logger.warn(
|
|
4162
|
+
"PsqlLaborTicketOperations.updateLaborTicket not yet implemented (Phase 2)"
|
|
4163
|
+
);
|
|
4164
|
+
throw new Error(
|
|
4165
|
+
"Labor ticket update not implemented for PSQL. This is a Phase 2 feature."
|
|
4166
|
+
);
|
|
4167
|
+
}
|
|
4168
|
+
}
|
|
4169
|
+
function formatPsqlDate(psqlDate) {
|
|
4170
|
+
if (!psqlDate || psqlDate === "000000" || psqlDate.trim() === "") {
|
|
4171
|
+
return null;
|
|
4172
|
+
}
|
|
4173
|
+
try {
|
|
4174
|
+
const year = parseInt(psqlDate.substring(0, 2), 10);
|
|
4175
|
+
const month = parseInt(psqlDate.substring(2, 4), 10);
|
|
4176
|
+
const day = parseInt(psqlDate.substring(4, 6), 10);
|
|
4177
|
+
const fullYear = year + 2e3;
|
|
4178
|
+
if (month < 1 || month > 12 || day < 1 || day > 31) {
|
|
4179
|
+
return null;
|
|
4180
|
+
}
|
|
4181
|
+
const date = new Date(fullYear, month - 1, day);
|
|
4182
|
+
return date.toISOString().split("T")[0];
|
|
4183
|
+
} catch (error) {
|
|
4184
|
+
return null;
|
|
4185
|
+
}
|
|
4186
|
+
}
|
|
4187
|
+
function formatPsqlTime(psqlTime) {
|
|
4188
|
+
if (!psqlTime || psqlTime.trim() === "") {
|
|
4189
|
+
return null;
|
|
4190
|
+
}
|
|
4191
|
+
try {
|
|
4192
|
+
const hours = psqlTime.substring(0, 2);
|
|
4193
|
+
const minutes = psqlTime.substring(2, 4);
|
|
4194
|
+
return `${hours}:${minutes}:00`;
|
|
4195
|
+
} catch (error) {
|
|
4196
|
+
return null;
|
|
4197
|
+
}
|
|
4198
|
+
}
|
|
4199
|
+
function combinePsqlDateTime(psqlDate, psqlTime) {
|
|
4200
|
+
const date = formatPsqlDate(psqlDate);
|
|
4201
|
+
const time = formatPsqlTime(psqlTime);
|
|
4202
|
+
if (!date || !time) {
|
|
4203
|
+
return null;
|
|
4204
|
+
}
|
|
4205
|
+
return `${date}T${time}`;
|
|
4206
|
+
}
|
|
4207
|
+
function isPsqlDateEmpty(psqlDate) {
|
|
4208
|
+
return !psqlDate || psqlDate === "000000" || psqlDate.trim() === "";
|
|
4209
|
+
}
|
|
4210
|
+
function cleanPsqlCharField(value) {
|
|
4211
|
+
if (value === null || value === void 0) {
|
|
4212
|
+
return "";
|
|
4213
|
+
}
|
|
4214
|
+
return String(value).trim();
|
|
4215
|
+
}
|
|
3589
4216
|
export {
|
|
3590
4217
|
ApplicationInitializer,
|
|
3591
4218
|
BatchCacheManager,
|
|
3592
4219
|
CoreConfiguration,
|
|
3593
4220
|
ERPObjType,
|
|
3594
4221
|
ERPType,
|
|
4222
|
+
E as ErpApiConnectionParams,
|
|
3595
4223
|
ErrorHandler,
|
|
4224
|
+
FileLogDeduper,
|
|
4225
|
+
GraphQLError,
|
|
3596
4226
|
GraphQLService,
|
|
3597
4227
|
HTTPClientFactory,
|
|
4228
|
+
HTTPError,
|
|
4229
|
+
LogEntry,
|
|
3598
4230
|
MMApiClient,
|
|
4231
|
+
MMBatchValidationError,
|
|
4232
|
+
MMConnectorLogger,
|
|
4233
|
+
MMReceiveLaborTicket,
|
|
3599
4234
|
MMSendLaborTicket,
|
|
3600
4235
|
MMSendPart,
|
|
3601
4236
|
MMSendPartOperation,
|
|
@@ -3605,6 +4240,9 @@ export {
|
|
|
3605
4240
|
MMSendWorkOrder,
|
|
3606
4241
|
MMSendWorkOrderOperation,
|
|
3607
4242
|
OAuthClient,
|
|
4243
|
+
PsqlLaborTicketOperations,
|
|
4244
|
+
PsqlService,
|
|
4245
|
+
RecordTrackingManager,
|
|
3608
4246
|
RestAPIService,
|
|
3609
4247
|
SqlServerHelper,
|
|
3610
4248
|
SqlServerService,
|
|
@@ -3613,15 +4251,22 @@ export {
|
|
|
3613
4251
|
addNewFieldFromLookupField,
|
|
3614
4252
|
applyTimezoneOffsetsToFields,
|
|
3615
4253
|
buildLogicalCondition,
|
|
4254
|
+
cleanPsqlCharField,
|
|
3616
4255
|
cleanupNumbers,
|
|
4256
|
+
combinePsqlDateTime,
|
|
3617
4257
|
convertToLocalTime,
|
|
3618
4258
|
formatDateWithTZOffset,
|
|
4259
|
+
formatPsqlDate,
|
|
4260
|
+
formatPsqlTime,
|
|
3619
4261
|
getCachedTimezoneOffset,
|
|
3620
4262
|
g as getErpApiConnectionParams,
|
|
4263
|
+
getErrorType,
|
|
3621
4264
|
c as getInitialLoadComplete,
|
|
3622
4265
|
getPayloadWithoutIDField,
|
|
3623
4266
|
a as getSQLServerConfiguration,
|
|
3624
4267
|
getUniqueRows,
|
|
4268
|
+
isPsqlDateEmpty,
|
|
4269
|
+
config as knexDatabaseConfig,
|
|
3625
4270
|
logger,
|
|
3626
4271
|
removeExtraneousFields,
|
|
3627
4272
|
runDataSyncService,
|