@machinemetrics/mm-erp-sdk 0.1.9 → 0.2.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -5
- package/dist/config-CvA-mFWF.js +418 -0
- package/dist/config-CvA-mFWF.js.map +1 -0
- package/dist/connector-factory-BPm2GVVF.js +30 -0
- package/dist/connector-factory-BPm2GVVF.js.map +1 -0
- package/dist/hashed-cache-manager-B15NN8hK.js +322 -0
- package/dist/hashed-cache-manager-B15NN8hK.js.map +1 -0
- package/dist/index-D8qO1NyK.js +192 -0
- package/dist/index-D8qO1NyK.js.map +1 -0
- package/dist/index.d.ts +43 -42
- package/dist/index.d.ts.map +1 -1
- package/dist/knexfile-Bng2Ru9c.js +20 -0
- package/dist/knexfile-Bng2Ru9c.js.map +1 -0
- package/dist/knexfile.d.ts.map +1 -1
- package/dist/logger-BWw0_z9q.js +17557 -0
- package/dist/logger-BWw0_z9q.js.map +1 -0
- package/dist/migrations/20241015162631_create_cache_table.js +15 -11
- package/dist/migrations/20241015162631_create_cache_table.js.map +1 -1
- package/dist/migrations/20241015162632_create_sdk_cache_table.js +15 -11
- package/dist/migrations/20241015162632_create_sdk_cache_table.js.map +1 -1
- package/dist/migrations/20250103162631_create_record_tracking_table.js +15 -14
- package/dist/migrations/20250103162631_create_record_tracking_table.js.map +1 -1
- package/dist/mm-erp-sdk.js +4978 -0
- package/dist/mm-erp-sdk.js.map +1 -0
- package/dist/services/caching-service/batch-cache-manager.d.ts +1 -1
- package/dist/services/caching-service/batch-cache-manager.d.ts.map +1 -1
- package/dist/services/caching-service/hashed-cache-manager.d.ts +2 -2
- package/dist/services/caching-service/hashed-cache-manager.d.ts.map +1 -1
- package/dist/services/caching-service/index.d.ts +1 -1
- package/dist/services/caching-service/index.d.ts.map +1 -1
- package/dist/services/caching-service/record-tracking-manager.d.ts +1 -1
- package/dist/services/caching-service/record-tracking-manager.d.ts.map +1 -1
- package/dist/services/data-sync-service/configuration-manager.d.ts +1 -1
- package/dist/services/data-sync-service/configuration-manager.d.ts.map +1 -1
- package/dist/services/data-sync-service/data-sync-service.d.ts.map +1 -1
- package/dist/services/data-sync-service/index.d.ts +3 -3
- package/dist/services/data-sync-service/index.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +39 -40
- package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/from-erp.js +36 -48
- package/dist/services/data-sync-service/jobs/from-erp.js.map +1 -1
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +35 -36
- package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js.map +1 -1
- package/dist/services/data-sync-service/jobs/run-migrations.js +21 -22
- package/dist/services/data-sync-service/jobs/run-migrations.js.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
- package/dist/services/data-sync-service/jobs/to-erp.js +45 -48
- package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
- package/dist/services/data-sync-service/nats-labor-ticket-listener.d.ts +30 -0
- package/dist/services/data-sync-service/nats-labor-ticket-listener.d.ts.map +1 -0
- package/dist/services/erp-api-services/errors.d.ts +1 -1
- package/dist/services/erp-api-services/errors.d.ts.map +1 -1
- package/dist/services/erp-api-services/graphql/graphql-service.d.ts +2 -2
- package/dist/services/erp-api-services/graphql/graphql-service.d.ts.map +1 -1
- package/dist/services/erp-api-services/index.d.ts +8 -8
- package/dist/services/erp-api-services/index.d.ts.map +1 -1
- package/dist/services/erp-api-services/rest/rest-api-service.d.ts +2 -2
- package/dist/services/erp-api-services/rest/rest-api-service.d.ts.map +1 -1
- package/dist/services/erp-api-services/types.d.ts +2 -2
- package/dist/services/erp-api-services/types.d.ts.map +1 -1
- package/dist/services/mm-api-service/company-info.d.ts +13 -0
- package/dist/services/mm-api-service/company-info.d.ts.map +1 -0
- package/dist/services/mm-api-service/index.d.ts +13 -6
- package/dist/services/mm-api-service/index.d.ts.map +1 -1
- package/dist/services/mm-api-service/mm-api-service.d.ts +13 -7
- package/dist/services/mm-api-service/mm-api-service.d.ts.map +1 -1
- package/dist/services/mm-api-service/types/entity-transformer.d.ts +2 -2
- package/dist/services/mm-api-service/types/entity-transformer.d.ts.map +1 -1
- package/dist/services/mm-api-service/types/receive-types.d.ts +3 -0
- package/dist/services/mm-api-service/types/receive-types.d.ts.map +1 -1
- package/dist/services/nats-service/nats-service.d.ts +114 -0
- package/dist/services/nats-service/nats-service.d.ts.map +1 -0
- package/dist/services/nats-service/test-nats-subscriber.d.ts +6 -0
- package/dist/services/nats-service/test-nats-subscriber.d.ts.map +1 -0
- package/dist/services/psql-erp-service/index.d.ts +3 -3
- package/dist/services/psql-erp-service/index.d.ts.map +1 -1
- package/dist/services/psql-erp-service/psql-service.d.ts +2 -2
- package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -1
- package/dist/services/reporting-service/index.d.ts +1 -1
- package/dist/services/reporting-service/index.d.ts.map +1 -1
- package/dist/services/reporting-service/logger.d.ts.map +1 -1
- package/dist/services/sql-server-erp-service/index.d.ts +3 -3
- package/dist/services/sql-server-erp-service/index.d.ts.map +1 -1
- package/dist/services/sql-server-erp-service/internal/sql-labor-ticket-operations.d.ts +2 -2
- package/dist/services/sql-server-erp-service/internal/sql-labor-ticket-operations.d.ts.map +1 -1
- package/dist/services/sql-server-erp-service/sql-server-helpers.d.ts +3 -3
- package/dist/services/sql-server-erp-service/sql-server-helpers.d.ts.map +1 -1
- package/dist/services/sql-server-erp-service/sql-server-service.d.ts +2 -2
- package/dist/services/sql-server-erp-service/sql-server-service.d.ts.map +1 -1
- package/dist/services/sqlite-service/index.d.ts +1 -1
- package/dist/services/sqlite-service/index.d.ts.map +1 -1
- package/dist/types/erp-connector.d.ts +1 -1
- package/dist/types/erp-connector.d.ts.map +1 -1
- package/dist/types/index.d.ts +2 -2
- package/dist/types/index.d.ts.map +1 -1
- package/dist/utils/connector-factory.d.ts +1 -1
- package/dist/utils/connector-factory.d.ts.map +1 -1
- package/dist/utils/connector-log/log-deduper.d.ts +1 -1
- package/dist/utils/connector-log/log-deduper.d.ts.map +1 -1
- package/dist/utils/connector-log/mm-connector-logger-example.d.ts +0 -1
- package/dist/utils/connector-log/mm-connector-logger.d.ts +1 -1
- package/dist/utils/connector-log/mm-connector-logger.d.ts.map +1 -1
- package/dist/utils/erp-type-from-entity.d.ts +1 -1
- package/dist/utils/erp-type-from-entity.d.ts.map +1 -1
- package/dist/utils/error-formatter.d.ts +19 -0
- package/dist/utils/error-formatter.d.ts.map +1 -0
- package/dist/utils/index.d.ts +38 -33
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/mm-labor-ticket-helpers.d.ts +1 -1
- package/dist/utils/mm-labor-ticket-helpers.d.ts.map +1 -1
- package/dist/utils/removeExtraneousFields.d.ts +1 -1
- package/dist/utils/removeExtraneousFields.d.ts.map +1 -1
- package/dist/utils/removeIdFieldFromPayload.d.ts +1 -1
- package/dist/utils/removeIdFieldFromPayload.d.ts.map +1 -1
- package/dist/utils/resource-group.d.ts +1 -1
- package/dist/utils/resource-group.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/error-processor.d.ts +3 -3
- package/dist/utils/standard-process-drivers/error-processor.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/index.d.ts +3 -3
- package/dist/utils/standard-process-drivers/index.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts +1 -1
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/mm-entity-processor.d.ts +4 -4
- package/dist/utils/standard-process-drivers/mm-entity-processor.d.ts.map +1 -1
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts +4 -4
- package/dist/utils/standard-process-drivers/standard-process-drivers.d.ts.map +1 -1
- package/package.json +10 -6
- package/src/index.ts +45 -42
- package/src/knexfile.ts +0 -1
- package/src/services/caching-service/batch-cache-manager.ts +2 -2
- package/src/services/caching-service/hashed-cache-manager.ts +5 -5
- package/src/services/caching-service/index.ts +1 -1
- package/src/services/caching-service/record-tracking-manager.ts +2 -2
- package/src/services/data-sync-service/configuration-manager.ts +2 -2
- package/src/services/data-sync-service/data-sync-service.ts +19 -10
- package/src/services/data-sync-service/index.ts +3 -3
- package/src/services/data-sync-service/jobs/clean-up-expired-cache.ts +3 -3
- package/src/services/data-sync-service/jobs/from-erp.ts +5 -10
- package/src/services/data-sync-service/jobs/retry-failed-labor-tickets.ts +2 -2
- package/src/services/data-sync-service/jobs/run-migrations.ts +2 -2
- package/src/services/data-sync-service/jobs/to-erp.ts +7 -3
- package/src/services/data-sync-service/nats-labor-ticket-listener.ts +341 -0
- package/src/services/erp-api-services/errors.ts +3 -3
- package/src/services/erp-api-services/graphql/graphql-service.ts +5 -5
- package/src/services/erp-api-services/index.ts +8 -8
- package/src/services/erp-api-services/rest/rest-api-service.ts +4 -4
- package/src/services/erp-api-services/types.ts +2 -2
- package/src/services/mm-api-service/company-info.ts +87 -0
- package/src/services/mm-api-service/index.ts +14 -6
- package/src/services/mm-api-service/mm-api-service.ts +29 -12
- package/src/services/mm-api-service/token-mgr.ts +4 -4
- package/src/services/mm-api-service/types/entity-transformer.ts +3 -3
- package/src/services/mm-api-service/types/receive-types.ts +1 -0
- package/src/services/nats-service/nats-service.ts +351 -0
- package/src/services/nats-service/test-nats-subscriber.ts +96 -0
- package/src/services/psql-erp-service/index.ts +3 -3
- package/src/services/psql-erp-service/psql-service.ts +4 -4
- package/src/services/reporting-service/index.ts +1 -1
- package/src/services/reporting-service/logger.ts +81 -111
- package/src/services/sql-server-erp-service/index.ts +3 -3
- package/src/services/sql-server-erp-service/internal/sql-labor-ticket-operations.ts +2 -2
- package/src/services/sql-server-erp-service/internal/sql-transaction-manager.ts +1 -1
- package/src/services/sql-server-erp-service/sql-server-helpers.ts +6 -6
- package/src/services/sql-server-erp-service/sql-server-service.ts +4 -4
- package/src/services/sqlite-service/index.ts +1 -1
- package/src/services/sqlite-service/sqlite-coordinator.ts +2 -2
- package/src/types/erp-connector.ts +1 -1
- package/src/types/index.ts +2 -2
- package/src/utils/application-initializer.ts +5 -5
- package/src/utils/connector-factory.ts +2 -2
- package/src/utils/connector-log/log-deduper.ts +2 -2
- package/src/utils/connector-log/mm-connector-logger.ts +3 -3
- package/src/utils/erp-type-from-entity.ts +1 -1
- package/src/utils/error-formatter.ts +205 -0
- package/src/utils/http-client.ts +3 -2
- package/src/utils/index.ts +39 -33
- package/src/utils/local-data-store/database-lock.ts +1 -1
- package/src/utils/mm-labor-ticket-helpers.ts +2 -2
- package/src/utils/removeExtraneousFields.ts +1 -1
- package/src/utils/removeIdFieldFromPayload.ts +1 -1
- package/src/utils/resource-group.ts +2 -2
- package/src/utils/standard-process-drivers/error-processor.ts +5 -5
- package/src/utils/standard-process-drivers/index.ts +3 -3
- package/src/utils/standard-process-drivers/labor-ticket-erp-synchronizer.ts +6 -6
- package/src/utils/standard-process-drivers/mm-entity-processor.ts +7 -7
- package/src/utils/standard-process-drivers/standard-process-drivers.ts +6 -6
- package/src/utils/time-utils.ts +3 -3
- package/src/utils/timezone.ts +2 -2
- package/dist/index.js +0 -48
- package/dist/index.js.map +0 -1
- package/dist/knexfile.js +0 -19
- package/dist/knexfile.js.map +0 -1
- package/dist/migrations/20241015162631_create_cache_table.d.ts +0 -4
- package/dist/migrations/20241015162631_create_cache_table.d.ts.map +0 -1
- package/dist/migrations/20241015162632_create_sdk_cache_table.d.ts +0 -4
- package/dist/migrations/20241015162632_create_sdk_cache_table.d.ts.map +0 -1
- package/dist/migrations/20250103162631_create_record_tracking_table.d.ts +0 -4
- package/dist/migrations/20250103162631_create_record_tracking_table.d.ts.map +0 -1
- package/dist/services/caching-service/batch-cache-manager.js +0 -84
- package/dist/services/caching-service/batch-cache-manager.js.map +0 -1
- package/dist/services/caching-service/hashed-cache-manager.js +0 -223
- package/dist/services/caching-service/hashed-cache-manager.js.map +0 -1
- package/dist/services/caching-service/index.js +0 -2
- package/dist/services/caching-service/index.js.map +0 -1
- package/dist/services/caching-service/record-tracking-manager.js +0 -28
- package/dist/services/caching-service/record-tracking-manager.js.map +0 -1
- package/dist/services/data-sync-service/configuration-manager.js +0 -163
- package/dist/services/data-sync-service/configuration-manager.js.map +0 -1
- package/dist/services/data-sync-service/data-sync-service.js +0 -95
- package/dist/services/data-sync-service/data-sync-service.js.map +0 -1
- package/dist/services/data-sync-service/index.js +0 -10
- package/dist/services/data-sync-service/index.js.map +0 -1
- package/dist/services/erp-api-services/errors.js +0 -83
- package/dist/services/erp-api-services/errors.js.map +0 -1
- package/dist/services/erp-api-services/graphql/graphql-service.js +0 -102
- package/dist/services/erp-api-services/graphql/graphql-service.js.map +0 -1
- package/dist/services/erp-api-services/graphql/types.js +0 -6
- package/dist/services/erp-api-services/graphql/types.js.map +0 -1
- package/dist/services/erp-api-services/index.js +0 -13
- package/dist/services/erp-api-services/index.js.map +0 -1
- package/dist/services/erp-api-services/oauth-client.js +0 -41
- package/dist/services/erp-api-services/oauth-client.js.map +0 -1
- package/dist/services/erp-api-services/rest/get-query-params.js +0 -23
- package/dist/services/erp-api-services/rest/get-query-params.js.map +0 -1
- package/dist/services/erp-api-services/rest/rest-api-service.js +0 -163
- package/dist/services/erp-api-services/rest/rest-api-service.js.map +0 -1
- package/dist/services/erp-api-services/types.js +0 -2
- package/dist/services/erp-api-services/types.js.map +0 -1
- package/dist/services/mm-api-service/index.js +0 -15
- package/dist/services/mm-api-service/index.js.map +0 -1
- package/dist/services/mm-api-service/mm-api-service.js +0 -519
- package/dist/services/mm-api-service/mm-api-service.js.map +0 -1
- package/dist/services/mm-api-service/token-mgr.js +0 -113
- package/dist/services/mm-api-service/token-mgr.js.map +0 -1
- package/dist/services/mm-api-service/types/checkpoint.js +0 -2
- package/dist/services/mm-api-service/types/checkpoint.js.map +0 -1
- package/dist/services/mm-api-service/types/entity-transformer.js +0 -186
- package/dist/services/mm-api-service/types/entity-transformer.js.map +0 -1
- package/dist/services/mm-api-service/types/mm-response-interfaces.js +0 -34
- package/dist/services/mm-api-service/types/mm-response-interfaces.js.map +0 -1
- package/dist/services/mm-api-service/types/receive-types.js +0 -55
- package/dist/services/mm-api-service/types/receive-types.js.map +0 -1
- package/dist/services/mm-api-service/types/send-types.js +0 -337
- package/dist/services/mm-api-service/types/send-types.js.map +0 -1
- package/dist/services/psql-erp-service/configuration.js +0 -2
- package/dist/services/psql-erp-service/configuration.js.map +0 -1
- package/dist/services/psql-erp-service/index.js +0 -10
- package/dist/services/psql-erp-service/index.js.map +0 -1
- package/dist/services/psql-erp-service/internal/types/psql-types.js +0 -5
- package/dist/services/psql-erp-service/internal/types/psql-types.js.map +0 -1
- package/dist/services/psql-erp-service/psql-helpers.js +0 -99
- package/dist/services/psql-erp-service/psql-helpers.js.map +0 -1
- package/dist/services/psql-erp-service/psql-service.js +0 -187
- package/dist/services/psql-erp-service/psql-service.js.map +0 -1
- package/dist/services/reporting-service/index.js +0 -5
- package/dist/services/reporting-service/index.js.map +0 -1
- package/dist/services/reporting-service/logger.js +0 -217
- package/dist/services/reporting-service/logger.js.map +0 -1
- package/dist/services/sql-server-erp-service/configuration.js +0 -2
- package/dist/services/sql-server-erp-service/configuration.js.map +0 -1
- package/dist/services/sql-server-erp-service/index.js +0 -11
- package/dist/services/sql-server-erp-service/index.js.map +0 -1
- package/dist/services/sql-server-erp-service/internal/sql-labor-ticket-operations.js +0 -50
- package/dist/services/sql-server-erp-service/internal/sql-labor-ticket-operations.js.map +0 -1
- package/dist/services/sql-server-erp-service/internal/sql-server-config.js +0 -40
- package/dist/services/sql-server-erp-service/internal/sql-server-config.js.map +0 -1
- package/dist/services/sql-server-erp-service/internal/sql-transaction-manager.js +0 -36
- package/dist/services/sql-server-erp-service/internal/sql-transaction-manager.js.map +0 -1
- package/dist/services/sql-server-erp-service/internal/types/sql-server-types.js +0 -2
- package/dist/services/sql-server-erp-service/internal/types/sql-server-types.js.map +0 -1
- package/dist/services/sql-server-erp-service/sql-server-helpers.js +0 -66
- package/dist/services/sql-server-erp-service/sql-server-helpers.js.map +0 -1
- package/dist/services/sql-server-erp-service/sql-server-service.js +0 -154
- package/dist/services/sql-server-erp-service/sql-server-service.js.map +0 -1
- package/dist/services/sql-server-erp-service/types/sql-input-param.js +0 -2
- package/dist/services/sql-server-erp-service/types/sql-input-param.js.map +0 -1
- package/dist/services/sqlite-service/index.js +0 -2
- package/dist/services/sqlite-service/index.js.map +0 -1
- package/dist/services/sqlite-service/sqlite-coordinator.js +0 -60
- package/dist/services/sqlite-service/sqlite-coordinator.js.map +0 -1
- package/dist/types/erp-connector.js +0 -2
- package/dist/types/erp-connector.js.map +0 -1
- package/dist/types/erp-types.js +0 -13
- package/dist/types/erp-types.js.map +0 -1
- package/dist/types/index.js +0 -6
- package/dist/types/index.js.map +0 -1
- package/dist/utils/application-initializer.js +0 -55
- package/dist/utils/application-initializer.js.map +0 -1
- package/dist/utils/cleanup-numbers.js +0 -6
- package/dist/utils/cleanup-numbers.js.map +0 -1
- package/dist/utils/connector-factory.js +0 -34
- package/dist/utils/connector-factory.js.map +0 -1
- package/dist/utils/connector-log/log-deduper.js +0 -240
- package/dist/utils/connector-log/log-deduper.js.map +0 -1
- package/dist/utils/connector-log/mm-connector-logger-example.js +0 -88
- package/dist/utils/connector-log/mm-connector-logger-example.js.map +0 -1
- package/dist/utils/connector-log/mm-connector-logger.js +0 -151
- package/dist/utils/connector-log/mm-connector-logger.js.map +0 -1
- package/dist/utils/data-transformation.js +0 -38
- package/dist/utils/data-transformation.js.map +0 -1
- package/dist/utils/erp-type-from-entity.js +0 -6
- package/dist/utils/erp-type-from-entity.js.map +0 -1
- package/dist/utils/error-utils.js +0 -21
- package/dist/utils/error-utils.js.map +0 -1
- package/dist/utils/http-client.js +0 -186
- package/dist/utils/http-client.js.map +0 -1
- package/dist/utils/index.js +0 -65
- package/dist/utils/index.js.map +0 -1
- package/dist/utils/local-data-store/database-lock.js +0 -68
- package/dist/utils/local-data-store/database-lock.js.map +0 -1
- package/dist/utils/local-data-store/jobs-shared-data.js +0 -116
- package/dist/utils/local-data-store/jobs-shared-data.js.map +0 -1
- package/dist/utils/mm-labor-ticket-helpers.js +0 -23
- package/dist/utils/mm-labor-ticket-helpers.js.map +0 -1
- package/dist/utils/removeExtraneousFields.js +0 -16
- package/dist/utils/removeExtraneousFields.js.map +0 -1
- package/dist/utils/removeIdFieldFromPayload.js +0 -16
- package/dist/utils/removeIdFieldFromPayload.js.map +0 -1
- package/dist/utils/resource-group.js +0 -59
- package/dist/utils/resource-group.js.map +0 -1
- package/dist/utils/standard-process-drivers/error-processor.js +0 -262
- package/dist/utils/standard-process-drivers/error-processor.js.map +0 -1
- package/dist/utils/standard-process-drivers/index.js +0 -4
- package/dist/utils/standard-process-drivers/index.js.map +0 -1
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.js +0 -164
- package/dist/utils/standard-process-drivers/labor-ticket-erp-synchronizer.js.map +0 -1
- package/dist/utils/standard-process-drivers/mm-entity-processor.js +0 -168
- package/dist/utils/standard-process-drivers/mm-entity-processor.js.map +0 -1
- package/dist/utils/standard-process-drivers/standard-process-drivers.js +0 -324
- package/dist/utils/standard-process-drivers/standard-process-drivers.js.map +0 -1
- package/dist/utils/time-utils.js +0 -96
- package/dist/utils/time-utils.js.map +0 -1
- package/dist/utils/timezone.js +0 -105
- package/dist/utils/timezone.js.map +0 -1
- package/dist/utils/trimObjectValues.js +0 -11
- package/dist/utils/trimObjectValues.js.map +0 -1
- package/dist/utils/uniqueRows.js +0 -35
- package/dist/utils/uniqueRows.js.map +0 -1
|
@@ -0,0 +1,4978 @@
|
|
|
1
|
+
import { C as CoreConfiguration, H as HashedCacheManager } from "./hashed-cache-manager-B15NN8hK.js";
|
|
2
|
+
import { E, g, a } from "./hashed-cache-manager-B15NN8hK.js";
|
|
3
|
+
import { l as logger } from "./logger-BWw0_z9q.js";
|
|
4
|
+
import { g as getCachedMMToken, s as setCachedMMToken, a as setTimezoneOffsetInCache, b as setTimezoneNameInCache, c as getCachedTimezoneOffset, S as SQLiteCoordinator } from "./index-D8qO1NyK.js";
|
|
5
|
+
import { f, d, e } from "./index-D8qO1NyK.js";
|
|
6
|
+
import axios, { AxiosError } from "axios";
|
|
7
|
+
import knex from "knex";
|
|
8
|
+
import { c as config } from "./knexfile-Bng2Ru9c.js";
|
|
9
|
+
import fs from "fs";
|
|
10
|
+
import path from "path";
|
|
11
|
+
import { c as createConnectorFromPath } from "./connector-factory-BPm2GVVF.js";
|
|
12
|
+
import Bree from "bree";
|
|
13
|
+
import Graceful from "@ladjs/graceful";
|
|
14
|
+
import { fileURLToPath } from "url";
|
|
15
|
+
import { StringCodec, connect } from "nats";
|
|
16
|
+
import sql from "mssql";
|
|
17
|
+
import { z } from "zod";
|
|
18
|
+
var ERPObjType = /* @__PURE__ */ ((ERPObjType2) => {
|
|
19
|
+
ERPObjType2[ERPObjType2["RESOURCES"] = 0] = "RESOURCES";
|
|
20
|
+
ERPObjType2[ERPObjType2["PARTS"] = 1] = "PARTS";
|
|
21
|
+
ERPObjType2[ERPObjType2["PART_OPERATION"] = 2] = "PART_OPERATION";
|
|
22
|
+
ERPObjType2[ERPObjType2["WORK_ORDERS"] = 3] = "WORK_ORDERS";
|
|
23
|
+
ERPObjType2[ERPObjType2["WORK_ORDER_OPERATIONS"] = 4] = "WORK_ORDER_OPERATIONS";
|
|
24
|
+
ERPObjType2[ERPObjType2["PERSONS"] = 5] = "PERSONS";
|
|
25
|
+
ERPObjType2[ERPObjType2["REASONS"] = 6] = "REASONS";
|
|
26
|
+
ERPObjType2[ERPObjType2["LABOR_TICKETS"] = 7] = "LABOR_TICKETS";
|
|
27
|
+
return ERPObjType2;
|
|
28
|
+
})(ERPObjType || {});
|
|
29
|
+
const erpTypes = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
30
|
+
__proto__: null,
|
|
31
|
+
ERPObjType
|
|
32
|
+
}, Symbol.toStringTag, { value: "Module" }));
|
|
33
|
+
var Operator = /* @__PURE__ */ ((Operator2) => {
|
|
34
|
+
Operator2["eq"] = "eq";
|
|
35
|
+
Operator2["lt"] = "lt";
|
|
36
|
+
Operator2["lte"] = "lte";
|
|
37
|
+
Operator2["gt"] = "gt";
|
|
38
|
+
Operator2["gte"] = "gte";
|
|
39
|
+
return Operator2;
|
|
40
|
+
})(Operator || {});
|
|
41
|
+
const buildLogicalCondition = (conditions, operator) => {
|
|
42
|
+
return conditions.map(
|
|
43
|
+
(condition) => `${condition.field} ${condition.operator} '${condition.value}'`
|
|
44
|
+
).join(` ${operator} `);
|
|
45
|
+
};
|
|
46
|
+
class MMReceiveLaborTicket {
|
|
47
|
+
constructor(data) {
|
|
48
|
+
Object.assign(this, data);
|
|
49
|
+
}
|
|
50
|
+
// Returns the closed date if it is a valid date, otherwise returns null
|
|
51
|
+
get workOrderOperationClosedDate() {
|
|
52
|
+
const closedDate = this.workOrderOperation?.closedDate;
|
|
53
|
+
return typeof closedDate === "string" && closedDate !== "" && !isNaN(Date.parse(closedDate)) ? closedDate : null;
|
|
54
|
+
}
|
|
55
|
+
set workOrderOperationClosedDate(value) {
|
|
56
|
+
if (this.workOrderOperation) {
|
|
57
|
+
this.workOrderOperation.closedDate = value;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// Returns 1 if the work order operation is closed (has a valid closed date), 0 otherwise
|
|
61
|
+
get workOrderOpIsClosed() {
|
|
62
|
+
return this.workOrderOperationClosedDate !== null ? 1 : 0;
|
|
63
|
+
}
|
|
64
|
+
/*Descriptions from machinemetrics.atlassian.net/wiki/spaces/DEVE/pages/842661901/MM+ERP+Schema+Descriptions*/
|
|
65
|
+
badParts;
|
|
66
|
+
// The number of parts produced that do not meet quality standards
|
|
67
|
+
clockIn;
|
|
68
|
+
// The timestamp when the work on this ticket started
|
|
69
|
+
clockOut;
|
|
70
|
+
// The timestamp when the work on this ticket ended
|
|
71
|
+
comment;
|
|
72
|
+
createdAt;
|
|
73
|
+
goodParts;
|
|
74
|
+
// The number of parts produced that meet quality standards
|
|
75
|
+
laborTicketId;
|
|
76
|
+
// The unique identifier for the labor ticket in the ERP
|
|
77
|
+
laborTicketRef;
|
|
78
|
+
// The unique identifier for the labor ticket in MM
|
|
79
|
+
lot;
|
|
80
|
+
// Refers to a material batch. If not available in the ERP, default to an empty string
|
|
81
|
+
personId;
|
|
82
|
+
// The person who did the work
|
|
83
|
+
reasons;
|
|
84
|
+
resourceId;
|
|
85
|
+
// The physical resource that did the work
|
|
86
|
+
resource;
|
|
87
|
+
// The resource name (available from export/updates endpoint)
|
|
88
|
+
sequenceNumber;
|
|
89
|
+
// Which sequence this operation is for in the order, associates back to a part_operation with the part_number from the work_order, and sequence_number from the op
|
|
90
|
+
split;
|
|
91
|
+
// Batch number if order split for staggered delivery. If not available in the ERP, default to an empty string
|
|
92
|
+
state;
|
|
93
|
+
// The current state of the labor ticket (e.g., "CLOSED/OPEN")
|
|
94
|
+
type;
|
|
95
|
+
// If the labor is production, setup, or indirect
|
|
96
|
+
syncedAt;
|
|
97
|
+
updatedAt;
|
|
98
|
+
workOrderId;
|
|
99
|
+
// The order this labor ticket corresponds to
|
|
100
|
+
workOrderOperation;
|
|
101
|
+
sub;
|
|
102
|
+
// Batch number for parts that had to be rerun. If not available in the ERP, default to an empty string
|
|
103
|
+
transactionDate;
|
|
104
|
+
}
|
|
105
|
+
class HTTPError extends Error {
|
|
106
|
+
constructor(message, status, code, data) {
|
|
107
|
+
super(message);
|
|
108
|
+
this.status = status;
|
|
109
|
+
this.code = code;
|
|
110
|
+
this.data = data;
|
|
111
|
+
this.name = "HTTPError";
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
class HTTPClientFactory {
|
|
115
|
+
static getInstance(config2) {
|
|
116
|
+
return new AxiosClient(config2.baseUrl, config2.retryAttempts);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
class AxiosClient {
|
|
120
|
+
client = null;
|
|
121
|
+
retryAttempts;
|
|
122
|
+
isDestroyed = false;
|
|
123
|
+
inFlightControllers = /* @__PURE__ */ new Set();
|
|
124
|
+
pendingTimeouts = /* @__PURE__ */ new Set();
|
|
125
|
+
pendingSleepResolvers = /* @__PURE__ */ new Set();
|
|
126
|
+
/**
|
|
127
|
+
* Note regarding baseURL, from https://github.com/axios/axios
|
|
128
|
+
* `baseURL` will be prepended to `url` unless `url` is absolute and option `allowAbsoluteUrls` is set to true (the default).
|
|
129
|
+
* It can be convenient to set `baseURL` for an instance of axios to pass relative URLs to methods of that instance.
|
|
130
|
+
*/
|
|
131
|
+
constructor(baseUrl, retryAttempts) {
|
|
132
|
+
const timeout = parseInt(process.env.MM_API_TIMEOUT || "30000");
|
|
133
|
+
this.client = axios.create({
|
|
134
|
+
baseURL: baseUrl,
|
|
135
|
+
timeout,
|
|
136
|
+
headers: {
|
|
137
|
+
"Content-Type": "application/json"
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
this.retryAttempts = retryAttempts;
|
|
141
|
+
}
|
|
142
|
+
sleep(ms) {
|
|
143
|
+
return new Promise((resolve) => {
|
|
144
|
+
if (this.isDestroyed) {
|
|
145
|
+
resolve();
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
const timeout = setTimeout(() => {
|
|
149
|
+
this.pendingTimeouts.delete(timeout);
|
|
150
|
+
this.pendingSleepResolvers.delete(resolve);
|
|
151
|
+
resolve();
|
|
152
|
+
}, ms);
|
|
153
|
+
this.pendingTimeouts.add(timeout);
|
|
154
|
+
this.pendingSleepResolvers.add(resolve);
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
async request(config2) {
|
|
158
|
+
if (this.isDestroyed || !this.client) {
|
|
159
|
+
throw new HTTPError("HTTP client has been destroyed", 500);
|
|
160
|
+
}
|
|
161
|
+
const controller = new AbortController();
|
|
162
|
+
this.inFlightControllers.add(controller);
|
|
163
|
+
const axiosConfig = {
|
|
164
|
+
method: config2.method,
|
|
165
|
+
url: config2.url,
|
|
166
|
+
headers: config2.headers,
|
|
167
|
+
data: config2.data,
|
|
168
|
+
params: config2.params,
|
|
169
|
+
signal: controller.signal
|
|
170
|
+
};
|
|
171
|
+
logger.info("HTTP request starting", {
|
|
172
|
+
url: config2.url,
|
|
173
|
+
method: config2.method,
|
|
174
|
+
baseURL: this.client.defaults.baseURL,
|
|
175
|
+
retryAttempts: this.retryAttempts
|
|
176
|
+
});
|
|
177
|
+
console.log("=== FULL URL DEBUG ===");
|
|
178
|
+
console.log("baseURL:", this.client.defaults.baseURL);
|
|
179
|
+
console.log("relative url:", config2.url);
|
|
180
|
+
console.log("full constructed URL:", (this.client.defaults.baseURL || "") + config2.url);
|
|
181
|
+
console.log("method:", config2.method);
|
|
182
|
+
let lastError;
|
|
183
|
+
try {
|
|
184
|
+
for (let attempt = 0; attempt <= this.retryAttempts; attempt++) {
|
|
185
|
+
try {
|
|
186
|
+
logger.info(`HTTP request attempt ${attempt + 1}/${this.retryAttempts + 1}`);
|
|
187
|
+
const response = await this.client.request(axiosConfig);
|
|
188
|
+
logger.info("HTTP request succeeded", { status: response.status });
|
|
189
|
+
return {
|
|
190
|
+
data: response.data,
|
|
191
|
+
status: response.status,
|
|
192
|
+
headers: response.headers
|
|
193
|
+
};
|
|
194
|
+
} catch (error) {
|
|
195
|
+
lastError = error;
|
|
196
|
+
const isAxiosErr = error instanceof AxiosError;
|
|
197
|
+
const code = isAxiosErr ? error.code : void 0;
|
|
198
|
+
const status = isAxiosErr ? error.response?.status : void 0;
|
|
199
|
+
const errorConstructor = error instanceof Error ? error.constructor.name : void 0;
|
|
200
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
201
|
+
logger.info(`HTTP request attempt ${attempt + 1} failed`, {
|
|
202
|
+
errorType: typeof error,
|
|
203
|
+
errorConstructor,
|
|
204
|
+
isAxiosError: isAxiosErr,
|
|
205
|
+
message,
|
|
206
|
+
code,
|
|
207
|
+
status
|
|
208
|
+
});
|
|
209
|
+
if (error instanceof AxiosError && error.response?.status && error.response.status >= 400 && error.response.status < 500) {
|
|
210
|
+
logger.info("Not retrying due to 4xx client error");
|
|
211
|
+
break;
|
|
212
|
+
}
|
|
213
|
+
if (error instanceof AxiosError && error.code === "ERR_CANCELED") {
|
|
214
|
+
break;
|
|
215
|
+
}
|
|
216
|
+
if (attempt < this.retryAttempts) {
|
|
217
|
+
const waitTime = Math.pow(2, attempt) * 1e3;
|
|
218
|
+
logger.info(`Waiting ${waitTime}ms before retry`);
|
|
219
|
+
await this.sleep(waitTime);
|
|
220
|
+
if (this.isDestroyed) {
|
|
221
|
+
throw new HTTPError("HTTP client has been destroyed", 500);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
} finally {
|
|
227
|
+
this.inFlightControllers.delete(controller);
|
|
228
|
+
}
|
|
229
|
+
logger.info("HTTP request failed after all retries, throwing error");
|
|
230
|
+
throw this.handleError(lastError, config2);
|
|
231
|
+
}
|
|
232
|
+
handleError(error, requestConfig) {
|
|
233
|
+
if (error instanceof AxiosError) {
|
|
234
|
+
const baseUrl = this.client?.defaults.baseURL || "";
|
|
235
|
+
const fullUrl = requestConfig ? `${baseUrl}${requestConfig.url}` : "Unknown URL";
|
|
236
|
+
const method = requestConfig?.method || "Unknown method";
|
|
237
|
+
const errorMessage = `${error.message} (${method} ${fullUrl})`;
|
|
238
|
+
return new HTTPError(
|
|
239
|
+
errorMessage,
|
|
240
|
+
error.response?.status || 500,
|
|
241
|
+
error.code,
|
|
242
|
+
error.response?.data
|
|
243
|
+
);
|
|
244
|
+
}
|
|
245
|
+
return new HTTPError(
|
|
246
|
+
error instanceof Error ? error.message : "Unknown error occurred",
|
|
247
|
+
500
|
|
248
|
+
);
|
|
249
|
+
}
|
|
250
|
+
async destroy() {
|
|
251
|
+
if (this.isDestroyed) return;
|
|
252
|
+
this.isDestroyed = true;
|
|
253
|
+
for (const c of this.inFlightControllers) {
|
|
254
|
+
try {
|
|
255
|
+
c.abort();
|
|
256
|
+
} catch {
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
this.inFlightControllers.clear();
|
|
260
|
+
for (const t of this.pendingTimeouts) {
|
|
261
|
+
clearTimeout(t);
|
|
262
|
+
}
|
|
263
|
+
this.pendingTimeouts.clear();
|
|
264
|
+
for (const resolve of this.pendingSleepResolvers) {
|
|
265
|
+
try {
|
|
266
|
+
resolve();
|
|
267
|
+
} catch {
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
this.pendingSleepResolvers.clear();
|
|
271
|
+
this.client = null;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
class MMTokenManager {
|
|
275
|
+
token;
|
|
276
|
+
tokenExpiration;
|
|
277
|
+
baseUrl;
|
|
278
|
+
api;
|
|
279
|
+
constructor() {
|
|
280
|
+
const cached = getCachedMMToken();
|
|
281
|
+
this.token = cached?.token || "";
|
|
282
|
+
this.tokenExpiration = cached?.expiration ? new Date(cached.expiration) : null;
|
|
283
|
+
const config2 = CoreConfiguration.inst();
|
|
284
|
+
console.log("=== MMTOKEN MANAGER DEBUG ===");
|
|
285
|
+
console.log("config.mmApiBaseUrl:", config2.mmApiBaseUrl);
|
|
286
|
+
console.log("config object keys:", Object.keys(config2));
|
|
287
|
+
this.baseUrl = config2.mmApiBaseUrl;
|
|
288
|
+
console.log("this.baseUrl set to:", this.baseUrl);
|
|
289
|
+
console.log("=== END MMTOKEN MANAGER DEBUG ===");
|
|
290
|
+
this.api = HTTPClientFactory.getInstance({
|
|
291
|
+
baseUrl: this.baseUrl,
|
|
292
|
+
retryAttempts: config2.mmApiRetryAttempts
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
/**
|
|
296
|
+
* Checks if the current token is expired or will expire within the next minute
|
|
297
|
+
* @returns boolean indicating if token needs refresh
|
|
298
|
+
*/
|
|
299
|
+
isTokenExpired() {
|
|
300
|
+
if (!this.tokenExpiration) return true;
|
|
301
|
+
const bufferTimeMS = 6e4;
|
|
302
|
+
const isExpired = this.tokenExpiration.getTime() - bufferTimeMS <= Date.now();
|
|
303
|
+
if (isExpired) {
|
|
304
|
+
logger.info("isTokenExpired: the api token from MM has expired");
|
|
305
|
+
}
|
|
306
|
+
return isExpired;
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Safely extracts expiration time from JWT token
|
|
310
|
+
* @param token JWT token string
|
|
311
|
+
* @returns Date object representing token expiration, or null if cannot be determined
|
|
312
|
+
*/
|
|
313
|
+
getTokenExpiration(token) {
|
|
314
|
+
try {
|
|
315
|
+
const parts = token.split(".");
|
|
316
|
+
if (parts.length !== 3) {
|
|
317
|
+
console.warn("Invalid JWT format: token does not have 3 parts");
|
|
318
|
+
return null;
|
|
319
|
+
}
|
|
320
|
+
const payload = Buffer.from(parts[1], "base64").toString("utf-8");
|
|
321
|
+
const decodedPayload = JSON.parse(payload);
|
|
322
|
+
if (typeof decodedPayload.exp !== "number") {
|
|
323
|
+
console.warn("JWT does not contain expiration time (exp claim)");
|
|
324
|
+
return null;
|
|
325
|
+
}
|
|
326
|
+
return new Date(decodedPayload.exp * 1e3);
|
|
327
|
+
} catch (error) {
|
|
328
|
+
console.warn("Error parsing JWT token:", error);
|
|
329
|
+
return null;
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Gets a valid token, either from cache or by requesting a new one
|
|
334
|
+
* @returns Promise resolving to a valid token string
|
|
335
|
+
*/
|
|
336
|
+
async getToken() {
|
|
337
|
+
if (this.token && !this.isTokenExpired()) {
|
|
338
|
+
return this.token;
|
|
339
|
+
}
|
|
340
|
+
const config2 = CoreConfiguration.inst();
|
|
341
|
+
const serviceToken = config2.mmApiAuthToken;
|
|
342
|
+
const tokenResponse = await this.api.request({
|
|
343
|
+
url: "/user-token",
|
|
344
|
+
method: "GET",
|
|
345
|
+
headers: {
|
|
346
|
+
Authorization: `Bearer ${serviceToken}`
|
|
347
|
+
}
|
|
348
|
+
});
|
|
349
|
+
this.token = tokenResponse.data;
|
|
350
|
+
this.tokenExpiration = this.getTokenExpiration(this.token);
|
|
351
|
+
setCachedMMToken({
|
|
352
|
+
token: this.token,
|
|
353
|
+
expiration: this.tokenExpiration?.getTime() || null
|
|
354
|
+
});
|
|
355
|
+
logger.info("getToken: the api token from MM has been refreshed");
|
|
356
|
+
return this.token;
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Invalidates the current token, forcing a fresh fetch on next getToken() call
|
|
360
|
+
*/
|
|
361
|
+
invalidateToken() {
|
|
362
|
+
this.token = "";
|
|
363
|
+
this.tokenExpiration = null;
|
|
364
|
+
setCachedMMToken({ token: "", expiration: null });
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
class GraphQLError extends Error {
|
|
368
|
+
constructor(message, errors) {
|
|
369
|
+
super(message);
|
|
370
|
+
this.errors = errors;
|
|
371
|
+
this.name = "GraphQLError";
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
class ErrorHandler {
|
|
375
|
+
/**
|
|
376
|
+
* Transforms any error into a standardized HTTPError or GraphQLError and throws it.
|
|
377
|
+
* Never returns - always throws an error.
|
|
378
|
+
*
|
|
379
|
+
* @throws {HTTPError|GraphQLError} Standardized error with appropriate details
|
|
380
|
+
*/
|
|
381
|
+
static handle(error) {
|
|
382
|
+
if (error instanceof HTTPError) {
|
|
383
|
+
if (error.data && typeof error.data === "object" && "errors" in error.data) {
|
|
384
|
+
const graphqlError = new GraphQLError(
|
|
385
|
+
"GraphQL Error",
|
|
386
|
+
error.data.errors
|
|
387
|
+
);
|
|
388
|
+
logger.error("Error>>GraphQL", {
|
|
389
|
+
message: graphqlError.message,
|
|
390
|
+
errors: graphqlError.errors
|
|
391
|
+
});
|
|
392
|
+
throw graphqlError;
|
|
393
|
+
}
|
|
394
|
+
logger.error("Error>>HTTP", {
|
|
395
|
+
status: error.status,
|
|
396
|
+
message: error.message,
|
|
397
|
+
code: error.code,
|
|
398
|
+
data: error.data
|
|
399
|
+
});
|
|
400
|
+
throw error;
|
|
401
|
+
}
|
|
402
|
+
if (error instanceof AxiosError) {
|
|
403
|
+
if (error.response?.data?.errors) {
|
|
404
|
+
const graphqlError = new GraphQLError(
|
|
405
|
+
"GraphQL Error",
|
|
406
|
+
error.response.data.errors
|
|
407
|
+
);
|
|
408
|
+
logger.error("Error>>GraphQL", {
|
|
409
|
+
message: graphqlError.message,
|
|
410
|
+
errors: graphqlError.errors
|
|
411
|
+
});
|
|
412
|
+
throw graphqlError;
|
|
413
|
+
}
|
|
414
|
+
const httpError2 = new HTTPError(
|
|
415
|
+
error.message,
|
|
416
|
+
error.response?.status || 500,
|
|
417
|
+
error.code,
|
|
418
|
+
error.response?.data
|
|
419
|
+
);
|
|
420
|
+
logger.error("Error>>HTTP", {
|
|
421
|
+
status: httpError2.status,
|
|
422
|
+
message: httpError2.message,
|
|
423
|
+
code: httpError2.code,
|
|
424
|
+
data: httpError2.data
|
|
425
|
+
});
|
|
426
|
+
throw httpError2;
|
|
427
|
+
}
|
|
428
|
+
const httpError = new HTTPError(
|
|
429
|
+
error instanceof Error ? error.message : "An unknown error occurred",
|
|
430
|
+
500
|
|
431
|
+
);
|
|
432
|
+
logger.error("Error>>Unknown", {
|
|
433
|
+
message: httpError.message,
|
|
434
|
+
status: httpError.status
|
|
435
|
+
});
|
|
436
|
+
throw httpError;
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
const MAPPING = "MAPPING";
|
|
440
|
+
const CHECKPOINTS = "CHECKPOINTS";
|
|
441
|
+
const MACHINE_GROUPS = "MACHINE_GROUPS";
|
|
442
|
+
const GRAPHQL = "GRAPHQL";
|
|
443
|
+
var UrlBase = /* @__PURE__ */ ((UrlBase2) => {
|
|
444
|
+
UrlBase2["ErpApiSvcBase"] = "ErpApiSvcBase";
|
|
445
|
+
UrlBase2["ApiBase"] = "ApiBase";
|
|
446
|
+
return UrlBase2;
|
|
447
|
+
})(UrlBase || {});
|
|
448
|
+
class MMApiClient {
|
|
449
|
+
tokenMgr;
|
|
450
|
+
api;
|
|
451
|
+
baseUrls;
|
|
452
|
+
resourceURLs = {
|
|
453
|
+
[ERPObjType.RESOURCES]: "/erp/v1/resources",
|
|
454
|
+
[ERPObjType.PARTS]: "/erp/v1/parts",
|
|
455
|
+
[ERPObjType.PART_OPERATION]: "/erp/v1/part-operations",
|
|
456
|
+
[ERPObjType.WORK_ORDERS]: "/erp/v1/production-orders",
|
|
457
|
+
[ERPObjType.WORK_ORDER_OPERATIONS]: "/erp/v1/production-order-operations",
|
|
458
|
+
[ERPObjType.PERSONS]: "/erp/v1/persons",
|
|
459
|
+
[ERPObjType.REASONS]: "/erp/v1/reasons",
|
|
460
|
+
[ERPObjType.LABOR_TICKETS]: "/erp/v1/labor-tickets",
|
|
461
|
+
[MAPPING]: "/erp/v1/mapping",
|
|
462
|
+
[CHECKPOINTS]: "/erp/v1/checkpoints",
|
|
463
|
+
[MACHINE_GROUPS]: "/machine-groups",
|
|
464
|
+
[GRAPHQL]: "/proxy/graphql"
|
|
465
|
+
};
|
|
466
|
+
constructor() {
|
|
467
|
+
this.tokenMgr = new MMTokenManager();
|
|
468
|
+
this.baseUrls = {
|
|
469
|
+
[
|
|
470
|
+
"ErpApiSvcBase"
|
|
471
|
+
/* ErpApiSvcBase */
|
|
472
|
+
]: CoreConfiguration.inst().mmERPSvcApiBaseUrl,
|
|
473
|
+
[
|
|
474
|
+
"ApiBase"
|
|
475
|
+
/* ApiBase */
|
|
476
|
+
]: CoreConfiguration.inst().mmApiBaseUrl
|
|
477
|
+
};
|
|
478
|
+
this.api = HTTPClientFactory.getInstance({
|
|
479
|
+
baseUrl: "",
|
|
480
|
+
retryAttempts: CoreConfiguration.inst().mmApiRetryAttempts
|
|
481
|
+
});
|
|
482
|
+
}
|
|
483
|
+
//#region Private Methods
|
|
484
|
+
async executeWithAuthRetry(operation, options = {}) {
|
|
485
|
+
let token = options.token || await this.tokenMgr.getToken();
|
|
486
|
+
try {
|
|
487
|
+
return await operation(token);
|
|
488
|
+
} catch (error) {
|
|
489
|
+
const hasStatus = (err) => {
|
|
490
|
+
return typeof err === "object" && err !== null && "status" in err;
|
|
491
|
+
};
|
|
492
|
+
const isAuthError = hasStatus(error) && (error.status === 401 || error.status === 403) || hasStatus(error) && error.status === 500 && typeof error.data?.error === "string" && error.data.error.includes("JWT");
|
|
493
|
+
if (isAuthError && !options.token) {
|
|
494
|
+
logger.info("Retrying request with fresh token due to auth error");
|
|
495
|
+
this.tokenMgr.invalidateToken();
|
|
496
|
+
token = await this.tokenMgr.getToken();
|
|
497
|
+
return await operation(token);
|
|
498
|
+
}
|
|
499
|
+
throw error;
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
/**
|
|
503
|
+
* Fetch data from the MM API, with an auth retry mechanism
|
|
504
|
+
* Auth Retry Level (executeWithAuthRetry)
|
|
505
|
+
* - 401 (Unauthorized) and 403 (Forbidden) get special treatment
|
|
506
|
+
* - These trigger a token refresh and one retry attempt
|
|
507
|
+
* - If the retry still fails, the error is re-thrown
|
|
508
|
+
*
|
|
509
|
+
* HTTP Status Codes behavior:
|
|
510
|
+
* - 2xx - Success, returns normally
|
|
511
|
+
* - 4xx - Client errors, throws immediately
|
|
512
|
+
* - 5xx - Server errors, retries then throws
|
|
513
|
+
*
|
|
514
|
+
* Flags:
|
|
515
|
+
*/
|
|
516
|
+
async getData(path2, queryParams = {}, options = {}) {
|
|
517
|
+
try {
|
|
518
|
+
return await this.executeWithAuthRetry(async (token) => {
|
|
519
|
+
const baseUrl = options.baseUrl || "ErpApiSvcBase";
|
|
520
|
+
const params = new URLSearchParams({
|
|
521
|
+
...queryParams,
|
|
522
|
+
...baseUrl === "ErpApiSvcBase" ? { raw: "true" } : {}
|
|
523
|
+
});
|
|
524
|
+
const url = params.size > 0 ? `${path2}?${params.toString()}` : path2;
|
|
525
|
+
const fullUrl = this.baseUrls[baseUrl] + url;
|
|
526
|
+
const result = await this.api.request({
|
|
527
|
+
url: fullUrl,
|
|
528
|
+
method: "GET",
|
|
529
|
+
headers: {
|
|
530
|
+
Authorization: `Bearer ${token}`,
|
|
531
|
+
...options.headers
|
|
532
|
+
}
|
|
533
|
+
});
|
|
534
|
+
return {
|
|
535
|
+
...result.data,
|
|
536
|
+
httpStatus: result.status
|
|
537
|
+
};
|
|
538
|
+
}, options);
|
|
539
|
+
} catch (error) {
|
|
540
|
+
ErrorHandler.handle(error);
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
/**
|
|
544
|
+
* Send data to the MM API, with an auth retry mechanism
|
|
545
|
+
* Auth Retry Level (executeWithAuthRetry)
|
|
546
|
+
* - 401 (Unauthorized) and 403 (Forbidden) get special treatment
|
|
547
|
+
* - These trigger a token refresh and one retry attempt
|
|
548
|
+
* - If the retry still fails, the error is re-thrown
|
|
549
|
+
*
|
|
550
|
+
* HTTP Status Codes behavior:
|
|
551
|
+
* - 2xx - Success, returns normally
|
|
552
|
+
* - 4xx - Client errors, throws immediately
|
|
553
|
+
* - 5xx - Server errors, retries then throws
|
|
554
|
+
*
|
|
555
|
+
* Flags:
|
|
556
|
+
* - 'raw' = true and 'dedupe' = true will be set for MM's ERP API url
|
|
557
|
+
*/
|
|
558
|
+
async postData(path2, data = {}, queryParams = {}, options = {}) {
|
|
559
|
+
try {
|
|
560
|
+
return await this.executeWithAuthRetry(async (token) => {
|
|
561
|
+
const baseUrl = options.baseUrl || "ErpApiSvcBase";
|
|
562
|
+
const params = new URLSearchParams({
|
|
563
|
+
...queryParams,
|
|
564
|
+
...baseUrl === "ErpApiSvcBase" ? { dedupe: "true", raw: "true" } : {}
|
|
565
|
+
});
|
|
566
|
+
const url = params.size > 0 ? `${path2}?${params.toString()}` : path2;
|
|
567
|
+
const fullUrl = this.baseUrls[baseUrl] + url;
|
|
568
|
+
const request = {
|
|
569
|
+
url: fullUrl,
|
|
570
|
+
method: "POST",
|
|
571
|
+
data: JSON.stringify(data),
|
|
572
|
+
headers: {
|
|
573
|
+
Authorization: `Bearer ${token}`,
|
|
574
|
+
...options.headers
|
|
575
|
+
}
|
|
576
|
+
};
|
|
577
|
+
const result = await this.api.request(request);
|
|
578
|
+
return {
|
|
579
|
+
...result.data,
|
|
580
|
+
httpStatus: result.status
|
|
581
|
+
};
|
|
582
|
+
}, options);
|
|
583
|
+
} catch (error) {
|
|
584
|
+
ErrorHandler.handle(error);
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
/**
|
|
588
|
+
* Delete data from the MM API, with an auth retry mechanism
|
|
589
|
+
* Auth Retry Level (executeWithAuthRetry)
|
|
590
|
+
* - 401 (Unauthorized) and 403 (Forbidden) get special treatment
|
|
591
|
+
* - These trigger a token refresh and one retry attempt
|
|
592
|
+
* - If the retry still fails, the error is re-thrown
|
|
593
|
+
*
|
|
594
|
+
* HTTP Status Codes behavior:
|
|
595
|
+
* - 2xx - Success, returns normally
|
|
596
|
+
* - 4xx - Client errors, throws immediately
|
|
597
|
+
* - 5xx - Server errors, retries then throws
|
|
598
|
+
*
|
|
599
|
+
*/
|
|
600
|
+
async deleteData(path2, data = {}, options = {}) {
|
|
601
|
+
try {
|
|
602
|
+
return await this.executeWithAuthRetry(async (token) => {
|
|
603
|
+
const baseUrl = options.baseUrl || "ErpApiSvcBase";
|
|
604
|
+
const params = new URLSearchParams({
|
|
605
|
+
...baseUrl === "ErpApiSvcBase" ? { raw: "true" } : {}
|
|
606
|
+
});
|
|
607
|
+
const url = params.size > 0 ? `${path2}?${params.toString()}` : path2;
|
|
608
|
+
const fullUrl = this.baseUrls[baseUrl] + url;
|
|
609
|
+
const result = await this.api.request({
|
|
610
|
+
url: fullUrl,
|
|
611
|
+
method: "DELETE",
|
|
612
|
+
data,
|
|
613
|
+
headers: {
|
|
614
|
+
Authorization: `Bearer ${token}`,
|
|
615
|
+
...options.headers
|
|
616
|
+
}
|
|
617
|
+
});
|
|
618
|
+
return {
|
|
619
|
+
...result.data,
|
|
620
|
+
httpStatus: result.status
|
|
621
|
+
};
|
|
622
|
+
}, options);
|
|
623
|
+
} catch (error) {
|
|
624
|
+
ErrorHandler.handle(error);
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
/**
|
|
628
|
+
* Patch data to the MM API, with an auth retry mechanism
|
|
629
|
+
* Auth Retry Level (executeWithAuthRetry)
|
|
630
|
+
* - 401 (Unauthorized) and 403 (Forbidden) get special treatment
|
|
631
|
+
* - These trigger a token refresh and one retry attempt
|
|
632
|
+
* - If the retry still fails, the error is re-thrown
|
|
633
|
+
*
|
|
634
|
+
* HTTP Status Codes behavior:
|
|
635
|
+
* - 2xx - Success, returns normally
|
|
636
|
+
* - 4xx - Client errors, throws immediately
|
|
637
|
+
* - 5xx - Server errors, retries then throws
|
|
638
|
+
*
|
|
639
|
+
*/
|
|
640
|
+
async patchData(path2, data = {}, options = {}) {
|
|
641
|
+
try {
|
|
642
|
+
return await this.executeWithAuthRetry(async (token) => {
|
|
643
|
+
const baseUrl = options.baseUrl || "ErpApiSvcBase";
|
|
644
|
+
const params = new URLSearchParams({
|
|
645
|
+
...baseUrl === "ErpApiSvcBase" ? { raw: "true" } : {}
|
|
646
|
+
});
|
|
647
|
+
const url = params.size > 0 ? `${path2}?${params.toString()}` : path2;
|
|
648
|
+
const fullUrl = this.baseUrls[baseUrl] + url;
|
|
649
|
+
const request = {
|
|
650
|
+
url: fullUrl,
|
|
651
|
+
method: "PATCH",
|
|
652
|
+
data,
|
|
653
|
+
headers: {
|
|
654
|
+
Authorization: `Bearer ${token}`,
|
|
655
|
+
...options.headers
|
|
656
|
+
}
|
|
657
|
+
};
|
|
658
|
+
const result = await this.api.request(request);
|
|
659
|
+
return {
|
|
660
|
+
...result.data,
|
|
661
|
+
httpStatus: result.status
|
|
662
|
+
};
|
|
663
|
+
}, options);
|
|
664
|
+
} catch (error) {
|
|
665
|
+
ErrorHandler.handle(error);
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
async graphqlRequest(operationName, query) {
|
|
669
|
+
try {
|
|
670
|
+
return await this.postData(
|
|
671
|
+
this.resourceURLs[GRAPHQL],
|
|
672
|
+
{ operationName, query },
|
|
673
|
+
{},
|
|
674
|
+
{
|
|
675
|
+
baseUrl: "ApiBase",
|
|
676
|
+
token: CoreConfiguration.inst().mmApiAuthToken,
|
|
677
|
+
headers: { "Content-Type": "application/json" }
|
|
678
|
+
}
|
|
679
|
+
);
|
|
680
|
+
} catch (error) {
|
|
681
|
+
ErrorHandler.handle(error);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Fetch a checkpoint for a specific system, table, and checkpoint type
|
|
686
|
+
* @param checkpoint The checkpoint to fetch
|
|
687
|
+
* @returns Promise with the checkpoint data
|
|
688
|
+
*/
|
|
689
|
+
async fetchCheckpoint(checkpoint) {
|
|
690
|
+
const { system, table, checkpointType } = checkpoint;
|
|
691
|
+
return this.getData(this.resourceURLs[CHECKPOINTS], {
|
|
692
|
+
system,
|
|
693
|
+
table,
|
|
694
|
+
checkpointType
|
|
695
|
+
});
|
|
696
|
+
}
|
|
697
|
+
//#endregion private methods
|
|
698
|
+
//#region Public Methods
|
|
699
|
+
/**
|
|
700
|
+
* Send resources to the MM API
|
|
701
|
+
* @param resources Array of resources to send
|
|
702
|
+
* @returns Promise with the API response
|
|
703
|
+
*/
|
|
704
|
+
async sendResourcesToMM(resources) {
|
|
705
|
+
return await this.postData(this.resourceURLs[ERPObjType.RESOURCES], {
|
|
706
|
+
data: resources.map((resource) => resource.toRESTApiObject())
|
|
707
|
+
});
|
|
708
|
+
}
|
|
709
|
+
/**
|
|
710
|
+
* Send parts to the MM API
|
|
711
|
+
* @param parts Array of parts to send
|
|
712
|
+
* @returns Promise with the API response
|
|
713
|
+
*/
|
|
714
|
+
async sendPartsToMM(parts) {
|
|
715
|
+
return await this.postData(this.resourceURLs[ERPObjType.PARTS], {
|
|
716
|
+
data: parts.map((part) => part.toRESTApiObject())
|
|
717
|
+
});
|
|
718
|
+
}
|
|
719
|
+
/**
|
|
720
|
+
* Send part operations to the MM API
|
|
721
|
+
* @param operations Array of part operations to send
|
|
722
|
+
* @returns Promise with the API response
|
|
723
|
+
*/
|
|
724
|
+
async sendPartOperationsToMM(operations) {
|
|
725
|
+
return await this.postData(this.resourceURLs[ERPObjType.PART_OPERATION], {
|
|
726
|
+
data: operations.map((operation) => operation.toRESTApiObject())
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
/**
|
|
730
|
+
* Send work orders to the MM API
|
|
731
|
+
* @param workOrders Array of work orders to send
|
|
732
|
+
* @returns Promise with the API response
|
|
733
|
+
*/
|
|
734
|
+
async sendWorkOrdersToMM(workOrders) {
|
|
735
|
+
return await this.postData(this.resourceURLs[ERPObjType.WORK_ORDERS], {
|
|
736
|
+
data: workOrders.map((workOrder) => workOrder.toRESTApiObject())
|
|
737
|
+
});
|
|
738
|
+
}
|
|
739
|
+
/**
|
|
740
|
+
* Send work order operations to the MM API
|
|
741
|
+
* @param workOrderOperations Array of work order operations to send
|
|
742
|
+
* @returns Promise with the API response
|
|
743
|
+
*/
|
|
744
|
+
async sendWorkOrderOperationsToMM(workOrderOperations) {
|
|
745
|
+
return await this.postData(
|
|
746
|
+
this.resourceURLs[ERPObjType.WORK_ORDER_OPERATIONS],
|
|
747
|
+
{
|
|
748
|
+
data: workOrderOperations.map(
|
|
749
|
+
(operation) => operation.toRESTApiObject()
|
|
750
|
+
)
|
|
751
|
+
}
|
|
752
|
+
);
|
|
753
|
+
}
|
|
754
|
+
/**
|
|
755
|
+
* Send persons to the MM API
|
|
756
|
+
* @param persons Array of persons to send
|
|
757
|
+
* @returns Promise with the API response
|
|
758
|
+
*/
|
|
759
|
+
async sendPersonsToMM(persons) {
|
|
760
|
+
return await this.postData(this.resourceURLs[ERPObjType.PERSONS], {
|
|
761
|
+
data: persons.map((person) => person.toRESTApiObject())
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
/**
|
|
765
|
+
* Send reasons to the MM API
|
|
766
|
+
* @param reasons Array of reasons to send
|
|
767
|
+
* @returns Promise with the API response
|
|
768
|
+
*/
|
|
769
|
+
async sendReasonsToMM(reasons) {
|
|
770
|
+
return await this.postData(this.resourceURLs[ERPObjType.REASONS], {
|
|
771
|
+
data: reasons.map((reason) => reason.toRESTApiObject())
|
|
772
|
+
});
|
|
773
|
+
}
|
|
774
|
+
/**
|
|
775
|
+
* Send labor tickets to the MM API
|
|
776
|
+
* @param laborTickets Array of labor tickets to send
|
|
777
|
+
* @returns Promise with the API response
|
|
778
|
+
*/
|
|
779
|
+
async sendLaborTicketsToMM(laborTickets) {
|
|
780
|
+
return await this.postData(this.resourceURLs[ERPObjType.LABOR_TICKETS], {
|
|
781
|
+
data: laborTickets.map((ticket) => ticket.toRESTApiObject())
|
|
782
|
+
});
|
|
783
|
+
}
|
|
784
|
+
/**
|
|
785
|
+
* Delete labor tickets from the MM API
|
|
786
|
+
* @param laborTickets Array of labor ticket records to delete
|
|
787
|
+
* @returns Promise with the API response
|
|
788
|
+
*/
|
|
789
|
+
async deleteLaborTickets(laborTickets) {
|
|
790
|
+
logger.debug("deleteLaborTickets:", { laborTickets });
|
|
791
|
+
return await this.deleteData(this.resourceURLs[ERPObjType.LABOR_TICKETS], {
|
|
792
|
+
laborTickets
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
/**
|
|
796
|
+
* Fetch transformed labor tickets from the MM API
|
|
797
|
+
* @returns Promise with the transformed labor tickets
|
|
798
|
+
*/
|
|
799
|
+
async fetchTransformedLaborTickets() {
|
|
800
|
+
return await this.getData(
|
|
801
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/export`
|
|
802
|
+
);
|
|
803
|
+
}
|
|
804
|
+
/**
|
|
805
|
+
* Fetch labor ticket updates from the MM API
|
|
806
|
+
* @param query Query parameters for filtering updates
|
|
807
|
+
* @returns Promise with an array of labor ticket updates
|
|
808
|
+
*/
|
|
809
|
+
async fetchLaborTicketUpdates(query) {
|
|
810
|
+
const updates = await this.getData(
|
|
811
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/export/updates`,
|
|
812
|
+
query
|
|
813
|
+
);
|
|
814
|
+
return updates.data.map((ticket) => new MMReceiveLaborTicket(ticket));
|
|
815
|
+
}
|
|
816
|
+
/**
|
|
817
|
+
* Fetch a single labor ticket by reference from the MM API
|
|
818
|
+
* @param laborTicketRef The labor ticket reference to fetch
|
|
819
|
+
* @returns Promise with the labor ticket data
|
|
820
|
+
*/
|
|
821
|
+
async fetchLaborTicketByRef(laborTicketRef) {
|
|
822
|
+
const response = await this.getData(
|
|
823
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/${laborTicketRef}`
|
|
824
|
+
);
|
|
825
|
+
return new MMReceiveLaborTicket(response.data);
|
|
826
|
+
}
|
|
827
|
+
/**
|
|
828
|
+
* Fetch a checkpoint for a specific system, table, and checkpoint type
|
|
829
|
+
* @param checkpoint The checkpoint to fetch
|
|
830
|
+
* @returns Promise with the checkpoint data
|
|
831
|
+
*/
|
|
832
|
+
async getCheckpoint(checkpoint) {
|
|
833
|
+
return await this.fetchCheckpoint(checkpoint);
|
|
834
|
+
}
|
|
835
|
+
/**
|
|
836
|
+
* Save a checkpoint to the MM API
|
|
837
|
+
* @param checkpoint The checkpoint to save
|
|
838
|
+
* @returns Promise with the API response
|
|
839
|
+
*/
|
|
840
|
+
async saveCheckpoint(checkpoint) {
|
|
841
|
+
return await this.postData(this.resourceURLs[CHECKPOINTS], checkpoint);
|
|
842
|
+
}
|
|
843
|
+
/**
|
|
844
|
+
* Initialize a checkpoint for a specific system, table, and checkpoint type
|
|
845
|
+
* @param checkpoint The checkpoint to initialize
|
|
846
|
+
* @returns Promise with the checkpoint data
|
|
847
|
+
*/
|
|
848
|
+
async initializeCheckpoint(checkpoint) {
|
|
849
|
+
const currentCheckpoint = await this.fetchCheckpoint(checkpoint);
|
|
850
|
+
logger.info("currentCheckpoint:", { currentCheckpoint });
|
|
851
|
+
logger.info("currentCheckpoint keys:", {
|
|
852
|
+
keys: Object.keys(currentCheckpoint || {})
|
|
853
|
+
});
|
|
854
|
+
const checkpointResponse = currentCheckpoint;
|
|
855
|
+
const hasCheckpointData = checkpointResponse && checkpointResponse.timestamp;
|
|
856
|
+
if (!hasCheckpointData) {
|
|
857
|
+
logger.info("No checkpoint data found, initializing checkpoint");
|
|
858
|
+
const saveCheckpoint = await this.saveCheckpoint(checkpoint);
|
|
859
|
+
logger.info("saveCheckpoint:", { saveCheckpoint });
|
|
860
|
+
} else {
|
|
861
|
+
logger.info("Existing checkpoint found:", {
|
|
862
|
+
timestamp: checkpointResponse.timestamp
|
|
863
|
+
});
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
/**
|
|
867
|
+
* Send data to MM API based on the object type
|
|
868
|
+
* @param type The type of ERP object to send
|
|
869
|
+
* @param objects Array of objects to send
|
|
870
|
+
* @returns Promise with the API response including affected rows count
|
|
871
|
+
*/
|
|
872
|
+
async sendDataToMMByType(type, objects) {
|
|
873
|
+
switch (type) {
|
|
874
|
+
case ERPObjType.PERSONS:
|
|
875
|
+
return await this.sendPersonsToMM(
|
|
876
|
+
objects
|
|
877
|
+
);
|
|
878
|
+
case ERPObjType.RESOURCES:
|
|
879
|
+
return await this.sendResourcesToMM(
|
|
880
|
+
objects
|
|
881
|
+
);
|
|
882
|
+
case ERPObjType.PARTS:
|
|
883
|
+
return await this.sendPartsToMM(objects);
|
|
884
|
+
case ERPObjType.PART_OPERATION:
|
|
885
|
+
return await this.sendPartOperationsToMM(
|
|
886
|
+
objects
|
|
887
|
+
);
|
|
888
|
+
case ERPObjType.WORK_ORDERS:
|
|
889
|
+
return await this.sendWorkOrdersToMM(
|
|
890
|
+
objects
|
|
891
|
+
);
|
|
892
|
+
case ERPObjType.WORK_ORDER_OPERATIONS:
|
|
893
|
+
return await this.sendWorkOrderOperationsToMM(
|
|
894
|
+
objects
|
|
895
|
+
);
|
|
896
|
+
case ERPObjType.REASONS:
|
|
897
|
+
return await this.sendReasonsToMM(
|
|
898
|
+
objects
|
|
899
|
+
);
|
|
900
|
+
case ERPObjType.LABOR_TICKETS:
|
|
901
|
+
return await this.sendLaborTicketsToMM(
|
|
902
|
+
objects
|
|
903
|
+
);
|
|
904
|
+
default:
|
|
905
|
+
throw new Error(`Unsupported type: ${ERPObjType[type]}`);
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
/**
|
|
909
|
+
* Update the labor ticket ERP ID by laborTicketRef
|
|
910
|
+
* @param laborTicketRef The MM API labor ticket unique ID
|
|
911
|
+
* @param erpObjectId The ERP labor ticket ID to update
|
|
912
|
+
* @returns The updated labor ticket
|
|
913
|
+
*/
|
|
914
|
+
async updateLaborTicketIdByRef(laborTicketRef, laborTicketId) {
|
|
915
|
+
return await this.patchData(
|
|
916
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/${laborTicketRef}`,
|
|
917
|
+
{ laborTicketId }
|
|
918
|
+
);
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Add failed labor ticket refs to the MM API
|
|
922
|
+
* @param failedTicketRefs The failed labor ticket references
|
|
923
|
+
* @returns The response from the MM API
|
|
924
|
+
*/
|
|
925
|
+
async addFailedLaborTicketRefs(system, failedTicketRefs) {
|
|
926
|
+
return await this.postData(
|
|
927
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/failed`,
|
|
928
|
+
{ failedTicketRefs },
|
|
929
|
+
{ system }
|
|
930
|
+
);
|
|
931
|
+
}
|
|
932
|
+
async fetchFailedLaborTickets(system) {
|
|
933
|
+
const failedTickets = await this.getData(
|
|
934
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/failed`,
|
|
935
|
+
{ system }
|
|
936
|
+
);
|
|
937
|
+
return failedTickets.erpLaborTickets.map(
|
|
938
|
+
(ticket) => new MMReceiveLaborTicket(ticket)
|
|
939
|
+
);
|
|
940
|
+
}
|
|
941
|
+
/**
|
|
942
|
+
* Send connector logs to the MM API
|
|
943
|
+
* @param logEntry Single log entry to send
|
|
944
|
+
* @returns Promise with the API response
|
|
945
|
+
*/
|
|
946
|
+
async sendConnectorLog(logEntry) {
|
|
947
|
+
return await this.postData(
|
|
948
|
+
"/connector/logs",
|
|
949
|
+
logEntry,
|
|
950
|
+
{},
|
|
951
|
+
{
|
|
952
|
+
baseUrl: "ApiBase"
|
|
953
|
+
/* ApiBase */
|
|
954
|
+
}
|
|
955
|
+
);
|
|
956
|
+
}
|
|
957
|
+
/**
|
|
958
|
+
* Send bulk connector logs to the MM API
|
|
959
|
+
* @param logs Array of log entries to send
|
|
960
|
+
* @returns Promise with the API response
|
|
961
|
+
*/
|
|
962
|
+
async sendBulkConnectorLogs(logs) {
|
|
963
|
+
return await this.postData(
|
|
964
|
+
"/connector/logs",
|
|
965
|
+
{ logs },
|
|
966
|
+
{},
|
|
967
|
+
{
|
|
968
|
+
baseUrl: "ApiBase"
|
|
969
|
+
/* ApiBase */
|
|
970
|
+
}
|
|
971
|
+
);
|
|
972
|
+
}
|
|
973
|
+
async deleteFailedLaborTicketIds(system, laborTicketRefs) {
|
|
974
|
+
return await this.postData(
|
|
975
|
+
`${this.resourceURLs[ERPObjType.LABOR_TICKETS]}/failed/remove`,
|
|
976
|
+
{ laborTicketRefs },
|
|
977
|
+
{ system }
|
|
978
|
+
);
|
|
979
|
+
}
|
|
980
|
+
/**
|
|
981
|
+
* Fetch machine groups from the MM API
|
|
982
|
+
* @returns Promise with the machine groups data
|
|
983
|
+
*/
|
|
984
|
+
async fetchMachineGroups() {
|
|
985
|
+
return await this.getData(
|
|
986
|
+
this.resourceURLs[MACHINE_GROUPS],
|
|
987
|
+
{},
|
|
988
|
+
{
|
|
989
|
+
baseUrl: "ApiBase"
|
|
990
|
+
/* ApiBase */
|
|
991
|
+
}
|
|
992
|
+
);
|
|
993
|
+
}
|
|
994
|
+
/**
|
|
995
|
+
* Fetch resources using GraphQL
|
|
996
|
+
* @returns Promise with the resources data
|
|
997
|
+
*/
|
|
998
|
+
async fetchResourcesFromGraphQL() {
|
|
999
|
+
return await this.graphqlRequest(
|
|
1000
|
+
"getErpResources",
|
|
1001
|
+
"query getErpResources { machineResource: erpResources { machineRef machineGroupId resourceId }}"
|
|
1002
|
+
);
|
|
1003
|
+
}
|
|
1004
|
+
/**
|
|
1005
|
+
* Cleanup all HTTP connections and resources
|
|
1006
|
+
* Call this when the service is no longer needed
|
|
1007
|
+
*/
|
|
1008
|
+
async destroy() {
|
|
1009
|
+
await this.api.destroy();
|
|
1010
|
+
}
|
|
1011
|
+
//#endregion public methods
|
|
1012
|
+
}
|
|
1013
|
+
class MMSendPerson {
|
|
1014
|
+
constructor(personId, firstName, lastName, isActive) {
|
|
1015
|
+
this.personId = personId;
|
|
1016
|
+
this.firstName = firstName;
|
|
1017
|
+
this.lastName = lastName;
|
|
1018
|
+
this.isActive = isActive;
|
|
1019
|
+
}
|
|
1020
|
+
toRESTApiObject() {
|
|
1021
|
+
return {
|
|
1022
|
+
personId: this.personId,
|
|
1023
|
+
firstName: this.firstName,
|
|
1024
|
+
lastName: this.lastName,
|
|
1025
|
+
isActive: this.isActive ? "1" : "0"
|
|
1026
|
+
};
|
|
1027
|
+
}
|
|
1028
|
+
static fromPlainObject(data) {
|
|
1029
|
+
return new MMSendPerson(
|
|
1030
|
+
data.personId || "",
|
|
1031
|
+
data.firstName || "",
|
|
1032
|
+
data.lastName || "",
|
|
1033
|
+
data.isActive === "1"
|
|
1034
|
+
);
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
class MMSendResource {
|
|
1038
|
+
constructor(resourceId, name, isResourceGroup, description, type, productionBurdenRateHourly, setupBurdenRateHourly) {
|
|
1039
|
+
this.resourceId = resourceId;
|
|
1040
|
+
this.name = name;
|
|
1041
|
+
this.isResourceGroup = isResourceGroup;
|
|
1042
|
+
this.description = description;
|
|
1043
|
+
this.type = type;
|
|
1044
|
+
this.productionBurdenRateHourly = productionBurdenRateHourly;
|
|
1045
|
+
this.setupBurdenRateHourly = setupBurdenRateHourly;
|
|
1046
|
+
}
|
|
1047
|
+
toRESTApiObject() {
|
|
1048
|
+
return {
|
|
1049
|
+
resourceId: this.resourceId,
|
|
1050
|
+
name: this.name,
|
|
1051
|
+
isResourceGroup: this.isResourceGroup ? "1" : "0",
|
|
1052
|
+
description: this.description,
|
|
1053
|
+
type: this.type,
|
|
1054
|
+
productionBurdenRateHourly: this.productionBurdenRateHourly.toString(),
|
|
1055
|
+
setupBurdenRateHourly: this.setupBurdenRateHourly.toString()
|
|
1056
|
+
};
|
|
1057
|
+
}
|
|
1058
|
+
static fromPlainObject(data) {
|
|
1059
|
+
return new MMSendResource(
|
|
1060
|
+
data.resourceId || "",
|
|
1061
|
+
data.name || "",
|
|
1062
|
+
data.isResourceGroup === "1",
|
|
1063
|
+
data.description || "",
|
|
1064
|
+
data.type || "",
|
|
1065
|
+
parseFloat(data.productionBurdenRateHourly || "0"),
|
|
1066
|
+
parseFloat(data.setupBurdenRateHourly || "0")
|
|
1067
|
+
);
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
class MMSendPart {
|
|
1071
|
+
constructor(partNumber, partRevision, method) {
|
|
1072
|
+
this.partNumber = partNumber;
|
|
1073
|
+
this.partRevision = partRevision;
|
|
1074
|
+
this.method = method;
|
|
1075
|
+
}
|
|
1076
|
+
toRESTApiObject() {
|
|
1077
|
+
return {
|
|
1078
|
+
partNumber: this.partNumber,
|
|
1079
|
+
partRevision: this.partRevision,
|
|
1080
|
+
method: this.method
|
|
1081
|
+
};
|
|
1082
|
+
}
|
|
1083
|
+
static fromPlainObject(data) {
|
|
1084
|
+
return new MMSendPart(
|
|
1085
|
+
data.partNumber || "",
|
|
1086
|
+
data.partRevision || "",
|
|
1087
|
+
data.method || ""
|
|
1088
|
+
);
|
|
1089
|
+
}
|
|
1090
|
+
}
|
|
1091
|
+
class MMSendPartOperation {
|
|
1092
|
+
constructor(partNumber, partRevision, method, sequenceNumber, resourceId, cycleTimeMs, setupTimeMs, description, quantityPerPart) {
|
|
1093
|
+
this.partNumber = partNumber;
|
|
1094
|
+
this.partRevision = partRevision;
|
|
1095
|
+
this.method = method;
|
|
1096
|
+
this.sequenceNumber = sequenceNumber;
|
|
1097
|
+
this.resourceId = resourceId;
|
|
1098
|
+
this.cycleTimeMs = cycleTimeMs;
|
|
1099
|
+
this.setupTimeMs = setupTimeMs;
|
|
1100
|
+
this.description = description;
|
|
1101
|
+
this.quantityPerPart = quantityPerPart;
|
|
1102
|
+
}
|
|
1103
|
+
toRESTApiObject() {
|
|
1104
|
+
return {
|
|
1105
|
+
partNumber: this.partNumber,
|
|
1106
|
+
partRevision: this.partRevision,
|
|
1107
|
+
method: this.method,
|
|
1108
|
+
sequenceNumber: this.sequenceNumber,
|
|
1109
|
+
resourceId: this.resourceId,
|
|
1110
|
+
cycleTimeMs: this.cycleTimeMs.toString(),
|
|
1111
|
+
setupTimeMs: this.setupTimeMs.toString(),
|
|
1112
|
+
description: this.description,
|
|
1113
|
+
quantityPerPart: this.quantityPerPart.toString()
|
|
1114
|
+
};
|
|
1115
|
+
}
|
|
1116
|
+
static fromPlainObject(data) {
|
|
1117
|
+
return new MMSendPartOperation(
|
|
1118
|
+
data.partNumber || "",
|
|
1119
|
+
data.partRevision || "",
|
|
1120
|
+
data.method || "",
|
|
1121
|
+
data.sequenceNumber || "",
|
|
1122
|
+
data.resourceId || "",
|
|
1123
|
+
parseInt(data.cycleTimeMs || "0"),
|
|
1124
|
+
parseInt(data.setupTimeMs || "0"),
|
|
1125
|
+
data.description || "",
|
|
1126
|
+
parseFloat(data.quantityPerPart || "1")
|
|
1127
|
+
);
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
class MMSendWorkOrder {
|
|
1131
|
+
constructor(workOrderId, lot, split, sub, status, dueDate, description, scheduledStartDate, scheduledEndDate, closedDate, quantityRequired, partNumber, partRevision, method) {
|
|
1132
|
+
this.workOrderId = workOrderId;
|
|
1133
|
+
this.lot = lot;
|
|
1134
|
+
this.split = split;
|
|
1135
|
+
this.sub = sub;
|
|
1136
|
+
this.status = status;
|
|
1137
|
+
this.dueDate = dueDate;
|
|
1138
|
+
this.description = description;
|
|
1139
|
+
this.scheduledStartDate = scheduledStartDate;
|
|
1140
|
+
this.scheduledEndDate = scheduledEndDate;
|
|
1141
|
+
this.closedDate = closedDate;
|
|
1142
|
+
this.quantityRequired = quantityRequired;
|
|
1143
|
+
this.partNumber = partNumber;
|
|
1144
|
+
this.partRevision = partRevision;
|
|
1145
|
+
this.method = method;
|
|
1146
|
+
}
|
|
1147
|
+
toRESTApiObject() {
|
|
1148
|
+
return {
|
|
1149
|
+
workOrderId: this.workOrderId,
|
|
1150
|
+
lot: this.lot,
|
|
1151
|
+
split: this.split,
|
|
1152
|
+
sub: this.sub,
|
|
1153
|
+
status: this.status,
|
|
1154
|
+
dueDate: this.dueDate,
|
|
1155
|
+
description: this.description,
|
|
1156
|
+
scheduledStartDate: this.scheduledStartDate,
|
|
1157
|
+
scheduledEndDate: this.scheduledEndDate,
|
|
1158
|
+
closedDate: this.closedDate,
|
|
1159
|
+
quantityRequired: this.quantityRequired.toString(),
|
|
1160
|
+
partNumber: this.partNumber,
|
|
1161
|
+
partRevision: this.partRevision,
|
|
1162
|
+
method: this.method
|
|
1163
|
+
};
|
|
1164
|
+
}
|
|
1165
|
+
static fromPlainObject(data) {
|
|
1166
|
+
return new MMSendWorkOrder(
|
|
1167
|
+
data.workOrderId || "",
|
|
1168
|
+
data.lot || "",
|
|
1169
|
+
data.split || "",
|
|
1170
|
+
data.sub || "",
|
|
1171
|
+
data.status || "",
|
|
1172
|
+
data.dueDate ?? null,
|
|
1173
|
+
data.description || "",
|
|
1174
|
+
data.scheduledStartDate ?? null,
|
|
1175
|
+
data.scheduledEndDate ?? null,
|
|
1176
|
+
data.closedDate ?? null,
|
|
1177
|
+
parseFloat(data.quantityRequired || "0"),
|
|
1178
|
+
data.partNumber || "",
|
|
1179
|
+
data.partRevision || "",
|
|
1180
|
+
data.method || ""
|
|
1181
|
+
);
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
class MMSendWorkOrderOperation {
|
|
1185
|
+
constructor(workOrderId, lot, split, sub, sequenceNumber, resourceId, startQuantity, finishQuantity, expectedRejectRate, scheduledStartDate, scheduledFinishDate, closedDate, cycleTimeMs, setupTimeMs, productionburdenRateHourly, setupburdenRatehourly, operationType, quantityPerPart, status) {
|
|
1186
|
+
this.workOrderId = workOrderId;
|
|
1187
|
+
this.lot = lot;
|
|
1188
|
+
this.split = split;
|
|
1189
|
+
this.sub = sub;
|
|
1190
|
+
this.sequenceNumber = sequenceNumber;
|
|
1191
|
+
this.resourceId = resourceId;
|
|
1192
|
+
this.startQuantity = startQuantity;
|
|
1193
|
+
this.finishQuantity = finishQuantity;
|
|
1194
|
+
this.expectedRejectRate = expectedRejectRate;
|
|
1195
|
+
this.scheduledStartDate = scheduledStartDate;
|
|
1196
|
+
this.scheduledFinishDate = scheduledFinishDate;
|
|
1197
|
+
this.closedDate = closedDate;
|
|
1198
|
+
this.cycleTimeMs = cycleTimeMs;
|
|
1199
|
+
this.setupTimeMs = setupTimeMs;
|
|
1200
|
+
this.productionburdenRateHourly = productionburdenRateHourly;
|
|
1201
|
+
this.setupburdenRatehourly = setupburdenRatehourly;
|
|
1202
|
+
this.operationType = operationType;
|
|
1203
|
+
this.quantityPerPart = quantityPerPart;
|
|
1204
|
+
this.status = status;
|
|
1205
|
+
}
|
|
1206
|
+
toRESTApiObject() {
|
|
1207
|
+
return {
|
|
1208
|
+
workOrderId: this.workOrderId,
|
|
1209
|
+
lot: this.lot,
|
|
1210
|
+
split: this.split,
|
|
1211
|
+
sub: this.sub,
|
|
1212
|
+
sequenceNumber: this.sequenceNumber,
|
|
1213
|
+
resourceId: this.resourceId,
|
|
1214
|
+
startQuantity: this.startQuantity.toString(),
|
|
1215
|
+
finishQuantity: this.finishQuantity.toString(),
|
|
1216
|
+
expectedRejectRate: this.expectedRejectRate.toString(),
|
|
1217
|
+
scheduledStartDate: this.scheduledStartDate,
|
|
1218
|
+
scheduledFinishDate: this.scheduledFinishDate,
|
|
1219
|
+
closedDate: this.closedDate,
|
|
1220
|
+
cycleTimeMs: this.cycleTimeMs.toString(),
|
|
1221
|
+
setupTimeMs: this.setupTimeMs.toString(),
|
|
1222
|
+
productionburdenRateHourly: this.productionburdenRateHourly.toString(),
|
|
1223
|
+
setupburdenRatehourly: this.setupburdenRatehourly.toString(),
|
|
1224
|
+
operationType: this.operationType,
|
|
1225
|
+
quantityPerPart: this.quantityPerPart.toString(),
|
|
1226
|
+
status: this.status
|
|
1227
|
+
};
|
|
1228
|
+
}
|
|
1229
|
+
static fromPlainObject(data) {
|
|
1230
|
+
return new MMSendWorkOrderOperation(
|
|
1231
|
+
data.workOrderId || "",
|
|
1232
|
+
data.lot || "",
|
|
1233
|
+
data.split || "",
|
|
1234
|
+
data.sub || "",
|
|
1235
|
+
data.sequenceNumber || "",
|
|
1236
|
+
data.resourceId || "",
|
|
1237
|
+
parseFloat(data.startQuantity || "0"),
|
|
1238
|
+
parseFloat(data.finishQuantity || "0"),
|
|
1239
|
+
parseFloat(data.expectedRejectRate || "0"),
|
|
1240
|
+
data.scheduledStartDate ?? null,
|
|
1241
|
+
data.scheduledFinishDate ?? null,
|
|
1242
|
+
data.closedDate ?? null,
|
|
1243
|
+
parseInt(data.cycleTimeMs || "0"),
|
|
1244
|
+
parseInt(data.setupTimeMs || "0"),
|
|
1245
|
+
parseFloat(data.productionburdenRateHourly || "0"),
|
|
1246
|
+
parseFloat(data.setupburdenRatehourly || "0"),
|
|
1247
|
+
data.operationType || "",
|
|
1248
|
+
parseFloat(data.quantityPerPart || "1"),
|
|
1249
|
+
data.status || ""
|
|
1250
|
+
);
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
class MMSendReason {
|
|
1254
|
+
constructor(reasonId, category, code, description, entityType) {
|
|
1255
|
+
this.reasonId = reasonId;
|
|
1256
|
+
this.category = category;
|
|
1257
|
+
this.code = code;
|
|
1258
|
+
this.description = description;
|
|
1259
|
+
this.entityType = entityType;
|
|
1260
|
+
}
|
|
1261
|
+
toRESTApiObject() {
|
|
1262
|
+
return {
|
|
1263
|
+
reasonId: this.reasonId,
|
|
1264
|
+
category: this.category,
|
|
1265
|
+
code: this.code,
|
|
1266
|
+
description: this.description,
|
|
1267
|
+
entityType: this.entityType
|
|
1268
|
+
};
|
|
1269
|
+
}
|
|
1270
|
+
static fromPlainObject(data) {
|
|
1271
|
+
return new MMSendReason(
|
|
1272
|
+
data.reasonId || "",
|
|
1273
|
+
data.category || "",
|
|
1274
|
+
data.code || "",
|
|
1275
|
+
data.description || "",
|
|
1276
|
+
data.entityType || ""
|
|
1277
|
+
);
|
|
1278
|
+
}
|
|
1279
|
+
}
|
|
1280
|
+
class MMSendLaborTicket {
|
|
1281
|
+
constructor(workOrderId, lot, split, sub, sequenceNumber, resourceId, personId, laborTicketId, transactionDate, clockIn, clockOut, goodParts, badParts, type, comment, state) {
|
|
1282
|
+
this.workOrderId = workOrderId;
|
|
1283
|
+
this.lot = lot;
|
|
1284
|
+
this.split = split;
|
|
1285
|
+
this.sub = sub;
|
|
1286
|
+
this.sequenceNumber = sequenceNumber;
|
|
1287
|
+
this.resourceId = resourceId;
|
|
1288
|
+
this.personId = personId;
|
|
1289
|
+
this.laborTicketId = laborTicketId;
|
|
1290
|
+
this.transactionDate = transactionDate;
|
|
1291
|
+
this.clockIn = clockIn;
|
|
1292
|
+
this.clockOut = clockOut;
|
|
1293
|
+
this.goodParts = goodParts;
|
|
1294
|
+
this.badParts = badParts;
|
|
1295
|
+
this.type = type;
|
|
1296
|
+
this.comment = comment;
|
|
1297
|
+
this.state = state;
|
|
1298
|
+
}
|
|
1299
|
+
toRESTApiObject() {
|
|
1300
|
+
return {
|
|
1301
|
+
workOrderId: this.workOrderId,
|
|
1302
|
+
lot: this.lot,
|
|
1303
|
+
split: this.split,
|
|
1304
|
+
sub: this.sub,
|
|
1305
|
+
sequenceNumber: this.sequenceNumber,
|
|
1306
|
+
resourceId: this.resourceId,
|
|
1307
|
+
personId: this.personId,
|
|
1308
|
+
laborTicketId: this.laborTicketId,
|
|
1309
|
+
transactionDate: this.transactionDate,
|
|
1310
|
+
clockIn: this.clockIn,
|
|
1311
|
+
clockOut: this.clockOut,
|
|
1312
|
+
goodParts: this.goodParts.toString(),
|
|
1313
|
+
badParts: this.badParts.toString(),
|
|
1314
|
+
type: this.type,
|
|
1315
|
+
comment: this.comment,
|
|
1316
|
+
state: this.state
|
|
1317
|
+
};
|
|
1318
|
+
}
|
|
1319
|
+
static fromPlainObject(data) {
|
|
1320
|
+
return new MMSendLaborTicket(
|
|
1321
|
+
data.workOrderId || "",
|
|
1322
|
+
data.lot || "",
|
|
1323
|
+
data.split || "",
|
|
1324
|
+
data.sub || "",
|
|
1325
|
+
data.sequenceNumber || "",
|
|
1326
|
+
data.resourceId || "",
|
|
1327
|
+
data.personId || "",
|
|
1328
|
+
data.laborTicketId || "",
|
|
1329
|
+
data.transactionDate ?? null,
|
|
1330
|
+
data.clockIn ?? null,
|
|
1331
|
+
data.clockOut ?? null,
|
|
1332
|
+
parseInt(data.goodParts || "0"),
|
|
1333
|
+
parseInt(data.badParts || "0"),
|
|
1334
|
+
data.type || "",
|
|
1335
|
+
data.comment || "",
|
|
1336
|
+
data.state || ""
|
|
1337
|
+
);
|
|
1338
|
+
}
|
|
1339
|
+
}
|
|
1340
|
+
let companyInfoCache = null;
|
|
1341
|
+
const getCompanyInfo = async () => {
|
|
1342
|
+
if (companyInfoCache) {
|
|
1343
|
+
return companyInfoCache;
|
|
1344
|
+
}
|
|
1345
|
+
try {
|
|
1346
|
+
const config2 = CoreConfiguration.inst();
|
|
1347
|
+
const apiUrl = config2.mmApiBaseUrl;
|
|
1348
|
+
const authToken = config2.mmApiAuthToken;
|
|
1349
|
+
if (!apiUrl || !authToken) {
|
|
1350
|
+
throw new Error("Missing required configuration for company info fetch");
|
|
1351
|
+
}
|
|
1352
|
+
const client = HTTPClientFactory.getInstance({
|
|
1353
|
+
baseUrl: apiUrl,
|
|
1354
|
+
retryAttempts: config2.mmApiRetryAttempts
|
|
1355
|
+
});
|
|
1356
|
+
const response = await client.request({
|
|
1357
|
+
url: "/accounts/current?includeLocation=true",
|
|
1358
|
+
method: "GET",
|
|
1359
|
+
headers: {
|
|
1360
|
+
Authorization: `Bearer ${authToken}`
|
|
1361
|
+
}
|
|
1362
|
+
});
|
|
1363
|
+
const userInfo = response.data;
|
|
1364
|
+
if (!userInfo?.company) {
|
|
1365
|
+
throw new Error("Unable to retrieve company information from API");
|
|
1366
|
+
}
|
|
1367
|
+
logger.info("Fetched company info from /accounts/current", {
|
|
1368
|
+
locationRef: userInfo.locationRef,
|
|
1369
|
+
companyId: userInfo.company.id,
|
|
1370
|
+
timezone: userInfo.company.timezone
|
|
1371
|
+
});
|
|
1372
|
+
companyInfoCache = {
|
|
1373
|
+
timezone: userInfo.company.timezone,
|
|
1374
|
+
locationRef: String(userInfo.locationRef),
|
|
1375
|
+
// Convert number to string
|
|
1376
|
+
companyId: userInfo.company.id
|
|
1377
|
+
};
|
|
1378
|
+
return companyInfoCache;
|
|
1379
|
+
} catch (error) {
|
|
1380
|
+
throw new Error(
|
|
1381
|
+
`Failed to get company info: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
1382
|
+
);
|
|
1383
|
+
}
|
|
1384
|
+
};
|
|
1385
|
+
const index = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
1386
|
+
__proto__: null,
|
|
1387
|
+
MMApiClient,
|
|
1388
|
+
MMReceiveLaborTicket,
|
|
1389
|
+
MMSendLaborTicket,
|
|
1390
|
+
MMSendPart,
|
|
1391
|
+
MMSendPartOperation,
|
|
1392
|
+
MMSendPerson,
|
|
1393
|
+
MMSendReason,
|
|
1394
|
+
MMSendResource,
|
|
1395
|
+
MMSendWorkOrder,
|
|
1396
|
+
MMSendWorkOrderOperation,
|
|
1397
|
+
getCompanyInfo
|
|
1398
|
+
}, Symbol.toStringTag, { value: "Module" }));
|
|
1399
|
+
function getUniqueRows(data, fields, sortFields) {
|
|
1400
|
+
const createCompositeKey = (item) => {
|
|
1401
|
+
return fields.map((field) => String(item[field])).join("|");
|
|
1402
|
+
};
|
|
1403
|
+
const uniqueKeys = /* @__PURE__ */ new Set();
|
|
1404
|
+
const uniqueRows = [];
|
|
1405
|
+
data.forEach((item) => {
|
|
1406
|
+
const key = createCompositeKey(item);
|
|
1407
|
+
if (!uniqueKeys.has(key)) {
|
|
1408
|
+
uniqueKeys.add(key);
|
|
1409
|
+
uniqueRows.push(item);
|
|
1410
|
+
}
|
|
1411
|
+
});
|
|
1412
|
+
if (sortFields && sortFields.length > 0) {
|
|
1413
|
+
uniqueRows.sort((a2, b) => {
|
|
1414
|
+
for (let i = 0; i < sortFields.length; i++) {
|
|
1415
|
+
const field = sortFields[i];
|
|
1416
|
+
if (a2[field] < b[field]) return -1;
|
|
1417
|
+
if (a2[field] > b[field]) return 1;
|
|
1418
|
+
}
|
|
1419
|
+
return 0;
|
|
1420
|
+
});
|
|
1421
|
+
}
|
|
1422
|
+
return uniqueRows || [];
|
|
1423
|
+
}
|
|
1424
|
+
const removeExtraneousFields = (recordset, retainFields) => {
|
|
1425
|
+
const data = recordset.map((row) => {
|
|
1426
|
+
const transformedRow = {};
|
|
1427
|
+
Object.keys(row).forEach((key) => {
|
|
1428
|
+
if (retainFields.includes(key)) {
|
|
1429
|
+
transformedRow[key] = row[key]?.toString() || "";
|
|
1430
|
+
}
|
|
1431
|
+
});
|
|
1432
|
+
return transformedRow;
|
|
1433
|
+
}) || [];
|
|
1434
|
+
return data;
|
|
1435
|
+
};
|
|
1436
|
+
const getPayloadWithoutIDField = (recordset, idField) => {
|
|
1437
|
+
const data = recordset.map((row) => {
|
|
1438
|
+
const transformedRow = {};
|
|
1439
|
+
Object.keys(row).forEach((key) => {
|
|
1440
|
+
if (!idField.includes(key)) {
|
|
1441
|
+
transformedRow[key] = row[key]?.toString() || "";
|
|
1442
|
+
}
|
|
1443
|
+
});
|
|
1444
|
+
return transformedRow;
|
|
1445
|
+
}) || [];
|
|
1446
|
+
return data;
|
|
1447
|
+
};
|
|
1448
|
+
const trimObjectValues = (obj) => {
|
|
1449
|
+
return Object.keys(obj).reduce((acc, key) => {
|
|
1450
|
+
const value = obj[key];
|
|
1451
|
+
acc[key] = typeof value === "string" ? value.trim() : value;
|
|
1452
|
+
return acc;
|
|
1453
|
+
}, {});
|
|
1454
|
+
};
|
|
1455
|
+
const cleanupNumbers = (data) => {
|
|
1456
|
+
data = data.toString().replace(/[^0-9\.]+/g, "");
|
|
1457
|
+
return data;
|
|
1458
|
+
};
|
|
1459
|
+
const addNewFieldFromExternalSource = (data, externalSource, externalSourceFieldName, newFieldName, defaultValue = "-") => {
|
|
1460
|
+
return data.map((current) => {
|
|
1461
|
+
const cacheEntry = externalSource.find(
|
|
1462
|
+
(existing) => existing[externalSourceFieldName] === current[externalSourceFieldName]
|
|
1463
|
+
);
|
|
1464
|
+
const newFieldValue = cacheEntry?.[newFieldName] || defaultValue;
|
|
1465
|
+
return {
|
|
1466
|
+
...current,
|
|
1467
|
+
[newFieldName]: newFieldValue
|
|
1468
|
+
};
|
|
1469
|
+
});
|
|
1470
|
+
};
|
|
1471
|
+
const addNewFieldFromLookupField = (data, externalSource, lookupField, currentField, newFieldName, defaultValue = "-") => {
|
|
1472
|
+
return data.map((current) => {
|
|
1473
|
+
const cacheEntry = externalSource.find(
|
|
1474
|
+
(existing) => existing[lookupField] === current[currentField]
|
|
1475
|
+
);
|
|
1476
|
+
const newFieldValue = cacheEntry?.[currentField] || defaultValue;
|
|
1477
|
+
return {
|
|
1478
|
+
...current,
|
|
1479
|
+
[newFieldName]: newFieldValue
|
|
1480
|
+
};
|
|
1481
|
+
});
|
|
1482
|
+
};
|
|
1483
|
+
const getTimezoneOffset = async () => {
|
|
1484
|
+
try {
|
|
1485
|
+
const config2 = CoreConfiguration.inst();
|
|
1486
|
+
const apiUrl = config2.mmApiBaseUrl;
|
|
1487
|
+
const authToken = config2.mmApiAuthToken;
|
|
1488
|
+
if (!apiUrl || !authToken) {
|
|
1489
|
+
throw new Error("Missing required configuration for timezone fetch");
|
|
1490
|
+
}
|
|
1491
|
+
const client = HTTPClientFactory.getInstance({
|
|
1492
|
+
baseUrl: apiUrl,
|
|
1493
|
+
retryAttempts: config2.mmApiRetryAttempts
|
|
1494
|
+
});
|
|
1495
|
+
const response = await client.request({
|
|
1496
|
+
url: "/accounts/current",
|
|
1497
|
+
method: "GET",
|
|
1498
|
+
headers: {
|
|
1499
|
+
Authorization: `Bearer ${authToken}`
|
|
1500
|
+
}
|
|
1501
|
+
});
|
|
1502
|
+
const userInfo = response.data;
|
|
1503
|
+
if (!userInfo?.company?.timezone) {
|
|
1504
|
+
throw new Error("Unable to retrieve company timezone from API");
|
|
1505
|
+
}
|
|
1506
|
+
const timezone = userInfo.company.timezone;
|
|
1507
|
+
const date = /* @__PURE__ */ new Date();
|
|
1508
|
+
const utcDate = new Date(date.toLocaleString("en-US", { timeZone: "UTC" }));
|
|
1509
|
+
const tzDate = new Date(
|
|
1510
|
+
date.toLocaleString("en-US", { timeZone: timezone })
|
|
1511
|
+
);
|
|
1512
|
+
const offset = (tzDate.getTime() - utcDate.getTime()) / 36e5;
|
|
1513
|
+
return { offset, timezone };
|
|
1514
|
+
} catch (error) {
|
|
1515
|
+
throw new Error(
|
|
1516
|
+
`Failed to get timezone offset: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
1517
|
+
);
|
|
1518
|
+
}
|
|
1519
|
+
};
|
|
1520
|
+
const convertToLocalTime = (zuluTime, timezoneOffset) => {
|
|
1521
|
+
if (!zuluTime) return void 0;
|
|
1522
|
+
const date = new Date(zuluTime);
|
|
1523
|
+
return new Date(date.getTime() + timezoneOffset * 60 * 60 * 1e3);
|
|
1524
|
+
};
|
|
1525
|
+
const formatDateWithTZOffset = (date, timezoneOffset) => {
|
|
1526
|
+
if (!date) return void 0;
|
|
1527
|
+
const isoDate = date.replace(" ", "T");
|
|
1528
|
+
const sign = timezoneOffset >= 0 ? "+" : "-";
|
|
1529
|
+
const absOffset = Math.abs(timezoneOffset);
|
|
1530
|
+
const hours = Math.floor(absOffset).toString().padStart(2, "0");
|
|
1531
|
+
const minutes = Math.floor(absOffset % 1 * 60).toString().padStart(2, "0");
|
|
1532
|
+
return `${isoDate}${sign}${hours}:${minutes}`;
|
|
1533
|
+
};
|
|
1534
|
+
const toISOWithOffset = (date, timezoneOffset) => {
|
|
1535
|
+
const sign = timezoneOffset >= 0 ? "+" : "-";
|
|
1536
|
+
const abs = Math.abs(timezoneOffset);
|
|
1537
|
+
const hours = Math.floor(abs);
|
|
1538
|
+
const minutes = Math.round((abs - hours) * 60);
|
|
1539
|
+
const pad2 = (n) => n.toString().padStart(2, "0");
|
|
1540
|
+
const pad3 = (n) => n.toString().padStart(3, "0");
|
|
1541
|
+
const yyyy = date.getUTCFullYear();
|
|
1542
|
+
const MM = pad2(date.getUTCMonth() + 1);
|
|
1543
|
+
const dd = pad2(date.getUTCDate());
|
|
1544
|
+
const HH = pad2(date.getUTCHours());
|
|
1545
|
+
const mm = pad2(date.getUTCMinutes());
|
|
1546
|
+
const ss = pad2(date.getUTCSeconds());
|
|
1547
|
+
const SSS = pad3(date.getUTCMilliseconds());
|
|
1548
|
+
const off = `${sign}${pad2(hours)}:${pad2(minutes)}`;
|
|
1549
|
+
return `${yyyy}-${MM}-${dd}T${HH}:${mm}:${ss}.${SSS}${off}`;
|
|
1550
|
+
};
|
|
1551
|
+
function calculateTimeDifferenceInHours(startTime, endTime, timezoneOffset) {
|
|
1552
|
+
if (!startTime || !endTime) return 0;
|
|
1553
|
+
const localStartTime = convertToLocalTime(startTime, timezoneOffset);
|
|
1554
|
+
const localEndTime = convertToLocalTime(endTime, timezoneOffset);
|
|
1555
|
+
if (!localStartTime || !localEndTime) return 0;
|
|
1556
|
+
const startHours = localStartTime.getHours();
|
|
1557
|
+
const startMinutes = localStartTime.getMinutes();
|
|
1558
|
+
const startSeconds = localStartTime.getSeconds();
|
|
1559
|
+
const endHours = localEndTime.getHours();
|
|
1560
|
+
const endMinutes = localEndTime.getMinutes();
|
|
1561
|
+
const endSeconds = localEndTime.getSeconds();
|
|
1562
|
+
const startTotalSeconds = startHours * 3600 + startMinutes * 60 + startSeconds;
|
|
1563
|
+
const endTotalSeconds = endHours * 3600 + endMinutes * 60 + endSeconds;
|
|
1564
|
+
if (endTotalSeconds > startTotalSeconds) {
|
|
1565
|
+
return Number(((endTotalSeconds - startTotalSeconds) / 3600).toFixed(3));
|
|
1566
|
+
}
|
|
1567
|
+
return 0;
|
|
1568
|
+
}
|
|
1569
|
+
const getSecondsOfDay = (timestamp) => {
|
|
1570
|
+
if (!timestamp) return null;
|
|
1571
|
+
const time = new Date(timestamp);
|
|
1572
|
+
const utcHours = time.getUTCHours();
|
|
1573
|
+
const utcMinutes = time.getUTCMinutes();
|
|
1574
|
+
const utcSeconds = time.getUTCSeconds();
|
|
1575
|
+
return (utcHours * 3600 + utcMinutes * 60 + utcSeconds).toString();
|
|
1576
|
+
};
|
|
1577
|
+
const applyTimezoneOffsetsToFields = (item, fields, timezoneOffset) => {
|
|
1578
|
+
const transformed = { ...item };
|
|
1579
|
+
fields.forEach((field) => {
|
|
1580
|
+
if (transformed[field] && transformed[field].trim()) {
|
|
1581
|
+
const formattedDate = formatDateWithTZOffset(
|
|
1582
|
+
transformed[field],
|
|
1583
|
+
timezoneOffset
|
|
1584
|
+
);
|
|
1585
|
+
transformed[field] = formattedDate || "";
|
|
1586
|
+
} else {
|
|
1587
|
+
transformed[field] = "";
|
|
1588
|
+
}
|
|
1589
|
+
});
|
|
1590
|
+
return transformed;
|
|
1591
|
+
};
|
|
1592
|
+
const getTimezoneOffsetAndPersist = async (params = {
|
|
1593
|
+
maxRetries: 36e3,
|
|
1594
|
+
// Retry for 10 hours before giving up.
|
|
1595
|
+
retryIntervalMs: 1e4
|
|
1596
|
+
}) => {
|
|
1597
|
+
let success = false;
|
|
1598
|
+
let retries = 0;
|
|
1599
|
+
logger.info(
|
|
1600
|
+
"Acquiring the timezone offset and timezone name from MachineMetrics and storing in cache"
|
|
1601
|
+
);
|
|
1602
|
+
while (!success && retries < params.maxRetries) {
|
|
1603
|
+
try {
|
|
1604
|
+
const { offset, timezone } = await getTimezoneOffset();
|
|
1605
|
+
logger.info(`Timezone offset: ${offset} hours, timezone: ${timezone}`);
|
|
1606
|
+
setTimezoneOffsetInCache(offset);
|
|
1607
|
+
setTimezoneNameInCache(timezone);
|
|
1608
|
+
success = true;
|
|
1609
|
+
} catch (error) {
|
|
1610
|
+
logger.error("Error getting timezone offset:", error);
|
|
1611
|
+
logger.info(`Retrying in ${params.retryIntervalMs / 1e3} seconds...`);
|
|
1612
|
+
await new Promise(
|
|
1613
|
+
(resolve) => setTimeout(resolve, params.retryIntervalMs)
|
|
1614
|
+
);
|
|
1615
|
+
retries++;
|
|
1616
|
+
}
|
|
1617
|
+
}
|
|
1618
|
+
if (!success) {
|
|
1619
|
+
throw new Error(
|
|
1620
|
+
`Failed to get the timezone offset after ${params.maxRetries} retries`
|
|
1621
|
+
);
|
|
1622
|
+
}
|
|
1623
|
+
};
|
|
1624
|
+
const DEFAULT_RECORD_TRACKING_TABLE_NAME = "record_tracking";
|
|
1625
|
+
class RecordTrackingManager {
|
|
1626
|
+
db;
|
|
1627
|
+
constructor() {
|
|
1628
|
+
this.db = knex(config.local);
|
|
1629
|
+
}
|
|
1630
|
+
async updateRecord(record) {
|
|
1631
|
+
if (record.lastValue && record.recordId) {
|
|
1632
|
+
const recordUpdated = await this.db(DEFAULT_RECORD_TRACKING_TABLE_NAME).where({ entityType: record.entityType }).update(record);
|
|
1633
|
+
if (recordUpdated < 1) {
|
|
1634
|
+
await this.db(DEFAULT_RECORD_TRACKING_TABLE_NAME).insert(record);
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
}
|
|
1638
|
+
async getTrackingRecord(type) {
|
|
1639
|
+
return await this.db(DEFAULT_RECORD_TRACKING_TABLE_NAME).select("*").where({ entityType: type.toString() });
|
|
1640
|
+
}
|
|
1641
|
+
async destroy() {
|
|
1642
|
+
return this.db.destroy();
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
class BatchCacheManager {
|
|
1646
|
+
cacheManager;
|
|
1647
|
+
constructor(options) {
|
|
1648
|
+
this.cacheManager = new HashedCacheManager({
|
|
1649
|
+
ttl: options?.ttl,
|
|
1650
|
+
tableName: options?.tableName
|
|
1651
|
+
});
|
|
1652
|
+
}
|
|
1653
|
+
/**
|
|
1654
|
+
* Checks a batch of objects against the cache and separates them into duplicates and non-duplicates
|
|
1655
|
+
* @param type The type of objects
|
|
1656
|
+
* @param objects Array of objects to check
|
|
1657
|
+
* @returns Object containing arrays of duplicate and non-duplicate objects
|
|
1658
|
+
*/
|
|
1659
|
+
async dedupeBatch(type, objects) {
|
|
1660
|
+
const result = {
|
|
1661
|
+
duplicates: [],
|
|
1662
|
+
nonDuplicates: []
|
|
1663
|
+
};
|
|
1664
|
+
for (const object of objects) {
|
|
1665
|
+
const hasChanged = await this.cacheManager.hasChanged(type, object);
|
|
1666
|
+
if (hasChanged) {
|
|
1667
|
+
result.nonDuplicates.push(object);
|
|
1668
|
+
} else {
|
|
1669
|
+
result.duplicates.push(object);
|
|
1670
|
+
}
|
|
1671
|
+
}
|
|
1672
|
+
return result;
|
|
1673
|
+
}
|
|
1674
|
+
/**
|
|
1675
|
+
* Stores a batch of objects in the cache
|
|
1676
|
+
* @param type The type of objects
|
|
1677
|
+
* @param objects Array of objects to store
|
|
1678
|
+
*/
|
|
1679
|
+
async storeBatch(type, objects) {
|
|
1680
|
+
let totalInserted = 0;
|
|
1681
|
+
for (const object of objects) {
|
|
1682
|
+
if (await this.cacheManager.store(type, object)) {
|
|
1683
|
+
totalInserted++;
|
|
1684
|
+
}
|
|
1685
|
+
}
|
|
1686
|
+
return { totalInserted };
|
|
1687
|
+
}
|
|
1688
|
+
/**
|
|
1689
|
+
* Removes objects from the cache
|
|
1690
|
+
* @param type The type of objects
|
|
1691
|
+
* @param objects Array of objects to remove
|
|
1692
|
+
*/
|
|
1693
|
+
async removeObjects(type, objects) {
|
|
1694
|
+
for (const object of objects) {
|
|
1695
|
+
await this.cacheManager.removeRecord(type, object);
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
/**
|
|
1699
|
+
* Gets all records of a specific type
|
|
1700
|
+
*/
|
|
1701
|
+
async getRecordsByType(type) {
|
|
1702
|
+
return this.cacheManager.getRecordsByType(type);
|
|
1703
|
+
}
|
|
1704
|
+
/**
|
|
1705
|
+
* Removes all records of a specific type
|
|
1706
|
+
*/
|
|
1707
|
+
async removeRecordsByType(type) {
|
|
1708
|
+
return this.cacheManager.removeRecordsByType(type);
|
|
1709
|
+
}
|
|
1710
|
+
/**
|
|
1711
|
+
* Clears all records from the cache
|
|
1712
|
+
*/
|
|
1713
|
+
async clear() {
|
|
1714
|
+
return this.cacheManager.clear();
|
|
1715
|
+
}
|
|
1716
|
+
/**
|
|
1717
|
+
* Cleans up resources
|
|
1718
|
+
*/
|
|
1719
|
+
async destroy() {
|
|
1720
|
+
return this.cacheManager.destroy();
|
|
1721
|
+
}
|
|
1722
|
+
}
|
|
1723
|
+
function convertLaborTicketToLocalTimezone(laborTicket, timezoneOffset) {
|
|
1724
|
+
const timeFields = [
|
|
1725
|
+
"clockIn",
|
|
1726
|
+
"clockOut",
|
|
1727
|
+
"transactionDate",
|
|
1728
|
+
"createdAt",
|
|
1729
|
+
"updatedAt",
|
|
1730
|
+
"workOrderOperationClosedDate"
|
|
1731
|
+
];
|
|
1732
|
+
timeFields.forEach((field) => {
|
|
1733
|
+
const localTime = convertToLocalTime(laborTicket[field], timezoneOffset);
|
|
1734
|
+
laborTicket[field] = localTime ? toISOWithOffset(localTime, timezoneOffset) : null;
|
|
1735
|
+
});
|
|
1736
|
+
return laborTicket;
|
|
1737
|
+
}
|
|
1738
|
+
class LaborTicketERPSynchronizer {
|
|
1739
|
+
/**
|
|
1740
|
+
* Synchronizes updated labor tickets from MachineMetrics to an ERP system
|
|
1741
|
+
*/
|
|
1742
|
+
static async syncToERP(connectorType, connector) {
|
|
1743
|
+
try {
|
|
1744
|
+
const mmApiClient = new MMApiClient();
|
|
1745
|
+
const failedLaborTicketRefs = [];
|
|
1746
|
+
await mmApiClient.initializeCheckpoint({
|
|
1747
|
+
system: connectorType,
|
|
1748
|
+
table: "labor_tickets",
|
|
1749
|
+
checkpointType: "export",
|
|
1750
|
+
checkpointValue: {
|
|
1751
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1752
|
+
}
|
|
1753
|
+
});
|
|
1754
|
+
const fallbackTimestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
1755
|
+
const laborTicketsUpdates = await mmApiClient.fetchLaborTicketUpdates({
|
|
1756
|
+
system: connectorType,
|
|
1757
|
+
checkpointType: "export"
|
|
1758
|
+
});
|
|
1759
|
+
if (laborTicketsUpdates.length === 0) {
|
|
1760
|
+
logger.info("syncLaborTicketsToERP:No updated labor tickets found");
|
|
1761
|
+
return;
|
|
1762
|
+
}
|
|
1763
|
+
logger.info(
|
|
1764
|
+
`ToERP: Found ${laborTicketsUpdates.length} Labor Ticket Ids and Refs to process`,
|
|
1765
|
+
{
|
|
1766
|
+
laborTickets: laborTicketsUpdates.map(
|
|
1767
|
+
(ticket) => ({
|
|
1768
|
+
ref: ticket.laborTicketRef,
|
|
1769
|
+
id: ticket.laborTicketId
|
|
1770
|
+
})
|
|
1771
|
+
)
|
|
1772
|
+
}
|
|
1773
|
+
);
|
|
1774
|
+
const mostRecentUpdate = laborTicketsUpdates.reduce(
|
|
1775
|
+
(latest, ticket) => {
|
|
1776
|
+
if (!latest || !ticket.updatedAt) return latest;
|
|
1777
|
+
return new Date(ticket.updatedAt) > new Date(latest) ? ticket.updatedAt : latest;
|
|
1778
|
+
},
|
|
1779
|
+
null
|
|
1780
|
+
);
|
|
1781
|
+
await Promise.all(
|
|
1782
|
+
laborTicketsUpdates.map(async (laborTicket) => {
|
|
1783
|
+
if (!laborTicket.laborTicketRef) {
|
|
1784
|
+
logger.error(
|
|
1785
|
+
"syncLaborTicketsToERP: laborTicketRef is not set for laborTicket pulled from MM:",
|
|
1786
|
+
{ laborTicket }
|
|
1787
|
+
);
|
|
1788
|
+
return void 0;
|
|
1789
|
+
}
|
|
1790
|
+
try {
|
|
1791
|
+
return await this.processLaborTicket(
|
|
1792
|
+
connector,
|
|
1793
|
+
mmApiClient,
|
|
1794
|
+
laborTicket
|
|
1795
|
+
);
|
|
1796
|
+
} catch (error) {
|
|
1797
|
+
failedLaborTicketRefs.push(laborTicket.laborTicketRef);
|
|
1798
|
+
logger.error(
|
|
1799
|
+
`syncLaborTicketsToERP: Error processing laborTicketRef ${laborTicket.laborTicketRef}:`,
|
|
1800
|
+
{ error }
|
|
1801
|
+
);
|
|
1802
|
+
return void 0;
|
|
1803
|
+
}
|
|
1804
|
+
})
|
|
1805
|
+
);
|
|
1806
|
+
logger.info(
|
|
1807
|
+
`syncLaborTicketsToERP: ${failedLaborTicketRefs.length} failed labor ticket ids`
|
|
1808
|
+
);
|
|
1809
|
+
if (failedLaborTicketRefs.length > 0) {
|
|
1810
|
+
logger.info(
|
|
1811
|
+
`syncLaborTicketsToERP: Reporting ${failedLaborTicketRefs.length} labor ticket failures:`,
|
|
1812
|
+
{
|
|
1813
|
+
failedLaborTicketRefs
|
|
1814
|
+
}
|
|
1815
|
+
);
|
|
1816
|
+
const addFailedResult = await mmApiClient.addFailedLaborTicketRefs(
|
|
1817
|
+
connectorType,
|
|
1818
|
+
failedLaborTicketRefs
|
|
1819
|
+
);
|
|
1820
|
+
logger.info("syncLaborTicketsToERP: addFailedResult:", {
|
|
1821
|
+
addFailedResult
|
|
1822
|
+
});
|
|
1823
|
+
}
|
|
1824
|
+
mmApiClient.saveCheckpoint({
|
|
1825
|
+
system: connectorType,
|
|
1826
|
+
table: "labor_tickets",
|
|
1827
|
+
checkpointType: "export",
|
|
1828
|
+
checkpointValue: {
|
|
1829
|
+
timestamp: mostRecentUpdate || fallbackTimestamp
|
|
1830
|
+
}
|
|
1831
|
+
});
|
|
1832
|
+
} catch (error) {
|
|
1833
|
+
logger.error("syncLaborTicketsToERP: Error:", error);
|
|
1834
|
+
}
|
|
1835
|
+
}
|
|
1836
|
+
/**
|
|
1837
|
+
* Retries labor tickets that have failed to be created or updated in the ERP during the sync
|
|
1838
|
+
*/
|
|
1839
|
+
static async retryFailed(connectorType, connector) {
|
|
1840
|
+
try {
|
|
1841
|
+
const mmApiClient = new MMApiClient();
|
|
1842
|
+
const successLaborTicketIds = [];
|
|
1843
|
+
const laborTickets = await mmApiClient.fetchFailedLaborTickets(connectorType);
|
|
1844
|
+
if (laborTickets.length === 0) {
|
|
1845
|
+
logger.info("retryFailedLaborTickets: No failed labor tickets found");
|
|
1846
|
+
return;
|
|
1847
|
+
}
|
|
1848
|
+
logger.info(
|
|
1849
|
+
"retryFailedLaborTickets: Failed Labor Tickets count:" + laborTickets.length
|
|
1850
|
+
);
|
|
1851
|
+
await Promise.all(
|
|
1852
|
+
laborTickets.map(async (laborTicket) => {
|
|
1853
|
+
if (!laborTicket.laborTicketRef) {
|
|
1854
|
+
logger.error(
|
|
1855
|
+
"retryFailedLaborTickets: laborTicketRef is not set for laborTicket pulled from MM:",
|
|
1856
|
+
{ laborTicket }
|
|
1857
|
+
);
|
|
1858
|
+
return void 0;
|
|
1859
|
+
}
|
|
1860
|
+
try {
|
|
1861
|
+
const laborTicketResult = await this.processLaborTicket(
|
|
1862
|
+
connector,
|
|
1863
|
+
mmApiClient,
|
|
1864
|
+
laborTicket
|
|
1865
|
+
);
|
|
1866
|
+
successLaborTicketIds.push(laborTicket.laborTicketRef);
|
|
1867
|
+
return laborTicketResult;
|
|
1868
|
+
} catch (error) {
|
|
1869
|
+
logger.error(
|
|
1870
|
+
"retryFailedLaborTickets: Error processing laborTicketRef:",
|
|
1871
|
+
{ laborTicketRef: laborTicket.laborTicketRef, error }
|
|
1872
|
+
);
|
|
1873
|
+
return void 0;
|
|
1874
|
+
}
|
|
1875
|
+
})
|
|
1876
|
+
);
|
|
1877
|
+
if (successLaborTicketIds.length > 0) {
|
|
1878
|
+
logger.info("Deleting failed labor ticket ids:", {
|
|
1879
|
+
successLaborTicketIds
|
|
1880
|
+
});
|
|
1881
|
+
const deleteFailedResult = await mmApiClient.deleteFailedLaborTicketIds(
|
|
1882
|
+
connectorType,
|
|
1883
|
+
successLaborTicketIds
|
|
1884
|
+
);
|
|
1885
|
+
logger.info("deleteFailedResult:", { deleteFailedResult });
|
|
1886
|
+
}
|
|
1887
|
+
} catch (error) {
|
|
1888
|
+
logger.error("retryFailedLaborTickets: Error:", error);
|
|
1889
|
+
}
|
|
1890
|
+
}
|
|
1891
|
+
// ============================================================================
|
|
1892
|
+
// PRIVATE HELPER METHODS
|
|
1893
|
+
// ============================================================================
|
|
1894
|
+
static async writeLaborTicketIdToMM(mmApiClient, laborTicket, laborTicketResult) {
|
|
1895
|
+
const updateRefAPIResponse = await mmApiClient.updateLaborTicketIdByRef(
|
|
1896
|
+
laborTicket.laborTicketRef,
|
|
1897
|
+
laborTicketResult.laborTicketId
|
|
1898
|
+
);
|
|
1899
|
+
logger.info(
|
|
1900
|
+
`Updated laborTicketId ${laborTicketResult.laborTicketId} for laborTicketRef ${laborTicket.laborTicketRef} in MM:`,
|
|
1901
|
+
{ updateRefAPIResponse }
|
|
1902
|
+
);
|
|
1903
|
+
}
|
|
1904
|
+
static async processLaborTicket(connector, mmApiClient, laborTicket) {
|
|
1905
|
+
let laborTicketResult;
|
|
1906
|
+
laborTicketResult = convertLaborTicketToLocalTimezone(
|
|
1907
|
+
laborTicket,
|
|
1908
|
+
getCachedTimezoneOffset()
|
|
1909
|
+
);
|
|
1910
|
+
logger.info(
|
|
1911
|
+
`processing laborTicket, id=${laborTicket.laborTicketId}, ref=${laborTicket.laborTicketRef}`
|
|
1912
|
+
);
|
|
1913
|
+
logger.debug({ laborTicket });
|
|
1914
|
+
if (!laborTicket.laborTicketId) {
|
|
1915
|
+
const { laborTicket: laborTicketResult2, erpUid } = await connector.createLaborTicketInERP(laborTicket);
|
|
1916
|
+
laborTicketResult2.laborTicketId = erpUid;
|
|
1917
|
+
await this.writeLaborTicketIdToMM(
|
|
1918
|
+
mmApiClient,
|
|
1919
|
+
laborTicket,
|
|
1920
|
+
laborTicketResult2
|
|
1921
|
+
);
|
|
1922
|
+
} else {
|
|
1923
|
+
laborTicketResult = await connector.updateLaborTicketInERP(laborTicket);
|
|
1924
|
+
}
|
|
1925
|
+
logger.info("ToERP: laborTicket update result:", {
|
|
1926
|
+
laborTicketResult: laborTicketResult || "Failed to create/update labor ticket",
|
|
1927
|
+
laborTicketRef: laborTicket.laborTicketRef,
|
|
1928
|
+
operation: laborTicket.laborTicketId ? "update" : "create"
|
|
1929
|
+
});
|
|
1930
|
+
return laborTicketResult;
|
|
1931
|
+
}
|
|
1932
|
+
}
|
|
1933
|
+
class EntityTransformer {
|
|
1934
|
+
// ============================================================================
|
|
1935
|
+
// PUBLIC INTERFACE METHODS
|
|
1936
|
+
// ============================================================================
|
|
1937
|
+
/**
|
|
1938
|
+
* Extracts the primary key from a record based on entity type
|
|
1939
|
+
* @param entityType The type of entity being processed
|
|
1940
|
+
* @param record The record to extract primary key from
|
|
1941
|
+
* @returns A string representation of the primary key
|
|
1942
|
+
* @throws Error if primary key fields are missing or invalid
|
|
1943
|
+
*/
|
|
1944
|
+
static extractPrimaryKey(entityType, record) {
|
|
1945
|
+
switch (entityType) {
|
|
1946
|
+
case ERPObjType.PERSONS:
|
|
1947
|
+
return this.extractPersonKey(record);
|
|
1948
|
+
case ERPObjType.RESOURCES:
|
|
1949
|
+
return this.extractResourceKey(record);
|
|
1950
|
+
case ERPObjType.PARTS:
|
|
1951
|
+
return this.extractPartKey(record);
|
|
1952
|
+
case ERPObjType.PART_OPERATION:
|
|
1953
|
+
return this.extractPartOperationKey(record);
|
|
1954
|
+
case ERPObjType.WORK_ORDERS:
|
|
1955
|
+
return this.extractWorkOrderKey(record);
|
|
1956
|
+
case ERPObjType.WORK_ORDER_OPERATIONS:
|
|
1957
|
+
return this.extractWorkOrderOperationKey(record);
|
|
1958
|
+
case ERPObjType.LABOR_TICKETS:
|
|
1959
|
+
return this.extractLaborTicketKey(record);
|
|
1960
|
+
case ERPObjType.REASONS:
|
|
1961
|
+
return this.extractReasonKey(record);
|
|
1962
|
+
default:
|
|
1963
|
+
throw new Error(
|
|
1964
|
+
`Unsupported entity type for primary key extraction: ${entityType}`
|
|
1965
|
+
);
|
|
1966
|
+
}
|
|
1967
|
+
}
|
|
1968
|
+
/**
|
|
1969
|
+
* Reconstructs a typed object from plain data based on entity type
|
|
1970
|
+
* @param entityType The type of entity being processed
|
|
1971
|
+
* @param plainData The plain data object containing entity fields
|
|
1972
|
+
* @returns A properly typed IToRESTApiObject instance
|
|
1973
|
+
* @throws Error if the entity type is not supported
|
|
1974
|
+
*/
|
|
1975
|
+
static reconstructFromPlainData(entityType, plainData) {
|
|
1976
|
+
switch (entityType) {
|
|
1977
|
+
case ERPObjType.PERSONS:
|
|
1978
|
+
return this.reconstructPerson(plainData);
|
|
1979
|
+
case ERPObjType.RESOURCES:
|
|
1980
|
+
return this.reconstructResource(plainData);
|
|
1981
|
+
case ERPObjType.PARTS:
|
|
1982
|
+
return this.reconstructPart(plainData);
|
|
1983
|
+
case ERPObjType.PART_OPERATION:
|
|
1984
|
+
return this.reconstructPartOperation(plainData);
|
|
1985
|
+
case ERPObjType.WORK_ORDERS:
|
|
1986
|
+
return this.reconstructWorkOrder(plainData);
|
|
1987
|
+
case ERPObjType.WORK_ORDER_OPERATIONS:
|
|
1988
|
+
return this.reconstructWorkOrderOperation(plainData);
|
|
1989
|
+
case ERPObjType.LABOR_TICKETS:
|
|
1990
|
+
return this.reconstructLaborTicket(plainData);
|
|
1991
|
+
case ERPObjType.REASONS:
|
|
1992
|
+
return this.reconstructReason(plainData);
|
|
1993
|
+
default:
|
|
1994
|
+
throw new Error(
|
|
1995
|
+
`Unsupported entity type for reconstruction: ${entityType}`
|
|
1996
|
+
);
|
|
1997
|
+
}
|
|
1998
|
+
}
|
|
1999
|
+
/**
|
|
2000
|
+
* Reconstructs multiple typed objects from plain data based on entity type
|
|
2001
|
+
* @param entityType The type of entity being processed
|
|
2002
|
+
* @param plainDataArray Array of plain data objects containing entity fields
|
|
2003
|
+
* @returns Array of properly typed IToRESTApiObject instances
|
|
2004
|
+
*/
|
|
2005
|
+
static reconstructMultipleFromPlainData(entityType, plainDataArray) {
|
|
2006
|
+
return plainDataArray.map(
|
|
2007
|
+
(plainData) => this.reconstructFromPlainData(entityType, plainData)
|
|
2008
|
+
);
|
|
2009
|
+
}
|
|
2010
|
+
// ============================================================================
|
|
2011
|
+
// PRIVATE ENTITY-SPECIFIC KEY EXTRACTION METHODS
|
|
2012
|
+
// ============================================================================
|
|
2013
|
+
static extractPersonKey(record) {
|
|
2014
|
+
const restApiObject = record.toRESTApiObject();
|
|
2015
|
+
return this.validateAndExtractKey(restApiObject, ["personId"], "PERSONS", [
|
|
2016
|
+
"personId"
|
|
2017
|
+
]);
|
|
2018
|
+
}
|
|
2019
|
+
static extractResourceKey(record) {
|
|
2020
|
+
const restApiObject = record.toRESTApiObject();
|
|
2021
|
+
return this.validateAndExtractKey(
|
|
2022
|
+
restApiObject,
|
|
2023
|
+
["resourceId"],
|
|
2024
|
+
"RESOURCES",
|
|
2025
|
+
["resourceId"]
|
|
2026
|
+
);
|
|
2027
|
+
}
|
|
2028
|
+
static extractPartKey(record) {
|
|
2029
|
+
const restApiObject = record.toRESTApiObject();
|
|
2030
|
+
return this.validateAndExtractKey(
|
|
2031
|
+
restApiObject,
|
|
2032
|
+
["partNumber", "partRevision", "method"],
|
|
2033
|
+
"PARTS",
|
|
2034
|
+
["partNumber"]
|
|
2035
|
+
);
|
|
2036
|
+
}
|
|
2037
|
+
static extractPartOperationKey(record) {
|
|
2038
|
+
const restApiObject = record.toRESTApiObject();
|
|
2039
|
+
return this.validateAndExtractKey(
|
|
2040
|
+
restApiObject,
|
|
2041
|
+
["partNumber", "partRevision", "method", "sequenceNumber"],
|
|
2042
|
+
"PART_OPERATION",
|
|
2043
|
+
["partNumber", "sequenceNumber"]
|
|
2044
|
+
);
|
|
2045
|
+
}
|
|
2046
|
+
static extractWorkOrderKey(record) {
|
|
2047
|
+
const restApiObject = record.toRESTApiObject();
|
|
2048
|
+
return this.validateAndExtractKey(
|
|
2049
|
+
restApiObject,
|
|
2050
|
+
["workOrderId", "lot", "split", "sub"],
|
|
2051
|
+
"WORK_ORDERS",
|
|
2052
|
+
["workOrderId"]
|
|
2053
|
+
);
|
|
2054
|
+
}
|
|
2055
|
+
static extractWorkOrderOperationKey(record) {
|
|
2056
|
+
const restApiObject = record.toRESTApiObject();
|
|
2057
|
+
return this.validateAndExtractKey(
|
|
2058
|
+
restApiObject,
|
|
2059
|
+
["workOrderId", "lot", "split", "sub", "sequenceNumber"],
|
|
2060
|
+
"WORK_ORDER_OPERATIONS",
|
|
2061
|
+
["workOrderId", "sequenceNumber"]
|
|
2062
|
+
);
|
|
2063
|
+
}
|
|
2064
|
+
static extractLaborTicketKey(record) {
|
|
2065
|
+
const restApiObject = record.toRESTApiObject();
|
|
2066
|
+
if (restApiObject.laborTicketId) {
|
|
2067
|
+
return this.validateAndExtractKey(
|
|
2068
|
+
restApiObject,
|
|
2069
|
+
["laborTicketId"],
|
|
2070
|
+
"LABOR_TICKETS",
|
|
2071
|
+
["laborTicketId"]
|
|
2072
|
+
);
|
|
2073
|
+
} else {
|
|
2074
|
+
return this.validateAndExtractKey(
|
|
2075
|
+
restApiObject,
|
|
2076
|
+
[
|
|
2077
|
+
"workOrderId",
|
|
2078
|
+
"lot",
|
|
2079
|
+
"split",
|
|
2080
|
+
"sub",
|
|
2081
|
+
"sequenceNumber",
|
|
2082
|
+
"resourceId",
|
|
2083
|
+
"personId"
|
|
2084
|
+
],
|
|
2085
|
+
"LABOR_TICKETS",
|
|
2086
|
+
["workOrderId", "sequenceNumber", "resourceId", "personId"]
|
|
2087
|
+
);
|
|
2088
|
+
}
|
|
2089
|
+
}
|
|
2090
|
+
static extractReasonKey(record) {
|
|
2091
|
+
const restApiObject = record.toRESTApiObject();
|
|
2092
|
+
return this.validateAndExtractKey(restApiObject, ["reasonId"], "REASONS", [
|
|
2093
|
+
"reasonId"
|
|
2094
|
+
]);
|
|
2095
|
+
}
|
|
2096
|
+
// ============================================================================
|
|
2097
|
+
// PRIVATE ENTITY-SPECIFIC RECONSTRUCTION METHODS
|
|
2098
|
+
// ============================================================================
|
|
2099
|
+
static reconstructPerson(plainData) {
|
|
2100
|
+
return MMSendPerson.fromPlainObject(plainData);
|
|
2101
|
+
}
|
|
2102
|
+
static reconstructResource(plainData) {
|
|
2103
|
+
return MMSendResource.fromPlainObject(plainData);
|
|
2104
|
+
}
|
|
2105
|
+
static reconstructPart(plainData) {
|
|
2106
|
+
return MMSendPart.fromPlainObject(plainData);
|
|
2107
|
+
}
|
|
2108
|
+
static reconstructPartOperation(plainData) {
|
|
2109
|
+
return MMSendPartOperation.fromPlainObject(plainData);
|
|
2110
|
+
}
|
|
2111
|
+
static reconstructWorkOrder(plainData) {
|
|
2112
|
+
return MMSendWorkOrder.fromPlainObject(plainData);
|
|
2113
|
+
}
|
|
2114
|
+
static reconstructWorkOrderOperation(plainData) {
|
|
2115
|
+
return MMSendWorkOrderOperation.fromPlainObject(plainData);
|
|
2116
|
+
}
|
|
2117
|
+
static reconstructLaborTicket(plainData) {
|
|
2118
|
+
return MMSendLaborTicket.fromPlainObject(plainData);
|
|
2119
|
+
}
|
|
2120
|
+
static reconstructReason(plainData) {
|
|
2121
|
+
return MMSendReason.fromPlainObject(plainData);
|
|
2122
|
+
}
|
|
2123
|
+
// ============================================================================
|
|
2124
|
+
// SHARED UTILITY METHODS
|
|
2125
|
+
// ============================================================================
|
|
2126
|
+
/**
|
|
2127
|
+
* Helper method to validate and extract primary key components
|
|
2128
|
+
* @param restApiObject The REST API object representation
|
|
2129
|
+
* @param keyFields The fields that make up the primary key
|
|
2130
|
+
* @param entityTypeName The entity type name for error messages
|
|
2131
|
+
* @param requiredFields The fields that must be non-empty (others can be empty)
|
|
2132
|
+
* @returns Combined primary key string
|
|
2133
|
+
* @throws Error if any required field is missing or null
|
|
2134
|
+
*/
|
|
2135
|
+
static validateAndExtractKey(restApiObject, keyFields, entityTypeName, requiredFields) {
|
|
2136
|
+
const keyValues = [];
|
|
2137
|
+
for (const field of keyFields) {
|
|
2138
|
+
const value = restApiObject[field];
|
|
2139
|
+
if (requiredFields.includes(field)) {
|
|
2140
|
+
if (value === null || value === void 0 || value === "") {
|
|
2141
|
+
throw new Error(
|
|
2142
|
+
`Primary key field '${field}' is missing or empty for entity type ${entityTypeName}`
|
|
2143
|
+
);
|
|
2144
|
+
}
|
|
2145
|
+
}
|
|
2146
|
+
keyValues.push(value || "");
|
|
2147
|
+
}
|
|
2148
|
+
return keyValues.join("|");
|
|
2149
|
+
}
|
|
2150
|
+
}
|
|
2151
|
+
class ErrorProcessor {
|
|
2152
|
+
/**
|
|
2153
|
+
* Creates a set of primary keys for all failed records from batch errors
|
|
2154
|
+
* @param entityType The type of entity being processed
|
|
2155
|
+
* @param batchErrors Array of batch errors containing failed entities
|
|
2156
|
+
* @returns Set of primary keys for failed records
|
|
2157
|
+
*/
|
|
2158
|
+
static createFailedRecordKeySet(entityType, batchErrors) {
|
|
2159
|
+
const failedKeySet = /* @__PURE__ */ new Set();
|
|
2160
|
+
batchErrors.forEach((batchError) => {
|
|
2161
|
+
batchError.affectedEntities.forEach((affectedEntity) => {
|
|
2162
|
+
try {
|
|
2163
|
+
const primaryKey = EntityTransformer.extractPrimaryKey(
|
|
2164
|
+
entityType,
|
|
2165
|
+
affectedEntity
|
|
2166
|
+
);
|
|
2167
|
+
failedKeySet.add(primaryKey);
|
|
2168
|
+
} catch (error) {
|
|
2169
|
+
logger.warn(
|
|
2170
|
+
`Failed to extract primary key from error entity: ${error}`
|
|
2171
|
+
);
|
|
2172
|
+
}
|
|
2173
|
+
});
|
|
2174
|
+
});
|
|
2175
|
+
return failedKeySet;
|
|
2176
|
+
}
|
|
2177
|
+
/**
|
|
2178
|
+
* Filters out failed records, returning only successful ones
|
|
2179
|
+
* @param entityType The type of entity being processed
|
|
2180
|
+
* @param allRecords All records (typed objects) that were sent to the API
|
|
2181
|
+
* @param failedKeySet Set of primary keys for records that failed
|
|
2182
|
+
* @returns Array of records that succeeded
|
|
2183
|
+
*/
|
|
2184
|
+
static filterSuccessfulRecords(entityType, allRecords, failedKeySet) {
|
|
2185
|
+
const successfulRecords = [];
|
|
2186
|
+
allRecords.forEach((record) => {
|
|
2187
|
+
try {
|
|
2188
|
+
const primaryKey = EntityTransformer.extractPrimaryKey(
|
|
2189
|
+
entityType,
|
|
2190
|
+
record
|
|
2191
|
+
);
|
|
2192
|
+
if (!failedKeySet.has(primaryKey)) {
|
|
2193
|
+
successfulRecords.push(record);
|
|
2194
|
+
}
|
|
2195
|
+
} catch (error) {
|
|
2196
|
+
logger.warn(
|
|
2197
|
+
`Failed to extract primary key from record during filtering: ${error}`
|
|
2198
|
+
);
|
|
2199
|
+
}
|
|
2200
|
+
});
|
|
2201
|
+
return successfulRecords;
|
|
2202
|
+
}
|
|
2203
|
+
/**
|
|
2204
|
+
* Orchestrates the caching of successful records on partial failure
|
|
2205
|
+
* @param entityType The type of entity being processed
|
|
2206
|
+
* @param toProcess All records that were sent to the API (all are now guaranteed to be typed objects)
|
|
2207
|
+
* @param batchErrors Array of batch errors containing failed entities
|
|
2208
|
+
* @param batchCacheManager The cache manager instance
|
|
2209
|
+
*/
|
|
2210
|
+
static async cacheSuccessfulRecordsOnPartialFailure(entityType, toProcess, batchErrors, batchCacheManager) {
|
|
2211
|
+
const failedKeySet = this.createFailedRecordKeySet(entityType, batchErrors);
|
|
2212
|
+
const successfulRecords = this.filterSuccessfulRecords(
|
|
2213
|
+
entityType,
|
|
2214
|
+
toProcess,
|
|
2215
|
+
failedKeySet
|
|
2216
|
+
);
|
|
2217
|
+
logger.info(
|
|
2218
|
+
`Caching ${successfulRecords.length} successful records out of ${toProcess.length} total records`
|
|
2219
|
+
);
|
|
2220
|
+
if (successfulRecords.length > 0) {
|
|
2221
|
+
await batchCacheManager.storeBatch(entityType, successfulRecords);
|
|
2222
|
+
}
|
|
2223
|
+
}
|
|
2224
|
+
/**
|
|
2225
|
+
* Extracts error count and batch errors from MM API response for partial failures (HTTP 207)
|
|
2226
|
+
* This supports all entities, including the slightly different format for labor tickets.
|
|
2227
|
+
* In case of labor tickets, the updateErrors and insertErrors arrays are combined into affectedEntities.
|
|
2228
|
+
* @param mmApiResponse The full MM API response object
|
|
2229
|
+
* @param entityType The type of entity being processed (determines response structure)
|
|
2230
|
+
* @returns Object containing errorCount and batchErrors
|
|
2231
|
+
* See MM207NonLaborTicketResponse and MM207LaborTicketResponse for response structure details
|
|
2232
|
+
*/
|
|
2233
|
+
static extractErrorDetails(mmApiResponse, entityType) {
|
|
2234
|
+
const data = mmApiResponse.data;
|
|
2235
|
+
let allErrors = [];
|
|
2236
|
+
if (entityType === ERPObjType.LABOR_TICKETS) {
|
|
2237
|
+
const updateErrors = data?.updateErrors || [];
|
|
2238
|
+
const insertErrors = data?.insertErrors || [];
|
|
2239
|
+
if (!data?.updateErrors && !data?.insertErrors) {
|
|
2240
|
+
logger.warn(
|
|
2241
|
+
"Labor tickets partial success response missing both updateErrors and insertErrors arrays"
|
|
2242
|
+
);
|
|
2243
|
+
}
|
|
2244
|
+
allErrors = [...updateErrors, ...insertErrors];
|
|
2245
|
+
} else {
|
|
2246
|
+
const errors = data?.errors || [];
|
|
2247
|
+
if (!data?.errors) {
|
|
2248
|
+
logger.warn(
|
|
2249
|
+
`${entityType} partial success response missing errors array`
|
|
2250
|
+
);
|
|
2251
|
+
}
|
|
2252
|
+
allErrors = errors;
|
|
2253
|
+
}
|
|
2254
|
+
const batchErrors = allErrors.filter(
|
|
2255
|
+
(error) => {
|
|
2256
|
+
return error.batchData && error.batchData.length > 0;
|
|
2257
|
+
}
|
|
2258
|
+
).map(
|
|
2259
|
+
(error) => {
|
|
2260
|
+
const typedErrorEntities = (error.batchData || []).map((entity) => {
|
|
2261
|
+
if (typeof entity === "object" && entity !== null && "toRESTApiObject" in entity && typeof entity.toRESTApiObject === "function") {
|
|
2262
|
+
return entity;
|
|
2263
|
+
} else {
|
|
2264
|
+
return EntityTransformer.reconstructFromPlainData(
|
|
2265
|
+
entityType,
|
|
2266
|
+
entity
|
|
2267
|
+
);
|
|
2268
|
+
}
|
|
2269
|
+
});
|
|
2270
|
+
return {
|
|
2271
|
+
message: error.message,
|
|
2272
|
+
affectedEntities: typedErrorEntities
|
|
2273
|
+
};
|
|
2274
|
+
}
|
|
2275
|
+
);
|
|
2276
|
+
const errorCount = batchErrors.reduce((total, batchError) => {
|
|
2277
|
+
return total + batchError.affectedEntities.length;
|
|
2278
|
+
}, 0);
|
|
2279
|
+
return {
|
|
2280
|
+
errorCount,
|
|
2281
|
+
batchErrors
|
|
2282
|
+
};
|
|
2283
|
+
}
|
|
2284
|
+
/**
|
|
2285
|
+
* Extracts error details from a 500 HTTP exception when it contains structured error data
|
|
2286
|
+
* @param exception The caught exception from MM API call
|
|
2287
|
+
* @param entityType The type of entity being processed
|
|
2288
|
+
* @returns Object containing errorCount and batchErrors, or null if not a structured 500 error
|
|
2289
|
+
* See MM500NonLaborTicketException and MM500LaborTicketException for exception structure details
|
|
2290
|
+
*/
|
|
2291
|
+
static extractErrorDetailsFrom500Exception(exception, entityType) {
|
|
2292
|
+
try {
|
|
2293
|
+
const ex = exception;
|
|
2294
|
+
const data = ex?.data;
|
|
2295
|
+
logger.info(
|
|
2296
|
+
"writeEntitiesToMM: Analyzing 500 exception structure for diagnostic purposes",
|
|
2297
|
+
{
|
|
2298
|
+
status: ex?.status,
|
|
2299
|
+
code: ex?.code,
|
|
2300
|
+
hasResponseData: !!data,
|
|
2301
|
+
responseDataKeys: data ? Object.keys(data) : [],
|
|
2302
|
+
errorMessage: data?.error,
|
|
2303
|
+
hasMessageObject: !!data?.message,
|
|
2304
|
+
messageObjectKeys: data?.message ? Object.keys(data.message) : [],
|
|
2305
|
+
entityType,
|
|
2306
|
+
exceptionType: typeof exception,
|
|
2307
|
+
exceptionKeys: ex ? Object.keys(ex) : []
|
|
2308
|
+
}
|
|
2309
|
+
);
|
|
2310
|
+
if (ex?.status !== 500 || typeof data?.error !== "string" || !data.error.startsWith("Failed to import")) {
|
|
2311
|
+
logger.info(
|
|
2312
|
+
"writeEntitiesToMM: Not a structured 500 error - will re-throw exception as-is",
|
|
2313
|
+
{
|
|
2314
|
+
status: ex?.status,
|
|
2315
|
+
errorMessage: data?.error,
|
|
2316
|
+
expectedStatus: 500,
|
|
2317
|
+
expectedMessagePrefix: "Failed to import"
|
|
2318
|
+
}
|
|
2319
|
+
);
|
|
2320
|
+
return null;
|
|
2321
|
+
}
|
|
2322
|
+
logger.info(
|
|
2323
|
+
"writeEntitiesToMM: Detected structured 500 error - extracting error details"
|
|
2324
|
+
);
|
|
2325
|
+
const messageObject = data?.message;
|
|
2326
|
+
if (!messageObject) {
|
|
2327
|
+
logger.warn(
|
|
2328
|
+
"writeEntitiesToMM: Structured 500 error missing message object"
|
|
2329
|
+
);
|
|
2330
|
+
return null;
|
|
2331
|
+
}
|
|
2332
|
+
let allErrors = [];
|
|
2333
|
+
if (entityType === ERPObjType.LABOR_TICKETS) {
|
|
2334
|
+
const updateErrors = Array.isArray(messageObject?.updateErrors) ? messageObject.updateErrors : [];
|
|
2335
|
+
const insertErrors = Array.isArray(messageObject?.insertErrors) ? messageObject.insertErrors : [];
|
|
2336
|
+
logger.info("writeEntitiesToMM: Processing labor tickets 500 error", {
|
|
2337
|
+
updateErrorsCount: updateErrors.length,
|
|
2338
|
+
insertErrorsCount: insertErrors.length
|
|
2339
|
+
});
|
|
2340
|
+
if (updateErrors.length === 0 && insertErrors.length === 0) {
|
|
2341
|
+
logger.warn(
|
|
2342
|
+
"writeEntitiesToMM: Labor tickets 500 error missing both updateErrors and insertErrors arrays"
|
|
2343
|
+
);
|
|
2344
|
+
}
|
|
2345
|
+
allErrors = [...updateErrors, ...insertErrors];
|
|
2346
|
+
} else {
|
|
2347
|
+
const errors = Array.isArray(messageObject?.errors) ? messageObject.errors : [];
|
|
2348
|
+
logger.info("writeEntitiesToMM: Processing regular entity 500 error", {
|
|
2349
|
+
errorsCount: errors.length
|
|
2350
|
+
});
|
|
2351
|
+
if (errors.length === 0) {
|
|
2352
|
+
logger.warn(
|
|
2353
|
+
`writeEntitiesToMM: ${entityType} 500 error missing errors array`
|
|
2354
|
+
);
|
|
2355
|
+
}
|
|
2356
|
+
allErrors = errors;
|
|
2357
|
+
}
|
|
2358
|
+
const batchErrors = allErrors.filter((error) => {
|
|
2359
|
+
const err = error;
|
|
2360
|
+
return Array.isArray(err?.batchData) && err.batchData.length > 0;
|
|
2361
|
+
}).map((error) => {
|
|
2362
|
+
const err = error;
|
|
2363
|
+
const batchData = err?.batchData;
|
|
2364
|
+
const typedErrorEntities = (batchData || []).map((entity) => {
|
|
2365
|
+
if (typeof entity === "object" && entity !== null && "toRESTApiObject" in entity && typeof entity.toRESTApiObject === "function") {
|
|
2366
|
+
return entity;
|
|
2367
|
+
} else {
|
|
2368
|
+
return EntityTransformer.reconstructFromPlainData(
|
|
2369
|
+
entityType,
|
|
2370
|
+
entity
|
|
2371
|
+
);
|
|
2372
|
+
}
|
|
2373
|
+
});
|
|
2374
|
+
return {
|
|
2375
|
+
message: typeof err?.message === "string" ? err.message : "Unknown error",
|
|
2376
|
+
affectedEntities: typedErrorEntities
|
|
2377
|
+
};
|
|
2378
|
+
});
|
|
2379
|
+
const errorCount = batchErrors.reduce((total, batchError) => {
|
|
2380
|
+
return total + batchError.affectedEntities.length;
|
|
2381
|
+
}, 0);
|
|
2382
|
+
logger.info("writeEntitiesToMM: Extracted 500 error details", {
|
|
2383
|
+
batchErrorsCount: batchErrors.length,
|
|
2384
|
+
totalErrorCount: errorCount,
|
|
2385
|
+
entityType
|
|
2386
|
+
});
|
|
2387
|
+
return {
|
|
2388
|
+
errorCount,
|
|
2389
|
+
batchErrors
|
|
2390
|
+
};
|
|
2391
|
+
} catch (error) {
|
|
2392
|
+
logger.error(
|
|
2393
|
+
"writeEntitiesToMM: Failed to parse 500 exception structure safely",
|
|
2394
|
+
{
|
|
2395
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2396
|
+
exceptionType: typeof exception,
|
|
2397
|
+
entityType
|
|
2398
|
+
}
|
|
2399
|
+
);
|
|
2400
|
+
return null;
|
|
2401
|
+
}
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
class MMEntityProcessor {
|
|
2405
|
+
/**
|
|
2406
|
+
* Writes entities to MM API with deduplication and caching
|
|
2407
|
+
*/
|
|
2408
|
+
static async writeEntities(entityType, mmRecords, batchCacheManager) {
|
|
2409
|
+
const { toProcess, result } = await this._prepareAndDedupe(
|
|
2410
|
+
entityType,
|
|
2411
|
+
mmRecords,
|
|
2412
|
+
batchCacheManager
|
|
2413
|
+
);
|
|
2414
|
+
if (toProcess.length === 0) {
|
|
2415
|
+
result.message = "All records were deduplicated locally - no records sent to MM API";
|
|
2416
|
+
return result;
|
|
2417
|
+
}
|
|
2418
|
+
let mmApiResponse;
|
|
2419
|
+
try {
|
|
2420
|
+
mmApiResponse = await this._sendToAPI(entityType, toProcess);
|
|
2421
|
+
} catch (exception) {
|
|
2422
|
+
this._handleException(exception, entityType, result);
|
|
2423
|
+
}
|
|
2424
|
+
return await this._handleResponse(
|
|
2425
|
+
mmApiResponse,
|
|
2426
|
+
entityType,
|
|
2427
|
+
toProcess,
|
|
2428
|
+
result,
|
|
2429
|
+
batchCacheManager
|
|
2430
|
+
);
|
|
2431
|
+
}
|
|
2432
|
+
// ============================================================================
|
|
2433
|
+
// PRIVATE HELPER METHODS
|
|
2434
|
+
// ============================================================================
|
|
2435
|
+
/**
|
|
2436
|
+
* Deduplicates records against cache
|
|
2437
|
+
* Returns the records to process and the result object
|
|
2438
|
+
* If the batchCacheManager is provided, the records are deduplicated against the cache
|
|
2439
|
+
* If the batchCacheManager is not provided, the records are not deduplicated
|
|
2440
|
+
*/
|
|
2441
|
+
static async _prepareAndDedupe(entityType, mmRecords, batchCacheManager) {
|
|
2442
|
+
let toProcess = [];
|
|
2443
|
+
const result = {
|
|
2444
|
+
message: "",
|
|
2445
|
+
upsertedEntities: 0,
|
|
2446
|
+
localDedupeCount: 0,
|
|
2447
|
+
apiDedupeCount: 0
|
|
2448
|
+
};
|
|
2449
|
+
if (batchCacheManager) {
|
|
2450
|
+
const { nonDuplicates, duplicates } = await batchCacheManager.dedupeBatch(entityType, mmRecords);
|
|
2451
|
+
toProcess = nonDuplicates;
|
|
2452
|
+
result.localDedupeCount = duplicates.length;
|
|
2453
|
+
} else {
|
|
2454
|
+
toProcess = mmRecords;
|
|
2455
|
+
}
|
|
2456
|
+
return { toProcess, result };
|
|
2457
|
+
}
|
|
2458
|
+
/**
|
|
2459
|
+
* Sends records to MM API
|
|
2460
|
+
*/
|
|
2461
|
+
static async _sendToAPI(entityType, toProcess) {
|
|
2462
|
+
const mmApiClient = new MMApiClient();
|
|
2463
|
+
switch (entityType) {
|
|
2464
|
+
case ERPObjType.PERSONS:
|
|
2465
|
+
return await mmApiClient.sendPersonsToMM(toProcess);
|
|
2466
|
+
case ERPObjType.RESOURCES:
|
|
2467
|
+
return await mmApiClient.sendResourcesToMM(
|
|
2468
|
+
toProcess
|
|
2469
|
+
);
|
|
2470
|
+
case ERPObjType.PARTS:
|
|
2471
|
+
return await mmApiClient.sendPartsToMM(toProcess);
|
|
2472
|
+
case ERPObjType.PART_OPERATION:
|
|
2473
|
+
return await mmApiClient.sendPartOperationsToMM(
|
|
2474
|
+
toProcess
|
|
2475
|
+
);
|
|
2476
|
+
case ERPObjType.WORK_ORDERS:
|
|
2477
|
+
return await mmApiClient.sendWorkOrdersToMM(
|
|
2478
|
+
toProcess
|
|
2479
|
+
);
|
|
2480
|
+
case ERPObjType.WORK_ORDER_OPERATIONS:
|
|
2481
|
+
return await mmApiClient.sendWorkOrderOperationsToMM(
|
|
2482
|
+
toProcess
|
|
2483
|
+
);
|
|
2484
|
+
case ERPObjType.LABOR_TICKETS:
|
|
2485
|
+
return await mmApiClient.sendLaborTicketsToMM(
|
|
2486
|
+
toProcess
|
|
2487
|
+
);
|
|
2488
|
+
case ERPObjType.REASONS:
|
|
2489
|
+
return await mmApiClient.sendReasonsToMM(toProcess);
|
|
2490
|
+
default:
|
|
2491
|
+
throw new Error(`Unknown entity type: ${entityType}`);
|
|
2492
|
+
}
|
|
2493
|
+
}
|
|
2494
|
+
/**
|
|
2495
|
+
* Handles exceptions from API calls, converting structured 500 errors to MMBatchValidationError
|
|
2496
|
+
* @throws MMBatchValidationError for structured 500 errors
|
|
2497
|
+
* @throws The original exception for all other errors
|
|
2498
|
+
*/
|
|
2499
|
+
static _handleException(exception, entityType, result) {
|
|
2500
|
+
const structuredCompleteErrorSet = ErrorProcessor.extractErrorDetailsFrom500Exception(exception, entityType);
|
|
2501
|
+
if (structuredCompleteErrorSet) {
|
|
2502
|
+
const ex = exception;
|
|
2503
|
+
const data = ex?.data;
|
|
2504
|
+
const errorMessage = typeof data?.error === "string" ? data.error : "All entities failed to import";
|
|
2505
|
+
throw new MMBatchValidationError({
|
|
2506
|
+
message: errorMessage,
|
|
2507
|
+
upsertedEntities: 0,
|
|
2508
|
+
localDedupeCount: result.localDedupeCount,
|
|
2509
|
+
apiDedupeCount: 0,
|
|
2510
|
+
errorCount: structuredCompleteErrorSet.errorCount,
|
|
2511
|
+
httpStatus: exception.status,
|
|
2512
|
+
batchErrors: structuredCompleteErrorSet.batchErrors
|
|
2513
|
+
});
|
|
2514
|
+
}
|
|
2515
|
+
throw exception;
|
|
2516
|
+
}
|
|
2517
|
+
/**
|
|
2518
|
+
* Handles the MM API response based on HTTP status code, caching successful records
|
|
2519
|
+
* and throwing exceptions for partial failures
|
|
2520
|
+
*
|
|
2521
|
+
* @returns The final result for success cases
|
|
2522
|
+
* @throws MMBatchValidationError for partial success cases
|
|
2523
|
+
* @throws Error for unknown status codes
|
|
2524
|
+
*/
|
|
2525
|
+
static async _handleResponse(mmApiResponse, entityType, toProcess, result, batchCacheManager) {
|
|
2526
|
+
if (mmApiResponse.httpStatus === 200) {
|
|
2527
|
+
if (batchCacheManager) {
|
|
2528
|
+
await batchCacheManager.storeBatch(entityType, toProcess);
|
|
2529
|
+
}
|
|
2530
|
+
if (entityType === ERPObjType.LABOR_TICKETS) {
|
|
2531
|
+
const success = mmApiResponse;
|
|
2532
|
+
result.message = success.message || "Entities processed successfully";
|
|
2533
|
+
result.upsertedEntities = (success.updated || 0) + (success.inserted || 0);
|
|
2534
|
+
} else {
|
|
2535
|
+
const success = mmApiResponse;
|
|
2536
|
+
result.message = success.message || "Entities processed successfully";
|
|
2537
|
+
result.upsertedEntities = success.affectedRows || 0;
|
|
2538
|
+
}
|
|
2539
|
+
result.apiDedupeCount = toProcess.length - result.upsertedEntities;
|
|
2540
|
+
return result;
|
|
2541
|
+
} else if (mmApiResponse.httpStatus === 207) {
|
|
2542
|
+
const partialResponse = mmApiResponse;
|
|
2543
|
+
const { errorCount, batchErrors } = ErrorProcessor.extractErrorDetails(
|
|
2544
|
+
partialResponse,
|
|
2545
|
+
entityType
|
|
2546
|
+
);
|
|
2547
|
+
if (batchCacheManager) {
|
|
2548
|
+
await ErrorProcessor.cacheSuccessfulRecordsOnPartialFailure(
|
|
2549
|
+
entityType,
|
|
2550
|
+
toProcess,
|
|
2551
|
+
batchErrors,
|
|
2552
|
+
batchCacheManager
|
|
2553
|
+
);
|
|
2554
|
+
}
|
|
2555
|
+
let upsertedEntities = 0;
|
|
2556
|
+
if (entityType === ERPObjType.LABOR_TICKETS) {
|
|
2557
|
+
const partial = partialResponse;
|
|
2558
|
+
upsertedEntities = (partial.data.updated || 0) + (partial.data.inserted || 0);
|
|
2559
|
+
} else {
|
|
2560
|
+
const partial = partialResponse;
|
|
2561
|
+
upsertedEntities = partial.data.affectedRows || 0;
|
|
2562
|
+
}
|
|
2563
|
+
throw new MMBatchValidationError({
|
|
2564
|
+
message: partialResponse.message || "Entities processed with partial failures",
|
|
2565
|
+
upsertedEntities,
|
|
2566
|
+
localDedupeCount: result.localDedupeCount,
|
|
2567
|
+
apiDedupeCount: toProcess.length - upsertedEntities - errorCount,
|
|
2568
|
+
errorCount,
|
|
2569
|
+
httpStatus: mmApiResponse.httpStatus,
|
|
2570
|
+
batchErrors
|
|
2571
|
+
});
|
|
2572
|
+
} else {
|
|
2573
|
+
throw new Error(
|
|
2574
|
+
`writeEntitiesToMM: Unknown HTTP status code: ${mmApiResponse.httpStatus}. An exception was expected.`
|
|
2575
|
+
);
|
|
2576
|
+
}
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
const mmEntityProcessor = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
2580
|
+
__proto__: null,
|
|
2581
|
+
MMEntityProcessor
|
|
2582
|
+
}, Symbol.toStringTag, { value: "Module" }));
|
|
2583
|
+
class MMBatchValidationError extends Error {
|
|
2584
|
+
upsertedEntities;
|
|
2585
|
+
localDedupeCount;
|
|
2586
|
+
apiDedupeCount;
|
|
2587
|
+
errorCount;
|
|
2588
|
+
httpStatus;
|
|
2589
|
+
batchErrors;
|
|
2590
|
+
constructor(options) {
|
|
2591
|
+
super(options.message);
|
|
2592
|
+
this.name = "MMBatchValidationError";
|
|
2593
|
+
this.upsertedEntities = options.upsertedEntities;
|
|
2594
|
+
this.localDedupeCount = options.localDedupeCount;
|
|
2595
|
+
this.apiDedupeCount = options.apiDedupeCount;
|
|
2596
|
+
this.errorCount = options.errorCount;
|
|
2597
|
+
this.httpStatus = options.httpStatus;
|
|
2598
|
+
this.batchErrors = options.batchErrors;
|
|
2599
|
+
}
|
|
2600
|
+
}
|
|
2601
|
+
class StandardProcessDrivers {
|
|
2602
|
+
/**
|
|
2603
|
+
* Synchronizes updated labor tickets from MachineMetrics to an ERP system:
|
|
2604
|
+
*
|
|
2605
|
+
* Initializes a checkpoint that tracks the last time labor tickets were synced,
|
|
2606
|
+
* fetches labor tickets from MM and converts them to the company timezone,
|
|
2607
|
+
* creates or updates the labor ticket in the ERP via connector-specific implementations,
|
|
2608
|
+
* updates the labor ticket id in MM on newly created labor tickets,
|
|
2609
|
+
* and saves the checkpoint
|
|
2610
|
+
*/
|
|
2611
|
+
static async syncLaborTicketsToERP(connectorType, connector) {
|
|
2612
|
+
return LaborTicketERPSynchronizer.syncToERP(connectorType, connector);
|
|
2613
|
+
}
|
|
2614
|
+
/**
|
|
2615
|
+
* Retries labor tickets that have failed to be created or updated in the ERP during the sync:
|
|
2616
|
+
*
|
|
2617
|
+
* Fetches failed labor tickets from MM, processes them, and updates the labor ticket id in MM.
|
|
2618
|
+
* If the labor ticket is successfully created or updated, it is added to the list of successful labor ticket ids,
|
|
2619
|
+
* which are then deleted from the list of failed labor ticket ids.
|
|
2620
|
+
*/
|
|
2621
|
+
static async retryFailedLaborTickets(connectorType, connector) {
|
|
2622
|
+
return LaborTicketERPSynchronizer.retryFailed(connectorType, connector);
|
|
2623
|
+
}
|
|
2624
|
+
/**
|
|
2625
|
+
* Writes a batch of records to MM for a specific entity type by:
|
|
2626
|
+
* 1. Deduplicating against cached records (localDedupeCount)
|
|
2627
|
+
* 2. Sending non-duplicate records to the MM API (apiDedupeCount)
|
|
2628
|
+
* 3. Storing successful records in cache
|
|
2629
|
+
*
|
|
2630
|
+
* An example usage pattern including comprehensive error handling:
|
|
2631
|
+
* ```
|
|
2632
|
+
* try {
|
|
2633
|
+
* const result = await StandardProcessDrivers.writeEntitiesToMM(
|
|
2634
|
+
* 'WorkOrders',
|
|
2635
|
+
* workOrderRecords,
|
|
2636
|
+
* batchCacheManager
|
|
2637
|
+
* );
|
|
2638
|
+
*
|
|
2639
|
+
* // HTTP 200 - Complete success
|
|
2640
|
+
* console.log(`✅ Success: ${result.message}`);
|
|
2641
|
+
* console.log(`📊 Metrics: ${result.upsertedEntities} upserted, ${result.localDedupeCount} locally deduplicated, ${result.apiDedupeCount} API deduplicated`);
|
|
2642
|
+
*
|
|
2643
|
+
* } catch (error) {
|
|
2644
|
+
* if (error instanceof MMBatchValidationError) {
|
|
2645
|
+
* // HTTP 207 - Partial success with some batches failing due to validation errors
|
|
2646
|
+
* // HTTP 500 - A specific type representing complete failure due to validation issues;
|
|
2647
|
+
* // other 500 types represent failure due to other issues and are not converted to MMBatchValidationError
|
|
2648
|
+
* // Note: Each batch error contains ALL entities from the failing batch, not necessarily just the failed ones
|
|
2649
|
+
*
|
|
2650
|
+
* console.log(`⚠️ Batch processing completed with errors (HTTP ${error.httpStatus})`);
|
|
2651
|
+
* console.log(`📊 Metrics: ${error.upsertedEntities} upserted, ${error.localDedupeCount} locally deduplicated, ${error.apiDedupeCount} API deduplicated`);
|
|
2652
|
+
* console.log(`❌ Error count: ${error.errorCount}`);
|
|
2653
|
+
*
|
|
2654
|
+
* // Process specific batch errors for retry or logging
|
|
2655
|
+
* error.batchErrors.forEach((batchError, index) => {
|
|
2656
|
+
* console.log(`Batch ${index + 1} error: ${batchError.message}`);
|
|
2657
|
+
* console.log(`All entities in failing batch:`, batchError.affectedEntities);
|
|
2658
|
+
*
|
|
2659
|
+
* // Example: Queue entire failing batch for retry (contains both successful and failed entities)
|
|
2660
|
+
* await queueForRetry(batchError.affectedEntities);
|
|
2661
|
+
* });
|
|
2662
|
+
*
|
|
2663
|
+
* // Decide whether to continue or halt based on httpStatus
|
|
2664
|
+
* if (error.httpStatus === 207) {
|
|
2665
|
+
* // Partial success - some batches processed successfully, others failed
|
|
2666
|
+
* console.log('⚠️ Continuing with partial success');
|
|
2667
|
+
* } else if (error.httpStatus === 500) {
|
|
2668
|
+
* // Complete failure - all batches failed due to validation issues
|
|
2669
|
+
* console.log('🛑 Complete failure - no records processed');
|
|
2670
|
+
* throw error; // Re-throw if complete failure should halt the process
|
|
2671
|
+
* }
|
|
2672
|
+
*
|
|
2673
|
+
* } else {
|
|
2674
|
+
* // Other underlying errors (network issues, authentication, etc.)
|
|
2675
|
+
* console.error('🚨 Unexpected error during MM API call:', error);
|
|
2676
|
+
*
|
|
2677
|
+
* // Example: Check for specific error types
|
|
2678
|
+
* if (error.message?.includes('authentication')) {
|
|
2679
|
+
* console.error('🔐 Authentication issue - check MM API credentials');
|
|
2680
|
+
* } else if (error.message?.includes('network') || error.code === 'ECONNREFUSED') {
|
|
2681
|
+
* console.error('🌐 Network connectivity issue - check MM API endpoint');
|
|
2682
|
+
* }
|
|
2683
|
+
*
|
|
2684
|
+
* throw error; // Re-throw for upstream handling
|
|
2685
|
+
* }
|
|
2686
|
+
* }
|
|
2687
|
+
* ```
|
|
2688
|
+
*
|
|
2689
|
+
* @param entityType The type of entity being processed
|
|
2690
|
+
* @param mmRecords The records to process
|
|
2691
|
+
* @param batchCacheManager The batch cache manager instance; pass in null if caching is not desired
|
|
2692
|
+
*
|
|
2693
|
+
* @returns WriteEntitiesToMMResult on complete success (HTTP 200)
|
|
2694
|
+
* @throws MMBatchValidationError on partial success (HTTP 207) or complete failure (HTTP 500) with structured error details
|
|
2695
|
+
* @throws Error on other underlying issues (network, authentication, etc.)
|
|
2696
|
+
*/
|
|
2697
|
+
static async writeEntitiesToMM(entityType, mmRecords, batchCacheManager) {
|
|
2698
|
+
return MMEntityProcessor.writeEntities(
|
|
2699
|
+
entityType,
|
|
2700
|
+
mmRecords,
|
|
2701
|
+
batchCacheManager
|
|
2702
|
+
);
|
|
2703
|
+
}
|
|
2704
|
+
/**
|
|
2705
|
+
* Writes a batch of Work Order related entities to MM maintaining referential integrity by:
|
|
2706
|
+
* 1. Processing parts first (base entities)
|
|
2707
|
+
* 2. Processing part operations (depends on parts)
|
|
2708
|
+
* 3. Processing work orders (depends on parts)
|
|
2709
|
+
* 4. Processing work order operations (depends on work orders and part operations)
|
|
2710
|
+
*
|
|
2711
|
+
* This ensures all foreign key constraints are satisfied and prevents referential integrity errors.
|
|
2712
|
+
*
|
|
2713
|
+
* @param workOrderBatch The batch containing all related entities
|
|
2714
|
+
* @param batchCacheManager The batch cache manager instance; pass in null if caching is not desired
|
|
2715
|
+
*
|
|
2716
|
+
* @returns Combined results from all entity processing
|
|
2717
|
+
* @throws MMBatchValidationError on partial success or complete failure with structured error details
|
|
2718
|
+
* @throws Error on other underlying issues (network, authentication, etc.)
|
|
2719
|
+
*/
|
|
2720
|
+
static async writeWorkOrderBatchToMM(workOrderBatch, batchCacheManager) {
|
|
2721
|
+
const partsResult = await MMEntityProcessor.writeEntities(
|
|
2722
|
+
ERPObjType.PARTS,
|
|
2723
|
+
workOrderBatch.parts,
|
|
2724
|
+
batchCacheManager
|
|
2725
|
+
);
|
|
2726
|
+
const partOperationsResult = await MMEntityProcessor.writeEntities(
|
|
2727
|
+
ERPObjType.PART_OPERATION,
|
|
2728
|
+
workOrderBatch.partOperations,
|
|
2729
|
+
batchCacheManager
|
|
2730
|
+
);
|
|
2731
|
+
const workOrdersResult = await MMEntityProcessor.writeEntities(
|
|
2732
|
+
ERPObjType.WORK_ORDERS,
|
|
2733
|
+
workOrderBatch.workOrders,
|
|
2734
|
+
batchCacheManager
|
|
2735
|
+
);
|
|
2736
|
+
const workOrderOperationsResult = await MMEntityProcessor.writeEntities(
|
|
2737
|
+
ERPObjType.WORK_ORDER_OPERATIONS,
|
|
2738
|
+
workOrderBatch.workOrderOperations,
|
|
2739
|
+
batchCacheManager
|
|
2740
|
+
);
|
|
2741
|
+
return {
|
|
2742
|
+
parts: partsResult,
|
|
2743
|
+
partOperations: partOperationsResult,
|
|
2744
|
+
workOrders: workOrdersResult,
|
|
2745
|
+
workOrderOperations: workOrderOperationsResult
|
|
2746
|
+
};
|
|
2747
|
+
}
|
|
2748
|
+
/**
|
|
2749
|
+
* Processes flattened work order data and syncs it to MM maintaining referential integrity.
|
|
2750
|
+
*
|
|
2751
|
+
* This method expects flattened data where each row contains both work order and operation information
|
|
2752
|
+
* with camelCase field names matching the GraphQL schema. It will automatically extract and deduplicate
|
|
2753
|
+
* parts, part operations, work orders, and work order operations, then process them in the correct order
|
|
2754
|
+
* to maintain referential integrity.
|
|
2755
|
+
*
|
|
2756
|
+
* @param flattenedData Array of flattened rows containing both work order and operation data (camelCase fields)
|
|
2757
|
+
* @param batchCacheManager The batch cache manager instance; pass in null if caching is not desired
|
|
2758
|
+
*
|
|
2759
|
+
* @returns Combined results from all entity processing with detailed logging information
|
|
2760
|
+
* @throws MMBatchValidationError on partial success or complete failure with structured error details
|
|
2761
|
+
* @throws Error on other underlying issues (network, authentication, etc.)
|
|
2762
|
+
*/
|
|
2763
|
+
static async syncWorkOrderBatchFromFlattened(flattenedData, batchCacheManager) {
|
|
2764
|
+
if (!flattenedData || flattenedData.length === 0) {
|
|
2765
|
+
throw new Error("No flattened work order data provided");
|
|
2766
|
+
}
|
|
2767
|
+
const uniqueParts = /* @__PURE__ */ new Map();
|
|
2768
|
+
const uniquePartOperations = /* @__PURE__ */ new Map();
|
|
2769
|
+
const uniqueWorkOrders = /* @__PURE__ */ new Map();
|
|
2770
|
+
const workOrderOperations = [];
|
|
2771
|
+
for (const row of flattenedData) {
|
|
2772
|
+
const partKey = `${row.partNumber}|${row.partRevision}|${row.method}`;
|
|
2773
|
+
if (!uniqueParts.has(partKey)) {
|
|
2774
|
+
uniqueParts.set(partKey, {
|
|
2775
|
+
partNumber: row.partNumber,
|
|
2776
|
+
// → partNumber
|
|
2777
|
+
partRevision: row.partRevision,
|
|
2778
|
+
// → partRevision
|
|
2779
|
+
method: row.method
|
|
2780
|
+
// → method
|
|
2781
|
+
});
|
|
2782
|
+
}
|
|
2783
|
+
const partOpKey = `${row.partNumber}|${row.partRevision}|${row.method}|${row.sequenceNumber}`;
|
|
2784
|
+
if (!uniquePartOperations.has(partOpKey)) {
|
|
2785
|
+
uniquePartOperations.set(partOpKey, {
|
|
2786
|
+
partNumber: row.partNumber,
|
|
2787
|
+
// → partNumber
|
|
2788
|
+
partRevision: row.partRevision,
|
|
2789
|
+
// → partRevision
|
|
2790
|
+
method: row.method,
|
|
2791
|
+
// → method
|
|
2792
|
+
sequenceNumber: row.sequenceNumber,
|
|
2793
|
+
// → sequenceNumber
|
|
2794
|
+
resourceId: row.resourceId,
|
|
2795
|
+
// → resourceId
|
|
2796
|
+
cycleTimeMs: row.cycleTimeMs,
|
|
2797
|
+
// → cycleTimeMs
|
|
2798
|
+
setupTimeMs: row.setupTimeMs,
|
|
2799
|
+
// → setupTimeMs
|
|
2800
|
+
description: row.operationDescription || "",
|
|
2801
|
+
// → description
|
|
2802
|
+
quantityPerPart: row.quantityPerPart || 1
|
|
2803
|
+
// → quantityPerPart
|
|
2804
|
+
});
|
|
2805
|
+
}
|
|
2806
|
+
const workOrderKey = row.workOrderId;
|
|
2807
|
+
if (!uniqueWorkOrders.has(workOrderKey)) {
|
|
2808
|
+
uniqueWorkOrders.set(workOrderKey, {
|
|
2809
|
+
workOrderId: row.workOrderId,
|
|
2810
|
+
// → workOrderId
|
|
2811
|
+
lot: row.lot,
|
|
2812
|
+
// → lot
|
|
2813
|
+
split: row.split,
|
|
2814
|
+
// → split
|
|
2815
|
+
sub: row.sub,
|
|
2816
|
+
// → sub
|
|
2817
|
+
status: row.status,
|
|
2818
|
+
// → status
|
|
2819
|
+
dueDate: row.dueDate,
|
|
2820
|
+
// → dueDate
|
|
2821
|
+
description: row.description,
|
|
2822
|
+
// → description
|
|
2823
|
+
scheduledStartDate: row.scheduledStartDate,
|
|
2824
|
+
// → scheduledStartDate
|
|
2825
|
+
scheduledEndDate: row.scheduledEndDate,
|
|
2826
|
+
// → scheduledEndDate
|
|
2827
|
+
closedDate: row.closedDate,
|
|
2828
|
+
// → closedDate
|
|
2829
|
+
quantityRequired: row.quantityRequired,
|
|
2830
|
+
// → quantityRequired
|
|
2831
|
+
partNumber: row.partNumber,
|
|
2832
|
+
// → partNumber
|
|
2833
|
+
partRevision: row.partRevision,
|
|
2834
|
+
// → partRevision
|
|
2835
|
+
method: row.method
|
|
2836
|
+
// → method
|
|
2837
|
+
});
|
|
2838
|
+
}
|
|
2839
|
+
workOrderOperations.push({
|
|
2840
|
+
workOrderId: row.workOrderId,
|
|
2841
|
+
// → workOrderId
|
|
2842
|
+
lot: row.lot,
|
|
2843
|
+
// → lot
|
|
2844
|
+
split: row.split,
|
|
2845
|
+
// → split
|
|
2846
|
+
sub: row.sub,
|
|
2847
|
+
// → sub
|
|
2848
|
+
sequenceNumber: row.sequenceNumber,
|
|
2849
|
+
// → sequenceNumber
|
|
2850
|
+
resourceId: row.resourceId,
|
|
2851
|
+
// → resourceId
|
|
2852
|
+
startQuantity: row.startQuantity,
|
|
2853
|
+
// → startQuantity
|
|
2854
|
+
finishQuantity: row.finishQuantity,
|
|
2855
|
+
// → finishQuantity
|
|
2856
|
+
expectedRejectRate: row.expectedRejectRate,
|
|
2857
|
+
// → expectedRejectRate
|
|
2858
|
+
scheduledStartDate: row.opScheduledStartDate,
|
|
2859
|
+
// → scheduledStartDate
|
|
2860
|
+
scheduledFinishDate: row.opScheduledFinishDate,
|
|
2861
|
+
// → scheduledFinishDate
|
|
2862
|
+
closedDate: row.opClosedDate,
|
|
2863
|
+
// → closedDate
|
|
2864
|
+
cycleTimeMs: row.cycleTimeMs,
|
|
2865
|
+
// → cycleTimeMs
|
|
2866
|
+
setupTimeMs: row.setupTimeMs,
|
|
2867
|
+
// → setupTimeMs
|
|
2868
|
+
productionburdenRateHourly: row.productionburdenRateHourly,
|
|
2869
|
+
// → productionburdenRateHourly
|
|
2870
|
+
setupburdenRatehourly: row.setupburdenRatehourly,
|
|
2871
|
+
// → setupburdenRatehourly
|
|
2872
|
+
operationType: row.operationType,
|
|
2873
|
+
// → operationType
|
|
2874
|
+
quantityPerPart: row.quantityPerPart,
|
|
2875
|
+
// → quantityPerPart
|
|
2876
|
+
status: row.opStatus || row.status
|
|
2877
|
+
// → status
|
|
2878
|
+
});
|
|
2879
|
+
}
|
|
2880
|
+
const parts = Array.from(uniqueParts.values()).map(
|
|
2881
|
+
(item) => new MMSendPart(
|
|
2882
|
+
item.partNumber || "",
|
|
2883
|
+
// partNumber
|
|
2884
|
+
item.partRevision || "",
|
|
2885
|
+
// partRevision
|
|
2886
|
+
item.method || "Standard"
|
|
2887
|
+
// method
|
|
2888
|
+
)
|
|
2889
|
+
);
|
|
2890
|
+
const partOperations = Array.from(uniquePartOperations.values()).map(
|
|
2891
|
+
(item) => new MMSendPartOperation(
|
|
2892
|
+
item.partNumber || "",
|
|
2893
|
+
// partNumber
|
|
2894
|
+
item.partRevision || "",
|
|
2895
|
+
// partRevision
|
|
2896
|
+
item.method || "Standard",
|
|
2897
|
+
// method
|
|
2898
|
+
item.sequenceNumber?.toString() || "",
|
|
2899
|
+
// sequenceNumber
|
|
2900
|
+
item.resourceId?.toString() || "",
|
|
2901
|
+
// resourceId
|
|
2902
|
+
item.cycleTimeMs || 0,
|
|
2903
|
+
// cycleTimeMs
|
|
2904
|
+
item.setupTimeMs || 0,
|
|
2905
|
+
// setupTimeMs
|
|
2906
|
+
item.description || "",
|
|
2907
|
+
// description
|
|
2908
|
+
item.quantityPerPart || 1
|
|
2909
|
+
// quantityPerPart
|
|
2910
|
+
)
|
|
2911
|
+
);
|
|
2912
|
+
const transformedWorkOrders = Array.from(uniqueWorkOrders.values()).map(
|
|
2913
|
+
(item) => new MMSendWorkOrder(
|
|
2914
|
+
item.workOrderId?.toString() || "",
|
|
2915
|
+
// workOrderId
|
|
2916
|
+
item.lot || "",
|
|
2917
|
+
// lot
|
|
2918
|
+
item.split || "",
|
|
2919
|
+
// split
|
|
2920
|
+
item.sub || "",
|
|
2921
|
+
// sub
|
|
2922
|
+
item.status || "Open",
|
|
2923
|
+
// status
|
|
2924
|
+
item.dueDate ? new Date(item.dueDate).toISOString() : null,
|
|
2925
|
+
// dueDate
|
|
2926
|
+
item.description || "",
|
|
2927
|
+
// description
|
|
2928
|
+
item.scheduledStartDate ? new Date(item.scheduledStartDate).toISOString() : null,
|
|
2929
|
+
// scheduledStartDate
|
|
2930
|
+
item.scheduledEndDate ? new Date(item.scheduledEndDate).toISOString() : null,
|
|
2931
|
+
// scheduledEndDate
|
|
2932
|
+
item.closedDate ? new Date(item.closedDate).toISOString() : null,
|
|
2933
|
+
// closedDate
|
|
2934
|
+
item.quantityRequired || 0,
|
|
2935
|
+
// quantityRequired
|
|
2936
|
+
item.partNumber || "",
|
|
2937
|
+
// partNumber
|
|
2938
|
+
item.partRevision || "",
|
|
2939
|
+
// partRevision
|
|
2940
|
+
item.method || "Standard"
|
|
2941
|
+
// method
|
|
2942
|
+
)
|
|
2943
|
+
);
|
|
2944
|
+
const transformedWorkOrderOperations = workOrderOperations.map(
|
|
2945
|
+
(item) => new MMSendWorkOrderOperation(
|
|
2946
|
+
item.workOrderId?.toString() || "",
|
|
2947
|
+
// workOrderId
|
|
2948
|
+
item.lot || "",
|
|
2949
|
+
// lot
|
|
2950
|
+
item.split || "",
|
|
2951
|
+
// split
|
|
2952
|
+
item.sub || "",
|
|
2953
|
+
// sub
|
|
2954
|
+
item.sequenceNumber?.toString() || "",
|
|
2955
|
+
// sequenceNumber
|
|
2956
|
+
item.resourceId?.toString() || "",
|
|
2957
|
+
// resourceId
|
|
2958
|
+
item.startQuantity || 0,
|
|
2959
|
+
// startQuantity
|
|
2960
|
+
item.finishQuantity || 0,
|
|
2961
|
+
// finishQuantity
|
|
2962
|
+
item.expectedRejectRate || 0,
|
|
2963
|
+
// expectedRejectRate
|
|
2964
|
+
item.scheduledStartDate ? new Date(item.scheduledStartDate).toISOString() : null,
|
|
2965
|
+
// scheduledStartDate
|
|
2966
|
+
item.scheduledFinishDate ? new Date(item.scheduledFinishDate).toISOString() : null,
|
|
2967
|
+
// scheduledFinishDate
|
|
2968
|
+
item.closedDate ? new Date(item.closedDate).toISOString() : null,
|
|
2969
|
+
// closedDate
|
|
2970
|
+
item.cycleTimeMs || 0,
|
|
2971
|
+
// cycleTimeMs
|
|
2972
|
+
item.setupTimeMs || 0,
|
|
2973
|
+
// setupTimeMs
|
|
2974
|
+
parseFloat(item.productionburdenRateHourly || "0"),
|
|
2975
|
+
// productionburdenRateHourly
|
|
2976
|
+
parseFloat(item.setupburdenRatehourly || "0"),
|
|
2977
|
+
// setupburdenRatehourly
|
|
2978
|
+
item.operationType || "Production",
|
|
2979
|
+
// operationType
|
|
2980
|
+
item.quantityPerPart || 1,
|
|
2981
|
+
// quantityPerPart
|
|
2982
|
+
item.status || "Open"
|
|
2983
|
+
// status
|
|
2984
|
+
)
|
|
2985
|
+
);
|
|
2986
|
+
const partsResult = await MMEntityProcessor.writeEntities(
|
|
2987
|
+
ERPObjType.PARTS,
|
|
2988
|
+
parts,
|
|
2989
|
+
batchCacheManager
|
|
2990
|
+
);
|
|
2991
|
+
const partOperationsResult = await MMEntityProcessor.writeEntities(
|
|
2992
|
+
ERPObjType.PART_OPERATION,
|
|
2993
|
+
partOperations,
|
|
2994
|
+
batchCacheManager
|
|
2995
|
+
);
|
|
2996
|
+
const workOrdersResult = await MMEntityProcessor.writeEntities(
|
|
2997
|
+
ERPObjType.WORK_ORDERS,
|
|
2998
|
+
transformedWorkOrders,
|
|
2999
|
+
batchCacheManager
|
|
3000
|
+
);
|
|
3001
|
+
const workOrderOperationsResult = await MMEntityProcessor.writeEntities(
|
|
3002
|
+
ERPObjType.WORK_ORDER_OPERATIONS,
|
|
3003
|
+
transformedWorkOrderOperations,
|
|
3004
|
+
batchCacheManager
|
|
3005
|
+
);
|
|
3006
|
+
return {
|
|
3007
|
+
parts: partsResult,
|
|
3008
|
+
partOperations: partOperationsResult,
|
|
3009
|
+
workOrders: workOrdersResult,
|
|
3010
|
+
workOrderOperations: workOrderOperationsResult
|
|
3011
|
+
};
|
|
3012
|
+
}
|
|
3013
|
+
}
|
|
3014
|
+
const DEFAULT_RETRY_ATTEMPTS$1 = 0;
|
|
3015
|
+
const DEFAULT_PAGINATION_LIMIT = 100;
|
|
3016
|
+
class RestAPIService {
|
|
3017
|
+
client;
|
|
3018
|
+
config;
|
|
3019
|
+
paginationLimit;
|
|
3020
|
+
constructor(config2, retryAttempts, paginationLimit) {
|
|
3021
|
+
this.config = config2;
|
|
3022
|
+
this.paginationLimit = paginationLimit ?? DEFAULT_PAGINATION_LIMIT;
|
|
3023
|
+
this.client = HTTPClientFactory.getInstance({
|
|
3024
|
+
baseUrl: config2.apiUrl,
|
|
3025
|
+
retryAttempts: retryAttempts ?? DEFAULT_RETRY_ATTEMPTS$1
|
|
3026
|
+
});
|
|
3027
|
+
}
|
|
3028
|
+
//#region Get Request Helpers
|
|
3029
|
+
async getHeaders() {
|
|
3030
|
+
const token = await this.config.getAuthToken();
|
|
3031
|
+
return {
|
|
3032
|
+
"Content-Type": "application/json",
|
|
3033
|
+
Accept: "application/json",
|
|
3034
|
+
...token ? { Authorization: `Bearer ${token}` } : {}
|
|
3035
|
+
};
|
|
3036
|
+
}
|
|
3037
|
+
buildQueryParams(query = {}) {
|
|
3038
|
+
const { limit, nextPage, filter } = query;
|
|
3039
|
+
const queryParams = {};
|
|
3040
|
+
if (limit) {
|
|
3041
|
+
queryParams.take = limit.toString();
|
|
3042
|
+
}
|
|
3043
|
+
if (nextPage) {
|
|
3044
|
+
queryParams.skip = nextPage;
|
|
3045
|
+
}
|
|
3046
|
+
if (filter) {
|
|
3047
|
+
if (filter.in) {
|
|
3048
|
+
filter.in.forEach((filterIn) => {
|
|
3049
|
+
const query2 = filterIn.value.join("|");
|
|
3050
|
+
queryParams[`${filterIn.field}[in]`] = query2;
|
|
3051
|
+
});
|
|
3052
|
+
}
|
|
3053
|
+
if (filter.and) {
|
|
3054
|
+
queryParams["filter[and]"] = buildLogicalCondition(filter.and, "and");
|
|
3055
|
+
}
|
|
3056
|
+
if (filter.or) {
|
|
3057
|
+
queryParams["filter[or]"] = buildLogicalCondition(filter.or, "or");
|
|
3058
|
+
}
|
|
3059
|
+
}
|
|
3060
|
+
return new URLSearchParams(queryParams);
|
|
3061
|
+
}
|
|
3062
|
+
/**
|
|
3063
|
+
* Builds pagination metadata for API responses
|
|
3064
|
+
* @param limit - Optional limit for this specific request. If not provided, uses the service's default pagination limit
|
|
3065
|
+
* @param nextPage - Optional next page token
|
|
3066
|
+
* @param previousPage - Optional previous page token
|
|
3067
|
+
* @param dataLength - Number of items in the current response
|
|
3068
|
+
* @param totalCount - Total number of items available
|
|
3069
|
+
* @returns Pagination metadata including count, limit, and page tokens
|
|
3070
|
+
*/
|
|
3071
|
+
buildPagination(limit, nextPage, previousPage, dataLength = 0, totalCount) {
|
|
3072
|
+
const paging = {
|
|
3073
|
+
count: totalCount || 0,
|
|
3074
|
+
limit: limit || this.paginationLimit,
|
|
3075
|
+
nextPage: void 0,
|
|
3076
|
+
previousPage
|
|
3077
|
+
};
|
|
3078
|
+
if (limit) {
|
|
3079
|
+
paging.limit = limit;
|
|
3080
|
+
const currentSkip = parseInt(nextPage || "0");
|
|
3081
|
+
if (dataLength > 0 && (!totalCount || currentSkip + limit < totalCount)) {
|
|
3082
|
+
paging.nextPage = `${currentSkip + limit}`;
|
|
3083
|
+
}
|
|
3084
|
+
}
|
|
3085
|
+
return paging;
|
|
3086
|
+
}
|
|
3087
|
+
buildUrl(endpoint, queryParams) {
|
|
3088
|
+
const pathParts = [this.config.apiUrl || ""];
|
|
3089
|
+
pathParts.push(endpoint);
|
|
3090
|
+
const path2 = pathParts.filter(Boolean).join("/");
|
|
3091
|
+
return queryParams ? `${path2}?${queryParams.toString()}` : path2;
|
|
3092
|
+
}
|
|
3093
|
+
//#endregion get request helpers
|
|
3094
|
+
async get(endpoint, query = {}) {
|
|
3095
|
+
try {
|
|
3096
|
+
const params = this.buildQueryParams(query);
|
|
3097
|
+
const url = this.buildUrl(endpoint, params);
|
|
3098
|
+
const headers = await this.getHeaders();
|
|
3099
|
+
const response = await this.client.request({
|
|
3100
|
+
method: "GET",
|
|
3101
|
+
url,
|
|
3102
|
+
headers
|
|
3103
|
+
});
|
|
3104
|
+
const paging = this.buildPagination(
|
|
3105
|
+
query.limit,
|
|
3106
|
+
query.nextPage,
|
|
3107
|
+
query.previousPage,
|
|
3108
|
+
response.data.Data?.length,
|
|
3109
|
+
response.data.TotalCount
|
|
3110
|
+
);
|
|
3111
|
+
return {
|
|
3112
|
+
data: response.data.Data || [],
|
|
3113
|
+
metadata: {
|
|
3114
|
+
pagination: paging
|
|
3115
|
+
}
|
|
3116
|
+
};
|
|
3117
|
+
} catch (error) {
|
|
3118
|
+
ErrorHandler.handle(error);
|
|
3119
|
+
}
|
|
3120
|
+
}
|
|
3121
|
+
async patch(endpoint, id, data) {
|
|
3122
|
+
try {
|
|
3123
|
+
const url = this.buildUrl(`${endpoint}/${id}`);
|
|
3124
|
+
const headers = await this.getHeaders();
|
|
3125
|
+
const response = await this.client.request({
|
|
3126
|
+
method: "PATCH",
|
|
3127
|
+
url,
|
|
3128
|
+
data,
|
|
3129
|
+
headers
|
|
3130
|
+
});
|
|
3131
|
+
return {
|
|
3132
|
+
data: response.data
|
|
3133
|
+
};
|
|
3134
|
+
} catch (error) {
|
|
3135
|
+
ErrorHandler.handle(error);
|
|
3136
|
+
}
|
|
3137
|
+
}
|
|
3138
|
+
async post(endpoint, data) {
|
|
3139
|
+
try {
|
|
3140
|
+
const url = this.buildUrl(endpoint);
|
|
3141
|
+
const headers = await this.getHeaders();
|
|
3142
|
+
const response = await this.client.request({
|
|
3143
|
+
method: "POST",
|
|
3144
|
+
url,
|
|
3145
|
+
data,
|
|
3146
|
+
headers
|
|
3147
|
+
});
|
|
3148
|
+
return {
|
|
3149
|
+
data: response.data
|
|
3150
|
+
};
|
|
3151
|
+
} catch (error) {
|
|
3152
|
+
ErrorHandler.handle(error);
|
|
3153
|
+
}
|
|
3154
|
+
}
|
|
3155
|
+
/**
|
|
3156
|
+
* Cleanup all HTTP connections and resources
|
|
3157
|
+
* Call this when the service is no longer needed
|
|
3158
|
+
*/
|
|
3159
|
+
async destroy() {
|
|
3160
|
+
await this.client.destroy();
|
|
3161
|
+
}
|
|
3162
|
+
}
|
|
3163
|
+
function getERPAPITypeFromEntity(entity, entityMap) {
|
|
3164
|
+
const entry = Object.entries(entityMap).find(
|
|
3165
|
+
([_, value]) => value.entity === entity
|
|
3166
|
+
);
|
|
3167
|
+
return entry ? Number(entry[0]) : void 0;
|
|
3168
|
+
}
|
|
3169
|
+
const isNonEmptyString = (v) => typeof v === "string" && v.trim().length > 0;
|
|
3170
|
+
function getErrorType(error) {
|
|
3171
|
+
if (error && typeof error === "object") {
|
|
3172
|
+
const o = error;
|
|
3173
|
+
if (isNonEmptyString(o.code)) return o.code;
|
|
3174
|
+
if (isNonEmptyString(o.name)) return o.name;
|
|
3175
|
+
const ctorName = o.constructor?.name;
|
|
3176
|
+
if (isNonEmptyString(ctorName) && ctorName !== "Object") return ctorName;
|
|
3177
|
+
}
|
|
3178
|
+
return "Error";
|
|
3179
|
+
}
|
|
3180
|
+
function formatError(error) {
|
|
3181
|
+
if (!error) {
|
|
3182
|
+
return {
|
|
3183
|
+
message: "Unknown error occurred",
|
|
3184
|
+
code: "UNKNOWN_ERROR"
|
|
3185
|
+
};
|
|
3186
|
+
}
|
|
3187
|
+
if (error.isAxiosError || error.name === "AxiosError") {
|
|
3188
|
+
return formatAxiosError(error);
|
|
3189
|
+
}
|
|
3190
|
+
if (error.message && error.code && typeof error.message === "string" && typeof error.code === "string" && !error.config) {
|
|
3191
|
+
return error;
|
|
3192
|
+
}
|
|
3193
|
+
if (error instanceof Error) {
|
|
3194
|
+
return {
|
|
3195
|
+
message: error.message || "An error occurred",
|
|
3196
|
+
code: "ERROR",
|
|
3197
|
+
metadata: {
|
|
3198
|
+
name: error.name
|
|
3199
|
+
}
|
|
3200
|
+
};
|
|
3201
|
+
}
|
|
3202
|
+
if (typeof error === "string") {
|
|
3203
|
+
return {
|
|
3204
|
+
message: error,
|
|
3205
|
+
code: "ERROR"
|
|
3206
|
+
};
|
|
3207
|
+
}
|
|
3208
|
+
const message = error.message || error.toString?.() || "Unknown error occurred";
|
|
3209
|
+
return {
|
|
3210
|
+
message: typeof message === "string" ? message : JSON.stringify(message),
|
|
3211
|
+
code: error.code || "UNKNOWN_ERROR"
|
|
3212
|
+
};
|
|
3213
|
+
}
|
|
3214
|
+
function formatAxiosError(error) {
|
|
3215
|
+
const httpStatus = error.response?.status;
|
|
3216
|
+
const method = error.config?.method?.toUpperCase();
|
|
3217
|
+
const url = error.config?.url;
|
|
3218
|
+
let message = error._extractedMessage;
|
|
3219
|
+
if (!message) {
|
|
3220
|
+
const extractedMessage = extractErrorMessage(error.response?.data);
|
|
3221
|
+
message = extractedMessage || error.response?.statusText || error.message || "Request failed";
|
|
3222
|
+
}
|
|
3223
|
+
const code = categorizeHttpError(httpStatus);
|
|
3224
|
+
const metadata = {
|
|
3225
|
+
method,
|
|
3226
|
+
url
|
|
3227
|
+
};
|
|
3228
|
+
if (httpStatus === 401 || httpStatus === 403) {
|
|
3229
|
+
metadata.hint = "Check authentication credentials";
|
|
3230
|
+
} else if (httpStatus === 404) {
|
|
3231
|
+
metadata.hint = "Resource not found - check endpoint URL";
|
|
3232
|
+
} else if (httpStatus && httpStatus >= 500) {
|
|
3233
|
+
metadata.hint = "ERP system may be temporarily unavailable";
|
|
3234
|
+
}
|
|
3235
|
+
return {
|
|
3236
|
+
message,
|
|
3237
|
+
code,
|
|
3238
|
+
httpStatus,
|
|
3239
|
+
metadata
|
|
3240
|
+
};
|
|
3241
|
+
}
|
|
3242
|
+
function extractErrorMessage(data) {
|
|
3243
|
+
if (!data) return null;
|
|
3244
|
+
const possibleFields = [
|
|
3245
|
+
"ErrorMessage",
|
|
3246
|
+
// Epicor
|
|
3247
|
+
"error.message",
|
|
3248
|
+
// Common REST format
|
|
3249
|
+
"error",
|
|
3250
|
+
// Simple format
|
|
3251
|
+
"Message",
|
|
3252
|
+
// .NET style
|
|
3253
|
+
"message",
|
|
3254
|
+
// JavaScript style
|
|
3255
|
+
"errorMessage",
|
|
3256
|
+
// Camel case
|
|
3257
|
+
"error_message",
|
|
3258
|
+
// Snake case
|
|
3259
|
+
"errors[0].message",
|
|
3260
|
+
// Array of errors
|
|
3261
|
+
"title",
|
|
3262
|
+
// Problem details format
|
|
3263
|
+
"detail"
|
|
3264
|
+
// Problem details format
|
|
3265
|
+
];
|
|
3266
|
+
for (const field of possibleFields) {
|
|
3267
|
+
const value = getNestedValue(data, field);
|
|
3268
|
+
if (value && typeof value === "string") {
|
|
3269
|
+
return value;
|
|
3270
|
+
}
|
|
3271
|
+
}
|
|
3272
|
+
if (typeof data === "string") {
|
|
3273
|
+
try {
|
|
3274
|
+
const parsed = JSON.parse(data);
|
|
3275
|
+
return extractErrorMessage(parsed);
|
|
3276
|
+
} catch {
|
|
3277
|
+
return data;
|
|
3278
|
+
}
|
|
3279
|
+
}
|
|
3280
|
+
return null;
|
|
3281
|
+
}
|
|
3282
|
+
function getNestedValue(obj, path2) {
|
|
3283
|
+
const parts = path2.split(".");
|
|
3284
|
+
let current = obj;
|
|
3285
|
+
for (const part of parts) {
|
|
3286
|
+
const arrayMatch = part.match(/(\w+)\[(\d+)\]/);
|
|
3287
|
+
if (arrayMatch) {
|
|
3288
|
+
const [, key, index2] = arrayMatch;
|
|
3289
|
+
current = current?.[key]?.[parseInt(index2, 10)];
|
|
3290
|
+
} else {
|
|
3291
|
+
current = current?.[part];
|
|
3292
|
+
}
|
|
3293
|
+
if (current === void 0 || current === null) {
|
|
3294
|
+
return null;
|
|
3295
|
+
}
|
|
3296
|
+
}
|
|
3297
|
+
return current;
|
|
3298
|
+
}
|
|
3299
|
+
function categorizeHttpError(status) {
|
|
3300
|
+
if (!status) return "NETWORK_ERROR";
|
|
3301
|
+
if (status === 400) return "VALIDATION_ERROR";
|
|
3302
|
+
if (status === 401) return "AUTHENTICATION_ERROR";
|
|
3303
|
+
if (status === 403) return "AUTHORIZATION_ERROR";
|
|
3304
|
+
if (status === 404) return "NOT_FOUND";
|
|
3305
|
+
if (status === 409) return "CONFLICT";
|
|
3306
|
+
if (status === 422) return "VALIDATION_ERROR";
|
|
3307
|
+
if (status === 429) return "RATE_LIMIT";
|
|
3308
|
+
if (status >= 500) return "ERP_SERVER_ERROR";
|
|
3309
|
+
if (status >= 400) return "CLIENT_ERROR";
|
|
3310
|
+
return "HTTP_ERROR";
|
|
3311
|
+
}
|
|
3312
|
+
function formatErrorForLogging(error) {
|
|
3313
|
+
const formatted = formatError(error);
|
|
3314
|
+
let message = `[${formatted.code}] ${formatted.message}`;
|
|
3315
|
+
if (formatted.httpStatus) {
|
|
3316
|
+
message = `[${formatted.httpStatus}] ${message}`;
|
|
3317
|
+
}
|
|
3318
|
+
return message;
|
|
3319
|
+
}
|
|
3320
|
+
class LogEntry {
|
|
3321
|
+
level;
|
|
3322
|
+
message;
|
|
3323
|
+
dedupeKey;
|
|
3324
|
+
eventTime;
|
|
3325
|
+
constructor(params) {
|
|
3326
|
+
this.level = params.level;
|
|
3327
|
+
this.message = params.message;
|
|
3328
|
+
this.dedupeKey = params.dedupeKey;
|
|
3329
|
+
this.eventTime = Date.now();
|
|
3330
|
+
}
|
|
3331
|
+
}
|
|
3332
|
+
function isLogResponse(value) {
|
|
3333
|
+
if (value === null || typeof value !== "object") return false;
|
|
3334
|
+
const v = value;
|
|
3335
|
+
if (typeof v.message !== "string") return false;
|
|
3336
|
+
if ("processed" in v && typeof v.processed !== "number") return false;
|
|
3337
|
+
return true;
|
|
3338
|
+
}
|
|
3339
|
+
class MMConnectorLogger {
|
|
3340
|
+
MAX_MSG_LEN = 2e3;
|
|
3341
|
+
mmApiClient;
|
|
3342
|
+
deduper;
|
|
3343
|
+
source;
|
|
3344
|
+
constructor(source, deduper) {
|
|
3345
|
+
if (source.length < 1 || source.length > 64) {
|
|
3346
|
+
throw new Error("source must be 1-64 characters");
|
|
3347
|
+
}
|
|
3348
|
+
this.mmApiClient = new MMApiClient();
|
|
3349
|
+
this.deduper = deduper;
|
|
3350
|
+
this.source = source;
|
|
3351
|
+
}
|
|
3352
|
+
// Deduplication helpers are delegated to injected FileLogDeduper
|
|
3353
|
+
/**
|
|
3354
|
+
* Send a single log entry to the MM cloud with deduplication.
|
|
3355
|
+
*
|
|
3356
|
+
* The deduplication is handled by the injected LogDeduper.
|
|
3357
|
+
* If no deduper is injected, the log entry is sent without deduplication.
|
|
3358
|
+
*
|
|
3359
|
+
* The standard deduper, FileLogDeduper, stores the deduplication state in a file,
|
|
3360
|
+
* allowing deduplication across jobs,
|
|
3361
|
+
*
|
|
3362
|
+
* @param logEntry - The log entry to send
|
|
3363
|
+
* @returns Promise resolving to the API response or null if suppressed
|
|
3364
|
+
* @throws HTTPError if the request fails or Error if the log entry is invalid
|
|
3365
|
+
*/
|
|
3366
|
+
async sendLog(logEntry) {
|
|
3367
|
+
this.validateLogEntry(logEntry);
|
|
3368
|
+
const now = Date.now();
|
|
3369
|
+
let messageToSend = logEntry.message;
|
|
3370
|
+
if (this.deduper) {
|
|
3371
|
+
const decision = await this.deduper.decide(logEntry, now);
|
|
3372
|
+
if (decision === null) return null;
|
|
3373
|
+
messageToSend = decision;
|
|
3374
|
+
}
|
|
3375
|
+
try {
|
|
3376
|
+
const logEntryToSend = {
|
|
3377
|
+
source: this.source,
|
|
3378
|
+
level: logEntry.level,
|
|
3379
|
+
message: messageToSend
|
|
3380
|
+
};
|
|
3381
|
+
const response = await this.mmApiClient.sendConnectorLog(logEntryToSend);
|
|
3382
|
+
if (this.deduper) {
|
|
3383
|
+
await this.deduper.onSuccess(logEntry, now);
|
|
3384
|
+
}
|
|
3385
|
+
if (!isLogResponse(response)) {
|
|
3386
|
+
logger.warn("Unexpected success response format from MM API for connector log", { response });
|
|
3387
|
+
return { message: "Unexpected success response format when sending log" };
|
|
3388
|
+
}
|
|
3389
|
+
return { message: response.message };
|
|
3390
|
+
} catch (error) {
|
|
3391
|
+
logger.error("Failed to send log to MM cloud", {
|
|
3392
|
+
level: logEntry.level,
|
|
3393
|
+
error: error instanceof Error ? error.message : "Unknown error"
|
|
3394
|
+
});
|
|
3395
|
+
throw error;
|
|
3396
|
+
}
|
|
3397
|
+
}
|
|
3398
|
+
/**
|
|
3399
|
+
* @throws Error if validation fails
|
|
3400
|
+
*/
|
|
3401
|
+
validateLogEntry(logEntry) {
|
|
3402
|
+
const allowedLevels = ["info", "warn", "error"];
|
|
3403
|
+
if (!logEntry.level || !allowedLevels.includes(logEntry.level)) {
|
|
3404
|
+
throw new Error(`level must be one of: ${allowedLevels.join(", ")}`);
|
|
3405
|
+
}
|
|
3406
|
+
if (!logEntry.message || typeof logEntry.message !== "string") {
|
|
3407
|
+
throw new Error("message is required and must be a string");
|
|
3408
|
+
}
|
|
3409
|
+
logEntry.message = logEntry.message.slice(0, this.MAX_MSG_LEN);
|
|
3410
|
+
if (!logEntry.dedupeKey || typeof logEntry.dedupeKey !== "string") {
|
|
3411
|
+
throw new Error("dedupeKey is required and must be a string");
|
|
3412
|
+
}
|
|
3413
|
+
if (logEntry.dedupeKey.trim().length < 1) {
|
|
3414
|
+
throw new Error("dedupeKey must be a non-empty string");
|
|
3415
|
+
}
|
|
3416
|
+
}
|
|
3417
|
+
/**
|
|
3418
|
+
* Retry all failed transmissions silently
|
|
3419
|
+
* This method attempts to retry all messages that failed to transmit
|
|
3420
|
+
* and removes them from the failed list if successful, else leaves them for the client to retry
|
|
3421
|
+
*
|
|
3422
|
+
* Expected usage is by a client to call this as part of its own retry mechanism
|
|
3423
|
+
*/
|
|
3424
|
+
async retryFailedTransmissions() {
|
|
3425
|
+
if (!this.deduper || !this.deduper.retryFailedTransmissions) {
|
|
3426
|
+
return;
|
|
3427
|
+
}
|
|
3428
|
+
await this.deduper.retryFailedTransmissions(async (entry, message) => {
|
|
3429
|
+
await this.mmApiClient.sendConnectorLog({
|
|
3430
|
+
source: this.source,
|
|
3431
|
+
level: entry.level,
|
|
3432
|
+
message
|
|
3433
|
+
});
|
|
3434
|
+
});
|
|
3435
|
+
}
|
|
3436
|
+
/**
|
|
3437
|
+
* Clean up resources
|
|
3438
|
+
*/
|
|
3439
|
+
async destroy() {
|
|
3440
|
+
await this.mmApiClient.destroy();
|
|
3441
|
+
}
|
|
3442
|
+
}
|
|
3443
|
+
class FileLogDeduper {
|
|
3444
|
+
storeFilePath;
|
|
3445
|
+
windowMs;
|
|
3446
|
+
ttlMs;
|
|
3447
|
+
sweepIntervalMs;
|
|
3448
|
+
lastSweepTsMs;
|
|
3449
|
+
DEFAULT_WINDOW_TEN_MINS = 600;
|
|
3450
|
+
DEFAULT_TTL_ONE_HOUR = 3600;
|
|
3451
|
+
DEFAULT_SWEEP_INTERVAL_FIVE_MINS = 300;
|
|
3452
|
+
DEFAULT_STORE_FILE_PATH = path.join("/tmp", "log-deduplication.json");
|
|
3453
|
+
/**
|
|
3454
|
+
* Ctor.
|
|
3455
|
+
* @param storeFilePath: The path to the file where the deduplication store is stored; recommended is to use the default
|
|
3456
|
+
* @param windowSeconds: Suppression window. Duplicates within this period are suppressed.
|
|
3457
|
+
* @param ttlSeconds: Eviction TTL. Store entries for keys inactive beyond this are removed. Enforced to be ≥ windowSeconds.
|
|
3458
|
+
* @param sweepIntervalSeconds: Efficiency parameter. How often (min interval) to run opportunistic eviction; retry always sweeps
|
|
3459
|
+
* The sweep is lazy, used only when the store is accessed
|
|
3460
|
+
*/
|
|
3461
|
+
constructor({
|
|
3462
|
+
storeFilePath = this.DEFAULT_STORE_FILE_PATH,
|
|
3463
|
+
windowSeconds = this.DEFAULT_WINDOW_TEN_MINS,
|
|
3464
|
+
ttlSeconds = this.DEFAULT_TTL_ONE_HOUR,
|
|
3465
|
+
sweepIntervalSeconds = this.DEFAULT_SWEEP_INTERVAL_FIVE_MINS
|
|
3466
|
+
} = {}) {
|
|
3467
|
+
this.storeFilePath = storeFilePath;
|
|
3468
|
+
this.windowMs = Math.max(1, windowSeconds) * 1e3;
|
|
3469
|
+
this.ttlMs = Math.max(this.windowMs, Math.max(1, ttlSeconds) * 1e3);
|
|
3470
|
+
this.sweepIntervalMs = Math.max(1, sweepIntervalSeconds) * 1e3;
|
|
3471
|
+
this.lastSweepTsMs = 0;
|
|
3472
|
+
this.ensureStoreFileExists();
|
|
3473
|
+
}
|
|
3474
|
+
/**
|
|
3475
|
+
* Deduplication gating function
|
|
3476
|
+
* Returns the formatted message to send, or null to suppress
|
|
3477
|
+
* Decision is based on the dedupeKey and the time of the entry
|
|
3478
|
+
*/
|
|
3479
|
+
async decide(entry, now) {
|
|
3480
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== "string" || entry.dedupeKey.trim().length === 0) {
|
|
3481
|
+
throw new Error("dedupeKey is required and must be a non-empty string");
|
|
3482
|
+
}
|
|
3483
|
+
const key = entry.dedupeKey;
|
|
3484
|
+
return this.withLock(async () => {
|
|
3485
|
+
const store = this.readStore();
|
|
3486
|
+
if (now - this.lastSweepTsMs >= this.sweepIntervalMs) {
|
|
3487
|
+
this.evictExpiredInStore(store, now);
|
|
3488
|
+
this.lastSweepTsMs = now;
|
|
3489
|
+
this.writeStore(store);
|
|
3490
|
+
}
|
|
3491
|
+
const existing = store[key];
|
|
3492
|
+
if (existing) {
|
|
3493
|
+
const withinWindow = existing.lastTransmitted > 0 && existing.lastTransmitted + this.windowMs > now;
|
|
3494
|
+
if (withinWindow) {
|
|
3495
|
+
store[key] = {
|
|
3496
|
+
...existing,
|
|
3497
|
+
suppressedCount: existing.suppressedCount + 1,
|
|
3498
|
+
firstUnsentEventTs: existing.suppressedCount === 0 ? entry.eventTime ?? now : existing.firstUnsentEventTs,
|
|
3499
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3500
|
+
level: entry.level,
|
|
3501
|
+
message: entry.message
|
|
3502
|
+
};
|
|
3503
|
+
this.writeStore(store);
|
|
3504
|
+
return null;
|
|
3505
|
+
}
|
|
3506
|
+
const messageToSend2 = this.formatMessage(entry.message, entry.eventTime ?? now, existing.suppressedCount, existing.firstUnsentEventTs);
|
|
3507
|
+
store[key] = {
|
|
3508
|
+
...existing,
|
|
3509
|
+
suppressedCount: 0,
|
|
3510
|
+
firstUnsentEventTs: 0,
|
|
3511
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3512
|
+
level: entry.level,
|
|
3513
|
+
message: entry.message
|
|
3514
|
+
};
|
|
3515
|
+
this.writeStore(store);
|
|
3516
|
+
return messageToSend2;
|
|
3517
|
+
}
|
|
3518
|
+
const messageToSend = this.formatMessage(entry.message, entry.eventTime ?? now, 0);
|
|
3519
|
+
store[key] = {
|
|
3520
|
+
lastTransmitted: 0,
|
|
3521
|
+
suppressedCount: 0,
|
|
3522
|
+
firstUnsentEventTs: entry.eventTime ?? now,
|
|
3523
|
+
lastEventTs: entry.eventTime ?? now,
|
|
3524
|
+
level: entry.level,
|
|
3525
|
+
message: entry.message
|
|
3526
|
+
};
|
|
3527
|
+
this.writeStore(store);
|
|
3528
|
+
return messageToSend;
|
|
3529
|
+
});
|
|
3530
|
+
}
|
|
3531
|
+
async onSuccess(entry, now) {
|
|
3532
|
+
if (!entry.dedupeKey || typeof entry.dedupeKey !== "string" || entry.dedupeKey.trim().length === 0) {
|
|
3533
|
+
throw new Error("dedupeKey is required and must be a non-empty string");
|
|
3534
|
+
}
|
|
3535
|
+
const key = entry.dedupeKey;
|
|
3536
|
+
await this.withLock(async () => {
|
|
3537
|
+
const store = this.readStore();
|
|
3538
|
+
const existing = store[key];
|
|
3539
|
+
if (existing) {
|
|
3540
|
+
store[key] = {
|
|
3541
|
+
...existing,
|
|
3542
|
+
lastTransmitted: now,
|
|
3543
|
+
firstUnsentEventTs: 0,
|
|
3544
|
+
suppressedCount: 0
|
|
3545
|
+
};
|
|
3546
|
+
this.writeStore(store);
|
|
3547
|
+
}
|
|
3548
|
+
});
|
|
3549
|
+
}
|
|
3550
|
+
async retryFailedTransmissions(send) {
|
|
3551
|
+
const now = Date.now();
|
|
3552
|
+
const entries = await this.withLock(async () => {
|
|
3553
|
+
const store = this.readStore();
|
|
3554
|
+
this.evictExpiredInStore(store, now);
|
|
3555
|
+
this.lastSweepTsMs = now;
|
|
3556
|
+
this.writeStore(store);
|
|
3557
|
+
return Object.entries(store).filter(([, rec]) => rec.lastTransmitted === 0).map(([key, rec]) => ({ key, rec }));
|
|
3558
|
+
});
|
|
3559
|
+
for (const { key, rec } of entries) {
|
|
3560
|
+
try {
|
|
3561
|
+
const message = this.formatMessage(rec.message, rec.lastEventTs, rec.suppressedCount, rec.firstUnsentEventTs);
|
|
3562
|
+
await send({ level: rec.level, message: rec.message, dedupeKey: key, eventTime: rec.lastEventTs }, message);
|
|
3563
|
+
await this.withLock(async () => {
|
|
3564
|
+
const store = this.readStore();
|
|
3565
|
+
const current = store[key];
|
|
3566
|
+
if (current) {
|
|
3567
|
+
store[key] = {
|
|
3568
|
+
...current,
|
|
3569
|
+
lastTransmitted: Date.now(),
|
|
3570
|
+
suppressedCount: 0
|
|
3571
|
+
};
|
|
3572
|
+
this.writeStore(store);
|
|
3573
|
+
}
|
|
3574
|
+
});
|
|
3575
|
+
} catch (err) {
|
|
3576
|
+
logger.error("Failed to retry failed transmission", { key, rec, error: err });
|
|
3577
|
+
return;
|
|
3578
|
+
}
|
|
3579
|
+
}
|
|
3580
|
+
}
|
|
3581
|
+
// --- Internals ---
|
|
3582
|
+
ensureStoreFileExists() {
|
|
3583
|
+
try {
|
|
3584
|
+
if (!fs.existsSync(this.storeFilePath)) {
|
|
3585
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify({}), "utf-8");
|
|
3586
|
+
}
|
|
3587
|
+
} catch {
|
|
3588
|
+
}
|
|
3589
|
+
}
|
|
3590
|
+
readStore() {
|
|
3591
|
+
try {
|
|
3592
|
+
if (!fs.existsSync(this.storeFilePath)) return {};
|
|
3593
|
+
const content = fs.readFileSync(this.storeFilePath, "utf-8");
|
|
3594
|
+
return content ? JSON.parse(content) : {};
|
|
3595
|
+
} catch {
|
|
3596
|
+
return {};
|
|
3597
|
+
}
|
|
3598
|
+
}
|
|
3599
|
+
writeStore(store) {
|
|
3600
|
+
try {
|
|
3601
|
+
fs.writeFileSync(this.storeFilePath, JSON.stringify(store, null, 2), "utf-8");
|
|
3602
|
+
} catch {
|
|
3603
|
+
}
|
|
3604
|
+
}
|
|
3605
|
+
formatMessage(message, eventTs, suppressedCount, firstUnsentEventTs) {
|
|
3606
|
+
const timestamp = new Date(eventTs).toISOString();
|
|
3607
|
+
const base = `${timestamp} | ${message}`;
|
|
3608
|
+
if (suppressedCount > 0) {
|
|
3609
|
+
const since = firstUnsentEventTs && firstUnsentEventTs > 0 ? ` since ${new Date(firstUnsentEventTs).toISOString()}` : "";
|
|
3610
|
+
return `${base} (${suppressedCount} suppressed${since})`;
|
|
3611
|
+
}
|
|
3612
|
+
return base;
|
|
3613
|
+
}
|
|
3614
|
+
async withLock(fn) {
|
|
3615
|
+
const lockPath = `${this.storeFilePath}.lock`;
|
|
3616
|
+
const start = Date.now();
|
|
3617
|
+
while (true) {
|
|
3618
|
+
try {
|
|
3619
|
+
const fd = fs.openSync(lockPath, "wx");
|
|
3620
|
+
try {
|
|
3621
|
+
const result = await fn();
|
|
3622
|
+
return result;
|
|
3623
|
+
} finally {
|
|
3624
|
+
try {
|
|
3625
|
+
fs.closeSync(fd);
|
|
3626
|
+
} catch {
|
|
3627
|
+
}
|
|
3628
|
+
try {
|
|
3629
|
+
fs.unlinkSync(lockPath);
|
|
3630
|
+
} catch {
|
|
3631
|
+
}
|
|
3632
|
+
}
|
|
3633
|
+
} catch {
|
|
3634
|
+
if (Date.now() - start > 3e3) {
|
|
3635
|
+
return await fn();
|
|
3636
|
+
}
|
|
3637
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
3638
|
+
}
|
|
3639
|
+
}
|
|
3640
|
+
}
|
|
3641
|
+
/**
|
|
3642
|
+
* Evict expired entries from the store based on the TTL and the key's last transmitted time
|
|
3643
|
+
*/
|
|
3644
|
+
evictExpiredInStore(store, now) {
|
|
3645
|
+
const keys = Object.keys(store);
|
|
3646
|
+
if (keys.length === 0) return;
|
|
3647
|
+
for (const key of keys) {
|
|
3648
|
+
const rec = store[key];
|
|
3649
|
+
const referenceTs = rec.lastTransmitted > 0 ? rec.lastTransmitted : rec.lastEventTs;
|
|
3650
|
+
if (now - referenceTs > this.ttlMs) {
|
|
3651
|
+
delete store[key];
|
|
3652
|
+
}
|
|
3653
|
+
}
|
|
3654
|
+
}
|
|
3655
|
+
}
|
|
3656
|
+
class ApplicationInitializer {
|
|
3657
|
+
/**
|
|
3658
|
+
* Performs all necessary application initialization tasks
|
|
3659
|
+
* This should be called before starting any services or jobs
|
|
3660
|
+
*/
|
|
3661
|
+
static async initialize() {
|
|
3662
|
+
try {
|
|
3663
|
+
logger.info(
|
|
3664
|
+
"\n================================INITIALIZING APPLICATION================================\n"
|
|
3665
|
+
);
|
|
3666
|
+
const coreConfig = CoreConfiguration.inst();
|
|
3667
|
+
logger.info("Core Configuration loaded:", coreConfig.toSafeLogObject());
|
|
3668
|
+
logger.info("Performing database startup checks...");
|
|
3669
|
+
await SQLiteCoordinator.performStartupCheck();
|
|
3670
|
+
logger.info("Database startup checks completed successfully");
|
|
3671
|
+
logger.info("Running database migrations...");
|
|
3672
|
+
await ApplicationInitializer.runMigrations();
|
|
3673
|
+
logger.info("Database migrations completed successfully");
|
|
3674
|
+
await getTimezoneOffsetAndPersist();
|
|
3675
|
+
logger.info(
|
|
3676
|
+
"\n================================APPLICATION INITIALIZATION COMPLETED================================\n"
|
|
3677
|
+
);
|
|
3678
|
+
} catch (error) {
|
|
3679
|
+
logger.error("Critical initialization failure. Exiting.", error);
|
|
3680
|
+
process.exitCode = 1;
|
|
3681
|
+
}
|
|
3682
|
+
}
|
|
3683
|
+
/**
|
|
3684
|
+
* Runs database migrations to ensure all required tables exist
|
|
3685
|
+
*/
|
|
3686
|
+
static async runMigrations() {
|
|
3687
|
+
const db = knex(config.local);
|
|
3688
|
+
try {
|
|
3689
|
+
await db.migrate.latest();
|
|
3690
|
+
} catch (error) {
|
|
3691
|
+
logger.error("Error running migrations:", error);
|
|
3692
|
+
throw error;
|
|
3693
|
+
} finally {
|
|
3694
|
+
await db.destroy();
|
|
3695
|
+
}
|
|
3696
|
+
}
|
|
3697
|
+
}
|
|
3698
|
+
const DEFAULT_RETRY_ATTEMPTS = 0;
|
|
3699
|
+
class GraphQLService {
|
|
3700
|
+
client;
|
|
3701
|
+
config;
|
|
3702
|
+
endpoint;
|
|
3703
|
+
constructor(config2, endpoint, retryAttempts) {
|
|
3704
|
+
this.config = config2;
|
|
3705
|
+
this.endpoint = endpoint ? endpoint.startsWith("/") ? endpoint : `/${endpoint}` : "";
|
|
3706
|
+
this.client = HTTPClientFactory.getInstance({
|
|
3707
|
+
baseUrl: config2.apiUrl,
|
|
3708
|
+
retryAttempts: retryAttempts ?? DEFAULT_RETRY_ATTEMPTS
|
|
3709
|
+
});
|
|
3710
|
+
}
|
|
3711
|
+
async getHeaders() {
|
|
3712
|
+
const token = await this.config.getAuthToken();
|
|
3713
|
+
return {
|
|
3714
|
+
"Content-Type": "application/json",
|
|
3715
|
+
Accept: "application/json",
|
|
3716
|
+
...token ? { Authorization: `Bearer ${token}` } : {}
|
|
3717
|
+
};
|
|
3718
|
+
}
|
|
3719
|
+
/**
|
|
3720
|
+
* Handles a GraphQL response, properly handling both successful responses
|
|
3721
|
+
* and GraphQL-level errors (which come in a 200 HTTP response)
|
|
3722
|
+
*
|
|
3723
|
+
* From the GraphQL specification (https://spec.graphql.org/draft/#sec-Errors):
|
|
3724
|
+
> * "When a GraphQL server encounters an error, it should return a response with a
|
|
3725
|
+
> * top-level "errors" field containing the error information. The response may still contain
|
|
3726
|
+
* a partial result in the "data" field if the error occurred after some data was already resolved."
|
|
3727
|
+
*
|
|
3728
|
+
* And specifically about HTTP status codes:
|
|
3729
|
+
* "A server should return a 200 status code when a GraphQL operation successfully executes, including when
|
|
3730
|
+
* the operation returns errors. A server should return a 400 status code when a GraphQL operation fails to execute."
|
|
3731
|
+
*/
|
|
3732
|
+
handleGraphQLResponse(response) {
|
|
3733
|
+
return {
|
|
3734
|
+
data: response.data.data,
|
|
3735
|
+
metadata: {
|
|
3736
|
+
errors: response.data.errors,
|
|
3737
|
+
extensions: response.data.extensions
|
|
3738
|
+
}
|
|
3739
|
+
};
|
|
3740
|
+
}
|
|
3741
|
+
/**
|
|
3742
|
+
* Executes a GraphQL query
|
|
3743
|
+
* @param query The GraphQL query string
|
|
3744
|
+
* @param variables Optional variables for the query
|
|
3745
|
+
* @returns The query result
|
|
3746
|
+
* @throws {HTTPError} For HTTP/network errors only
|
|
3747
|
+
*/
|
|
3748
|
+
async query(query, variables) {
|
|
3749
|
+
try {
|
|
3750
|
+
const headers = await this.getHeaders();
|
|
3751
|
+
const response = await this.client.request({
|
|
3752
|
+
method: "POST",
|
|
3753
|
+
url: this.endpoint,
|
|
3754
|
+
data: { query, variables },
|
|
3755
|
+
headers
|
|
3756
|
+
});
|
|
3757
|
+
return this.handleGraphQLResponse(response);
|
|
3758
|
+
} catch (error) {
|
|
3759
|
+
ErrorHandler.handle(error);
|
|
3760
|
+
}
|
|
3761
|
+
}
|
|
3762
|
+
/**
|
|
3763
|
+
* Executes a GraphQL mutation
|
|
3764
|
+
* @param mutation The GraphQL mutation string
|
|
3765
|
+
* @param variables Optional variables for the mutation
|
|
3766
|
+
* @returns The mutation result
|
|
3767
|
+
* @throws {HTTPError} For HTTP/network errors only
|
|
3768
|
+
*/
|
|
3769
|
+
async mutate(mutation, variables) {
|
|
3770
|
+
try {
|
|
3771
|
+
const headers = await this.getHeaders();
|
|
3772
|
+
const response = await this.client.request({
|
|
3773
|
+
method: "POST",
|
|
3774
|
+
url: this.endpoint,
|
|
3775
|
+
data: { query: mutation, variables },
|
|
3776
|
+
headers
|
|
3777
|
+
});
|
|
3778
|
+
return this.handleGraphQLResponse(response);
|
|
3779
|
+
} catch (error) {
|
|
3780
|
+
ErrorHandler.handle(error);
|
|
3781
|
+
}
|
|
3782
|
+
}
|
|
3783
|
+
/**
|
|
3784
|
+
* Cleanup all HTTP connections and resources
|
|
3785
|
+
* Call this when the service is no longer needed
|
|
3786
|
+
*/
|
|
3787
|
+
async destroy() {
|
|
3788
|
+
await this.client.destroy();
|
|
3789
|
+
}
|
|
3790
|
+
}
|
|
3791
|
+
class OAuthClient {
|
|
3792
|
+
constructor(config2) {
|
|
3793
|
+
this.config = config2;
|
|
3794
|
+
}
|
|
3795
|
+
/**
|
|
3796
|
+
* Requests an OAuth token using the client credentials flow.
|
|
3797
|
+
* The request is sent as application/x-www-form-urlencoded.
|
|
3798
|
+
* @returns A promise that resolves to the token response containing access_token and expires_in
|
|
3799
|
+
* @throws Error if the token request fails
|
|
3800
|
+
*/
|
|
3801
|
+
async getToken() {
|
|
3802
|
+
const formData = new URLSearchParams({
|
|
3803
|
+
grant_type: "client_credentials",
|
|
3804
|
+
client_id: this.config.clientId,
|
|
3805
|
+
client_secret: this.config.clientSecret,
|
|
3806
|
+
scope: this.config.scope
|
|
3807
|
+
});
|
|
3808
|
+
const response = await fetch(this.config.authUrl, {
|
|
3809
|
+
method: "POST",
|
|
3810
|
+
headers: {
|
|
3811
|
+
"Content-Type": "application/x-www-form-urlencoded"
|
|
3812
|
+
},
|
|
3813
|
+
body: formData
|
|
3814
|
+
});
|
|
3815
|
+
if (!response.ok) {
|
|
3816
|
+
throw new Error(`OAuth token request failed: ${response.statusText}`);
|
|
3817
|
+
}
|
|
3818
|
+
const data = await response.json();
|
|
3819
|
+
if (data.token_type !== "Bearer") {
|
|
3820
|
+
throw new Error(`Unexpected token type: ${data.token_type}`);
|
|
3821
|
+
}
|
|
3822
|
+
return data;
|
|
3823
|
+
}
|
|
3824
|
+
}
|
|
3825
|
+
const sc = StringCodec();
|
|
3826
|
+
class NatsService {
|
|
3827
|
+
connection = null;
|
|
3828
|
+
subscriptions = /* @__PURE__ */ new Map();
|
|
3829
|
+
config;
|
|
3830
|
+
handlers = [];
|
|
3831
|
+
statusPublishTimer = null;
|
|
3832
|
+
constructor(config2) {
|
|
3833
|
+
this.config = config2;
|
|
3834
|
+
}
|
|
3835
|
+
/**
|
|
3836
|
+
* Register a handler for a specific subject pattern
|
|
3837
|
+
*/
|
|
3838
|
+
registerHandler(registration) {
|
|
3839
|
+
logger.info("Registering NATS handler", {
|
|
3840
|
+
subject: registration.subject,
|
|
3841
|
+
description: registration.description
|
|
3842
|
+
});
|
|
3843
|
+
this.handlers.push(registration);
|
|
3844
|
+
}
|
|
3845
|
+
/**
|
|
3846
|
+
* Connect to NATS and start all registered handlers
|
|
3847
|
+
*/
|
|
3848
|
+
async connect() {
|
|
3849
|
+
if (!this.config.enabled) {
|
|
3850
|
+
logger.info("NATS is disabled, skipping connection");
|
|
3851
|
+
return;
|
|
3852
|
+
}
|
|
3853
|
+
try {
|
|
3854
|
+
logger.info("Connecting to NATS...", {
|
|
3855
|
+
servers: this.config.servers,
|
|
3856
|
+
name: this.config.name
|
|
3857
|
+
});
|
|
3858
|
+
this.connection = await connect({
|
|
3859
|
+
servers: this.config.servers,
|
|
3860
|
+
name: this.config.name,
|
|
3861
|
+
reconnect: this.config.reconnect ?? true,
|
|
3862
|
+
maxReconnectAttempts: this.config.maxReconnectAttempts ?? -1,
|
|
3863
|
+
reconnectTimeWait: this.config.reconnectTimeWait ?? 2e3
|
|
3864
|
+
});
|
|
3865
|
+
logger.info("Connected to NATS", {
|
|
3866
|
+
server: this.connection.getServer(),
|
|
3867
|
+
clientId: this.connection.info?.client_id
|
|
3868
|
+
});
|
|
3869
|
+
for (const registration of this.handlers) {
|
|
3870
|
+
await this.startHandler(registration);
|
|
3871
|
+
}
|
|
3872
|
+
this.startStatusPublishing();
|
|
3873
|
+
this.monitorConnection();
|
|
3874
|
+
this.setupShutdown();
|
|
3875
|
+
} catch (error) {
|
|
3876
|
+
logger.error("Failed to connect to NATS", { error });
|
|
3877
|
+
throw error;
|
|
3878
|
+
}
|
|
3879
|
+
}
|
|
3880
|
+
/**
|
|
3881
|
+
* Start a single handler (subscribe to its subject)
|
|
3882
|
+
*/
|
|
3883
|
+
async startHandler(registration) {
|
|
3884
|
+
if (!this.connection) {
|
|
3885
|
+
throw new Error("NATS connection not established");
|
|
3886
|
+
}
|
|
3887
|
+
const sub = this.connection.subscribe(registration.subject);
|
|
3888
|
+
this.subscriptions.set(registration.subject, sub);
|
|
3889
|
+
logger.info("Started NATS handler", {
|
|
3890
|
+
subject: registration.subject,
|
|
3891
|
+
description: registration.description
|
|
3892
|
+
});
|
|
3893
|
+
(async () => {
|
|
3894
|
+
for await (const msg of sub) {
|
|
3895
|
+
try {
|
|
3896
|
+
const data = sc.decode(msg.data);
|
|
3897
|
+
logger.info("Received NATS message", {
|
|
3898
|
+
subject: msg.subject,
|
|
3899
|
+
hasReply: !!msg.reply
|
|
3900
|
+
});
|
|
3901
|
+
let parsedData;
|
|
3902
|
+
try {
|
|
3903
|
+
parsedData = JSON.parse(data);
|
|
3904
|
+
} catch {
|
|
3905
|
+
parsedData = data;
|
|
3906
|
+
}
|
|
3907
|
+
const response = await registration.handler.handle(parsedData, msg.subject);
|
|
3908
|
+
if (msg.reply && response !== void 0) {
|
|
3909
|
+
const responseStr = JSON.stringify(response);
|
|
3910
|
+
msg.respond(sc.encode(responseStr));
|
|
3911
|
+
logger.info("Sent reply", { replySubject: msg.reply });
|
|
3912
|
+
}
|
|
3913
|
+
} catch (error) {
|
|
3914
|
+
logger.error("Error handling NATS message", {
|
|
3915
|
+
subject: msg.subject,
|
|
3916
|
+
error
|
|
3917
|
+
});
|
|
3918
|
+
if (msg.reply) {
|
|
3919
|
+
const errorResponse = {
|
|
3920
|
+
status: "error",
|
|
3921
|
+
error: {
|
|
3922
|
+
message: error instanceof Error ? error.message : "Unknown error",
|
|
3923
|
+
code: "HANDLER_ERROR"
|
|
3924
|
+
}
|
|
3925
|
+
};
|
|
3926
|
+
msg.respond(sc.encode(JSON.stringify(errorResponse)));
|
|
3927
|
+
}
|
|
3928
|
+
}
|
|
3929
|
+
}
|
|
3930
|
+
})();
|
|
3931
|
+
}
|
|
3932
|
+
/**
|
|
3933
|
+
* Publish a message to a subject (for pub/sub)
|
|
3934
|
+
*/
|
|
3935
|
+
async publish(subject, data) {
|
|
3936
|
+
if (!this.connection) {
|
|
3937
|
+
throw new Error("NATS connection not established");
|
|
3938
|
+
}
|
|
3939
|
+
const message = typeof data === "string" ? data : JSON.stringify(data);
|
|
3940
|
+
this.connection.publish(subject, sc.encode(message));
|
|
3941
|
+
logger.info("Published NATS message", { subject });
|
|
3942
|
+
}
|
|
3943
|
+
/**
|
|
3944
|
+
* Send a request and wait for reply (for request-reply)
|
|
3945
|
+
*/
|
|
3946
|
+
async request(subject, data, timeoutMs = 3e4) {
|
|
3947
|
+
if (!this.connection) {
|
|
3948
|
+
throw new Error("NATS connection not established");
|
|
3949
|
+
}
|
|
3950
|
+
const message = typeof data === "string" ? data : JSON.stringify(data);
|
|
3951
|
+
const response = await this.connection.request(
|
|
3952
|
+
subject,
|
|
3953
|
+
sc.encode(message),
|
|
3954
|
+
{ timeout: timeoutMs }
|
|
3955
|
+
);
|
|
3956
|
+
const responseData = sc.decode(response.data);
|
|
3957
|
+
try {
|
|
3958
|
+
return JSON.parse(responseData);
|
|
3959
|
+
} catch {
|
|
3960
|
+
return responseData;
|
|
3961
|
+
}
|
|
3962
|
+
}
|
|
3963
|
+
/**
|
|
3964
|
+
* Check if connected to NATS
|
|
3965
|
+
*/
|
|
3966
|
+
isConnected() {
|
|
3967
|
+
return this.connection !== null && !this.connection.isClosed();
|
|
3968
|
+
}
|
|
3969
|
+
/**
|
|
3970
|
+
* Start automatic status publishing (every 30 seconds)
|
|
3971
|
+
*/
|
|
3972
|
+
startStatusPublishing() {
|
|
3973
|
+
logger.info("Starting status publishing (every 30 seconds)");
|
|
3974
|
+
this.publishStatus();
|
|
3975
|
+
this.statusPublishTimer = setInterval(() => {
|
|
3976
|
+
this.publishStatus();
|
|
3977
|
+
}, 3e4);
|
|
3978
|
+
}
|
|
3979
|
+
/**
|
|
3980
|
+
* Publish connector status
|
|
3981
|
+
*/
|
|
3982
|
+
async publishStatus() {
|
|
3983
|
+
try {
|
|
3984
|
+
const status = {
|
|
3985
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3986
|
+
locationRef: this.config.locationRef,
|
|
3987
|
+
erpType: this.config.erpType,
|
|
3988
|
+
natsConnected: this.isConnected()
|
|
3989
|
+
};
|
|
3990
|
+
await this.publish(
|
|
3991
|
+
`mm.14.${this.config.locationRef}.erp.status`,
|
|
3992
|
+
status
|
|
3993
|
+
);
|
|
3994
|
+
logger.debug("Published connector status");
|
|
3995
|
+
} catch (error) {
|
|
3996
|
+
logger.error("Failed to publish status", { error });
|
|
3997
|
+
}
|
|
3998
|
+
}
|
|
3999
|
+
/**
|
|
4000
|
+
* Monitor connection status
|
|
4001
|
+
*/
|
|
4002
|
+
monitorConnection() {
|
|
4003
|
+
if (!this.connection) return;
|
|
4004
|
+
(async () => {
|
|
4005
|
+
for await (const status of this.connection.status()) {
|
|
4006
|
+
if (status.type !== "pingTimer") {
|
|
4007
|
+
logger.info("NATS connection status", {
|
|
4008
|
+
type: status.type,
|
|
4009
|
+
data: status.data
|
|
4010
|
+
});
|
|
4011
|
+
}
|
|
4012
|
+
}
|
|
4013
|
+
})();
|
|
4014
|
+
}
|
|
4015
|
+
/**
|
|
4016
|
+
* Setup graceful shutdown
|
|
4017
|
+
*/
|
|
4018
|
+
setupShutdown() {
|
|
4019
|
+
const shutdown = async () => {
|
|
4020
|
+
logger.info("Shutting down NATS service...");
|
|
4021
|
+
await this.disconnect();
|
|
4022
|
+
process.exit(0);
|
|
4023
|
+
};
|
|
4024
|
+
process.on("SIGINT", shutdown);
|
|
4025
|
+
process.on("SIGTERM", shutdown);
|
|
4026
|
+
}
|
|
4027
|
+
/**
|
|
4028
|
+
* Disconnect from NATS
|
|
4029
|
+
*/
|
|
4030
|
+
async disconnect() {
|
|
4031
|
+
if (this.statusPublishTimer) {
|
|
4032
|
+
clearInterval(this.statusPublishTimer);
|
|
4033
|
+
this.statusPublishTimer = null;
|
|
4034
|
+
}
|
|
4035
|
+
if (this.connection) {
|
|
4036
|
+
await this.connection.drain();
|
|
4037
|
+
this.connection = null;
|
|
4038
|
+
this.subscriptions.clear();
|
|
4039
|
+
logger.info("Disconnected from NATS");
|
|
4040
|
+
}
|
|
4041
|
+
}
|
|
4042
|
+
/**
|
|
4043
|
+
* Get the location reference
|
|
4044
|
+
*/
|
|
4045
|
+
getLocationRef() {
|
|
4046
|
+
return this.config.locationRef;
|
|
4047
|
+
}
|
|
4048
|
+
}
|
|
4049
|
+
class NatsLaborTicketListener {
|
|
4050
|
+
connector;
|
|
4051
|
+
natsService;
|
|
4052
|
+
constructor(connector) {
|
|
4053
|
+
this.connector = connector;
|
|
4054
|
+
}
|
|
4055
|
+
/**
|
|
4056
|
+
* Start listening for labor ticket events via NATS
|
|
4057
|
+
*/
|
|
4058
|
+
async start() {
|
|
4059
|
+
try {
|
|
4060
|
+
const companyInfo = await getCompanyInfo();
|
|
4061
|
+
const erpType = this.connector.type || "unknown";
|
|
4062
|
+
logger.info("Starting NATS listener for labor tickets", {
|
|
4063
|
+
locationRef: companyInfo.locationRef,
|
|
4064
|
+
companyId: companyInfo.companyId,
|
|
4065
|
+
erpType,
|
|
4066
|
+
servers: process.env.NATS_SERVERS || "nats://localhost:4222"
|
|
4067
|
+
});
|
|
4068
|
+
this.natsService = new NatsService({
|
|
4069
|
+
servers: process.env.NATS_SERVERS || "nats://localhost:4222",
|
|
4070
|
+
name: `${erpType}-connector`,
|
|
4071
|
+
locationRef: companyInfo.locationRef,
|
|
4072
|
+
erpType,
|
|
4073
|
+
enabled: true,
|
|
4074
|
+
reconnect: true,
|
|
4075
|
+
maxReconnectAttempts: -1,
|
|
4076
|
+
reconnectTimeWait: 2e3
|
|
4077
|
+
});
|
|
4078
|
+
this.registerHealthCheckHandler(companyInfo.locationRef, erpType);
|
|
4079
|
+
this.registerLaborTicketHandler(companyInfo.locationRef, erpType);
|
|
4080
|
+
await this.natsService.connect();
|
|
4081
|
+
logger.info("NATS listener started successfully", {
|
|
4082
|
+
subject: `mm.14.${companyInfo.locationRef}.labor-ticket.*`
|
|
4083
|
+
});
|
|
4084
|
+
} catch (error) {
|
|
4085
|
+
logger.error("Failed to start NATS listener", { error });
|
|
4086
|
+
}
|
|
4087
|
+
}
|
|
4088
|
+
/**
|
|
4089
|
+
* Register health check handler - responds immediately to let Dashboard know connector is online
|
|
4090
|
+
*/
|
|
4091
|
+
registerHealthCheckHandler(locationRef, erpType) {
|
|
4092
|
+
if (!this.natsService) return;
|
|
4093
|
+
this.natsService.registerHandler({
|
|
4094
|
+
subject: `mm.14.${locationRef}.erp.health`,
|
|
4095
|
+
description: "Health check - responds immediately to indicate connector is online",
|
|
4096
|
+
handler: {
|
|
4097
|
+
handle: async () => {
|
|
4098
|
+
logger.debug("Health check received, sending pong");
|
|
4099
|
+
return {
|
|
4100
|
+
status: "online",
|
|
4101
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4102
|
+
locationRef,
|
|
4103
|
+
erpType
|
|
4104
|
+
};
|
|
4105
|
+
}
|
|
4106
|
+
}
|
|
4107
|
+
});
|
|
4108
|
+
}
|
|
4109
|
+
/**
|
|
4110
|
+
* Register labor ticket processing handler
|
|
4111
|
+
*/
|
|
4112
|
+
registerLaborTicketHandler(locationRef, erpType) {
|
|
4113
|
+
if (!this.natsService) return;
|
|
4114
|
+
this.natsService.registerHandler({
|
|
4115
|
+
subject: `mm.14.${locationRef}.labor-ticket.*`,
|
|
4116
|
+
description: "Process labor tickets in real-time from NATS",
|
|
4117
|
+
handler: {
|
|
4118
|
+
handle: async ({ data }, subject) => {
|
|
4119
|
+
const action = subject.split(".").pop();
|
|
4120
|
+
const { actionPayload } = data;
|
|
4121
|
+
const startTime = Date.now();
|
|
4122
|
+
const { laborTicketRef } = actionPayload;
|
|
4123
|
+
logger.info("Received labor ticket via NATS", {
|
|
4124
|
+
action,
|
|
4125
|
+
requestId: data.requestId,
|
|
4126
|
+
laborTicketRef
|
|
4127
|
+
});
|
|
4128
|
+
return await SQLiteCoordinator.executeWithLock("to-erp", async () => {
|
|
4129
|
+
try {
|
|
4130
|
+
let laborTicketData;
|
|
4131
|
+
if (laborTicketRef) {
|
|
4132
|
+
const mmApiClient = new MMApiClient();
|
|
4133
|
+
const laborTicket = await mmApiClient.fetchLaborTicketByRef(laborTicketRef);
|
|
4134
|
+
logger.info("Fetched labor ticket data from MM API", {
|
|
4135
|
+
laborTicketRef,
|
|
4136
|
+
laborTicketId: laborTicket.laborTicketId
|
|
4137
|
+
});
|
|
4138
|
+
laborTicketData = {
|
|
4139
|
+
...laborTicket,
|
|
4140
|
+
...actionPayload
|
|
4141
|
+
};
|
|
4142
|
+
} else {
|
|
4143
|
+
logger.info("No laborTicketRef provided, using actionPayload directly", {
|
|
4144
|
+
requestId: data.requestId
|
|
4145
|
+
});
|
|
4146
|
+
laborTicketData = actionPayload;
|
|
4147
|
+
}
|
|
4148
|
+
const mergedLaborTicket = new MMReceiveLaborTicket(laborTicketData);
|
|
4149
|
+
const result = await this.processLaborTicket(
|
|
4150
|
+
mergedLaborTicket,
|
|
4151
|
+
action || "unknown"
|
|
4152
|
+
);
|
|
4153
|
+
await this.updateCheckpoint(erpType, result);
|
|
4154
|
+
return {
|
|
4155
|
+
status: "success",
|
|
4156
|
+
requestId: data.requestId,
|
|
4157
|
+
action,
|
|
4158
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4159
|
+
processingTimeMs: Date.now() - startTime,
|
|
4160
|
+
laborTicketRef: result.laborTicketRef,
|
|
4161
|
+
laborTicket: result.laborTicket
|
|
4162
|
+
};
|
|
4163
|
+
} catch (error) {
|
|
4164
|
+
const formattedError = error?._formatted || formatError(error);
|
|
4165
|
+
logger.debug("Error details", {
|
|
4166
|
+
hasFormatted: !!error?._formatted,
|
|
4167
|
+
isAxiosError: error?.isAxiosError,
|
|
4168
|
+
errorMessage: error?.message,
|
|
4169
|
+
responseStatus: error?.response?.status,
|
|
4170
|
+
responseData: error?.response?.data
|
|
4171
|
+
});
|
|
4172
|
+
logger.error("Error handling labor ticket from NATS", {
|
|
4173
|
+
error: formattedError.message,
|
|
4174
|
+
code: formattedError.code,
|
|
4175
|
+
requestId: data.requestId,
|
|
4176
|
+
laborTicketRef
|
|
4177
|
+
});
|
|
4178
|
+
return {
|
|
4179
|
+
status: "error",
|
|
4180
|
+
requestId: data.requestId,
|
|
4181
|
+
action,
|
|
4182
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
4183
|
+
processingTimeMs: Date.now() - startTime,
|
|
4184
|
+
error: {
|
|
4185
|
+
message: formattedError.message,
|
|
4186
|
+
code: formattedError.code,
|
|
4187
|
+
httpStatus: formattedError.httpStatus,
|
|
4188
|
+
metadata: formattedError.metadata,
|
|
4189
|
+
laborTicketRef
|
|
4190
|
+
}
|
|
4191
|
+
};
|
|
4192
|
+
}
|
|
4193
|
+
});
|
|
4194
|
+
}
|
|
4195
|
+
}
|
|
4196
|
+
});
|
|
4197
|
+
}
|
|
4198
|
+
/**
|
|
4199
|
+
* Process labor ticket in ERP and then create MM entities with ERP ID attached
|
|
4200
|
+
*/
|
|
4201
|
+
async processLaborTicket(laborTicket, action) {
|
|
4202
|
+
const { MMEntityProcessor: MMEntityProcessor2 } = await Promise.resolve().then(() => mmEntityProcessor);
|
|
4203
|
+
const { ERPObjType: ERPObjType2 } = await Promise.resolve().then(() => erpTypes);
|
|
4204
|
+
const { MMSendLaborTicket: MMSendLaborTicket2 } = await Promise.resolve().then(() => index);
|
|
4205
|
+
logger.info("Processing labor ticket: ERP first, then MM creation", {
|
|
4206
|
+
laborTicketRef: laborTicket.laborTicketRef,
|
|
4207
|
+
action,
|
|
4208
|
+
hasLaborTicketId: !!laborTicket.laborTicketId
|
|
4209
|
+
});
|
|
4210
|
+
try {
|
|
4211
|
+
let erpResult;
|
|
4212
|
+
if (action === "create" || !laborTicket.laborTicketId) {
|
|
4213
|
+
erpResult = await this.connector.createLaborTicketInERP(laborTicket);
|
|
4214
|
+
logger.info("Successfully created labor ticket in ERP", {
|
|
4215
|
+
laborTicketRef: laborTicket.laborTicketRef,
|
|
4216
|
+
erpUid: erpResult.erpUid
|
|
4217
|
+
});
|
|
4218
|
+
} else {
|
|
4219
|
+
erpResult = { laborTicket: await this.connector.updateLaborTicketInERP(laborTicket) };
|
|
4220
|
+
logger.info("Successfully updated labor ticket in ERP", {
|
|
4221
|
+
laborTicketRef: laborTicket.laborTicketRef
|
|
4222
|
+
});
|
|
4223
|
+
}
|
|
4224
|
+
const laborTicketForMM = { ...laborTicket };
|
|
4225
|
+
if (erpResult.erpUid) {
|
|
4226
|
+
laborTicketForMM.laborTicketId = erpResult.erpUid;
|
|
4227
|
+
if (!laborTicket.laborTicketId && laborTicketForMM.laborTicketRef) {
|
|
4228
|
+
const mmApiClient = new MMApiClient();
|
|
4229
|
+
await mmApiClient.updateLaborTicketIdByRef(
|
|
4230
|
+
laborTicketForMM.laborTicketRef,
|
|
4231
|
+
erpResult.erpUid
|
|
4232
|
+
);
|
|
4233
|
+
logger.info("Patched existing MM labor ticket with new ERP ID", {
|
|
4234
|
+
laborTicketRef: laborTicketForMM.laborTicketRef,
|
|
4235
|
+
laborTicketId: erpResult.erpUid
|
|
4236
|
+
});
|
|
4237
|
+
}
|
|
4238
|
+
}
|
|
4239
|
+
laborTicketForMM.state = laborTicketForMM.clockOut ? "CLOSED" : "OPEN";
|
|
4240
|
+
const mmLaborTicket = MMSendLaborTicket2.fromPlainObject(laborTicketForMM);
|
|
4241
|
+
const mmResult = await MMEntityProcessor2.writeEntities(
|
|
4242
|
+
ERPObjType2.LABOR_TICKETS,
|
|
4243
|
+
[mmLaborTicket],
|
|
4244
|
+
null
|
|
4245
|
+
// No caching for real-time operations
|
|
4246
|
+
);
|
|
4247
|
+
logger.info("Successfully updated MM entities after ERP operation", {
|
|
4248
|
+
laborTicketRef: laborTicketForMM.laborTicketRef,
|
|
4249
|
+
laborTicketId: laborTicketForMM.laborTicketId,
|
|
4250
|
+
entitiesCreated: mmResult.upsertedEntities
|
|
4251
|
+
});
|
|
4252
|
+
return {
|
|
4253
|
+
laborTicketRef: laborTicketForMM.laborTicketRef,
|
|
4254
|
+
laborTicket: laborTicketForMM
|
|
4255
|
+
};
|
|
4256
|
+
} catch (error) {
|
|
4257
|
+
const formattedError = formatError(error);
|
|
4258
|
+
logger.error("Failed to process labor ticket with MM creation", {
|
|
4259
|
+
laborTicketRef: laborTicket.laborTicketRef,
|
|
4260
|
+
action,
|
|
4261
|
+
error: formattedError.message,
|
|
4262
|
+
code: formattedError.code
|
|
4263
|
+
});
|
|
4264
|
+
const enhancedError = error;
|
|
4265
|
+
enhancedError._formatted = formattedError;
|
|
4266
|
+
throw enhancedError;
|
|
4267
|
+
}
|
|
4268
|
+
}
|
|
4269
|
+
/**
|
|
4270
|
+
* Update checkpoint to prevent to-erp polling job from reprocessing this ticket
|
|
4271
|
+
* Only updates if the new timestamp is later than the current checkpoint (prevents moving backwards)
|
|
4272
|
+
*/
|
|
4273
|
+
async updateCheckpoint(erpType, result) {
|
|
4274
|
+
const mmApiClient = new MMApiClient();
|
|
4275
|
+
const currentCheckpoint = await mmApiClient.getCheckpoint({
|
|
4276
|
+
system: erpType,
|
|
4277
|
+
table: "labor_tickets",
|
|
4278
|
+
checkpointType: "export",
|
|
4279
|
+
checkpointValue: {
|
|
4280
|
+
timestamp: ""
|
|
4281
|
+
}
|
|
4282
|
+
});
|
|
4283
|
+
const currentTimestamp = currentCheckpoint?.timestamp;
|
|
4284
|
+
const newTimestamp = result.laborTicket.updatedAt || (/* @__PURE__ */ new Date()).toISOString();
|
|
4285
|
+
if (!currentTimestamp || new Date(newTimestamp) > new Date(currentTimestamp)) {
|
|
4286
|
+
await mmApiClient.saveCheckpoint({
|
|
4287
|
+
system: erpType,
|
|
4288
|
+
table: "labor_tickets",
|
|
4289
|
+
checkpointType: "export",
|
|
4290
|
+
checkpointValue: {
|
|
4291
|
+
timestamp: newTimestamp
|
|
4292
|
+
}
|
|
4293
|
+
});
|
|
4294
|
+
logger.debug("Updated export checkpoint after NATS processing", {
|
|
4295
|
+
laborTicketRef: result.laborTicketRef,
|
|
4296
|
+
previousCheckpoint: currentTimestamp,
|
|
4297
|
+
newCheckpoint: newTimestamp
|
|
4298
|
+
});
|
|
4299
|
+
} else {
|
|
4300
|
+
logger.debug("Skipped checkpoint update (timestamp not newer)", {
|
|
4301
|
+
laborTicketRef: result.laborTicketRef,
|
|
4302
|
+
currentCheckpoint: currentTimestamp,
|
|
4303
|
+
ticketTimestamp: newTimestamp
|
|
4304
|
+
});
|
|
4305
|
+
}
|
|
4306
|
+
}
|
|
4307
|
+
}
|
|
4308
|
+
const runDataSyncService = async (connectorPath) => {
|
|
4309
|
+
const config2 = CoreConfiguration.inst();
|
|
4310
|
+
try {
|
|
4311
|
+
const connector = await createConnectorFromPath(connectorPath);
|
|
4312
|
+
if (process.env.NATS_ENABLED === "true") {
|
|
4313
|
+
const natsListener = new NatsLaborTicketListener(connector);
|
|
4314
|
+
await natsListener.start();
|
|
4315
|
+
}
|
|
4316
|
+
const currentFileUrl = import.meta.url;
|
|
4317
|
+
const currentFilePath = fileURLToPath(currentFileUrl);
|
|
4318
|
+
const sdkDistPath = path.dirname(currentFilePath);
|
|
4319
|
+
const jobsPath = path.join(
|
|
4320
|
+
sdkDistPath,
|
|
4321
|
+
"services",
|
|
4322
|
+
"data-sync-service",
|
|
4323
|
+
"jobs"
|
|
4324
|
+
);
|
|
4325
|
+
const bree = new Bree({
|
|
4326
|
+
root: jobsPath,
|
|
4327
|
+
logger,
|
|
4328
|
+
worker: {
|
|
4329
|
+
env: {
|
|
4330
|
+
CONNECTOR_PATH: connectorPath,
|
|
4331
|
+
// Pass through all required environment variables
|
|
4332
|
+
MM_MAPPING_SERVICE_URL: process.env.MM_MAPPING_SERVICE_URL,
|
|
4333
|
+
MM_MAPPING_AUTH_SERVICE_URL: process.env.MM_MAPPING_AUTH_SERVICE_URL,
|
|
4334
|
+
MM_MAPPING_SERVICE_TOKEN: process.env.MM_MAPPING_SERVICE_TOKEN,
|
|
4335
|
+
ERP_SYSTEM: process.env.ERP_SYSTEM,
|
|
4336
|
+
LOG_LEVEL: process.env.LOG_LEVEL,
|
|
4337
|
+
NODE_ENV: process.env.NODE_ENV,
|
|
4338
|
+
RETRY_ATTEMPTS: process.env.RETRY_ATTEMPTS,
|
|
4339
|
+
CACHE_TTL: process.env.CACHE_TTL,
|
|
4340
|
+
// Pass through all other environment variables that might be needed
|
|
4341
|
+
...process.env
|
|
4342
|
+
}
|
|
4343
|
+
},
|
|
4344
|
+
jobs: [
|
|
4345
|
+
// {
|
|
4346
|
+
// name: 'run-migrations', // Running this once on startup will create the tables in the sqlite database
|
|
4347
|
+
// },
|
|
4348
|
+
{
|
|
4349
|
+
name: "from-erp",
|
|
4350
|
+
timeout: "10s",
|
|
4351
|
+
interval: config2.fromErpInterval
|
|
4352
|
+
},
|
|
4353
|
+
{
|
|
4354
|
+
name: "to-erp",
|
|
4355
|
+
//timeout: '3s', // Use timeout during development to see the job in action quickly
|
|
4356
|
+
interval: config2.toErpInterval
|
|
4357
|
+
},
|
|
4358
|
+
{
|
|
4359
|
+
name: "retry-failed-labor-tickets",
|
|
4360
|
+
interval: config2.retryLaborTicketsInterval
|
|
4361
|
+
},
|
|
4362
|
+
{
|
|
4363
|
+
name: "clean-up-expired-cache",
|
|
4364
|
+
interval: config2.cacheExpirationCheckInterval
|
|
4365
|
+
}
|
|
4366
|
+
]
|
|
4367
|
+
});
|
|
4368
|
+
logger.info(
|
|
4369
|
+
"\n================================INITIATING DATA SYNC CYCLES (Bree)================================\n"
|
|
4370
|
+
);
|
|
4371
|
+
const jobsConfig = bree.config.jobs.map((job) => ({
|
|
4372
|
+
name: job.name,
|
|
4373
|
+
interval: job.interval,
|
|
4374
|
+
timeout: job.timeout
|
|
4375
|
+
}));
|
|
4376
|
+
logger.info("JOBS CONFIGURATION:", { jobs: jobsConfig });
|
|
4377
|
+
const graceful = new Graceful({ brees: [bree] });
|
|
4378
|
+
graceful.listen();
|
|
4379
|
+
(async () => {
|
|
4380
|
+
await bree.start();
|
|
4381
|
+
})();
|
|
4382
|
+
bree.on("jobStarted", (job) => {
|
|
4383
|
+
console.log("Job " + job.name + " started");
|
|
4384
|
+
});
|
|
4385
|
+
bree.on("jobCompleted", (job) => {
|
|
4386
|
+
if (job.error) {
|
|
4387
|
+
console.error("Job " + job.name + " failed:", job.error);
|
|
4388
|
+
} else {
|
|
4389
|
+
console.log("Job " + job.name + " completed successfully!");
|
|
4390
|
+
}
|
|
4391
|
+
});
|
|
4392
|
+
bree.on("error", (error) => {
|
|
4393
|
+
console.error("Bree error:", error);
|
|
4394
|
+
});
|
|
4395
|
+
logger.info(
|
|
4396
|
+
"\n================================DATA SYNC CYCLES INITIATION COMPLETED================================\n"
|
|
4397
|
+
);
|
|
4398
|
+
} catch (error) {
|
|
4399
|
+
logger.error("startUp: Error initiating data sync cycles:", error);
|
|
4400
|
+
}
|
|
4401
|
+
};
|
|
4402
|
+
const SQLServerConfigSchema = z.object({
|
|
4403
|
+
user: z.string().nonempty("User is required."),
|
|
4404
|
+
password: z.string().nonempty("Password is required."),
|
|
4405
|
+
database: z.string().nonempty("Database name is required."),
|
|
4406
|
+
server: z.string().nonempty("Server is required."),
|
|
4407
|
+
port: z.coerce.number().int().positive("Port must be a positive integer.").default(1433),
|
|
4408
|
+
connectionTimeout: z.coerce.number().int().positive("Connection timeout must be a positive integer.").default(3e4),
|
|
4409
|
+
requestTimeout: z.coerce.number().int().positive("Request timeout must be a positive integer.").default(6e4),
|
|
4410
|
+
pool: z.object({
|
|
4411
|
+
max: z.coerce.number().int().positive("Max pool size must be a positive integer.").default(10),
|
|
4412
|
+
min: z.coerce.number().int().nonnegative("Min pool size must be a non-negative integer.").default(0),
|
|
4413
|
+
idleTimeoutMillis: z.coerce.number().int().nonnegative("Idle timeout must be a non-negative integer.").default(3e4)
|
|
4414
|
+
}),
|
|
4415
|
+
options: z.object({
|
|
4416
|
+
encrypt: z.coerce.boolean().default(false),
|
|
4417
|
+
trustServerCertificate: z.coerce.boolean().default(false)
|
|
4418
|
+
})
|
|
4419
|
+
});
|
|
4420
|
+
class SqlServerService {
|
|
4421
|
+
connection = null;
|
|
4422
|
+
config;
|
|
4423
|
+
constructor(config2) {
|
|
4424
|
+
this.config = config2;
|
|
4425
|
+
}
|
|
4426
|
+
async dispose() {
|
|
4427
|
+
await this.closeConnection();
|
|
4428
|
+
}
|
|
4429
|
+
/**
|
|
4430
|
+
* Execute a prepared statement and return the results
|
|
4431
|
+
*
|
|
4432
|
+
* @param statementToPrepare The SQL query to prepare
|
|
4433
|
+
* @param params The parameters to bind to the prepared statement
|
|
4434
|
+
* @param paging Optional paging parameters
|
|
4435
|
+
* @returns The entities fetched from the database, along with paging information
|
|
4436
|
+
*/
|
|
4437
|
+
async executePreparedStatement(statementToPrepare, params, paging) {
|
|
4438
|
+
const connection = await this.openConnection();
|
|
4439
|
+
const preparedStatement = new sql.PreparedStatement(connection);
|
|
4440
|
+
Object.keys(params).forEach((key) => {
|
|
4441
|
+
preparedStatement.input(key, sql.VarChar);
|
|
4442
|
+
});
|
|
4443
|
+
const finalQuery = paging?.limit !== void 0 ? `${statementToPrepare} OFFSET ${paging.offset || 0} ROWS FETCH NEXT ${paging.limit} ROWS ONLY` : statementToPrepare;
|
|
4444
|
+
await preparedStatement.prepare(finalQuery);
|
|
4445
|
+
let records;
|
|
4446
|
+
try {
|
|
4447
|
+
records = await preparedStatement.execute(params);
|
|
4448
|
+
} catch (error) {
|
|
4449
|
+
const errorInfo = error;
|
|
4450
|
+
const errorMessage = "Error fetching data from ERP -> " + (errorInfo.cause ? `Cause: ${errorInfo.cause}. ` : "") + `Name: ${errorInfo.name}. Message: ${errorInfo.message}. Stack: ${errorInfo.stack}`;
|
|
4451
|
+
throw new Error(errorMessage);
|
|
4452
|
+
} finally {
|
|
4453
|
+
try {
|
|
4454
|
+
await preparedStatement.unprepare();
|
|
4455
|
+
} catch (unprepareError) {
|
|
4456
|
+
logger.error("Error during unprepare:", unprepareError);
|
|
4457
|
+
}
|
|
4458
|
+
}
|
|
4459
|
+
const allRecords = SqlServerService.recordsetToRecords(records?.recordset);
|
|
4460
|
+
const rowsFetched = records?.rowsAffected[0] || 0;
|
|
4461
|
+
const pagedData = paging?.offset !== void 0 ? allRecords.slice(
|
|
4462
|
+
paging.offset,
|
|
4463
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
4464
|
+
) : allRecords;
|
|
4465
|
+
return {
|
|
4466
|
+
data: pagedData,
|
|
4467
|
+
paging: {
|
|
4468
|
+
count: rowsFetched,
|
|
4469
|
+
limit: paging?.limit || 0,
|
|
4470
|
+
offset: paging?.offset || 0,
|
|
4471
|
+
nextPage: (() => {
|
|
4472
|
+
const currentPageEnd = (paging?.offset || 0) + (paging?.limit || 0);
|
|
4473
|
+
return paging?.limit && currentPageEnd < rowsFetched ? String(currentPageEnd) : void 0;
|
|
4474
|
+
})(),
|
|
4475
|
+
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4476
|
+
}
|
|
4477
|
+
};
|
|
4478
|
+
}
|
|
4479
|
+
/**
|
|
4480
|
+
* Opens a connection to SQL Server
|
|
4481
|
+
* Caches the connection so that it can be reused.
|
|
4482
|
+
* On failure to connect, throws
|
|
4483
|
+
*/
|
|
4484
|
+
async openConnection() {
|
|
4485
|
+
if (this.connection?.connected) {
|
|
4486
|
+
logger.debug("Reusing existing SQL Server connection");
|
|
4487
|
+
return this.connection;
|
|
4488
|
+
}
|
|
4489
|
+
if (this.connection) {
|
|
4490
|
+
logger.debug("Clearing disconnected SQL Server connection");
|
|
4491
|
+
this.connection = null;
|
|
4492
|
+
}
|
|
4493
|
+
try {
|
|
4494
|
+
logger.info("Opening new SQL Server connection");
|
|
4495
|
+
const sqlConfig = SQLServerConfigSchema.parse({
|
|
4496
|
+
user: this.config.username,
|
|
4497
|
+
password: this.config.password,
|
|
4498
|
+
database: this.config.database,
|
|
4499
|
+
server: this.config.host,
|
|
4500
|
+
port: parseInt(this.config.port),
|
|
4501
|
+
connectionTimeout: parseInt(this.config.connectionTimeout),
|
|
4502
|
+
requestTimeout: parseInt(this.config.requestTimeout),
|
|
4503
|
+
pool: {
|
|
4504
|
+
max: parseInt(this.config.poolMax),
|
|
4505
|
+
min: parseInt(this.config.poolMin),
|
|
4506
|
+
idleTimeoutMillis: parseInt(this.config.idleTimeoutMillis)
|
|
4507
|
+
},
|
|
4508
|
+
options: {
|
|
4509
|
+
encrypt: this.config.encrypt,
|
|
4510
|
+
trustServerCertificate: this.config.trustServer
|
|
4511
|
+
}
|
|
4512
|
+
});
|
|
4513
|
+
this.connection = await sql.connect(sqlConfig);
|
|
4514
|
+
return this.connection;
|
|
4515
|
+
} catch (error) {
|
|
4516
|
+
logger.error("SqlServerService>>openConnection>> Connection failed", {
|
|
4517
|
+
error
|
|
4518
|
+
});
|
|
4519
|
+
throw new Error("SQL Server connection failed");
|
|
4520
|
+
}
|
|
4521
|
+
}
|
|
4522
|
+
/**
|
|
4523
|
+
* Transform a IRecordSet from a SQL query into an array of Record<string, string> instances.
|
|
4524
|
+
* @param recordset IRecordSet from a SQL query
|
|
4525
|
+
* @returns array of Record<string, string> instances
|
|
4526
|
+
*/
|
|
4527
|
+
static recordsetToRecords(recordset) {
|
|
4528
|
+
const data = recordset?.map((row) => {
|
|
4529
|
+
const transformedRow = {};
|
|
4530
|
+
Object.keys(row).forEach((key) => {
|
|
4531
|
+
transformedRow[key] = row[key]?.toString() || "";
|
|
4532
|
+
});
|
|
4533
|
+
return transformedRow;
|
|
4534
|
+
}) || [];
|
|
4535
|
+
return data;
|
|
4536
|
+
}
|
|
4537
|
+
async closeConnection() {
|
|
4538
|
+
if (this.connection?.connected) {
|
|
4539
|
+
logger.info("Closing SQL Server connection");
|
|
4540
|
+
try {
|
|
4541
|
+
await this.connection.close();
|
|
4542
|
+
} catch (error) {
|
|
4543
|
+
logger.error(
|
|
4544
|
+
"SqlServerService::closeConnection: Error closing connection",
|
|
4545
|
+
{
|
|
4546
|
+
error
|
|
4547
|
+
}
|
|
4548
|
+
);
|
|
4549
|
+
}
|
|
4550
|
+
this.connection = null;
|
|
4551
|
+
}
|
|
4552
|
+
}
|
|
4553
|
+
}
|
|
4554
|
+
class SQLTransactionManager {
|
|
4555
|
+
constructor(connection) {
|
|
4556
|
+
this.connection = connection;
|
|
4557
|
+
}
|
|
4558
|
+
/**
|
|
4559
|
+
* Executes a transaction with the given operation and data, manages rollbacks
|
|
4560
|
+
* @param operation The SQLTransactionOperation to execute
|
|
4561
|
+
* @param data The sql input parameters to pass to the operation
|
|
4562
|
+
* @param config The configuration for the transaction
|
|
4563
|
+
* @returns The result of the operation; throws exceptions on failures
|
|
4564
|
+
*/
|
|
4565
|
+
async executeTransaction(operation, data, config2 = {}) {
|
|
4566
|
+
const transaction = new sql.Transaction(this.connection);
|
|
4567
|
+
const isolationLevel = config2.isolationLevel || sql.ISOLATION_LEVEL.READ_COMMITTED;
|
|
4568
|
+
try {
|
|
4569
|
+
await transaction.begin(isolationLevel);
|
|
4570
|
+
const result = await operation.execute(transaction, data);
|
|
4571
|
+
await transaction.commit();
|
|
4572
|
+
return result;
|
|
4573
|
+
} catch (error) {
|
|
4574
|
+
logger.error("Transaction error:", error);
|
|
4575
|
+
try {
|
|
4576
|
+
await transaction.rollback();
|
|
4577
|
+
} catch (rollbackError) {
|
|
4578
|
+
logger.error("Rollback error:", rollbackError);
|
|
4579
|
+
}
|
|
4580
|
+
throw error;
|
|
4581
|
+
}
|
|
4582
|
+
}
|
|
4583
|
+
}
|
|
4584
|
+
class SQLLaborTicketInsertOperation {
|
|
4585
|
+
constructor(sqlQuery) {
|
|
4586
|
+
this.sqlQuery = sqlQuery;
|
|
4587
|
+
}
|
|
4588
|
+
async execute(transaction, inputs) {
|
|
4589
|
+
const request = transaction.request();
|
|
4590
|
+
inputs.forEach((input) => {
|
|
4591
|
+
request.input(input.name, input.type, input.value);
|
|
4592
|
+
});
|
|
4593
|
+
const result = await request.query(this.sqlQuery);
|
|
4594
|
+
if (!result.recordset[0].NewRowUniqueId) {
|
|
4595
|
+
throw new Error(
|
|
4596
|
+
"No LaborTicketId generated by the labor ticket insert query: " + JSON.stringify(request.parameters, null, 2)
|
|
4597
|
+
);
|
|
4598
|
+
}
|
|
4599
|
+
return result.recordset[0].NewRowUniqueId;
|
|
4600
|
+
}
|
|
4601
|
+
}
|
|
4602
|
+
class SQLLaborTicketUpdateOperation {
|
|
4603
|
+
constructor(sqlQuery) {
|
|
4604
|
+
this.sqlQuery = sqlQuery;
|
|
4605
|
+
}
|
|
4606
|
+
async execute(transaction, inputs) {
|
|
4607
|
+
const request = transaction.request();
|
|
4608
|
+
inputs.forEach((input) => {
|
|
4609
|
+
request.input(input.name, input.type, input.value);
|
|
4610
|
+
});
|
|
4611
|
+
const result = await request.query(this.sqlQuery);
|
|
4612
|
+
return result.rowsAffected[0];
|
|
4613
|
+
}
|
|
4614
|
+
}
|
|
4615
|
+
class SqlServerHelper {
|
|
4616
|
+
/**
|
|
4617
|
+
* Creates a new labor ticket in the SQL Server database
|
|
4618
|
+
* Opens but does not close the connection
|
|
4619
|
+
*
|
|
4620
|
+
* @param sqlServerService The SQL Server service to use for connections
|
|
4621
|
+
* @param laborTicket The labor ticket to create
|
|
4622
|
+
* @param sqlInputs The SQL query parameters
|
|
4623
|
+
* @param insertQuery The SQL query to execute for insertion
|
|
4624
|
+
*
|
|
4625
|
+
* @returns The processed labor ticket, with the ERP's laborTicketId set
|
|
4626
|
+
* Throws exceptions on failures, including if the query does not return a laborTicketId
|
|
4627
|
+
*/
|
|
4628
|
+
static async createLaborTicket(sqlServerService, laborTicket, sqlInputs, insertQuery) {
|
|
4629
|
+
const connection = await sqlServerService.openConnection();
|
|
4630
|
+
if (!connection) {
|
|
4631
|
+
throw new Error(
|
|
4632
|
+
"Unable to establish database connection to create labor ticket."
|
|
4633
|
+
);
|
|
4634
|
+
}
|
|
4635
|
+
const transactionManager = new SQLTransactionManager(connection);
|
|
4636
|
+
const laborTicketId = await transactionManager.executeTransaction(
|
|
4637
|
+
new SQLLaborTicketInsertOperation(insertQuery),
|
|
4638
|
+
sqlInputs
|
|
4639
|
+
);
|
|
4640
|
+
if (laborTicketId) {
|
|
4641
|
+
laborTicket.laborTicketId = laborTicketId.toString();
|
|
4642
|
+
return laborTicket;
|
|
4643
|
+
} else {
|
|
4644
|
+
throw new Error(
|
|
4645
|
+
`The query did not return the labor ticket id for labor ticket ref: ${laborTicket.laborTicketRef}`
|
|
4646
|
+
);
|
|
4647
|
+
}
|
|
4648
|
+
}
|
|
4649
|
+
/**
|
|
4650
|
+
* Updates an existing labor ticket in the SQL Server database
|
|
4651
|
+
* Opens but does not close the connection
|
|
4652
|
+
* @param sqlServerService The SQL Server service to use for connections
|
|
4653
|
+
* @param laborTicket The labor ticket to update
|
|
4654
|
+
* @param sqlInputs The SQL query parameters
|
|
4655
|
+
* @param updateQuery The SQL query to execute for update
|
|
4656
|
+
*
|
|
4657
|
+
* @returns The processed labor ticket
|
|
4658
|
+
* Throws exceptions on failures
|
|
4659
|
+
*/
|
|
4660
|
+
static async updateLaborTicket(sqlServerService, laborTicket, sqlInputs, updateQuery) {
|
|
4661
|
+
const connection = await sqlServerService.openConnection();
|
|
4662
|
+
if (!connection) {
|
|
4663
|
+
throw new Error(
|
|
4664
|
+
"Unable to establish database connection to update labor ticket."
|
|
4665
|
+
);
|
|
4666
|
+
}
|
|
4667
|
+
const transactionManager = new SQLTransactionManager(connection);
|
|
4668
|
+
await transactionManager.executeTransaction(
|
|
4669
|
+
new SQLLaborTicketUpdateOperation(updateQuery),
|
|
4670
|
+
sqlInputs
|
|
4671
|
+
);
|
|
4672
|
+
return laborTicket;
|
|
4673
|
+
}
|
|
4674
|
+
/**
|
|
4675
|
+
* Logs the SQL input parameters for debugging
|
|
4676
|
+
*/
|
|
4677
|
+
static logQueryInputs(inputs) {
|
|
4678
|
+
logger.info("SQL Input Variables:");
|
|
4679
|
+
logger.info("----------------------------------------");
|
|
4680
|
+
logger.info("Variable Name | Value");
|
|
4681
|
+
logger.info("----------------------------------------");
|
|
4682
|
+
inputs.forEach((input) => {
|
|
4683
|
+
logger.info(`${input.name.padEnd(20)} | ${input.value}`);
|
|
4684
|
+
});
|
|
4685
|
+
logger.info("----------------------------------------");
|
|
4686
|
+
}
|
|
4687
|
+
}
|
|
4688
|
+
class PsqlService {
|
|
4689
|
+
config;
|
|
4690
|
+
static odbcModule = null;
|
|
4691
|
+
static odbcLoadError = null;
|
|
4692
|
+
constructor(config2) {
|
|
4693
|
+
this.config = config2;
|
|
4694
|
+
}
|
|
4695
|
+
/**
|
|
4696
|
+
* Dynamically load the ODBC module with lazy initialization and caching
|
|
4697
|
+
* @throws Error with helpful message if ODBC package is not installed
|
|
4698
|
+
*/
|
|
4699
|
+
static async getOdbc() {
|
|
4700
|
+
if (this.odbcLoadError) {
|
|
4701
|
+
throw this.odbcLoadError;
|
|
4702
|
+
}
|
|
4703
|
+
if (this.odbcModule) {
|
|
4704
|
+
return this.odbcModule;
|
|
4705
|
+
}
|
|
4706
|
+
try {
|
|
4707
|
+
const odbcImport = await import("odbc");
|
|
4708
|
+
const odbc = odbcImport.default || odbcImport;
|
|
4709
|
+
this.odbcModule = odbc;
|
|
4710
|
+
return this.odbcModule;
|
|
4711
|
+
} catch (error) {
|
|
4712
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
4713
|
+
this.odbcLoadError = new Error(
|
|
4714
|
+
`ODBC package is required for PSQL service but is not installed or failed to load.
|
|
4715
|
+
Install it with: npm install odbc
|
|
4716
|
+
Also install OS-level dependencies, e.g. on Alpine Linux:
|
|
4717
|
+
apk add --no-cache unixodbc unixodbc-dev python3 make g++
|
|
4718
|
+
For other Linux distributions, install unixodbc and unixodbc-dev packages.
|
|
4719
|
+
Original error: ${errorMessage}`
|
|
4720
|
+
);
|
|
4721
|
+
throw this.odbcLoadError;
|
|
4722
|
+
}
|
|
4723
|
+
}
|
|
4724
|
+
// REMOVED: dispose() method - not needed anymore
|
|
4725
|
+
// REMOVED: connection property - not needed anymore
|
|
4726
|
+
// REMOVED: openConnection() method - not needed anymore
|
|
4727
|
+
// REMOVED: closeConnection() method - not needed anymore
|
|
4728
|
+
/**
|
|
4729
|
+
* Build PSQL ODBC connection string
|
|
4730
|
+
* CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
|
|
4731
|
+
*/
|
|
4732
|
+
buildConnectionString() {
|
|
4733
|
+
const serverName = `${this.config.host}.${this.config.port}`;
|
|
4734
|
+
return [
|
|
4735
|
+
"Driver={Pervasive ODBC Interface}",
|
|
4736
|
+
`ServerName=${serverName}`,
|
|
4737
|
+
`DBQ=${this.config.database}`,
|
|
4738
|
+
`UID=${this.config.username}`,
|
|
4739
|
+
`PWD=${this.config.password}`,
|
|
4740
|
+
"AutoDoubleQuote=0"
|
|
4741
|
+
].join(";") + ";";
|
|
4742
|
+
}
|
|
4743
|
+
/**
|
|
4744
|
+
* Execute a query and return the results
|
|
4745
|
+
* Creates a fresh connection for each query to avoid handle corruption
|
|
4746
|
+
*
|
|
4747
|
+
* SECURITY WARNING: This method executes the provided SQL string as-is.
|
|
4748
|
+
* - Parameter binding is NOT implemented; the `params` argument is currently ignored.
|
|
4749
|
+
* - Never concatenate untrusted/user-supplied input into `query`.
|
|
4750
|
+
* - Doing so can result in SQL injection vulnerabilities and data exposure.
|
|
4751
|
+
* If dynamic values are required, ensure they are strictly validated/escaped
|
|
4752
|
+
* or implement proper parameterized execution before accepting untrusted input.
|
|
4753
|
+
*
|
|
4754
|
+
* @param query The SQL query to execute
|
|
4755
|
+
* @param params Query parameters (currently unused for PSQL read operations)
|
|
4756
|
+
* @param paging Optional paging parameters
|
|
4757
|
+
* @returns The entities fetched from the database, along with paging information
|
|
4758
|
+
*/
|
|
4759
|
+
async executePreparedStatement(query, params = {}, paging) {
|
|
4760
|
+
const odbc = await PsqlService.getOdbc();
|
|
4761
|
+
let connection = null;
|
|
4762
|
+
try {
|
|
4763
|
+
const connStr = this.buildConnectionString();
|
|
4764
|
+
logger.debug("Creating fresh PSQL connection for query");
|
|
4765
|
+
connection = await odbc.connect(connStr);
|
|
4766
|
+
if (Object.keys(params).length > 0) {
|
|
4767
|
+
logger.warn(
|
|
4768
|
+
"PsqlService: Query parameters provided but parameter binding not yet implemented. Using direct query execution."
|
|
4769
|
+
);
|
|
4770
|
+
}
|
|
4771
|
+
const records = await connection.query(query);
|
|
4772
|
+
const allRecords = PsqlService.recordsetToRecords(records);
|
|
4773
|
+
const rowsFetched = allRecords.length;
|
|
4774
|
+
const pagedData = paging?.offset !== void 0 || paging?.limit !== void 0 ? allRecords.slice(
|
|
4775
|
+
paging.offset || 0,
|
|
4776
|
+
(paging.offset || 0) + (paging.limit || allRecords.length)
|
|
4777
|
+
) : allRecords;
|
|
4778
|
+
return {
|
|
4779
|
+
data: pagedData,
|
|
4780
|
+
paging: {
|
|
4781
|
+
count: rowsFetched,
|
|
4782
|
+
limit: paging?.limit || 0,
|
|
4783
|
+
offset: paging?.offset || 0,
|
|
4784
|
+
nextPage: paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched ? String((paging.offset || 0) + paging.limit) : void 0,
|
|
4785
|
+
previousPage: paging?.offset ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10))) : void 0
|
|
4786
|
+
}
|
|
4787
|
+
};
|
|
4788
|
+
} catch (error) {
|
|
4789
|
+
if (error instanceof Error && error.message.includes("ODBC package is required")) {
|
|
4790
|
+
throw error;
|
|
4791
|
+
}
|
|
4792
|
+
const errorInfo = error;
|
|
4793
|
+
logger.error("Error fetching data from PSQL", {
|
|
4794
|
+
error: errorInfo.message,
|
|
4795
|
+
odbcErrors: errorInfo.odbcErrors,
|
|
4796
|
+
query: query.substring(0, 200)
|
|
4797
|
+
// Log first 200 chars of query
|
|
4798
|
+
});
|
|
4799
|
+
throw this.handleOdbcError(errorInfo);
|
|
4800
|
+
} finally {
|
|
4801
|
+
if (connection) {
|
|
4802
|
+
try {
|
|
4803
|
+
await connection.close();
|
|
4804
|
+
logger.debug("PSQL connection closed successfully");
|
|
4805
|
+
} catch (err) {
|
|
4806
|
+
logger.warn("Error closing PSQL connection (non-fatal)", {
|
|
4807
|
+
error: err
|
|
4808
|
+
});
|
|
4809
|
+
}
|
|
4810
|
+
}
|
|
4811
|
+
}
|
|
4812
|
+
}
|
|
4813
|
+
/**
|
|
4814
|
+
* Transform ODBC result set to array of Record<string, string> instances.
|
|
4815
|
+
* IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
|
|
4816
|
+
*/
|
|
4817
|
+
static recordsetToRecords(recordset) {
|
|
4818
|
+
if (!Array.isArray(recordset)) {
|
|
4819
|
+
return [];
|
|
4820
|
+
}
|
|
4821
|
+
const data = recordset.map((row) => {
|
|
4822
|
+
const transformedRow = {};
|
|
4823
|
+
Object.keys(row).forEach((key) => {
|
|
4824
|
+
const value = row[key];
|
|
4825
|
+
transformedRow[key] = value !== null && value !== void 0 ? String(value).trim() : "";
|
|
4826
|
+
});
|
|
4827
|
+
return transformedRow;
|
|
4828
|
+
});
|
|
4829
|
+
return data;
|
|
4830
|
+
}
|
|
4831
|
+
/**
|
|
4832
|
+
* Handle ODBC errors and provide meaningful messages
|
|
4833
|
+
*/
|
|
4834
|
+
handleOdbcError(error) {
|
|
4835
|
+
const odbcError = error.odbcErrors?.[0];
|
|
4836
|
+
const errorCode = odbcError?.state;
|
|
4837
|
+
const message = odbcError?.message || error.message;
|
|
4838
|
+
switch (errorCode) {
|
|
4839
|
+
case "08S01":
|
|
4840
|
+
return new Error(
|
|
4841
|
+
`PSQL connection failed. Check: 1) PVSW environment variable set to /usr/local/psql/etc/pvsw.ini, 2) Network connectivity to ports 1583/3351, 3) ODBC configuration files in /usr/local/psql/etc/ and /etc/. Original error: ${message}`
|
|
4842
|
+
);
|
|
4843
|
+
case "28000":
|
|
4844
|
+
return new Error(
|
|
4845
|
+
`PSQL authentication failed. Check username/password. Original error: ${message}`
|
|
4846
|
+
);
|
|
4847
|
+
case "42000":
|
|
4848
|
+
return new Error(`PSQL SQL syntax error. Original error: ${message}`);
|
|
4849
|
+
case "42S02":
|
|
4850
|
+
return new Error(
|
|
4851
|
+
`PSQL table or view not found. Check table names in query. Original error: ${message}`
|
|
4852
|
+
);
|
|
4853
|
+
default:
|
|
4854
|
+
return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
|
|
4855
|
+
}
|
|
4856
|
+
}
|
|
4857
|
+
}
|
|
4858
|
+
function formatPsqlDate(psqlDate) {
|
|
4859
|
+
if (!psqlDate || psqlDate.trim().length !== 6 || psqlDate === "000000") {
|
|
4860
|
+
return null;
|
|
4861
|
+
}
|
|
4862
|
+
try {
|
|
4863
|
+
const year = parseInt(psqlDate.substring(0, 2), 10);
|
|
4864
|
+
const month = parseInt(psqlDate.substring(2, 4), 10);
|
|
4865
|
+
const day = parseInt(psqlDate.substring(4, 6), 10);
|
|
4866
|
+
const fullYear = year + 2e3;
|
|
4867
|
+
if (month < 1 || month > 12 || day < 1 || day > 31) {
|
|
4868
|
+
return null;
|
|
4869
|
+
}
|
|
4870
|
+
const date = new Date(fullYear, month - 1, day);
|
|
4871
|
+
if (date.getFullYear() !== fullYear || date.getMonth() !== month - 1 || date.getDate() !== day) {
|
|
4872
|
+
return null;
|
|
4873
|
+
}
|
|
4874
|
+
return date.toISOString().split("T")[0];
|
|
4875
|
+
} catch (error) {
|
|
4876
|
+
return null;
|
|
4877
|
+
}
|
|
4878
|
+
}
|
|
4879
|
+
function formatPsqlTime(psqlTime) {
|
|
4880
|
+
if (!psqlTime) {
|
|
4881
|
+
return null;
|
|
4882
|
+
}
|
|
4883
|
+
const trimmed = psqlTime.trim();
|
|
4884
|
+
if (trimmed.length !== 4) {
|
|
4885
|
+
return null;
|
|
4886
|
+
}
|
|
4887
|
+
try {
|
|
4888
|
+
const hours = parseInt(trimmed.substring(0, 2), 10);
|
|
4889
|
+
const minutes = parseInt(trimmed.substring(2, 4), 10);
|
|
4890
|
+
if (isNaN(hours) || isNaN(minutes) || hours < 0 || hours > 23 || minutes < 0 || minutes > 59) {
|
|
4891
|
+
return null;
|
|
4892
|
+
}
|
|
4893
|
+
const hoursStr = hours.toString().padStart(2, "0");
|
|
4894
|
+
const minutesStr = minutes.toString().padStart(2, "0");
|
|
4895
|
+
return `${hoursStr}:${minutesStr}:00`;
|
|
4896
|
+
} catch (error) {
|
|
4897
|
+
return null;
|
|
4898
|
+
}
|
|
4899
|
+
}
|
|
4900
|
+
function combinePsqlDateTime(psqlDate, psqlTime) {
|
|
4901
|
+
const date = formatPsqlDate(psqlDate);
|
|
4902
|
+
const time = formatPsqlTime(psqlTime);
|
|
4903
|
+
if (!date || !time) {
|
|
4904
|
+
return null;
|
|
4905
|
+
}
|
|
4906
|
+
return `${date}T${time}`;
|
|
4907
|
+
}
|
|
4908
|
+
function isPsqlDateEmpty(psqlDate) {
|
|
4909
|
+
return !psqlDate || psqlDate === "000000" || psqlDate.trim() === "";
|
|
4910
|
+
}
|
|
4911
|
+
function cleanPsqlCharField(value) {
|
|
4912
|
+
if (value === null || value === void 0) {
|
|
4913
|
+
return "";
|
|
4914
|
+
}
|
|
4915
|
+
return String(value).trim();
|
|
4916
|
+
}
|
|
4917
|
+
export {
|
|
4918
|
+
ApplicationInitializer,
|
|
4919
|
+
BatchCacheManager,
|
|
4920
|
+
CoreConfiguration,
|
|
4921
|
+
ERPObjType,
|
|
4922
|
+
E as ErpApiConnectionParams,
|
|
4923
|
+
ErrorHandler,
|
|
4924
|
+
FileLogDeduper,
|
|
4925
|
+
GraphQLError,
|
|
4926
|
+
GraphQLService,
|
|
4927
|
+
HTTPClientFactory,
|
|
4928
|
+
HTTPError,
|
|
4929
|
+
LogEntry,
|
|
4930
|
+
MMApiClient,
|
|
4931
|
+
MMBatchValidationError,
|
|
4932
|
+
MMConnectorLogger,
|
|
4933
|
+
MMReceiveLaborTicket,
|
|
4934
|
+
MMSendLaborTicket,
|
|
4935
|
+
MMSendPart,
|
|
4936
|
+
MMSendPartOperation,
|
|
4937
|
+
MMSendPerson,
|
|
4938
|
+
MMSendReason,
|
|
4939
|
+
MMSendResource,
|
|
4940
|
+
MMSendWorkOrder,
|
|
4941
|
+
MMSendWorkOrderOperation,
|
|
4942
|
+
OAuthClient,
|
|
4943
|
+
PsqlService,
|
|
4944
|
+
RecordTrackingManager,
|
|
4945
|
+
RestAPIService,
|
|
4946
|
+
SqlServerHelper,
|
|
4947
|
+
SqlServerService,
|
|
4948
|
+
StandardProcessDrivers,
|
|
4949
|
+
addNewFieldFromExternalSource,
|
|
4950
|
+
addNewFieldFromLookupField,
|
|
4951
|
+
applyTimezoneOffsetsToFields,
|
|
4952
|
+
buildLogicalCondition,
|
|
4953
|
+
cleanPsqlCharField,
|
|
4954
|
+
cleanupNumbers,
|
|
4955
|
+
combinePsqlDateTime,
|
|
4956
|
+
convertToLocalTime,
|
|
4957
|
+
formatDateWithTZOffset,
|
|
4958
|
+
formatError,
|
|
4959
|
+
formatErrorForLogging,
|
|
4960
|
+
formatPsqlDate,
|
|
4961
|
+
formatPsqlTime,
|
|
4962
|
+
f as getCachedTimezoneName,
|
|
4963
|
+
getCachedTimezoneOffset,
|
|
4964
|
+
g as getErpApiConnectionParams,
|
|
4965
|
+
getErrorType,
|
|
4966
|
+
d as getInitialLoadComplete,
|
|
4967
|
+
getPayloadWithoutIDField,
|
|
4968
|
+
a as getSQLServerConfiguration,
|
|
4969
|
+
getUniqueRows,
|
|
4970
|
+
isPsqlDateEmpty,
|
|
4971
|
+
config as knexDatabaseConfig,
|
|
4972
|
+
logger,
|
|
4973
|
+
removeExtraneousFields,
|
|
4974
|
+
runDataSyncService,
|
|
4975
|
+
e as setInitialLoadComplete,
|
|
4976
|
+
trimObjectValues
|
|
4977
|
+
};
|
|
4978
|
+
//# sourceMappingURL=mm-erp-sdk.js.map
|