@machinemetrics/mm-erp-sdk 0.1.6-beta.1 → 0.1.7-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/dist/{config-WKwu1mMo.js → config-qat9zgOl.js} +2 -2
  2. package/dist/{config-WKwu1mMo.js.map → config-qat9zgOl.js.map} +1 -1
  3. package/dist/{connector-factory-DHmMYsRs.js → connector-factory-C2czCs9v.js} +2 -2
  4. package/dist/{connector-factory-DHmMYsRs.js.map → connector-factory-C2czCs9v.js.map} +1 -1
  5. package/dist/{hashed-cache-manager-CtDhFqj6.js → hashed-cache-manager-CzyFSt2B.js} +4 -4
  6. package/dist/{hashed-cache-manager-CtDhFqj6.js.map → hashed-cache-manager-CzyFSt2B.js.map} +1 -1
  7. package/dist/{index-aci_wdcn.js → index-B9wo8pld.js} +2 -2
  8. package/dist/{index-aci_wdcn.js.map → index-B9wo8pld.js.map} +1 -1
  9. package/dist/{logger-hqtl8hFM.js → logger-Db8CkwR6.js} +924 -966
  10. package/dist/logger-Db8CkwR6.js.map +1 -0
  11. package/dist/mm-erp-sdk.js +45 -73
  12. package/dist/mm-erp-sdk.js.map +1 -1
  13. package/dist/services/data-sync-service/jobs/clean-up-expired-cache.js +4 -4
  14. package/dist/services/data-sync-service/jobs/from-erp.js +4 -4
  15. package/dist/services/data-sync-service/jobs/retry-failed-labor-tickets.js +3 -3
  16. package/dist/services/data-sync-service/jobs/run-migrations.js +1 -1
  17. package/dist/services/data-sync-service/jobs/to-erp.d.ts.map +1 -1
  18. package/dist/services/data-sync-service/jobs/to-erp.js +3 -3
  19. package/dist/services/data-sync-service/jobs/to-erp.js.map +1 -1
  20. package/dist/services/psql-erp-service/psql-service.d.ts +1 -14
  21. package/dist/services/psql-erp-service/psql-service.d.ts.map +1 -1
  22. package/package.json +1 -1
  23. package/src/services/data-sync-service/jobs/to-erp.ts +2 -1
  24. package/src/services/psql-erp-service/psql-service.ts +53 -90
  25. package/dist/logger-hqtl8hFM.js.map +0 -1
@@ -1,7 +1,7 @@
1
- import "../../../config-WKwu1mMo.js";
2
- import { H as HashedCacheManager } from "../../../hashed-cache-manager-CtDhFqj6.js";
3
- import { S as SQLiteCoordinator } from "../../../index-aci_wdcn.js";
4
- import { l as logger } from "../../../logger-hqtl8hFM.js";
1
+ import "../../../config-qat9zgOl.js";
2
+ import { H as HashedCacheManager } from "../../../hashed-cache-manager-CzyFSt2B.js";
3
+ import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
4
+ import { l as logger } from "../../../logger-Db8CkwR6.js";
5
5
  logger.level = process.env.LOG_LEVEL || "info";
6
6
  const main = async () => {
7
7
  const cacheManager = new HashedCacheManager();
@@ -1,7 +1,7 @@
1
- import "../../../config-WKwu1mMo.js";
2
- import { l as logger } from "../../../logger-hqtl8hFM.js";
3
- import { S as SQLiteCoordinator } from "../../../index-aci_wdcn.js";
4
- import { c as createConnectorFromPath } from "../../../connector-factory-DHmMYsRs.js";
1
+ import "../../../config-qat9zgOl.js";
2
+ import { l as logger } from "../../../logger-Db8CkwR6.js";
3
+ import { S as SQLiteCoordinator } from "../../../index-B9wo8pld.js";
4
+ import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
5
5
  logger.level = process.env.LOG_LEVEL || "info";
6
6
  const main = async () => {
7
7
  try {
@@ -1,6 +1,6 @@
1
- import "../../../config-WKwu1mMo.js";
2
- import { l as logger } from "../../../logger-hqtl8hFM.js";
3
- import { c as createConnectorFromPath } from "../../../connector-factory-DHmMYsRs.js";
1
+ import "../../../config-qat9zgOl.js";
2
+ import { l as logger } from "../../../logger-Db8CkwR6.js";
3
+ import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
5
  const main = async () => {
6
6
  try {
@@ -1,5 +1,5 @@
1
1
  import knex from "knex";
2
- import { l as logger } from "../../../logger-hqtl8hFM.js";
2
+ import { l as logger } from "../../../logger-Db8CkwR6.js";
3
3
  import { c as config } from "../../../knexfile-1qKKIORB.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
5
  const db = knex(config.local);
@@ -1 +1 @@
1
- {"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AAQvB,QAAA,MAAM,IAAI,qBAoCT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
1
+ {"version":3,"file":"to-erp.d.ts","sourceRoot":"","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"names":[],"mappings":"AAAA,OAAO,eAAe,CAAC;AASvB,QAAA,MAAM,IAAI,qBAoCT,CAAC;AAoBF,eAAe,IAAI,CAAC"}
@@ -1,6 +1,6 @@
1
- import "../../../config-WKwu1mMo.js";
2
- import { l as logger } from "../../../logger-hqtl8hFM.js";
3
- import { c as createConnectorFromPath } from "../../../connector-factory-DHmMYsRs.js";
1
+ import "../../../config-qat9zgOl.js";
2
+ import { l as logger } from "../../../logger-Db8CkwR6.js";
3
+ import { c as createConnectorFromPath } from "../../../connector-factory-C2czCs9v.js";
4
4
  logger.level = process.env.LOG_LEVEL || "info";
5
5
  const main = async () => {
6
6
  try {
@@ -1 +1 @@
1
- {"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../reporting-service/logger\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"to-erp\" online');\n logger.info(\"==========Starting to-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await connector.syncToERP();\n await connector.syncToERPCompleted();\n\n logger.info(\"==========Completed to-erp job cycle==========\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"to-erp\" had an error', {\n error: errorDetails,\n connectorPath: process.env.CONNECTOR_PATH,\n });\n\n // Also log to console for immediate visibility\n console.error(\"to-erp job error:\", error);\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, '/');\nconst fileUrl = normalizedArgv1.startsWith('/') ? \n `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path\n `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;AAMA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAExC,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,gCAAgC;AAC5C,WAAO,KAAK,+CAA+C;AAG3D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,UAAU,UAAA;AAChB,UAAM,UAAU,mBAAA;AAEhB,WAAO,KAAK,gDAAgD;AAAA,EAC9D,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,wCAAwC;AAAA,MACnD,OAAO;AAAA,MACP,eAAe,QAAQ,IAAI;AAAA,IAAA,CAC5B;AAGD,YAAQ,MAAM,qBAAqB,KAAK;AAExC,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC5C,UAAU,eAAe;AAAA;AAAA,EACzB,WAAW,eAAe;AAAA;AAC5B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
1
+ {"version":3,"file":"to-erp.js","sources":["../../../../src/services/data-sync-service/jobs/to-erp.ts"],"sourcesContent":["import \"dotenv/config\";\n\nimport logger from \"../../../services/reporting-service/logger\";\nimport { SQLiteCoordinator } from \"../../sqlite-service\";\nimport { createConnectorFromPath } from \"../../../utils/connector-factory\";\n\n// Configure the logger with the correct log level\nlogger.level = process.env.LOG_LEVEL || \"info\";\n\nconst main = async () => {\n try {\n logger.info('Worker for job \"to-erp\" online');\n logger.info(\"==========Starting to-erp job cycle==========\");\n\n // Get the connector path from the environment variable\n const connectorPath = process.env.CONNECTOR_PATH;\n\n if (!connectorPath) {\n throw new Error(\"Connector path not provided in environment variables\");\n }\n\n // Create a new connector instance for this job\n const connector = await createConnectorFromPath(connectorPath);\n\n await connector.syncToERP();\n await connector.syncToERPCompleted();\n\n logger.info(\"==========Completed to-erp job cycle==========\");\n } catch (error) {\n const errorDetails = {\n message: error instanceof Error ? error.message : String(error),\n stack: error instanceof Error ? error.stack : undefined,\n name: error instanceof Error ? error.name : undefined,\n ...(error && typeof error === \"object\" ? error : {}), // Include all enumerable properties if it's an object\n };\n logger.error('Worker for job \"to-erp\" had an error', {\n error: errorDetails,\n connectorPath: process.env.CONNECTOR_PATH,\n });\n\n // Also log to console for immediate visibility\n console.error(\"to-erp job error:\", error);\n\n throw error; // Rethrow so Bree can handle it properly\n }\n};\n\n// Cross-platform module detection fix for Bree compatibility\n// Windows: process.argv[1] uses backslashes, import.meta.url uses forward slashes\n// Linux/Mac: both use forward slashes, so this normalization is safe\nconst normalizedArgv1 = process.argv[1].replace(/\\\\/g, '/');\nconst fileUrl = normalizedArgv1.startsWith('/') ? \n `file://${normalizedArgv1}` : // Unix: file:// + /path = file:///path\n `file:///${normalizedArgv1}`; // Windows: file:/// + C:/path = file:///C:/path\nconst isMainModule = import.meta.url === fileUrl;\n\nif (isMainModule) {\n // This is called when Bree runs this file as a worker\n try {\n await main();\n } catch {\n process.exitCode = 1; // prefer exitCode so stdout/stderr can flush\n }\n}\n\nexport default main;\n"],"names":[],"mappings":";;;AAOA,OAAO,QAAQ,QAAQ,IAAI,aAAa;AAExC,MAAM,OAAO,YAAY;AACvB,MAAI;AACF,WAAO,KAAK,gCAAgC;AAC5C,WAAO,KAAK,+CAA+C;AAG3D,UAAM,gBAAgB,QAAQ,IAAI;AAElC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,sDAAsD;AAAA,IACxE;AAGA,UAAM,YAAY,MAAM,wBAAwB,aAAa;AAE7D,UAAM,UAAU,UAAA;AAChB,UAAM,UAAU,mBAAA;AAEhB,WAAO,KAAK,gDAAgD;AAAA,EAC9D,SAAS,OAAO;AACd,UAAM,eAAe;AAAA,MACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,MAC5C,GAAI,SAAS,OAAO,UAAU,WAAW,QAAQ,CAAA;AAAA;AAAA,IAAC;AAEpD,WAAO,MAAM,wCAAwC;AAAA,MACnD,OAAO;AAAA,MACP,eAAe,QAAQ,IAAI;AAAA,IAAA,CAC5B;AAGD,YAAQ,MAAM,qBAAqB,KAAK;AAExC,UAAM;AAAA,EACR;AACF;AAKA,MAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,QAAQ,OAAO,GAAG;AAC1D,MAAM,UAAU,gBAAgB,WAAW,GAAG,IAC5C,UAAU,eAAe;AAAA;AAAA,EACzB,WAAW,eAAe;AAAA;AAC5B,MAAM,eAAe,YAAY,QAAQ;AAEzC,IAAI,cAAc;AAEhB,MAAI;AACF,UAAM,KAAA;AAAA,EACR,QAAQ;AACN,YAAQ,WAAW;AAAA,EACrB;AACF;"}
@@ -1,4 +1,3 @@
1
- import odbc from "odbc";
2
1
  import { PsqlConfiguration } from "./configuration";
3
2
  import { ERPResponse } from "../../types/erp-types";
4
3
  type PagingParams = {
@@ -6,10 +5,8 @@ type PagingParams = {
6
5
  offset?: number;
7
6
  };
8
7
  export declare class PsqlService {
9
- private connection;
10
8
  private config;
11
9
  constructor(config: PsqlConfiguration);
12
- dispose(): Promise<void>;
13
10
  /**
14
11
  * Build PSQL ODBC connection string
15
12
  * CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
@@ -17,7 +14,7 @@ export declare class PsqlService {
17
14
  private buildConnectionString;
18
15
  /**
19
16
  * Execute a query and return the results
20
- * Interface matches SqlServerService for consistency
17
+ * Creates a fresh connection for each query to avoid handle corruption
21
18
  *
22
19
  * @param query The SQL query to execute
23
20
  * @param params Query parameters (currently unused for PSQL read operations)
@@ -25,25 +22,15 @@ export declare class PsqlService {
25
22
  * @returns The entities fetched from the database, along with paging information
26
23
  */
27
24
  executePreparedStatement(query: string, params?: Record<string, string>, paging?: PagingParams): Promise<ERPResponse | undefined>;
28
- /**
29
- * Opens a connection to PSQL database
30
- * Caches the connection so that it can be reused.
31
- * On failure to connect, throws
32
- */
33
- openConnection(): Promise<odbc.Connection>;
34
25
  /**
35
26
  * Transform ODBC result set to array of Record<string, string> instances.
36
27
  * IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
37
- *
38
- * @param recordset Result set from ODBC query
39
- * @returns array of Record<string, string> instances
40
28
  */
41
29
  static recordsetToRecords(recordset: any[]): Record<string, string>[];
42
30
  /**
43
31
  * Handle ODBC errors and provide meaningful messages
44
32
  */
45
33
  private handleOdbcError;
46
- private closeConnection;
47
34
  }
48
35
  export {};
49
36
  //# sourceMappingURL=psql-service.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"psql-service.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-service.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAIpD,KAAK,YAAY,GAAG;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,qBAAa,WAAW;IACtB,OAAO,CAAC,UAAU,CAAgC;IAClD,OAAO,CAAC,MAAM,CAAoB;gBAEtB,MAAM,EAAE,iBAAiB;IAI/B,OAAO;IAIb;;;OAGG;IACH,OAAO,CAAC,qBAAqB;IAgB7B;;;;;;;;OAQG;IACU,wBAAwB,CACnC,KAAK,EAAE,MAAM,EACb,MAAM,GAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAM,EACnC,MAAM,CAAC,EAAE,YAAY,GACpB,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;IAsDnC;;;;OAIG;IACG,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC;IA4BhD;;;;;;OAMG;WACW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;IAmB5E;;OAEG;IACH,OAAO,CAAC,eAAe;YA6BT,eAAe;CAa9B"}
1
+ {"version":3,"file":"psql-service.d.ts","sourceRoot":"","sources":["../../../src/services/psql-erp-service/psql-service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAIpD,KAAK,YAAY,GAAG;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAoB;gBAEtB,MAAM,EAAE,iBAAiB;IASrC;;;OAGG;IACH,OAAO,CAAC,qBAAqB;IAe7B;;;;;;;;OAQG;IACU,wBAAwB,CACnC,KAAK,EAAE,MAAM,EACb,MAAM,GAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAM,EACnC,MAAM,CAAC,EAAE,YAAY,GACpB,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;IAqEnC;;;OAGG;WACW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE;IAkB5E;;OAEG;IACH,OAAO,CAAC,eAAe;CA4BxB"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@machinemetrics/mm-erp-sdk",
3
3
  "description": "A library for syncing data between MachineMetrics and ERP systems",
4
- "version": "0.1.6-beta.1",
4
+ "version": "0.1.7-beta.0",
5
5
  "license": "MIT",
6
6
  "author": "machinemetrics",
7
7
  "main": "dist/mm-erp-sdk.js",
@@ -1,6 +1,7 @@
1
1
  import "dotenv/config";
2
2
 
3
- import logger from "../../reporting-service/logger";
3
+ import logger from "../../../services/reporting-service/logger";
4
+ import { SQLiteCoordinator } from "../../sqlite-service";
4
5
  import { createConnectorFromPath } from "../../../utils/connector-factory";
5
6
 
6
7
  // Configure the logger with the correct log level
@@ -10,23 +10,22 @@ type PagingParams = {
10
10
  };
11
11
 
12
12
  export class PsqlService {
13
- private connection: odbc.Connection | null = null;
14
13
  private config: PsqlConfiguration;
15
14
 
16
15
  constructor(config: PsqlConfiguration) {
17
16
  this.config = config;
18
17
  }
19
18
 
20
- async dispose() {
21
- await this.closeConnection();
22
- }
19
+ // REMOVED: dispose() method - not needed anymore
20
+ // REMOVED: connection property - not needed anymore
21
+ // REMOVED: openConnection() method - not needed anymore
22
+ // REMOVED: closeConnection() method - not needed anymore
23
23
 
24
24
  /**
25
25
  * Build PSQL ODBC connection string
26
26
  * CRITICAL: ServerName must use IP.PORT format (e.g., 10.4.0.11.1583)
27
27
  */
28
28
  private buildConnectionString(): string {
29
- // PSQL requires ServerName in format IP.PORT (not IP:PORT)
30
29
  const serverName = `${this.config.host}.${this.config.port}`;
31
30
 
32
31
  return (
@@ -43,7 +42,7 @@ export class PsqlService {
43
42
 
44
43
  /**
45
44
  * Execute a query and return the results
46
- * Interface matches SqlServerService for consistency
45
+ * Creates a fresh connection for each query to avoid handle corruption
47
46
  *
48
47
  * @param query The SQL query to execute
49
48
  * @param params Query parameters (currently unused for PSQL read operations)
@@ -55,12 +54,14 @@ export class PsqlService {
55
54
  params: Record<string, string> = {},
56
55
  paging?: PagingParams
57
56
  ): Promise<ERPResponse | undefined> {
58
- const connection = await this.openConnection();
57
+ let connection: odbc.Connection | null = null;
59
58
 
60
- let records;
61
59
  try {
62
- // For Phase 1 (read-only), we execute queries directly
63
- // Phase 2 will add proper parameter binding for INSERT/UPDATE/DELETE
60
+ // Create fresh connection for THIS query only
61
+ const connStr = this.buildConnectionString();
62
+ logger.debug("Creating fresh PSQL connection for query");
63
+ connection = await odbc.connect(connStr);
64
+
64
65
  if (Object.keys(params).length > 0) {
65
66
  logger.warn(
66
67
  "PsqlService: Query parameters provided but parameter binding not yet implemented. " +
@@ -68,85 +69,62 @@ export class PsqlService {
68
69
  );
69
70
  }
70
71
 
71
- records = await connection.query(query);
72
+ const records = await connection.query(query);
73
+ const allRecords = PsqlService.recordsetToRecords(records);
74
+ const rowsFetched = allRecords.length;
75
+
76
+ // Apply paging if requested
77
+ const pagedData =
78
+ paging?.offset !== undefined || paging?.limit !== undefined
79
+ ? allRecords.slice(
80
+ paging.offset || 0,
81
+ (paging.offset || 0) + (paging.limit || allRecords.length)
82
+ )
83
+ : allRecords;
84
+
85
+ return {
86
+ data: pagedData,
87
+ paging: {
88
+ count: rowsFetched,
89
+ limit: paging?.limit || 0,
90
+ offset: paging?.offset || 0,
91
+ nextPage:
92
+ paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched
93
+ ? String((paging.offset || 0) + paging.limit)
94
+ : undefined,
95
+ previousPage: paging?.offset
96
+ ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10)))
97
+ : undefined,
98
+ },
99
+ };
72
100
  } catch (error) {
73
101
  const errorInfo = error as OdbcErrorResponse;
74
102
  logger.error("Error fetching data from PSQL", {
75
103
  error: errorInfo.message,
76
104
  odbcErrors: errorInfo.odbcErrors,
105
+ query: query.substring(0, 200), // Log first 200 chars of query
77
106
  });
78
107
 
79
108
  throw this.handleOdbcError(errorInfo);
80
- }
81
-
82
- const allRecords = PsqlService.recordsetToRecords(records);
83
- const rowsFetched = allRecords.length;
84
-
85
- // Apply paging if requested
86
- const pagedData =
87
- paging?.offset !== undefined || paging?.limit !== undefined
88
- ? allRecords.slice(
89
- paging.offset || 0,
90
- (paging.offset || 0) + (paging.limit || allRecords.length)
91
- )
92
- : allRecords;
93
-
94
- return {
95
- data: pagedData,
96
- paging: {
97
- count: rowsFetched,
98
- limit: paging?.limit || 0,
99
- offset: paging?.offset || 0,
100
- nextPage:
101
- paging?.limit && (paging.offset || 0) + paging.limit < rowsFetched
102
- ? String((paging.offset || 0) + paging.limit)
103
- : undefined,
104
- previousPage: paging?.offset
105
- ? String(Math.max(0, (paging.offset || 0) - (paging.limit || 10)))
106
- : undefined,
107
- },
108
- };
109
- }
110
-
111
- /**
112
- * Opens a connection to PSQL database
113
- * Caches the connection so that it can be reused.
114
- * On failure to connect, throws
115
- */
116
- async openConnection(): Promise<odbc.Connection> {
117
- // If we have a connection, reuse it
118
- // Note: ODBC connections don't have a .connected property like SQL Server
119
- // We'll keep it simple and reuse if not null
120
- if (this.connection) {
121
- logger.debug("Reusing existing PSQL connection");
122
- return this.connection;
123
- }
124
-
125
- try {
126
- const connStr = this.buildConnectionString();
127
- logger.info("Opening new PSQL connection");
128
- logger.debug(
129
- "Connection string (password hidden):",
130
- connStr.replace(/PWD=[^;]+/, "PWD=***")
131
- );
132
-
133
- this.connection = await odbc.connect(connStr);
134
- logger.info("Successfully connected to PSQL database");
135
- return this.connection;
136
- } catch (error) {
137
- logger.error("PsqlService>>openConnection>> Connection failed", {
138
- error,
139
- });
140
- throw this.handleOdbcError(error as OdbcErrorResponse);
109
+ } finally {
110
+ // CRITICAL: Always close connection, even on error
111
+ if (connection) {
112
+ try {
113
+ await connection.close();
114
+ logger.debug("PSQL connection closed successfully");
115
+ } catch (err) {
116
+ // Don't throw on close errors, just log
117
+ logger.warn("Error closing PSQL connection (non-fatal)", {
118
+ error: err,
119
+ });
120
+ }
121
+ }
141
122
  }
142
123
  }
143
124
 
144
125
  /**
145
126
  * Transform ODBC result set to array of Record<string, string> instances.
146
127
  * IMPORTANT: PSQL CHAR fields are often padded with spaces - we trim them
147
- *
148
- * @param recordset Result set from ODBC query
149
- * @returns array of Record<string, string> instances
150
128
  */
151
129
  public static recordsetToRecords(recordset: any[]): Record<string, string>[] {
152
130
  if (!Array.isArray(recordset)) {
@@ -157,7 +135,6 @@ export class PsqlService {
157
135
  const transformedRow: Record<string, string> = {};
158
136
  Object.keys(row).forEach((key) => {
159
137
  const value = row[key];
160
- // Convert to string and trim (PSQL CHAR fields have trailing spaces)
161
138
  transformedRow[key] =
162
139
  value !== null && value !== undefined ? String(value).trim() : "";
163
140
  });
@@ -198,18 +175,4 @@ export class PsqlService {
198
175
  return new Error(`PSQL error (${errorCode || "unknown"}): ${message}`);
199
176
  }
200
177
  }
201
-
202
- private async closeConnection(): Promise<void> {
203
- if (this.connection) {
204
- logger.info("Closing PSQL connection");
205
- try {
206
- await this.connection.close();
207
- } catch (error) {
208
- logger.error("PsqlService::closeConnection: Error closing connection", {
209
- error,
210
- });
211
- }
212
- this.connection = null;
213
- }
214
- }
215
178
  }