@nsshunt/stsdatamanagement 1.17.65 → 1.17.66

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/cliworker-old.js +914 -0
  2. package/dist/cliworker.js +929 -0
  3. package/dist/databaseutils.js +1281 -0
  4. package/dist/datagenerator-old.js +277 -0
  5. package/dist/datagenerator.js +1156 -0
  6. package/dist/dbaccess.js +1377 -0
  7. package/dist/dbaccess.test.js +10 -0
  8. package/dist/dbbuild.test.js +1298 -0
  9. package/dist/dberrors.js +104 -0
  10. package/dist/pgaccesslayer.js +751 -0
  11. package/dist/pgpoolmanager.js +144 -0
  12. package/dist/pgutils.js +920 -0
  13. package/dist/setupdb.js +1288 -0
  14. package/dist/stsDataManagementTypes.js +18 -0
  15. package/package.json +3 -2
  16. package/dist/stsdatamanagement.mjs +0 -21142
  17. package/dist/stsdatamanagement.mjs.map +0 -1
  18. package/dist/stsdatamanagement.umd.js +0 -21149
  19. package/dist/stsdatamanagement.umd.js.map +0 -1
  20. package/types/cliworker-old.d.ts +0 -2
  21. package/types/cliworker-old.d.ts.map +0 -1
  22. package/types/cliworker.d.ts +0 -20
  23. package/types/cliworker.d.ts.map +0 -1
  24. package/types/databaseutils.d.ts +0 -20
  25. package/types/databaseutils.d.ts.map +0 -1
  26. package/types/datagenerator-old.d.ts +0 -12
  27. package/types/datagenerator-old.d.ts.map +0 -1
  28. package/types/datagenerator.d.ts +0 -12
  29. package/types/datagenerator.d.ts.map +0 -1
  30. package/types/dbaccess.d.ts +0 -9
  31. package/types/dbaccess.d.ts.map +0 -1
  32. package/types/dbaccess.test.d.ts +0 -2
  33. package/types/dbaccess.test.d.ts.map +0 -1
  34. package/types/dbbuild.test.d.ts +0 -2
  35. package/types/dbbuild.test.d.ts.map +0 -1
  36. package/types/dberrors.d.ts +0 -22
  37. package/types/dberrors.d.ts.map +0 -1
  38. package/types/pgaccesslayer.d.ts +0 -200
  39. package/types/pgaccesslayer.d.ts.map +0 -1
  40. package/types/pgpoolmanager.d.ts +0 -14
  41. package/types/pgpoolmanager.d.ts.map +0 -1
  42. package/types/pgutils.d.ts +0 -6
  43. package/types/pgutils.d.ts.map +0 -1
  44. package/types/setupdb.d.ts +0 -3
  45. package/types/setupdb.d.ts.map +0 -1
  46. package/types/stsDataManagementTypes.d.ts +0 -10
  47. package/types/stsDataManagementTypes.d.ts.map +0 -1
@@ -0,0 +1,914 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") {
10
+ for (let key of __getOwnPropNames(from))
11
+ if (!__hasOwnProp.call(to, key) && key !== except)
12
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
13
+ }
14
+ return to;
15
+ };
16
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
17
+ // If the importer is in node compatibility mode or this is not an ESM
18
+ // file that has been converted to a CommonJS file using a Babel-
19
+ // compatible transform (i.e. "__esModule" has not been set), then set
20
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
+ mod
23
+ ));
24
+
25
+ // src/cliworker-old.ts
26
+ var import_crypto = __toESM(require("crypto"));
27
+
28
+ // src/pgpoolmanager.ts
29
+ var import_stsconfig = require("@nsshunt/stsconfig");
30
+ var import_stsutils = require("@nsshunt/stsutils");
31
+ var import_debug = __toESM(require("debug"));
32
+ var import_colors = require("colors");
33
+ var import_cluster = __toESM(require("cluster"));
34
+ var import_pg = require("pg");
35
+ var { databaseUrl, connectionString, defaultDatabaseConnectionString, isProduction, poolSize } = (0, import_stsconfig.$Options)();
36
+ var debug = (0, import_debug.default)(`proc:${process.pid}`);
37
+ var PGPoolManager = class {
38
+ #options;
39
+ #observer = null;
40
+ #pool;
41
+ constructor(options) {
42
+ if (!options) {
43
+ options = {
44
+ usedefaultdb: false
45
+ };
46
+ }
47
+ this.#options = options;
48
+ const connectionStringURI = isProduction ? databaseUrl : options.usedefaultdb === true ? defaultDatabaseConnectionString : connectionString;
49
+ this.#pool = new import_pg.Pool({
50
+ connectionString: connectionStringURI,
51
+ ssl: isProduction,
52
+ max: poolSize
53
+ // Default is 10 - This will be the number in a pool per thread (worker)
54
+ // Consideration needs to be given for the max number of connections at the database.
55
+ // Max connections calculation = max pool size * max workers (threads) per instance/node * max instances in a cluster (k3s, k8s, swarm mode etc.)
56
+ /*
57
+ ssl: {
58
+ // rejectUnauthorized: true, // Make this true for PROD
59
+ rejectUnauthorized: false,
60
+ //ca: fs.readFileSync('/path/to/server-certificates/root.crt').toString(),
61
+ //key: fs.readFileSync('/path/to/client-key/postgresql.key').toString(),
62
+ //cert: fs.readFileSync('/path/to/client-certificates/postgresql.crt').toString(),
63
+ }
64
+ */
65
+ });
66
+ const re = new RegExp("//(.*)@").exec(connectionStringURI);
67
+ let usernamepassword = "";
68
+ if (re) {
69
+ usernamepassword = re[1];
70
+ }
71
+ const sep = usernamepassword.split(":");
72
+ const usernamepasswordReplacement = usernamepassword.replace(sep[1], "*****************************");
73
+ const safeConnectionStringURI = connectionStringURI.replace(usernamepassword, usernamepasswordReplacement);
74
+ if (import_cluster.default.isPrimary) {
75
+ debug(`Created Database Pool with Master Thread, PID: [${process.pid}]`.yellow);
76
+ } else {
77
+ debug(`Created Database Pool with Worker Thread, PID: [${process.pid}]`.yellow);
78
+ }
79
+ debug(`Created Database Pool: Master Thread`.yellow);
80
+ debug(` Connection URI: [${safeConnectionStringURI}]`.yellow);
81
+ debug(` Maximum Pool Size: [${poolSize}]`.yellow);
82
+ debug(` SSL Connection: [${isProduction}]`.yellow);
83
+ this.#pool.on("error", (error) => {
84
+ console.error(`Unexpected error on idle client: [${error}]`.red);
85
+ process.exit(-1);
86
+ });
87
+ this.#pool.on("connect", () => {
88
+ this.UpdateInstruments();
89
+ });
90
+ this.#pool.on("acquire", () => {
91
+ this.UpdateInstruments();
92
+ });
93
+ this.#pool.on("remove", () => {
94
+ this.UpdateInstruments();
95
+ });
96
+ this.AttachInstruments();
97
+ }
98
+ get pool() {
99
+ return this.#pool;
100
+ }
101
+ // eslint-disable-next-line @typescript-eslint/no-empty-function
102
+ UpdateInstruments() {
103
+ }
104
+ /*
105
+ UpdateInstruments()
106
+ {
107
+ if ($stsgd.app) {
108
+ $stsgd.app.UpdateInstrument(Gauge.CONNECTION_POOL_TOTAL_GAUGE, {
109
+ val: this.pool.totalCount
110
+ } as InstrumentGaugeTelemetry);
111
+
112
+ $stsgd.app.UpdateInstrument(Gauge.CONNECTION_POOL_IDLE_GAUGE, {
113
+ val: this.pool.idleCount
114
+ } as InstrumentGaugeTelemetry);
115
+
116
+ $stsgd.app.UpdateInstrument(Gauge.CONNECTION_POOL_WAITING_GAUGE, {
117
+ val: this.pool.waitingCount
118
+ } as InstrumentGaugeTelemetry);
119
+ }
120
+ }
121
+ */
122
+ AttachInstruments(interval = 1e3) {
123
+ if (import_stsutils.$stsgd.app) {
124
+ this.#observer = setInterval(() => {
125
+ this.UpdateInstruments();
126
+ }, interval).unref();
127
+ }
128
+ }
129
+ DetachInstruments() {
130
+ clearInterval(this.#observer);
131
+ this.#observer = null;
132
+ }
133
+ };
134
+
135
+ // src/pgaccesslayer.ts
136
+ var import_http_status_codes = require("http-status-codes");
137
+ var import_ioredis = __toESM(require("ioredis"));
138
+ var import_stsconfig2 = require("@nsshunt/stsconfig");
139
+ var import_pg_copy_streams = __toESM(require("pg-copy-streams"));
140
+ var streamex = __toESM(require("node:stream"));
141
+ var fs = __toESM(require("node:fs/promises"));
142
+ var import_debug2 = __toESM(require("debug"));
143
+ var goptions = (0, import_stsconfig2.$Options)();
144
+ var copyFrom = import_pg_copy_streams.default.from;
145
+ var defaultConfigId = "defaultconfig";
146
+ var systemUserId = "SystemUserID";
147
+ var debug2 = (0, import_debug2.default)(`proc:${process.pid}:pgaccesslayer`);
148
+ var { redisPort, redisServer, useRedis, endFlush, consoleLogErrors } = goptions;
149
+ var PGAccessLayer = class {
150
+ #cache = null;
151
+ #pgpool = null;
152
+ #poolManager = null;
153
+ constructor(poolManager) {
154
+ this.#poolManager = poolManager;
155
+ this.#pgpool = poolManager.pool;
156
+ if (useRedis === true) {
157
+ this.#cache = new import_ioredis.default(parseInt(redisPort), redisServer);
158
+ }
159
+ }
160
+ #latestResourceVersion(inputfilter) {
161
+ const { resname, entname, vnum, filter, incdel, startdate, enddate, dbaction, dbactionuser } = inputfilter;
162
+ return resname !== void 0 && entname === void 0 && vnum === void 0 && filter === void 0 && incdel === void 0 && startdate === void 0 && enddate === void 0 && dbaction === void 0 && dbactionuser === void 0;
163
+ }
164
+ #buildQueryString(filters, restype) {
165
+ const { resname, entname, vnum, filter, incdel, startdate, enddate, dbaction, dbactionuser } = filters;
166
+ const qparams = [];
167
+ let qstring = " where ";
168
+ let sep = "";
169
+ let qnum = 0;
170
+ if (resname !== void 0) {
171
+ qparams.push(resname);
172
+ qstring += sep + "r.resname = $" + ++qnum;
173
+ sep = " and ";
174
+ }
175
+ if (entname !== void 0) {
176
+ qparams.push(entname);
177
+ qstring += sep + "e.entname = $" + ++qnum;
178
+ sep = " and ";
179
+ }
180
+ if (vnum !== void 0) {
181
+ qparams.push(vnum);
182
+ qstring += sep + restype + ".vnum = $" + ++qnum;
183
+ sep = " and ";
184
+ } else {
185
+ if (enddate === void 0) {
186
+ qstring += sep + restype + ".validto is null";
187
+ sep = " and ";
188
+ }
189
+ }
190
+ if (filter !== void 0) {
191
+ qparams.push(filter);
192
+ if (restype === "r") {
193
+ qstring += sep + restype + ".resname like $" + ++qnum;
194
+ } else {
195
+ qstring += sep + restype + ".entname like $" + ++qnum;
196
+ }
197
+ sep = " and ";
198
+ }
199
+ if (startdate !== void 0) {
200
+ qparams.push(startdate);
201
+ qstring += sep + restype + ".validfrom >= $" + ++qnum;
202
+ sep = " and ";
203
+ }
204
+ if (enddate !== void 0) {
205
+ qparams.push(enddate);
206
+ qstring += sep + restype + ".validto <= $" + ++qnum;
207
+ sep = " and ";
208
+ }
209
+ if (incdel === void 0 || incdel.toLowerCase() === "n") {
210
+ qparams.push(3);
211
+ qstring += sep + restype + ".dbaction != $" + ++qnum;
212
+ sep = " and ";
213
+ }
214
+ if (dbaction !== void 0) {
215
+ qparams.push(dbaction);
216
+ qstring += sep + restype + ".dbaction = $" + ++qnum;
217
+ sep = " and ";
218
+ }
219
+ if (dbactionuser !== void 0) {
220
+ qparams.push(dbactionuser);
221
+ qstring += sep + restype + ".dbactionuser = $" + ++qnum;
222
+ sep = " and ";
223
+ }
224
+ if (qnum === 0) {
225
+ return null;
226
+ } else {
227
+ return { qstring, qparams };
228
+ }
229
+ }
230
+ async getLatestResource(resid) {
231
+ return await this.getResource({ resname: resid });
232
+ }
233
+ async saveResource(userid2, resid, data) {
234
+ const retVal = await this.getLatestResource(resid);
235
+ if (retVal.status !== import_http_status_codes.StatusCodes.OK) {
236
+ return await this.createResource(userid2, resid, data);
237
+ } else {
238
+ const vnum = retVal.detail.vnum;
239
+ return await this.updateResource(userid2, resid, vnum, data);
240
+ }
241
+ }
242
+ async getDefaultConfig() {
243
+ return await this.getLatestResource(defaultConfigId);
244
+ }
245
+ async saveDefaultConfig(config) {
246
+ await this.saveResource(systemUserId, defaultConfigId, config);
247
+ }
248
+ async getResourceCount(filter = null) {
249
+ const fname = "getResourceCount";
250
+ try {
251
+ let createQuery = `select count(*) from stsresource r`;
252
+ if (filter !== null) {
253
+ createQuery += " where " + filter;
254
+ }
255
+ const client = await this.#pgpool?.connect();
256
+ try {
257
+ const { rows } = await client.query(createQuery);
258
+ const dbResponse = rows[0];
259
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
260
+ } catch (error) {
261
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
262
+ } finally {
263
+ client.release();
264
+ }
265
+ } catch (error) {
266
+ if (consoleLogErrors)
267
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
268
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
269
+ }
270
+ }
271
+ async createResource(userid2, resourceid, payload) {
272
+ const fname = "createResource";
273
+ try {
274
+ let createQuery = null;
275
+ createQuery = `select * from create_stsresource($1, $2, $3)`;
276
+ const values = [
277
+ userid2,
278
+ resourceid,
279
+ payload
280
+ ];
281
+ const client = await this.#pgpool?.connect();
282
+ try {
283
+ await client.query("BEGIN");
284
+ const { rows } = await client.query(createQuery, values);
285
+ const dbResponse = rows[0];
286
+ await client.query("COMMIT");
287
+ return { status: import_http_status_codes.StatusCodes.CREATED, detail: dbResponse };
288
+ } catch (error) {
289
+ await client.query("ROLLBACK");
290
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
291
+ if (error.code === "23505") {
292
+ err = { status: import_http_status_codes.StatusCodes.CONFLICT, error: `[${fname}: resourceid [${resourceid}] already exists`, detail: error };
293
+ }
294
+ debug2(`Error: ${JSON.stringify(error)}`.red);
295
+ return err;
296
+ } finally {
297
+ client.release();
298
+ }
299
+ } catch (error) {
300
+ if (consoleLogErrors)
301
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
302
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
303
+ }
304
+ }
305
+ async beginX() {
306
+ const fname = "beginX";
307
+ try {
308
+ const client = await this.#pgpool?.connect();
309
+ await client.query("BEGIN");
310
+ return client;
311
+ } catch (error) {
312
+ if (consoleLogErrors)
313
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
314
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
315
+ }
316
+ }
317
+ async commitX(client) {
318
+ const fname = "commitX";
319
+ try {
320
+ await client.query("COMMIT");
321
+ client.release();
322
+ } catch (error) {
323
+ if (consoleLogErrors)
324
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
325
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
326
+ }
327
+ }
328
+ async rollbackX(client) {
329
+ const fname = "rollbackX";
330
+ try {
331
+ await client.query("ROLLBACK");
332
+ client.release();
333
+ } catch (error) {
334
+ if (consoleLogErrors)
335
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
336
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
337
+ }
338
+ }
339
+ async createResourceX(client, userid2, resourceid, payload) {
340
+ const fname = "createResourceX";
341
+ const createQuery = `select * from create_stsresource($1, $2, $3)`;
342
+ const values = [
343
+ userid2,
344
+ resourceid,
345
+ payload
346
+ ];
347
+ try {
348
+ const { rows } = await client.query(createQuery, values);
349
+ const dbResponse = rows[0];
350
+ return { status: import_http_status_codes.StatusCodes.CREATED, detail: dbResponse };
351
+ } catch (error) {
352
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
353
+ if (error.code === "23505") {
354
+ err = { status: import_http_status_codes.StatusCodes.CONFLICT, error: `[${fname}: resourceid [${resourceid}] already exists`, detail: error };
355
+ }
356
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
357
+ return err;
358
+ }
359
+ }
360
+ async updateResource(userid2, resourceid, resourcevnum, payload) {
361
+ const fname = "updateResource";
362
+ try {
363
+ const createQuery = `select * from update_stsresource($1, $2, $3, $4)`;
364
+ const values = [
365
+ userid2,
366
+ resourceid,
367
+ resourcevnum,
368
+ payload
369
+ ];
370
+ const client = await this.#pgpool?.connect();
371
+ try {
372
+ await client.query("BEGIN");
373
+ const { rows } = await client.query(createQuery, values);
374
+ const dbResponse = rows[0];
375
+ await client.query("COMMIT");
376
+ if (this.#cache !== null) {
377
+ await this.#cache.del(resourceid);
378
+ }
379
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
380
+ } catch (error) {
381
+ await client.query("ROLLBACK");
382
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
383
+ if (error.code === "P0002") {
384
+ err = { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}] and version [${resourcevnum}] not found or not current`, detail: error };
385
+ }
386
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
387
+ return err;
388
+ } finally {
389
+ client.release();
390
+ }
391
+ } catch (error) {
392
+ if (consoleLogErrors)
393
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
394
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
395
+ }
396
+ }
397
+ async deleteLatestResource(userid2, resid) {
398
+ const fname = "deleteLatestResource";
399
+ const retVal = await this.getLatestResource(resid);
400
+ if (retVal.status !== import_http_status_codes.StatusCodes.OK) {
401
+ return { status: retVal.status, error: `[${fname}]: Operation was not successful: [${resid}]`, detail: retVal };
402
+ } else {
403
+ const vnum = retVal.detail.vnum;
404
+ await this.deleteResource(userid2, resid, vnum);
405
+ }
406
+ }
407
+ async deleteResource(userid2, resourceid, resourcevnum) {
408
+ const fname = "deleteResource";
409
+ try {
410
+ const createQuery = `select * from delete_stsresource($1, $2, $3)`;
411
+ const values = [
412
+ userid2,
413
+ resourceid,
414
+ resourcevnum
415
+ ];
416
+ const client = await this.#pgpool?.connect();
417
+ try {
418
+ await client.query("BEGIN");
419
+ const { rows } = await client.query(createQuery, values);
420
+ const dbResponse = rows[0];
421
+ await client.query("COMMIT");
422
+ if (this.#cache !== null) {
423
+ await this.#cache.del(resourceid);
424
+ }
425
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
426
+ } catch (error) {
427
+ await client.query("ROLLBACK");
428
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
429
+ if (error.code === "P0002") {
430
+ err = { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}] and version [${resourcevnum}] not found or not current`, detail: error };
431
+ }
432
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
433
+ return err;
434
+ } finally {
435
+ client.release();
436
+ }
437
+ } catch (error) {
438
+ if (consoleLogErrors)
439
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
440
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
441
+ }
442
+ }
443
+ async createEntity(userid2, resourceid, resourcevnum, entityid, payload) {
444
+ const fname = "createEntity";
445
+ try {
446
+ const createQuery = `select * from create_stsentity($1, $2, $3, $4, $5)`;
447
+ const values = [
448
+ userid2,
449
+ resourceid,
450
+ resourcevnum,
451
+ entityid,
452
+ payload
453
+ ];
454
+ const client = await this.#pgpool?.connect();
455
+ try {
456
+ await client.query("BEGIN");
457
+ const { rows } = await client.query(createQuery, values);
458
+ const dbResponse = rows[0];
459
+ await client.query("COMMIT");
460
+ return { status: import_http_status_codes.StatusCodes.CREATED, detail: dbResponse };
461
+ } catch (error) {
462
+ await client.query("ROLLBACK");
463
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
464
+ if (error.code === "23505") {
465
+ err = { status: import_http_status_codes.StatusCodes.CONFLICT, error: `[${fname}: entityid [${entityid}] already exists`, detail: error };
466
+ }
467
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
468
+ return err;
469
+ } finally {
470
+ client.release();
471
+ }
472
+ } catch (error) {
473
+ if (consoleLogErrors)
474
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
475
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
476
+ }
477
+ }
478
+ async updateEntity(userid2, resourceid, resourcevnum, entityid, entityvnum, payload) {
479
+ const fname = "updateEntity";
480
+ try {
481
+ const createQuery = `select * from update_stsentity($1, $2, $3, $4, $5, $6)`;
482
+ const values = [
483
+ userid2,
484
+ resourceid,
485
+ resourcevnum,
486
+ entityid,
487
+ entityvnum,
488
+ payload
489
+ ];
490
+ const client = await this.#pgpool?.connect();
491
+ try {
492
+ await client.query("BEGIN");
493
+ const { rows } = await client.query(createQuery, values);
494
+ const dbResponse = rows[0];
495
+ await client.query("COMMIT");
496
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
497
+ } catch (error) {
498
+ await client.query("ROLLBACK");
499
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
500
+ if (error.code === "P0002") {
501
+ err = { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}:${resourcevnum}], entityid [${entityid}:${entityvnum}] not found or not current`, detail: error };
502
+ }
503
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
504
+ return err;
505
+ } finally {
506
+ client.release();
507
+ }
508
+ } catch (error) {
509
+ if (consoleLogErrors)
510
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
511
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
512
+ }
513
+ }
514
+ async deleteEntity(userid2, resourceid, resourcevnum, entityid, entityvnum) {
515
+ const fname = "deleteEntity";
516
+ try {
517
+ const createQuery = `select * from delete_stsentity($1, $2, $3, $4, $5)`;
518
+ const values = [
519
+ userid2,
520
+ resourceid,
521
+ resourcevnum,
522
+ entityid,
523
+ entityvnum
524
+ ];
525
+ const client = await this.#pgpool?.connect();
526
+ try {
527
+ await client.query("BEGIN");
528
+ const { rows } = await client.query(createQuery, values);
529
+ const dbResponse = rows[0];
530
+ await client.query("COMMIT");
531
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
532
+ } catch (error) {
533
+ await client.query("ROLLBACK");
534
+ let err = { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
535
+ if (error.code === "P0002") {
536
+ err = { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}:${resourcevnum}], entityid [${entityid}:${entityvnum}] not found or not current`, detail: error };
537
+ }
538
+ debug2(`Error: [${JSON.stringify(err)}]`.red);
539
+ return err;
540
+ } finally {
541
+ client.release();
542
+ }
543
+ } catch (error) {
544
+ if (consoleLogErrors)
545
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
546
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
547
+ }
548
+ }
549
+ // get single resource and all associated entities for this resource
550
+ async getResource(filters) {
551
+ const fname = "getResource";
552
+ try {
553
+ let query = "select r.oid::bigint, r.resname, r.resdesc, r.vnum::bigint, r.validfrom, r.validto, r.dbaction, r.dbactionuser from stsresource r";
554
+ let values = [];
555
+ const resourceid = filters.resname;
556
+ const qs = this.#buildQueryString(filters, "r");
557
+ if (qs !== null) {
558
+ query += qs.qstring;
559
+ values = qs.qparams;
560
+ }
561
+ if (this.#cache !== null) {
562
+ if (this.#latestResourceVersion(filters)) {
563
+ const cacheVal = await this.#cache.get(resourceid);
564
+ if (cacheVal !== null) {
565
+ const cacheResponse = JSON.parse(cacheVal);
566
+ return { status: import_http_status_codes.StatusCodes.OK, usedcache: true, detail: cacheResponse };
567
+ }
568
+ }
569
+ }
570
+ query += " limit 2";
571
+ const client = await this.#pgpool?.connect();
572
+ try {
573
+ const { rows } = await client.query(query, values);
574
+ if (rows.length === 0) {
575
+ return { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}] (current version) not found`, detail: null };
576
+ }
577
+ if (rows.length > 1) {
578
+ return { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: Multiple resourceid [${resourceid}](current version) exist within database. Invalid database state.`, detail: null };
579
+ }
580
+ const dbResponse = rows[0];
581
+ if (this.#cache !== null) {
582
+ this.#cache.set(resourceid, JSON.stringify(dbResponse));
583
+ }
584
+ return { status: import_http_status_codes.StatusCodes.OK, usedcache: false, detail: dbResponse };
585
+ } catch (error) {
586
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
587
+ } finally {
588
+ client.release();
589
+ }
590
+ } catch (error) {
591
+ if (consoleLogErrors)
592
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
593
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
594
+ }
595
+ }
596
+ // get single resource and all associated entities for this resource
597
+ async getEntity(filters) {
598
+ const fname = "getEntity";
599
+ try {
600
+ let query = "select e.* from stsentity e join stsresource r on e.stsresourceoid = r.oid";
601
+ let values = [];
602
+ const qs = this.#buildQueryString(filters, "e");
603
+ const resourceid = filters.resname;
604
+ if (qs !== null) {
605
+ query += qs.qstring;
606
+ values = qs.qparams;
607
+ }
608
+ query += " limit 2";
609
+ const client = await this.#pgpool?.connect();
610
+ try {
611
+ const { rows } = await client.query(query, values);
612
+ if (rows.length === 0) {
613
+ return { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: resourceid [${resourceid}] (current version) not found`, detail: null };
614
+ }
615
+ if (rows.length > 1) {
616
+ return { status: import_http_status_codes.StatusCodes.NOT_FOUND, error: `[${fname}: Multiple resourceid [${resourceid}](current version) exist within database. Invalid database state.`, detail: null };
617
+ }
618
+ const dbResponse = rows[0];
619
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
620
+ } catch (error) {
621
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
622
+ } finally {
623
+ client.release();
624
+ }
625
+ } catch (error) {
626
+ if (consoleLogErrors)
627
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
628
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
629
+ }
630
+ }
631
+ // get single resource and all associated entities for this resource
632
+ async getEntities(filters) {
633
+ const fname = "getEntities";
634
+ try {
635
+ let query = `select * from stsentity e
636
+ join stsresource r on
637
+ e.stsresourceoid = r.oid`;
638
+ let values = [];
639
+ const qs = this.#buildQueryString(filters, "e");
640
+ if (qs !== null) {
641
+ query += qs.qstring;
642
+ values = qs.qparams;
643
+ }
644
+ query += " limit 10";
645
+ const client = await this.#pgpool?.connect();
646
+ try {
647
+ const { rows } = await client.query(query, values);
648
+ const dbResponse = rows;
649
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
650
+ } catch (error) {
651
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
652
+ } finally {
653
+ client.release();
654
+ }
655
+ } catch (error) {
656
+ if (consoleLogErrors)
657
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
658
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
659
+ }
660
+ }
661
+ // get current resource
662
+ async getResources(filters) {
663
+ const fname = "getResources";
664
+ try {
665
+ let query = "select * from stsresource r";
666
+ let values = [];
667
+ const qs = this.#buildQueryString(filters, "r");
668
+ if (qs !== null) {
669
+ query += qs.qstring;
670
+ values = qs.qparams;
671
+ }
672
+ query += " limit 10";
673
+ const client = await this.#pgpool?.connect();
674
+ try {
675
+ const { rows } = await client.query(query, values);
676
+ const dbResponse = rows;
677
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
678
+ } catch (error) {
679
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
680
+ } finally {
681
+ client.release();
682
+ }
683
+ } catch (error) {
684
+ if (consoleLogErrors)
685
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
686
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
687
+ }
688
+ }
689
+ async deleteResources(resname, actionuser) {
690
+ const fname = "deleteResources";
691
+ try {
692
+ const client = await this.#pgpool?.connect();
693
+ try {
694
+ const query = "delete from stsresource where resname like $1 and dbactionuser = $2";
695
+ const values = [resname, actionuser];
696
+ await client.query("BEGIN");
697
+ await client.query(query, values);
698
+ await client.query("COMMIT");
699
+ return { status: import_http_status_codes.StatusCodes.OK };
700
+ } catch (error) {
701
+ if (consoleLogErrors)
702
+ console.error(`[${fname}]: Failed to delete resources: ${error}`);
703
+ await client.query("ROLLBACK");
704
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
705
+ } finally {
706
+ client.release();
707
+ }
708
+ } catch (error) {
709
+ if (consoleLogErrors)
710
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
711
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
712
+ }
713
+ }
714
+ async deleteAllResources() {
715
+ const fname = "createResource";
716
+ try {
717
+ const client = await this.#pgpool?.connect();
718
+ try {
719
+ const query = "delete from stsresource";
720
+ await client.query("BEGIN");
721
+ await client.query(query);
722
+ await client.query("COMMIT");
723
+ return { status: import_http_status_codes.StatusCodes.OK };
724
+ } catch (error) {
725
+ if (consoleLogErrors)
726
+ console.error(`[${fname}]: Failed to truncate table(s): ${error}`);
727
+ await client.query("ROLLBACK");
728
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
729
+ } finally {
730
+ client.release();
731
+ }
732
+ } catch (error) {
733
+ if (consoleLogErrors)
734
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
735
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
736
+ }
737
+ }
738
+ async getTotalResources() {
739
+ const fname = "getTotalResources";
740
+ try {
741
+ const client = await this.#pgpool?.connect();
742
+ try {
743
+ const query = "select count(*) from stsresource";
744
+ const { rows } = await client.query(query);
745
+ const dbResponse = rows[0];
746
+ return { status: import_http_status_codes.StatusCodes.OK, detail: dbResponse };
747
+ } catch (error) {
748
+ if (consoleLogErrors)
749
+ console.error(`[${fname}]: Could not get count from stsresource table: ${error}`);
750
+ await client.query("ROLLBACK");
751
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
752
+ } finally {
753
+ client.release();
754
+ }
755
+ } catch (error) {
756
+ if (consoleLogErrors)
757
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
758
+ return { status: import_http_status_codes.StatusCodes.INTERNAL_SERVER_ERROR, error: `[${fname}]: Operation was not successful`, detail: error };
759
+ }
760
+ }
761
+ async executedbscript(scriptfile) {
762
+ try {
763
+ const fname = "executedbscript";
764
+ const queryText = await fs.readFile(scriptfile, "utf-8");
765
+ try {
766
+ const client = await this.#pgpool?.connect();
767
+ try {
768
+ await client.query("BEGIN");
769
+ await client.query(queryText);
770
+ await client.query("COMMIT");
771
+ return `Completed executing file: ${scriptfile}`;
772
+ } catch (e) {
773
+ await client.query("ROLLBACK");
774
+ console.error(`Error executing script: [${scriptfile}]`);
775
+ console.error(e);
776
+ throw e;
777
+ } finally {
778
+ client.release();
779
+ }
780
+ } catch (error) {
781
+ if (consoleLogErrors)
782
+ console.error(`[${fname}]: Could not get client from pool: ${error}`);
783
+ throw error;
784
+ }
785
+ } catch (error) {
786
+ if (consoleLogErrors)
787
+ console.error(`Error executing script: [${scriptfile}]`);
788
+ throw error;
789
+ }
790
+ }
791
+ // Method only used for testing. Ensure that if redis is being used that the cache is always flushed.
792
+ async startdatabase() {
793
+ if (this.#cache !== null) {
794
+ await this.#cache.flushall();
795
+ }
796
+ }
797
+ async enddatabase() {
798
+ debug2(`Ending database for process PID: ${process.pid}`);
799
+ await this.#pgpool?.end();
800
+ if (this.#cache !== null) {
801
+ if (endFlush === true) {
802
+ await this.#cache.flushall();
803
+ debug2(`Redis cache flushed.`.magenta);
804
+ }
805
+ await this.#cache.quit();
806
+ this.#cache = null;
807
+ }
808
+ this.#pgpool = null;
809
+ this.#poolManager?.DetachInstruments();
810
+ this.#poolManager = null;
811
+ }
812
+ bulkInsert(fakerdata) {
813
+ return new Promise((resolve, reject) => {
814
+ this.#pgpool?.connect().then((client) => {
815
+ const done = () => {
816
+ client.release();
817
+ resolve("Completed");
818
+ };
819
+ const stream = client.query(copyFrom("COPY stsresource (resname,resdesc,vnum,validfrom,dbaction,dbactionuser) FROM STDIN"));
820
+ const rs = new streamex.Readable();
821
+ let currentIndex = 0;
822
+ rs._read = function() {
823
+ if (currentIndex === fakerdata.length) {
824
+ rs.push(null);
825
+ } else {
826
+ const data = fakerdata[currentIndex];
827
+ rs.push(
828
+ //data.oid
829
+ data.resname + " " + data.resdesc + " " + data.vnum + " " + data.validfrom + " " + data.dbaction + " " + data.dbactionuser + "\n"
830
+ );
831
+ currentIndex++;
832
+ }
833
+ };
834
+ const onError = (strErr) => {
835
+ client.release();
836
+ reject(strErr);
837
+ };
838
+ rs.on("error", onError);
839
+ stream.on("error", onError);
840
+ stream.on("finish", done);
841
+ rs.pipe(stream);
842
+ }).catch((error) => {
843
+ console.log(error);
844
+ });
845
+ });
846
+ }
847
+ };
848
+
849
+ // src/cliworker-old.ts
850
+ var userid = "dbinitrunner";
851
+ var resfakerprefix = "RESFK-";
852
+ var rescopyprefix = "RESCP-";
853
+ var CliWorker = class {
854
+ fakerdataarray = [];
855
+ fakerdataarray2 = [];
856
+ totalDataSize = 0;
857
+ avgDataSize = 0;
858
+ copybatchsize = 250;
859
+ run = async (data) => {
860
+ const { start, iterations, datamin } = data.data;
861
+ let { datamax } = data.data;
862
+ let count = start;
863
+ const accesslayer = new PGAccessLayer(new PGPoolManager());
864
+ if (datamax > 0) {
865
+ datamax = datamax <= datamin ? datamin + 1 : datamax;
866
+ }
867
+ for (let i = 0; i < iterations; i++) {
868
+ const coreRecordSizeRange = import_crypto.default.randomInt(128, 192);
869
+ const coreRecordSize = Math.floor(coreRecordSizeRange / 2);
870
+ let randcard = import_crypto.default.randomBytes(coreRecordSize).toString("hex");
871
+ if (datamax > 0) {
872
+ const n = import_crypto.default.randomInt(datamin, datamax);
873
+ const n2 = Math.floor(n / 2);
874
+ const rndid = import_crypto.default.randomBytes(n2).toString("hex");
875
+ randcard += rndid;
876
+ }
877
+ const randcardstring = randcard;
878
+ this.totalDataSize += randcardstring.length;
879
+ this.avgDataSize = Math.floor(this.totalDataSize / (i + 1));
880
+ const pn = ("" + count++).padStart(10, "0");
881
+ const id = resfakerprefix + pn;
882
+ const cpid = rescopyprefix + pn;
883
+ const datanow = (/* @__PURE__ */ new Date()).toISOString();
884
+ const fakerdata = {
885
+ //oid: i
886
+ resname: id,
887
+ resdesc: randcardstring,
888
+ vnum: 1,
889
+ validfrom: datanow,
890
+ validto: null,
891
+ dbaction: 1,
892
+ dbactionuser: userid
893
+ };
894
+ this.fakerdataarray.push(fakerdata);
895
+ const fakerdata2 = Object.assign({}, fakerdata);
896
+ fakerdata2.resname = cpid;
897
+ this.fakerdataarray2.push(fakerdata2);
898
+ if ((i + 1) % this.copybatchsize === 0) {
899
+ await accesslayer.bulkInsert(this.fakerdataarray);
900
+ await accesslayer.bulkInsert(this.fakerdataarray2);
901
+ this.fakerdataarray = [];
902
+ this.fakerdataarray2 = [];
903
+ }
904
+ }
905
+ if (this.fakerdataarray.length > 0) {
906
+ await accesslayer.bulkInsert(this.fakerdataarray);
907
+ await accesslayer.bulkInsert(this.fakerdataarray2);
908
+ }
909
+ };
910
+ };
911
+ var worker = new CliWorker();
912
+ onmessage = async function(data) {
913
+ worker.run(data);
914
+ };