@cumulus/db 14.1.0 → 15.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/config.js CHANGED
@@ -16,7 +16,6 @@ exports.localStackConnectionEnv = {
16
16
  const isKnexDebugEnabled = (env = {}) => env.KNEX_DEBUG === 'true';
17
17
  exports.isKnexDebugEnabled = isKnexDebugEnabled;
18
18
  const getSecretConnectionConfig = async (SecretId, secretsManager) => {
19
- var _a;
20
19
  const response = await secretsManager.getSecretValue({ SecretId }).promise();
21
20
  if (response.SecretString === undefined) {
22
21
  throw new Error(`AWS Secret did not contain a stored value: ${SecretId}`);
@@ -32,20 +31,17 @@ const getSecretConnectionConfig = async (SecretId, secretsManager) => {
32
31
  user: dbAccessMeta.username,
33
32
  password: dbAccessMeta.password,
34
33
  database: dbAccessMeta.database,
35
- port: (_a = dbAccessMeta.port) !== null && _a !== void 0 ? _a : 5432,
34
+ port: dbAccessMeta.port ?? 5432,
36
35
  };
37
36
  };
38
37
  exports.getSecretConnectionConfig = getSecretConnectionConfig;
39
- const getConnectionConfigEnv = (env) => {
40
- var _a;
41
- return ({
42
- host: common_1.envUtils.getRequiredEnvVar('PG_HOST', env),
43
- user: common_1.envUtils.getRequiredEnvVar('PG_USER', env),
44
- password: common_1.envUtils.getRequiredEnvVar('PG_PASSWORD', env),
45
- database: common_1.envUtils.getRequiredEnvVar('PG_DATABASE', env),
46
- port: Number.parseInt((_a = env.PG_PORT) !== null && _a !== void 0 ? _a : '5432', 10),
47
- });
48
- };
38
+ const getConnectionConfigEnv = (env) => ({
39
+ host: common_1.envUtils.getRequiredEnvVar('PG_HOST', env),
40
+ user: common_1.envUtils.getRequiredEnvVar('PG_USER', env),
41
+ password: common_1.envUtils.getRequiredEnvVar('PG_PASSWORD', env),
42
+ database: common_1.envUtils.getRequiredEnvVar('PG_DATABASE', env),
43
+ port: Number.parseInt(env.PG_PORT ?? '5432', 10),
44
+ });
49
45
  exports.getConnectionConfigEnv = getConnectionConfigEnv;
50
46
  /**
51
47
  * Return configuration to make a database connection.
@@ -108,7 +104,6 @@ exports.getConnectionConfig = getConnectionConfig;
108
104
  * @returns {Promise<Knex.Config>} a Knex configuration object
109
105
  */
110
106
  const getKnexConfig = async ({ env = process.env, secretsManager = new aws_sdk_1.default.SecretsManager(), } = {}) => {
111
- var _a, _b, _c, _d, _e, _f, _g;
112
107
  const knexConfig = {
113
108
  client: 'pg',
114
109
  connection: await (0, exports.getConnectionConfig)({ env, secretsManager }),
@@ -116,16 +111,16 @@ const getKnexConfig = async ({ env = process.env, secretsManager = new aws_sdk_1
116
111
  asyncStackTraces: env.KNEX_ASYNC_STACK_TRACES === 'true',
117
112
  pool: {
118
113
  min: 0,
119
- max: Number.parseInt((_a = env.dbMaxPool) !== null && _a !== void 0 ? _a : '2', 10),
120
- idleTimeoutMillis: Number.parseInt((_b = env.idleTimeoutMillis) !== null && _b !== void 0 ? _b : '1000', 10),
114
+ max: Number.parseInt(env.dbMaxPool ?? '2', 10),
115
+ idleTimeoutMillis: Number.parseInt(env.idleTimeoutMillis ?? '1000', 10),
121
116
  // ts-ignore as https://github.com/knex/knex/blob/master/types/index.d.ts#L1886
122
117
  // is improperly typed.
123
118
  //@ts-ignore
124
- acquireTimeoutMillis: Number.parseInt((_c = env.acquireTimeoutMillis) !== null && _c !== void 0 ? _c : '90000', 10),
125
- createRetryIntervalMillis: Number.parseInt((_d = env.createRetryIntervalMillis) !== null && _d !== void 0 ? _d : '30000', 10),
126
- createTimeoutMillis: Number.parseInt((_e = env.createTimeoutMillis) !== null && _e !== void 0 ? _e : '20000', 10),
127
- destroyTimeoutMillis: Number.parseInt((_f = env.destroyTimeoutMillis) !== null && _f !== void 0 ? _f : '5000', 10),
128
- reapIntervalMillis: Number.parseInt((_g = env.reapIntervalMillis) !== null && _g !== void 0 ? _g : '1000', 10),
119
+ acquireTimeoutMillis: Number.parseInt(env.acquireTimeoutMillis ?? '90000', 10),
120
+ createRetryIntervalMillis: Number.parseInt(env.createRetryIntervalMillis ?? '30000', 10),
121
+ createTimeoutMillis: Number.parseInt(env.createTimeoutMillis ?? '20000', 10),
122
+ destroyTimeoutMillis: Number.parseInt(env.destroyTimeoutMillis ?? '5000', 10),
123
+ reapIntervalMillis: Number.parseInt(env.reapIntervalMillis ?? '1000', 10),
129
124
  propagateCreateError: false,
130
125
  },
131
126
  };
@@ -87,7 +87,7 @@ exports.executionArnsFromGranuleIdsAndWorkflowNames = executionArnsFromGranuleId
87
87
  */
88
88
  const newestExecutionArnFromGranuleIdWorkflowName = async (granuleId, workflowName, testKnex) => {
89
89
  try {
90
- const knex = testKnex !== null && testKnex !== void 0 ? testKnex : await getKnexClient({ env: process.env });
90
+ const knex = testKnex ?? await getKnexClient({ env: process.env });
91
91
  const executions = await (0, exports.executionArnsFromGranuleIdsAndWorkflowNames)(knex, [granuleId], [workflowName]);
92
92
  if (executions.length === 0) {
93
93
  throw new errors_1.RecordDoesNotExist(`No executionArns found for granuleId:${granuleId} running workflow:${workflowName}`);
@@ -17,12 +17,11 @@ class BasePgModel {
17
17
  */
18
18
  async searchWithUpdatedAtRange(knexOrTransaction, params, updatedAtParams) {
19
19
  const records = await knexOrTransaction(this.tableName).where((builder) => {
20
- var _a, _b;
21
20
  builder.where(params);
22
21
  if (updatedAtParams.updatedAtFrom || updatedAtParams.updatedAtTo) {
23
22
  builder.whereBetween('updated_at', [
24
- (_a = updatedAtParams === null || updatedAtParams === void 0 ? void 0 : updatedAtParams.updatedAtFrom) !== null && _a !== void 0 ? _a : new Date(0),
25
- (_b = updatedAtParams === null || updatedAtParams === void 0 ? void 0 : updatedAtParams.updatedAtTo) !== null && _b !== void 0 ? _b : new Date(),
23
+ updatedAtParams?.updatedAtFrom ?? new Date(0),
24
+ updatedAtParams?.updatedAtTo ?? new Date(),
26
25
  ]);
27
26
  }
28
27
  });
@@ -3,7 +3,7 @@ import { BasePgModel } from './base';
3
3
  import { PostgresCollection, PostgresCollectionRecord } from '../types/collection';
4
4
  declare class CollectionPgModel extends BasePgModel<PostgresCollection, PostgresCollectionRecord> {
5
5
  constructor();
6
- create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresCollection): Promise<Object[] | unknown[]>;
6
+ create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresCollection): Promise<unknown[] | Object[]>;
7
7
  upsert(knexOrTransaction: Knex | Knex.Transaction, collection: PostgresCollection): Knex.QueryBuilder<any, {
8
8
  _base: any;
9
9
  _hasSelection: false;
@@ -4,7 +4,7 @@ import { PostgresExecution, PostgresExecutionRecord } from '../types/execution';
4
4
  declare class ExecutionPgModel extends BasePgModel<PostgresExecution, PostgresExecutionRecord> {
5
5
  constructor();
6
6
  static nonActiveStatuses: string[];
7
- create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresExecution): Promise<Object[] | unknown[]>;
7
+ create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresExecution): Promise<unknown[] | Object[]>;
8
8
  upsert(knexOrTrx: Knex | Knex.Transaction, execution: PostgresExecution): Promise<any[]>;
9
9
  /**
10
10
  * Get executions from the execution cumulus_id
@@ -7,7 +7,7 @@ interface RecordSelect {
7
7
  }
8
8
  export default class GranulePgModel extends BasePgModel<PostgresGranule, PostgresGranuleRecord> {
9
9
  constructor();
10
- create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresGranule): Promise<Object[] | unknown[]>;
10
+ create(knexOrTransaction: Knex | Knex.Transaction, item: PostgresGranule): Promise<unknown[] | Object[]>;
11
11
  /**
12
12
  * Deletes the item from Postgres
13
13
  *
@@ -12,7 +12,6 @@ const execution_1 = require("../models/execution");
12
12
  const collection_1 = require("../models/collection");
13
13
  const async_operation_1 = require("../models/async_operation");
14
14
  const translatePostgresExecutionToApiExecution = async (executionRecord, knex, collectionPgModel = new collection_1.CollectionPgModel(), asyncOperationPgModel = new async_operation_1.AsyncOperationPgModel(), executionPgModel = new execution_1.ExecutionPgModel()) => {
15
- var _a;
16
15
  let parentArn;
17
16
  let collectionId;
18
17
  let asyncOperationId;
@@ -55,7 +54,7 @@ const translatePostgresExecutionToApiExecution = async (executionRecord, knex, c
55
54
  parentArn,
56
55
  createdAt: executionRecord.created_at.getTime(),
57
56
  updatedAt: executionRecord.updated_at.getTime(),
58
- timestamp: (_a = executionRecord.timestamp) === null || _a === void 0 ? void 0 : _a.getTime(),
57
+ timestamp: executionRecord.timestamp?.getTime(),
59
58
  };
60
59
  return (0, util_1.removeNilProperties)(translatedRecord);
61
60
  };
@@ -25,7 +25,7 @@ import { PostgresProviderRecord } from '../types/provider';
25
25
  export declare const translatePostgresGranuleToApiGranule: ({ granulePgRecord, collectionPgRecord, knexOrTransaction, providerPgRecord, collectionPgModel, pdrPgModel, providerPgModel, filePgModel, }: {
26
26
  granulePgRecord: PostgresGranuleRecord;
27
27
  knexOrTransaction: Knex | Knex.Transaction;
28
- collectionPgRecord?: Pick<PostgresCollectionRecord, "name" | "version" | "cumulus_id"> | undefined;
28
+ collectionPgRecord?: Pick<PostgresCollectionRecord, "version" | "name" | "cumulus_id"> | undefined;
29
29
  providerPgRecord?: Pick<PostgresProviderRecord, "name"> | undefined;
30
30
  collectionPgModel?: CollectionPgModel | undefined;
31
31
  pdrPgModel?: PdrPgModel | undefined;
@@ -30,7 +30,6 @@ const file_2 = require("./file");
30
30
  * @returns {Object} An API Granule with associated Files
31
31
  */
32
32
  const translatePostgresGranuleToApiGranule = async ({ granulePgRecord, collectionPgRecord, knexOrTransaction, providerPgRecord, collectionPgModel = new collection_1.CollectionPgModel(), pdrPgModel = new pdr_1.PdrPgModel(), providerPgModel = new provider_1.ProviderPgModel(), filePgModel = new file_1.FilePgModel(), }) => {
33
- var _a, _b, _c, _d, _e, _f, _g, _h, _j;
34
33
  const collection = collectionPgRecord || await collectionPgModel.get(knexOrTransaction, { cumulus_id: granulePgRecord.collection_cumulus_id });
35
34
  if (granulePgRecord.collection_cumulus_id !== collection.cumulus_id) {
36
35
  throw new errors_1.ValidationError(`Input collection.cumulus_id: ${collection.cumulus_id} does not match the granule PG record collection_cumulus_id: ${granulePgRecord.collection_cumulus_id}`);
@@ -54,30 +53,30 @@ const translatePostgresGranuleToApiGranule = async ({ granulePgRecord, collectio
54
53
  provider = await providerPgModel.get(knexOrTransaction, { cumulus_id: granulePgRecord.provider_cumulus_id });
55
54
  }
56
55
  const apiGranule = (0, util_1.removeNilProperties)({
57
- beginningDateTime: (_a = granulePgRecord.beginning_date_time) === null || _a === void 0 ? void 0 : _a.toISOString(),
56
+ beginningDateTime: granulePgRecord.beginning_date_time?.toISOString(),
58
57
  cmrLink: granulePgRecord.cmr_link,
59
58
  collectionId: (0, Collections_1.constructCollectionId)(collection.name, collection.version),
60
- createdAt: (_b = granulePgRecord.created_at) === null || _b === void 0 ? void 0 : _b.getTime(),
59
+ createdAt: granulePgRecord.created_at?.getTime(),
61
60
  duration: granulePgRecord.duration,
62
- endingDateTime: (_c = granulePgRecord.ending_date_time) === null || _c === void 0 ? void 0 : _c.toISOString(),
61
+ endingDateTime: granulePgRecord.ending_date_time?.toISOString(),
63
62
  error: granulePgRecord.error,
64
63
  execution: executionUrls[0] ? executionUrls[0].url : undefined,
65
64
  files: files.length > 0 ? files.map((file) => (0, file_2.translatePostgresFileToApiFile)(file)) : [],
66
65
  granuleId: granulePgRecord.granule_id,
67
- lastUpdateDateTime: (_d = granulePgRecord.last_update_date_time) === null || _d === void 0 ? void 0 : _d.toISOString(),
66
+ lastUpdateDateTime: granulePgRecord.last_update_date_time?.toISOString(),
68
67
  pdrName: pdr ? pdr.name : undefined,
69
- processingEndDateTime: (_e = granulePgRecord.processing_end_date_time) === null || _e === void 0 ? void 0 : _e.toISOString(),
70
- processingStartDateTime: (_f = granulePgRecord.processing_start_date_time) === null || _f === void 0 ? void 0 : _f.toISOString(),
71
- productionDateTime: (_g = granulePgRecord.production_date_time) === null || _g === void 0 ? void 0 : _g.toISOString(),
68
+ processingEndDateTime: granulePgRecord.processing_end_date_time?.toISOString(),
69
+ processingStartDateTime: granulePgRecord.processing_start_date_time?.toISOString(),
70
+ productionDateTime: granulePgRecord.production_date_time?.toISOString(),
72
71
  productVolume: granulePgRecord.product_volume,
73
72
  provider: provider ? provider.name : undefined,
74
73
  published: granulePgRecord.published,
75
74
  queryFields: granulePgRecord.query_fields,
76
75
  status: granulePgRecord.status,
77
- timestamp: (_h = granulePgRecord.timestamp) === null || _h === void 0 ? void 0 : _h.getTime(),
76
+ timestamp: granulePgRecord.timestamp?.getTime(),
78
77
  timeToArchive: granulePgRecord.time_to_archive,
79
78
  timeToPreprocess: granulePgRecord.time_to_process,
80
- updatedAt: (_j = granulePgRecord.updated_at) === null || _j === void 0 ? void 0 : _j.getTime(),
79
+ updatedAt: granulePgRecord.updated_at?.getTime(),
81
80
  });
82
81
  return apiGranule;
83
82
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cumulus/db",
3
- "version": "14.1.0",
3
+ "version": "15.0.1",
4
4
  "description": "Utilities for working with the Cumulus DB",
5
5
  "license": "Apache-2.0",
6
6
  "main": "./dist/index.js",
@@ -26,15 +26,15 @@
26
26
  "timeout": "5m"
27
27
  },
28
28
  "engines": {
29
- "node": ">=14.19.1"
29
+ "node": ">=16.19.0"
30
30
  },
31
31
  "dependencies": {
32
- "@cumulus/aws-client": "14.1.0",
33
- "@cumulus/common": "14.1.0",
34
- "@cumulus/errors": "14.1.0",
35
- "@cumulus/logger": "14.1.0",
36
- "@cumulus/message": "14.1.0",
37
- "@cumulus/types": "14.1.0",
32
+ "@cumulus/aws-client": "15.0.1",
33
+ "@cumulus/common": "15.0.1",
34
+ "@cumulus/errors": "15.0.1",
35
+ "@cumulus/logger": "15.0.1",
36
+ "@cumulus/message": "15.0.1",
37
+ "@cumulus/types": "15.0.1",
38
38
  "crypto-random-string": "^3.2.0",
39
39
  "is-valid-hostname": "1.0.2",
40
40
  "knex": "2.4.1",
@@ -46,5 +46,5 @@
46
46
  "devDependencies": {
47
47
  "@types/uuid": "^8.0.0"
48
48
  },
49
- "gitHead": "d97b5b37913944c0f0ecf958f2a567ec3714816c"
49
+ "gitHead": "fb7304717073c7be4dd8f40342e038bcab6f751c"
50
50
  }