@cumulus/db 20.2.1 → 21.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -29,7 +29,7 @@ export { translateApiPdrToPostgresPdr, translatePostgresPdrToApiPdr, } from './t
29
29
  export { translateApiReconReportToPostgresReconReport, translatePostgresReconReportToApiReconReport, } from './translate/reconciliation_reports';
30
30
  export { getCollectionsByGranuleIds, getUniqueCollectionsByGranuleFilter, } from './lib/collection';
31
31
  export { batchDeleteExecutionFromDatabaseByCumulusCollectionId, executionArnsFromGranuleIdsAndWorkflowNames, getApiExecutionCumulusIds, getApiGranuleExecutionCumulusIdsByExecution, getExecutionInfoByGranuleCumulusId, getWorkflowNameIntersectFromGranuleIds, newestExecutionArnFromGranuleIdWorkflowName, } from './lib/execution';
32
- export { getFilesAndGranuleInfoQuery, } from './lib/file';
32
+ export { getFilesAndGranuleInfoQuery, getGranuleIdAndCollectionIdFromFile, } from './lib/file';
33
33
  export { getApiGranuleCumulusIds, getApiGranuleExecutionCumulusIds, getGranuleCollectionId, getUniqueGranuleByGranuleId, getGranuleByUniqueColumns, upsertGranuleWithExecutionJoinRecord, getGranulesByApiPropertiesQuery, getGranulesByGranuleId, getGranuleAndCollection, updateBatchGranulesCollection, } from './lib/granule';
34
34
  export { QuerySearchClient, } from './lib/QuerySearchClient';
35
35
  export { AsyncOperationSearch, } from './search/AsyncOperationSearch';
package/dist/index.js CHANGED
@@ -24,7 +24,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
26
  exports.getApiExecutionCumulusIds = exports.executionArnsFromGranuleIdsAndWorkflowNames = exports.batchDeleteExecutionFromDatabaseByCumulusCollectionId = exports.getUniqueCollectionsByGranuleFilter = exports.getCollectionsByGranuleIds = exports.translatePostgresReconReportToApiReconReport = exports.translateApiReconReportToPostgresReconReport = exports.translatePostgresPdrToApiPdr = exports.translateApiPdrToPostgresPdr = exports.translatePostgresGranuleResultToApiGranule = exports.translatePostgresGranuleToApiGranule = exports.translateApiGranuleToPostgresGranuleWithoutNilsRemoved = exports.translateApiGranuleToPostgresGranule = exports.translatePostgresExecutionToApiExecution = exports.translateApiExecutionToPostgresExecutionWithoutNilsRemoved = exports.translateApiExecutionToPostgresExecution = exports.translateApiRuleToPostgresRuleRaw = exports.translateApiRuleToPostgresRule = exports.translatePostgresRuleToApiRule = exports.translatePostgresProviderToApiProvider = exports.translateApiProviderToPostgresProvider = exports.translatePostgresCollectionToApiCollection = exports.translateApiCollectionToPostgresCollection = exports.translatePostgresFileToApiFile = exports.translateApiFiletoPostgresFile = exports.translatePostgresAsyncOperationToApiAsyncOperation = exports.translateApiAsyncOperationToPostgresAsyncOperation = exports.nullifyUndefinedProviderValues = exports.validateProviderHost = exports.migrationDir = exports.TableNames = exports.createRejectableTransaction = exports.localStackConnectionEnv = exports.getKnexConfig = exports.getKnexClient = exports.isCollisionError = exports.generateLocalTestDb = exports.fakeRuleRecordFactory = exports.fakeReconciliationReportRecordFactory = exports.fakeProviderRecordFactory = exports.fakePdrRecordFactory = exports.fakeGranuleRecordFactory = exports.fakeFileRecordFactory = exports.fakeExecutionRecordFactory = exports.fakeCollectionRecordFactory = exports.fakeAsyncOperationRecordFactory = exports.destroyLocalTestDb = exports.deleteTestDatabase = exports.createTestDatabase = exports.Knex = void 0;
27
- exports.RulePgModel = exports.ReconciliationReportPgModel = exports.ProviderPgModel = exports.PdrPgModel = exports.GranulesExecutionsPgModel = exports.GranulePgModel = exports.FilePgModel = exports.ExecutionPgModel = exports.CollectionPgModel = exports.BasePgModel = exports.AsyncOperationPgModel = exports.ReconciliationReportSearch = exports.StatsSearch = exports.RuleSearch = exports.ProviderSearch = exports.PdrSearch = exports.GranuleSearch = exports.ExecutionSearch = exports.CollectionSearch = exports.AsyncOperationSearch = exports.QuerySearchClient = exports.updateBatchGranulesCollection = exports.getGranuleAndCollection = exports.getGranulesByGranuleId = exports.getGranulesByApiPropertiesQuery = exports.upsertGranuleWithExecutionJoinRecord = exports.getGranuleByUniqueColumns = exports.getUniqueGranuleByGranuleId = exports.getGranuleCollectionId = exports.getApiGranuleExecutionCumulusIds = exports.getApiGranuleCumulusIds = exports.getFilesAndGranuleInfoQuery = exports.newestExecutionArnFromGranuleIdWorkflowName = exports.getWorkflowNameIntersectFromGranuleIds = exports.getExecutionInfoByGranuleCumulusId = exports.getApiGranuleExecutionCumulusIdsByExecution = void 0;
27
+ exports.RulePgModel = exports.ReconciliationReportPgModel = exports.ProviderPgModel = exports.PdrPgModel = exports.GranulesExecutionsPgModel = exports.GranulePgModel = exports.FilePgModel = exports.ExecutionPgModel = exports.CollectionPgModel = exports.BasePgModel = exports.AsyncOperationPgModel = exports.ReconciliationReportSearch = exports.StatsSearch = exports.RuleSearch = exports.ProviderSearch = exports.PdrSearch = exports.GranuleSearch = exports.ExecutionSearch = exports.CollectionSearch = exports.AsyncOperationSearch = exports.QuerySearchClient = exports.updateBatchGranulesCollection = exports.getGranuleAndCollection = exports.getGranulesByGranuleId = exports.getGranulesByApiPropertiesQuery = exports.upsertGranuleWithExecutionJoinRecord = exports.getGranuleByUniqueColumns = exports.getUniqueGranuleByGranuleId = exports.getGranuleCollectionId = exports.getApiGranuleExecutionCumulusIds = exports.getApiGranuleCumulusIds = exports.getGranuleIdAndCollectionIdFromFile = exports.getFilesAndGranuleInfoQuery = exports.newestExecutionArnFromGranuleIdWorkflowName = exports.getWorkflowNameIntersectFromGranuleIds = exports.getExecutionInfoByGranuleCumulusId = exports.getApiGranuleExecutionCumulusIdsByExecution = void 0;
28
28
  const path = __importStar(require("path"));
29
29
  var knex_1 = require("knex");
30
30
  Object.defineProperty(exports, "Knex", { enumerable: true, get: function () { return knex_1.Knex; } });
@@ -101,6 +101,7 @@ Object.defineProperty(exports, "getWorkflowNameIntersectFromGranuleIds", { enume
101
101
  Object.defineProperty(exports, "newestExecutionArnFromGranuleIdWorkflowName", { enumerable: true, get: function () { return execution_1.newestExecutionArnFromGranuleIdWorkflowName; } });
102
102
  var file_2 = require("./lib/file");
103
103
  Object.defineProperty(exports, "getFilesAndGranuleInfoQuery", { enumerable: true, get: function () { return file_2.getFilesAndGranuleInfoQuery; } });
104
+ Object.defineProperty(exports, "getGranuleIdAndCollectionIdFromFile", { enumerable: true, get: function () { return file_2.getGranuleIdAndCollectionIdFromFile; } });
104
105
  var granule_1 = require("./lib/granule");
105
106
  Object.defineProperty(exports, "getApiGranuleCumulusIds", { enumerable: true, get: function () { return granule_1.getApiGranuleCumulusIds; } });
106
107
  Object.defineProperty(exports, "getApiGranuleExecutionCumulusIds", { enumerable: true, get: function () { return granule_1.getApiGranuleExecutionCumulusIds; } });
@@ -1,6 +1,31 @@
1
1
  import { Knex } from 'knex';
2
2
  import { PostgresFileRecord } from '../types/file';
3
3
  import { PostgresGranuleRecord } from '../types/granule';
4
+ /**
5
+ * Retrieves the granule ID, collection name, and collection version associated
6
+ * with a specific file by joining data across the files, granules, and collections tables.
7
+ *
8
+ * @param {Object} params - The parameters for the query.
9
+ * @param {Knex} params.knex - The Knex client object for database interaction.
10
+ * @param {string} params.bucket - The S3 bucket of the file.
11
+ * @param {string} params.key - The S3 key (path) of the file.
12
+ * @returns {Knex.QueryBuilder} A Knex query builder object that, when executed,
13
+ * will return the granule_cumulus_id, collection_name, and collection_version
14
+ * for the specified file.
15
+ */
16
+ export declare const getGranuleIdAndCollectionIdFromFile: ({ knex, bucket, key, }: {
17
+ knex: Knex;
18
+ bucket: string;
19
+ key: string;
20
+ }) => Knex.QueryBuilder<any, {
21
+ _base: any;
22
+ _hasSelection: true;
23
+ _keys: "granules.granule_id" | "collections.name as collection_name" | "collections.version as collection_version";
24
+ _aliases: {};
25
+ _single: false;
26
+ _intersectProps: {};
27
+ _unionProps: undefined;
28
+ }>;
4
29
  /**
5
30
  * Helper to build a query that returns records from the files table with data
6
31
  * joined in from the granules table optionally filtered by collectionIds,
package/dist/lib/file.js CHANGED
@@ -1,8 +1,31 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getFilesAndGranuleInfoQuery = void 0;
3
+ exports.getFilesAndGranuleInfoQuery = exports.getGranuleIdAndCollectionIdFromFile = void 0;
4
4
  const Collections_1 = require("@cumulus/message/Collections");
5
5
  const tables_1 = require("../tables");
6
+ /**
7
+ * Retrieves the granule ID, collection name, and collection version associated
8
+ * with a specific file by joining data across the files, granules, and collections tables.
9
+ *
10
+ * @param {Object} params - The parameters for the query.
11
+ * @param {Knex} params.knex - The Knex client object for database interaction.
12
+ * @param {string} params.bucket - The S3 bucket of the file.
13
+ * @param {string} params.key - The S3 key (path) of the file.
14
+ * @returns {Knex.QueryBuilder} A Knex query builder object that, when executed,
15
+ * will return the granule_cumulus_id, collection_name, and collection_version
16
+ * for the specified file.
17
+ */
18
+ const getGranuleIdAndCollectionIdFromFile = ({ knex, bucket, key, }) => {
19
+ const { files: filesTable, granules: granulesTable, collections: collectionsTable } = tables_1.TableNames;
20
+ return knex(filesTable)
21
+ .select(`${granulesTable}.granule_id`, `${collectionsTable}.name as collection_name`, `${collectionsTable}.version as collection_version`)
22
+ .innerJoin(granulesTable, `${filesTable}.granule_cumulus_id`, `${granulesTable}.cumulus_id`)
23
+ .innerJoin(collectionsTable, `${granulesTable}.collection_cumulus_id`, `${collectionsTable}.cumulus_id`)
24
+ .where(`${filesTable}.bucket`, bucket)
25
+ .andWhere(`${filesTable}.key`, key)
26
+ .first();
27
+ };
28
+ exports.getGranuleIdAndCollectionIdFromFile = getGranuleIdAndCollectionIdFromFile;
6
29
  /**
7
30
  * Helper to build a query that returns records from the files table with data
8
31
  * joined in from the granules table optionally filtered by collectionIds,
@@ -0,0 +1,4 @@
1
+ import { Knex } from 'knex';
2
+ export declare const up: (knex: Knex) => Promise<void>;
3
+ export declare const down: (knex: Knex) => Promise<void>;
4
+ //# sourceMappingURL=20250425134823_granules_add_producer_granule_id.d.ts.map
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.down = exports.up = void 0;
4
+ const up = async (knex) => {
5
+ if (!await knex.schema.hasColumn('granules', 'producer_granule_id')) {
6
+ await knex.schema.table('granules', (table) => {
7
+ table
8
+ .text('producer_granule_id')
9
+ .comment('Producer Granule Id');
10
+ });
11
+ await knex('granules').update('producer_granule_id', knex.raw('granule_id'));
12
+ await knex.schema.table('granules', (table) => {
13
+ table.text('producer_granule_id').notNullable().alter();
14
+ });
15
+ await knex.raw('CREATE INDEX CONCURRENTLY IF NOT EXISTS granules_producer_granule_id_index ON granules(producer_granule_id)');
16
+ }
17
+ };
18
+ exports.up = up;
19
+ const down = async (knex) => {
20
+ if (await knex.schema.hasColumn('granules', 'producer_granule_id')) {
21
+ await knex.schema.table('granules', (table) => {
22
+ table.dropColumn('producer_granule_id');
23
+ });
24
+ }
25
+ };
26
+ exports.down = down;
27
+ exports.config = {
28
+ transaction: false,
29
+ };
30
+ //# sourceMappingURL=20250425134823_granules_add_producer_granule_id.js.map
@@ -0,0 +1,4 @@
1
+ import { Knex } from 'knex';
2
+ export declare const up: (knex: Knex) => Promise<void>;
3
+ export declare const down: (knex: Knex) => Promise<void>;
4
+ //# sourceMappingURL=20250617190412_add_archived_and_index.d.ts.map
@@ -0,0 +1,43 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.down = exports.up = void 0;
4
+ const up = async (knex) => {
5
+ if (!(await knex.schema.hasColumn('granules', 'archived'))) {
6
+ await knex.schema.table('granules', (table) => {
7
+ table.boolean('archived')
8
+ .comment('granule has been "archived"')
9
+ .defaultTo('false')
10
+ .notNullable();
11
+ });
12
+ }
13
+ if (!(await knex.schema.hasColumn('executions', 'archived'))) {
14
+ await knex.schema.table('executions', (table) => {
15
+ table.boolean('archived')
16
+ .comment('execution has been "archived"')
17
+ .defaultTo('false')
18
+ .notNullable();
19
+ });
20
+ }
21
+ await knex.raw('CREATE INDEX CONCURRENTLY IF NOT EXISTS executions_archived_index ON executions (archived)');
22
+ await knex.raw('CREATE INDEX CONCURRENTLY IF NOT EXISTS granules_archived_index ON granules (archived)');
23
+ };
24
+ exports.up = up;
25
+ const down = async (knex) => {
26
+ await knex.raw('DROP INDEX CONCURRENTLY IF EXISTS granules_archived_index');
27
+ await knex.raw('DROP INDEX CONCURRENTLY IF EXISTS executions_archived_index');
28
+ if (await knex.schema.hasColumn('granules', 'archived')) {
29
+ await knex.schema.table('granules', (table) => {
30
+ table.dropColumn('archived');
31
+ });
32
+ }
33
+ if (await knex.schema.hasColumn('executions', 'archived')) {
34
+ await knex.schema.table('executions', (table) => {
35
+ table.dropColumn('archived');
36
+ });
37
+ }
38
+ };
39
+ exports.down = down;
40
+ exports.config = {
41
+ transaction: false,
42
+ };
43
+ //# sourceMappingURL=20250617190412_add_archived_and_index.js.map
@@ -9,6 +9,9 @@ const logger_1 = __importDefault(require("@cumulus/logger"));
9
9
  const log = new logger_1.default({ sender: '@cumulus/db/field-mapping' });
10
10
  // functions to map the api search string field name and value to postgres db field
11
11
  const granuleMapping = {
12
+ archived: (value) => ({
13
+ archived: value,
14
+ }),
12
15
  beginningDateTime: (value) => ({
13
16
  beginning_date_time: value,
14
17
  }),
@@ -39,6 +42,9 @@ const granuleMapping = {
39
42
  processingStartDateTime: (value) => ({
40
43
  processing_start_date_time: value,
41
44
  }),
45
+ producerGranuleId: (value) => ({
46
+ producer_granule_id: value,
47
+ }),
42
48
  productionDateTime: (value) => ({
43
49
  production_date_time: value,
44
50
  }),
@@ -205,6 +211,9 @@ const executionMapping = {
205
211
  collectionVersion: version,
206
212
  };
207
213
  },
214
+ archived: (value) => ({
215
+ archived: value,
216
+ }),
208
217
  };
209
218
  const pdrMapping = {
210
219
  address: (value) => ({
@@ -69,6 +69,7 @@ const fakeExecutionRecordFactory = (params) => {
69
69
  const executionId = (0, uuid_1.v4)();
70
70
  const executionARN = `arn:aws:states:us-east-1:12345:execution:test-TestExecution:${executionId}`;
71
71
  return {
72
+ archived: false,
72
73
  arn: executionARN,
73
74
  url: (0, Executions_1.getExecutionUrlFromArn)(executionARN),
74
75
  status: 'running',
@@ -90,6 +91,7 @@ const fakeProviderRecordFactory = (params) => ({
90
91
  exports.fakeProviderRecordFactory = fakeProviderRecordFactory;
91
92
  const fakeGranuleRecordFactory = (params) => ({
92
93
  granule_id: (0, crypto_random_string_1.default)({ length: 5 }),
94
+ producer_granule_id: (0, crypto_random_string_1.default)({ length: 5 }),
93
95
  status: 'completed',
94
96
  created_at: new Date(),
95
97
  ...params,
@@ -34,6 +34,7 @@ const translatePostgresExecutionToApiExecutionWithoutDbQuery = ({ executionRecor
34
34
  asyncOperationId,
35
35
  collectionId,
36
36
  parentArn,
37
+ archived: executionRecord.archived,
37
38
  createdAt: executionRecord.created_at.getTime(),
38
39
  updatedAt: executionRecord.updated_at.getTime(),
39
40
  timestamp: executionRecord.timestamp?.getTime(),
@@ -111,6 +112,7 @@ const translateApiExecutionToPostgresExecutionWithoutNilsRemoved = async (apiRec
111
112
  // Map old record to new schema.
112
113
  const translatedRecord = {
113
114
  async_operation_cumulus_id: (apiRecord.asyncOperationId ? await asyncOperationPgModel.getRecordCumulusId(knex, { id: apiRecord.asyncOperationId }) : ((0, isNull_1.default)(apiRecord.asyncOperationId) ? null : undefined)),
115
+ archived: apiRecord.archived,
114
116
  status: apiRecord.status,
115
117
  arn: apiRecord.arn,
116
118
  duration: apiRecord.duration,
@@ -29,6 +29,7 @@ const file_2 = require("./file");
29
29
  * @returns An API Granule with associated Files
30
30
  */
31
31
  const translatePostgresGranuleToApiGranuleWithoutDbQuery = ({ granulePgRecord, collectionPgRecord, executionUrls = [], files = [], pdr, providerPgRecord, }) => (0, util_1.removeNilProperties)({
32
+ archived: granulePgRecord.archived,
32
33
  beginningDateTime: granulePgRecord.beginning_date_time?.toISOString(),
33
34
  cmrLink: granulePgRecord.cmr_link,
34
35
  collectionId: (0, Collections_1.constructCollectionId)(collectionPgRecord.name, collectionPgRecord.version),
@@ -43,6 +44,7 @@ const translatePostgresGranuleToApiGranuleWithoutDbQuery = ({ granulePgRecord, c
43
44
  pdrName: pdr ? pdr.name : undefined,
44
45
  processingEndDateTime: granulePgRecord.processing_end_date_time?.toISOString(),
45
46
  processingStartDateTime: granulePgRecord.processing_start_date_time?.toISOString(),
47
+ producerGranuleId: granulePgRecord.producer_granule_id,
46
48
  productionDateTime: granulePgRecord.production_date_time?.toISOString(),
47
49
  productVolume: granulePgRecord.product_volume,
48
50
  provider: providerPgRecord ? providerPgRecord.name : undefined,
@@ -118,8 +120,11 @@ const validateApiToPostgresGranuleObject = (apiGranule) => {
118
120
  if ((0, isNil_1.default)(apiGranule.granuleId)) {
119
121
  throw new errors_1.ValidationError('granuleId cannot be undefined on a granule, granules must have a collection and a granule ID');
120
122
  }
123
+ if ((0, isNil_1.default)(apiGranule.producerGranuleId)) {
124
+ throw new errors_1.ValidationError('producerGranuleId cannot be undefined on a granule, granules must have a producerGranuleId');
125
+ }
121
126
  if ((0, isNull_1.default)(apiGranule.status)) {
122
- throw new errors_1.ValidationError('status cannot be null on a granule, granules must have a collection and a granule ID');
127
+ throw new errors_1.ValidationError('status cannot be null on a granule, granules must have a status');
123
128
  }
124
129
  };
125
130
  /**
@@ -159,9 +164,11 @@ const translateApiGranuleToPostgresGranuleWithoutNilsRemoved = async ({ dynamoRe
159
164
  provider_cumulus_id = await providerPgModel.getRecordCumulusId(knexOrTransaction, { name: dynamoRecord.provider });
160
165
  }
161
166
  const granuleRecord = {
167
+ archived: dynamoRecord.archived,
162
168
  granule_id: dynamoRecord.granuleId,
163
169
  status: dynamoRecord.status,
164
170
  collection_cumulus_id: await collectionPgModel.getRecordCumulusId(knexOrTransaction, { name, version }),
171
+ producer_granule_id: dynamoRecord.producerGranuleId,
165
172
  published: dynamoRecord.published,
166
173
  duration: dynamoRecord.duration,
167
174
  time_to_archive: dynamoRecord.timeToArchive,
@@ -16,10 +16,12 @@ export interface PostgresExecution {
16
16
  updated_at?: Date | null;
17
17
  url?: string | null;
18
18
  workflow_name?: string | null;
19
+ archived: boolean;
19
20
  }
20
21
  export interface PostgresExecutionRecord extends PostgresExecution {
21
22
  created_at: Date;
22
23
  cumulus_id: number;
23
24
  updated_at: Date;
25
+ archived: boolean;
24
26
  }
25
27
  //# sourceMappingURL=execution.d.ts.map
@@ -4,6 +4,8 @@ export interface PostgresGranuleUniqueColumns {
4
4
  collection_cumulus_id: number;
5
5
  }
6
6
  export interface PostgresGranule extends PostgresGranuleUniqueColumns {
7
+ archived: boolean;
8
+ producer_granule_id: string;
7
9
  status?: GranuleStatus;
8
10
  cmr_link?: string | null;
9
11
  error?: object | null;
@@ -26,7 +28,9 @@ export interface PostgresGranule extends PostgresGranuleUniqueColumns {
26
28
  query_fields?: unknown | null;
27
29
  }
28
30
  export interface PostgresGranuleRecord extends Omit<PostgresGranule, 'product_volume'> {
31
+ archived: boolean;
29
32
  cumulus_id: number;
33
+ producer_granule_id: string;
30
34
  product_volume?: string;
31
35
  created_at: Date;
32
36
  updated_at: Date;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cumulus/db",
3
- "version": "20.2.1",
3
+ "version": "21.0.0",
4
4
  "description": "Utilities for working with the Cumulus DB",
5
5
  "license": "Apache-2.0",
6
6
  "main": "./dist/index.js",
@@ -33,12 +33,12 @@
33
33
  },
34
34
  "dependencies": {
35
35
  "@aws-sdk/client-secrets-manager": "^3.621.0",
36
- "@cumulus/aws-client": "20.2.1",
37
- "@cumulus/common": "20.2.1",
38
- "@cumulus/errors": "20.2.1",
39
- "@cumulus/logger": "20.2.1",
40
- "@cumulus/message": "20.2.1",
41
- "@cumulus/types": "20.2.1",
36
+ "@cumulus/aws-client": "21.0.0",
37
+ "@cumulus/common": "21.0.0",
38
+ "@cumulus/errors": "21.0.0",
39
+ "@cumulus/logger": "21.0.0",
40
+ "@cumulus/message": "21.0.0",
41
+ "@cumulus/types": "21.0.0",
42
42
  "crypto-random-string": "^3.2.0",
43
43
  "is-valid-hostname": "1.0.2",
44
44
  "knex": "2.4.1",
@@ -50,5 +50,5 @@
50
50
  "devDependencies": {
51
51
  "@types/uuid": "^8.0.0"
52
52
  },
53
- "gitHead": "8aeab83b5fd9f7e1818d4631e1e36535e443ae90"
53
+ "gitHead": "19bb3477969662a9e0b300f10f6df23b6c0654db"
54
54
  }