@cumulus/db 21.3.1-alpha.0 → 21.3.2-testlerna.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/index.js +3 -1
- package/dist/s3search/AsyncOperationS3Search.d.ts +20 -0
- package/dist/s3search/AsyncOperationS3Search.js +29 -0
- package/dist/s3search/CollectionS3Search.d.ts +39 -0
- package/dist/s3search/CollectionS3Search.js +113 -0
- package/dist/s3search/DuckDBSearchExecutor.d.ts +36 -0
- package/dist/s3search/DuckDBSearchExecutor.js +57 -0
- package/dist/s3search/ExecutionS3Search.d.ts +20 -0
- package/dist/s3search/ExecutionS3Search.js +29 -0
- package/dist/s3search/GranuleS3Search.d.ts +31 -0
- package/dist/s3search/GranuleS3Search.js +100 -0
- package/dist/s3search/PdrS3Search.d.ts +20 -0
- package/dist/s3search/PdrS3Search.js +29 -0
- package/dist/s3search/ProviderS3Search.d.ts +20 -0
- package/dist/s3search/ProviderS3Search.js +29 -0
- package/dist/s3search/ReconciliationReportS3Search.d.ts +20 -0
- package/dist/s3search/ReconciliationReportS3Search.js +29 -0
- package/dist/s3search/RuleS3Search.d.ts +20 -0
- package/dist/s3search/RuleS3Search.js +29 -0
- package/dist/s3search/StatsS3Search.d.ts +25 -0
- package/dist/s3search/StatsS3Search.js +51 -0
- package/dist/s3search/duckdbHelpers.d.ts +43 -0
- package/dist/s3search/duckdbHelpers.js +83 -0
- package/dist/s3search/s3TableSchemas.d.ts +11 -0
- package/dist/s3search/s3TableSchemas.js +272 -0
- package/dist/search/BaseSearch.d.ts +46 -2
- package/dist/search/BaseSearch.js +84 -22
- package/dist/search/CollectionSearch.d.ts +6 -4
- package/dist/search/CollectionSearch.js +2 -3
- package/dist/search/ExecutionSearch.d.ts +1 -1
- package/dist/search/ExecutionSearch.js +3 -3
- package/dist/search/GranuleSearch.d.ts +2 -3
- package/dist/search/GranuleSearch.js +3 -3
- package/dist/search/PdrSearch.js +1 -1
- package/dist/search/ReconciliationReportSearch.js +1 -1
- package/dist/search/RuleSearch.js +4 -4
- package/dist/search/StatsSearch.d.ts +15 -4
- package/dist/search/StatsSearch.js +12 -6
- package/dist/search/field-mapping.d.ts +1 -3
- package/dist/search/field-mapping.js +40 -19
- package/dist/test-duckdb-utils.d.ts +31 -0
- package/dist/test-duckdb-utils.js +125 -0
- package/dist/test-utils.js +6 -0
- package/dist/translate/async_operations.js +7 -3
- package/dist/translate/collections.js +6 -6
- package/dist/translate/executions.js +7 -7
- package/dist/translate/granules.js +16 -11
- package/dist/translate/pdr.js +4 -4
- package/dist/translate/providers.js +2 -2
- package/dist/translate/reconciliation_reports.js +5 -4
- package/dist/translate/rules.d.ts +1 -1
- package/dist/translate/rules.js +6 -6
- package/dist/types/file.d.ts +2 -0
- package/package.json +12 -11
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createDuckDBTables = exports.stageAndLoadDuckDBTableFromData = exports.setupDuckDBWithS3ForTesting = void 0;
|
|
4
|
+
const node_api_1 = require("@duckdb/node-api");
|
|
5
|
+
const s3TableSchemas_1 = require("./s3search/s3TableSchemas");
|
|
6
|
+
const duckdbHelpers_1 = require("./s3search/duckdbHelpers");
|
|
7
|
+
/**
|
|
8
|
+
* Creates a DuckDB in-memory instance and sets up S3/httpfs for testing.
|
|
9
|
+
* Configures S3-related settings on the DuckDB instance.
|
|
10
|
+
*
|
|
11
|
+
* @param {string} dbFilePath - The path to the DuckDB database file. Defaults to in-memory
|
|
12
|
+
* @returns {Promise<{ instance: DuckDBInstance, connection: DuckDBConnection }>}
|
|
13
|
+
* - The created DuckDB instance and the connection object for interacting with the database.
|
|
14
|
+
* - The connection is configured with HTTPFS for S3.
|
|
15
|
+
*/
|
|
16
|
+
async function setupDuckDBWithS3ForTesting(dbFilePath = ':memory:') {
|
|
17
|
+
const instance = await node_api_1.DuckDBInstance.create(dbFilePath);
|
|
18
|
+
const connection = await instance.connect();
|
|
19
|
+
// Configure DuckDB HTTPFS for S3
|
|
20
|
+
await connection.run(`
|
|
21
|
+
INSTALL httpfs;
|
|
22
|
+
LOAD httpfs;
|
|
23
|
+
SET s3_region='us-east-1';
|
|
24
|
+
SET s3_access_key_id='test';
|
|
25
|
+
SET s3_secret_access_key='test';
|
|
26
|
+
SET s3_endpoint='localhost:4566';
|
|
27
|
+
SET s3_use_ssl=false;
|
|
28
|
+
SET s3_url_style='path';
|
|
29
|
+
`);
|
|
30
|
+
return { instance, connection };
|
|
31
|
+
}
|
|
32
|
+
exports.setupDuckDBWithS3ForTesting = setupDuckDBWithS3ForTesting;
|
|
33
|
+
/**
|
|
34
|
+
* Stages data into a temporary DuckDB table, exports it to Parquet (S3),
|
|
35
|
+
* and then loads it into the target table.
|
|
36
|
+
*
|
|
37
|
+
* @template T - Shape of the row object being inserted.
|
|
38
|
+
* @param connection - Active DuckDB connection.
|
|
39
|
+
* @param knexBuilder - Knex instance used to generate SQL insert statements.
|
|
40
|
+
* @param tableName - Name of the destination table.
|
|
41
|
+
* @param tableSql - Function that returns the CREATE TABLE SQL for a given table name.
|
|
42
|
+
* @param data - A single row or array of rows to insert.
|
|
43
|
+
* @param s3Path - Destination S3 path where the staged data will be exported as Parquet.
|
|
44
|
+
* @returns Promise that resolves when the staging, export, and load process completes.
|
|
45
|
+
*/
|
|
46
|
+
async function stageAndLoadDuckDBTableFromData(connection, knexBuilder, tableName, tableSql, data, s3Path) {
|
|
47
|
+
if (!data || (Array.isArray(data) && data.length === 0))
|
|
48
|
+
return;
|
|
49
|
+
const rows = Array.isArray(data) ? data : [data];
|
|
50
|
+
const tmpTableName = `${tableName}_tmp`;
|
|
51
|
+
// Create temporary staging table
|
|
52
|
+
await connection.run(tableSql(tmpTableName));
|
|
53
|
+
// Insert into staging table
|
|
54
|
+
if (tableName === 'executions') {
|
|
55
|
+
const execRows = rows;
|
|
56
|
+
const parentRows = execRows.filter((r) => !r.parent_cumulus_id);
|
|
57
|
+
const childRows = execRows.filter((r) => r.parent_cumulus_id);
|
|
58
|
+
if (parentRows.length > 0) {
|
|
59
|
+
const parentInsert = knexBuilder(tmpTableName)
|
|
60
|
+
.insert(parentRows)
|
|
61
|
+
.toSQL()
|
|
62
|
+
.toNative();
|
|
63
|
+
await connection.run(parentInsert.sql, (0, duckdbHelpers_1.prepareBindings)(parentInsert.bindings));
|
|
64
|
+
}
|
|
65
|
+
if (childRows.length > 0) {
|
|
66
|
+
const childInsert = knexBuilder(tmpTableName)
|
|
67
|
+
.insert(childRows)
|
|
68
|
+
.toSQL()
|
|
69
|
+
.toNative();
|
|
70
|
+
await connection.run(childInsert.sql, (0, duckdbHelpers_1.prepareBindings)(childInsert.bindings));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
// Generic insert for other tables
|
|
75
|
+
const insertQuery = knexBuilder(tmpTableName)
|
|
76
|
+
.insert(rows)
|
|
77
|
+
.toSQL()
|
|
78
|
+
.toNative();
|
|
79
|
+
await connection.run(insertQuery.sql, (0, duckdbHelpers_1.prepareBindings)(insertQuery.bindings));
|
|
80
|
+
}
|
|
81
|
+
// Export staging table to Parquet (S3)
|
|
82
|
+
await connection.run(`
|
|
83
|
+
COPY ${tmpTableName}
|
|
84
|
+
TO '${s3Path}'
|
|
85
|
+
(FORMAT PARQUET);
|
|
86
|
+
`);
|
|
87
|
+
// Load from staging table into final table
|
|
88
|
+
if (tableName === 'executions') {
|
|
89
|
+
// Insert parents first
|
|
90
|
+
await connection.run(`
|
|
91
|
+
INSERT INTO ${tableName}
|
|
92
|
+
SELECT * FROM ${tmpTableName}
|
|
93
|
+
WHERE parent_cumulus_id IS NULL;
|
|
94
|
+
`);
|
|
95
|
+
// Insert children next
|
|
96
|
+
await connection.run(`
|
|
97
|
+
INSERT INTO ${tableName}
|
|
98
|
+
SELECT * FROM ${tmpTableName}
|
|
99
|
+
WHERE parent_cumulus_id IS NOT NULL;
|
|
100
|
+
`);
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
await connection.run(`
|
|
104
|
+
INSERT INTO ${tableName}
|
|
105
|
+
SELECT * FROM ${tmpTableName};
|
|
106
|
+
`);
|
|
107
|
+
}
|
|
108
|
+
// Drop staging table
|
|
109
|
+
await connection.run(`DROP TABLE IF EXISTS ${tmpTableName};`);
|
|
110
|
+
}
|
|
111
|
+
exports.stageAndLoadDuckDBTableFromData = stageAndLoadDuckDBTableFromData;
|
|
112
|
+
async function createDuckDBTables(connection) {
|
|
113
|
+
await connection.run((0, s3TableSchemas_1.asyncOperationsS3TableSql)());
|
|
114
|
+
await connection.run((0, s3TableSchemas_1.collectionsS3TableSql)());
|
|
115
|
+
await connection.run((0, s3TableSchemas_1.providersS3TableSql)());
|
|
116
|
+
await connection.run((0, s3TableSchemas_1.granulesS3TableSql)());
|
|
117
|
+
await connection.run((0, s3TableSchemas_1.filesS3TableSql)());
|
|
118
|
+
await connection.run((0, s3TableSchemas_1.executionsS3TableSql)());
|
|
119
|
+
await connection.run((0, s3TableSchemas_1.granulesExecutionsS3TableSql)());
|
|
120
|
+
await connection.run((0, s3TableSchemas_1.pdrsS3TableSql)());
|
|
121
|
+
await connection.run((0, s3TableSchemas_1.reconciliationReportsS3TableSql)());
|
|
122
|
+
await connection.run((0, s3TableSchemas_1.rulesS3TableSql)());
|
|
123
|
+
}
|
|
124
|
+
exports.createDuckDBTables = createDuckDBTables;
|
|
125
|
+
//# sourceMappingURL=test-duckdb-utils.js.map
|
package/dist/test-utils.js
CHANGED
|
@@ -94,12 +94,15 @@ const fakeGranuleRecordFactory = (params) => ({
|
|
|
94
94
|
producer_granule_id: (0, crypto_random_string_1.default)({ length: 5 }),
|
|
95
95
|
status: 'completed',
|
|
96
96
|
created_at: new Date(),
|
|
97
|
+
updated_at: new Date(),
|
|
97
98
|
...params,
|
|
98
99
|
});
|
|
99
100
|
exports.fakeGranuleRecordFactory = fakeGranuleRecordFactory;
|
|
100
101
|
const fakeFileRecordFactory = (params) => ({
|
|
101
102
|
bucket: (0, crypto_random_string_1.default)({ length: 3 }),
|
|
102
103
|
key: (0, crypto_random_string_1.default)({ length: 3 }),
|
|
104
|
+
created_at: new Date(),
|
|
105
|
+
updated_at: new Date(),
|
|
103
106
|
...params,
|
|
104
107
|
});
|
|
105
108
|
exports.fakeFileRecordFactory = fakeFileRecordFactory;
|
|
@@ -110,6 +113,8 @@ const fakeAsyncOperationRecordFactory = (params) => ({
|
|
|
110
113
|
status: 'RUNNING',
|
|
111
114
|
output: { test: 'output' },
|
|
112
115
|
task_arn: (0, crypto_random_string_1.default)({ length: 3 }),
|
|
116
|
+
created_at: new Date(),
|
|
117
|
+
updated_at: new Date(),
|
|
113
118
|
...params,
|
|
114
119
|
});
|
|
115
120
|
exports.fakeAsyncOperationRecordFactory = fakeAsyncOperationRecordFactory;
|
|
@@ -117,6 +122,7 @@ const fakePdrRecordFactory = (params) => ({
|
|
|
117
122
|
name: `pdr${(0, crypto_random_string_1.default)({ length: 10 })}`,
|
|
118
123
|
status: 'running',
|
|
119
124
|
created_at: new Date(),
|
|
125
|
+
updated_at: new Date(),
|
|
120
126
|
...params,
|
|
121
127
|
});
|
|
122
128
|
exports.fakePdrRecordFactory = fakePdrRecordFactory;
|
|
@@ -4,8 +4,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.translatePostgresAsyncOperationToApiAsyncOperation = exports.translateApiAsyncOperationToPostgresAsyncOperation = void 0;
|
|
7
|
+
const isObject_1 = __importDefault(require("lodash/isObject"));
|
|
7
8
|
const omit_1 = __importDefault(require("lodash/omit"));
|
|
8
9
|
const snake_camel_1 = require("snake-camel");
|
|
10
|
+
const util_1 = require("@cumulus/common/util");
|
|
9
11
|
const logger_1 = __importDefault(require("@cumulus/logger"));
|
|
10
12
|
const log = new logger_1.default({ sender: '@cumulus/db/translate/async-operations' });
|
|
11
13
|
/**
|
|
@@ -67,10 +69,12 @@ const translatePostgresAsyncOperationToApiAsyncOperation = (pgAsyncOperation) =>
|
|
|
67
69
|
description: pgAsyncOperation.description,
|
|
68
70
|
operationType: pgAsyncOperation.operation_type,
|
|
69
71
|
status: pgAsyncOperation.status,
|
|
70
|
-
output:
|
|
72
|
+
output: (0, isObject_1.default)(pgAsyncOperation.output)
|
|
73
|
+
? JSON.stringify(pgAsyncOperation.output)
|
|
74
|
+
: pgAsyncOperation.output,
|
|
71
75
|
taskArn: pgAsyncOperation.task_arn,
|
|
72
|
-
createdAt:
|
|
73
|
-
updatedAt:
|
|
76
|
+
createdAt: (0, util_1.returnNullOrUndefinedOrDate)(pgAsyncOperation.created_at)?.getTime(),
|
|
77
|
+
updatedAt: (0, util_1.returnNullOrUndefinedOrDate)(pgAsyncOperation.created_at)?.getTime(),
|
|
74
78
|
};
|
|
75
79
|
return apiAsyncOperation;
|
|
76
80
|
};
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.translateApiCollectionToPostgresCollection = exports.translatePostgresCollectionToApiCollection = void 0;
|
|
4
|
-
const { removeNilProperties } = require('@cumulus/common/util');
|
|
4
|
+
const { parseIfJson, returnNullOrUndefinedOrDate, removeNilProperties } = require('@cumulus/common/util');
|
|
5
5
|
/**
|
|
6
6
|
* Translates a PostgresCollectionRecord object to a `CollectionRecord` API collection object
|
|
7
7
|
* @param {PostgresCollectionRecord} collectionRecord - PostgreSQL collection record to translate
|
|
8
8
|
* @returns {CollectionRecord} - Translated record
|
|
9
9
|
*/
|
|
10
10
|
const translatePostgresCollectionToApiCollection = (collectionRecord) => removeNilProperties(({
|
|
11
|
-
createdAt: collectionRecord.created_at.getTime(),
|
|
12
|
-
updatedAt: collectionRecord.updated_at.getTime(),
|
|
11
|
+
createdAt: returnNullOrUndefinedOrDate(collectionRecord.created_at).getTime(),
|
|
12
|
+
updatedAt: returnNullOrUndefinedOrDate(collectionRecord.updated_at).getTime(),
|
|
13
13
|
name: collectionRecord.name,
|
|
14
14
|
version: collectionRecord.version,
|
|
15
15
|
process: collectionRecord.process,
|
|
@@ -17,12 +17,12 @@ const translatePostgresCollectionToApiCollection = (collectionRecord) => removeN
|
|
|
17
17
|
duplicateHandling: collectionRecord.duplicate_handling,
|
|
18
18
|
granuleId: collectionRecord.granule_id_validation_regex,
|
|
19
19
|
granuleIdExtraction: collectionRecord.granule_id_extraction_regex,
|
|
20
|
-
files: collectionRecord.files,
|
|
20
|
+
files: parseIfJson(collectionRecord.files),
|
|
21
21
|
reportToEms: collectionRecord.report_to_ems,
|
|
22
22
|
sampleFileName: collectionRecord.sample_file_name,
|
|
23
23
|
ignoreFilesConfigForDiscovery: collectionRecord.ignore_files_config_for_discovery,
|
|
24
|
-
meta: collectionRecord.meta,
|
|
25
|
-
tags: collectionRecord.tags,
|
|
24
|
+
meta: parseIfJson(collectionRecord.meta),
|
|
25
|
+
tags: parseIfJson(collectionRecord.tags),
|
|
26
26
|
}));
|
|
27
27
|
exports.translatePostgresCollectionToApiCollection = translatePostgresCollectionToApiCollection;
|
|
28
28
|
/**
|
|
@@ -24,10 +24,10 @@ const translatePostgresExecutionToApiExecutionWithoutDbQuery = ({ executionRecor
|
|
|
24
24
|
status: executionRecord.status,
|
|
25
25
|
arn: executionRecord.arn,
|
|
26
26
|
duration: executionRecord.duration,
|
|
27
|
-
error: executionRecord.error,
|
|
28
|
-
tasks: executionRecord.tasks,
|
|
29
|
-
originalPayload: executionRecord.original_payload,
|
|
30
|
-
finalPayload: executionRecord.final_payload,
|
|
27
|
+
error: (0, util_1.parseIfJson)(executionRecord.error),
|
|
28
|
+
tasks: (0, util_1.parseIfJson)(executionRecord.tasks),
|
|
29
|
+
originalPayload: (0, util_1.parseIfJson)(executionRecord.original_payload),
|
|
30
|
+
finalPayload: (0, util_1.parseIfJson)(executionRecord.final_payload),
|
|
31
31
|
type: executionRecord.workflow_name,
|
|
32
32
|
execution: executionRecord.url,
|
|
33
33
|
cumulusVersion: executionRecord.cumulus_version,
|
|
@@ -35,9 +35,9 @@ const translatePostgresExecutionToApiExecutionWithoutDbQuery = ({ executionRecor
|
|
|
35
35
|
collectionId,
|
|
36
36
|
parentArn,
|
|
37
37
|
archived: executionRecord.archived,
|
|
38
|
-
createdAt: executionRecord.created_at
|
|
39
|
-
updatedAt: executionRecord.updated_at
|
|
40
|
-
timestamp: executionRecord.timestamp?.getTime(),
|
|
38
|
+
createdAt: (0, util_1.returnNullOrUndefinedOrDate)(executionRecord.created_at)?.getTime(),
|
|
39
|
+
updatedAt: (0, util_1.returnNullOrUndefinedOrDate)(executionRecord.updated_at)?.getTime(),
|
|
40
|
+
timestamp: (0, util_1.returnNullOrUndefinedOrDate)(executionRecord.timestamp)?.getTime(),
|
|
41
41
|
};
|
|
42
42
|
return (0, util_1.removeNilProperties)(translatedRecord);
|
|
43
43
|
};
|
|
@@ -30,31 +30,36 @@ const file_2 = require("./file");
|
|
|
30
30
|
*/
|
|
31
31
|
const translatePostgresGranuleToApiGranuleWithoutDbQuery = ({ granulePgRecord, collectionPgRecord, executionUrls = [], files = [], pdr, providerPgRecord, }) => (0, util_1.removeNilProperties)({
|
|
32
32
|
archived: granulePgRecord.archived,
|
|
33
|
-
beginningDateTime: granulePgRecord.beginning_date_time
|
|
33
|
+
beginningDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.beginning_date_time)
|
|
34
|
+
?.toISOString(),
|
|
34
35
|
cmrLink: granulePgRecord.cmr_link,
|
|
35
36
|
collectionId: (0, Collections_1.constructCollectionId)(collectionPgRecord.name, collectionPgRecord.version),
|
|
36
|
-
createdAt: granulePgRecord.created_at?.getTime(),
|
|
37
|
+
createdAt: new Date(granulePgRecord.created_at)?.getTime(),
|
|
37
38
|
duration: granulePgRecord.duration,
|
|
38
|
-
endingDateTime: granulePgRecord.ending_date_time?.toISOString(),
|
|
39
|
-
error: granulePgRecord.error,
|
|
39
|
+
endingDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.ending_date_time)?.toISOString(),
|
|
40
|
+
error: (0, util_1.parseIfJson)(granulePgRecord.error),
|
|
40
41
|
execution: executionUrls[0] ? executionUrls[0].url : undefined,
|
|
41
42
|
files: files.length > 0 ? files.map((file) => (0, file_2.translatePostgresFileToApiFile)(file)) : [],
|
|
42
43
|
granuleId: granulePgRecord.granule_id,
|
|
43
|
-
lastUpdateDateTime: granulePgRecord.last_update_date_time
|
|
44
|
+
lastUpdateDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.last_update_date_time)
|
|
45
|
+
?.toISOString(),
|
|
44
46
|
pdrName: pdr ? pdr.name : undefined,
|
|
45
|
-
processingEndDateTime: granulePgRecord.processing_end_date_time
|
|
46
|
-
|
|
47
|
+
processingEndDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.processing_end_date_time)
|
|
48
|
+
?.toISOString(),
|
|
49
|
+
processingStartDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.processing_start_date_time)
|
|
50
|
+
?.toISOString(),
|
|
47
51
|
producerGranuleId: granulePgRecord.producer_granule_id,
|
|
48
|
-
productionDateTime: granulePgRecord.production_date_time
|
|
52
|
+
productionDateTime: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.production_date_time)
|
|
53
|
+
?.toISOString(),
|
|
49
54
|
productVolume: granulePgRecord.product_volume,
|
|
50
55
|
provider: providerPgRecord ? providerPgRecord.name : undefined,
|
|
51
56
|
published: granulePgRecord.published,
|
|
52
|
-
queryFields: granulePgRecord.query_fields,
|
|
57
|
+
queryFields: (0, util_1.parseIfJson)(granulePgRecord.query_fields),
|
|
53
58
|
status: granulePgRecord.status,
|
|
54
|
-
timestamp: granulePgRecord.timestamp?.getTime(),
|
|
59
|
+
timestamp: (0, util_1.returnNullOrUndefinedOrDate)(granulePgRecord.timestamp)?.getTime(),
|
|
55
60
|
timeToArchive: granulePgRecord.time_to_archive,
|
|
56
61
|
timeToPreprocess: granulePgRecord.time_to_process,
|
|
57
|
-
updatedAt: granulePgRecord.updated_at?.getTime(),
|
|
62
|
+
updatedAt: new Date(granulePgRecord.updated_at)?.getTime(),
|
|
58
63
|
});
|
|
59
64
|
exports.translatePostgresGranuleToApiGranuleWithoutDbQuery = translatePostgresGranuleToApiGranuleWithoutDbQuery;
|
|
60
65
|
/**
|
package/dist/translate/pdr.js
CHANGED
|
@@ -55,17 +55,17 @@ const translatePostgresPdrToApiPdrWithoutDbQuery = ({ pdrPgRecord, collectionPgR
|
|
|
55
55
|
provider: providerPgRecord?.name,
|
|
56
56
|
collectionId: (0, Collections_1.constructCollectionId)(collectionPgRecord.name, collectionPgRecord.version),
|
|
57
57
|
status: pdrPgRecord.status,
|
|
58
|
-
createdAt: pdrPgRecord.created_at
|
|
58
|
+
createdAt: (0, util_1.returnNullOrUndefinedOrDate)(pdrPgRecord.created_at)?.getTime(),
|
|
59
59
|
progress: pdrPgRecord.progress,
|
|
60
60
|
execution: executionArn ? (0, Executions_1.getExecutionUrlFromArn)(executionArn) : undefined,
|
|
61
61
|
PANSent: pdrPgRecord.pan_sent,
|
|
62
62
|
PANmessage: pdrPgRecord.pan_message,
|
|
63
|
-
stats: pdrPgRecord.stats,
|
|
63
|
+
stats: (0, util_1.parseIfJson)(pdrPgRecord.stats),
|
|
64
64
|
address: pdrPgRecord.address,
|
|
65
65
|
originalUrl: pdrPgRecord.original_url,
|
|
66
|
-
timestamp: (
|
|
66
|
+
timestamp: (0, util_1.returnNullOrUndefinedOrDate)(pdrPgRecord.timestamp)?.getTime(),
|
|
67
67
|
duration: pdrPgRecord.duration,
|
|
68
|
-
updatedAt: pdrPgRecord.updated_at
|
|
68
|
+
updatedAt: (0, util_1.returnNullOrUndefinedOrDate)(pdrPgRecord.updated_at)?.getTime(),
|
|
69
69
|
});
|
|
70
70
|
exports.translatePostgresPdrToApiPdrWithoutDbQuery = translatePostgresPdrToApiPdrWithoutDbQuery;
|
|
71
71
|
/**
|
|
@@ -20,8 +20,8 @@ const translatePostgresProviderToApiProvider = (record) => {
|
|
|
20
20
|
port: record.port,
|
|
21
21
|
host: record.host,
|
|
22
22
|
protocol: record.protocol,
|
|
23
|
-
createdAt: record.created_at
|
|
24
|
-
updatedAt: record.updated_at
|
|
23
|
+
createdAt: (0, util_1.returnNullOrUndefinedOrDate)(record.created_at)?.getTime(),
|
|
24
|
+
updatedAt: (0, util_1.returnNullOrUndefinedOrDate)(record.updated_at)?.getTime(),
|
|
25
25
|
username: record.username,
|
|
26
26
|
password: record.password,
|
|
27
27
|
allowedRedirects: record.allowed_redirects,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.translatePostgresReconReportToApiReconReport = exports.translateApiReconReportToPostgresReconReport = void 0;
|
|
4
|
-
const { removeNilProperties } = require('@cumulus/common/util');
|
|
4
|
+
const { parseIfJson, removeNilProperties, returnNullOrUndefinedOrDate } = require('@cumulus/common/util');
|
|
5
5
|
const pick = require('lodash/pick');
|
|
6
6
|
/**
|
|
7
7
|
* Generate a PostgreSQL Reconciliation Report from an API record.
|
|
@@ -26,9 +26,10 @@ exports.translateApiReconReportToPostgresReconReport = translateApiReconReportTo
|
|
|
26
26
|
*/
|
|
27
27
|
const translatePostgresReconReportToApiReconReport = (pgReconciliationReport) => {
|
|
28
28
|
const apiReconciliationReport = removeNilProperties({
|
|
29
|
-
...pick(pgReconciliationReport, ['name', 'type', 'status', 'location'
|
|
30
|
-
|
|
31
|
-
|
|
29
|
+
...pick(pgReconciliationReport, ['name', 'type', 'status', 'location']),
|
|
30
|
+
error: parseIfJson(pgReconciliationReport.error),
|
|
31
|
+
createdAt: returnNullOrUndefinedOrDate(pgReconciliationReport.created_at)?.getTime(),
|
|
32
|
+
updatedAt: returnNullOrUndefinedOrDate(pgReconciliationReport.updated_at)?.getTime(),
|
|
32
33
|
});
|
|
33
34
|
return apiReconciliationReport;
|
|
34
35
|
};
|
|
@@ -5,7 +5,7 @@ import { ProviderPgModel } from '../models/provider';
|
|
|
5
5
|
import { PostgresRule, PostgresRuleRecord } from '../types/rule';
|
|
6
6
|
import { PostgresProviderRecord } from '../types/provider';
|
|
7
7
|
import { PostgresCollectionRecord } from '../types/collection';
|
|
8
|
-
export declare const translatePostgresRuleToApiRuleWithoutDbQuery: (pgRule: PostgresRuleRecord, collectionPgRecord?: Pick<PostgresCollectionRecord, 'name' | 'version'>, providerPgRecord?: Partial<PostgresProviderRecord>) =>
|
|
8
|
+
export declare const translatePostgresRuleToApiRuleWithoutDbQuery: (pgRule: PostgresRuleRecord, collectionPgRecord?: Pick<PostgresCollectionRecord, 'name' | 'version'>, providerPgRecord?: Partial<PostgresProviderRecord>) => RuleRecord;
|
|
9
9
|
export declare const translatePostgresRuleToApiRule: (pgRule: PostgresRuleRecord, knex: Knex | Knex.Transaction, collectionPgModel?: CollectionPgModel, providerPgModel?: ProviderPgModel) => Promise<RuleRecord>;
|
|
10
10
|
/**
|
|
11
11
|
* Generate a Postgres rule record from a DynamoDB record.
|
package/dist/translate/rules.js
CHANGED
|
@@ -4,7 +4,7 @@ exports.translateApiRuleToPostgresRule = exports.translateApiRuleToPostgresRuleR
|
|
|
4
4
|
const util_1 = require("@cumulus/common/util");
|
|
5
5
|
const collection_1 = require("../models/collection");
|
|
6
6
|
const provider_1 = require("../models/provider");
|
|
7
|
-
const translatePostgresRuleToApiRuleWithoutDbQuery =
|
|
7
|
+
const translatePostgresRuleToApiRuleWithoutDbQuery = (pgRule, collectionPgRecord, providerPgRecord) => {
|
|
8
8
|
const apiRule = {
|
|
9
9
|
name: pgRule.name,
|
|
10
10
|
workflow: pgRule.workflow,
|
|
@@ -20,13 +20,13 @@ const translatePostgresRuleToApiRuleWithoutDbQuery = async (pgRule, collectionPg
|
|
|
20
20
|
value: pgRule.value,
|
|
21
21
|
}),
|
|
22
22
|
state: pgRule.enabled ? 'ENABLED' : 'DISABLED',
|
|
23
|
-
meta: pgRule.meta,
|
|
24
|
-
payload: pgRule.payload,
|
|
23
|
+
meta: (0, util_1.parseIfJson)(pgRule.meta),
|
|
24
|
+
payload: (0, util_1.parseIfJson)(pgRule.payload),
|
|
25
25
|
executionNamePrefix: pgRule.execution_name_prefix,
|
|
26
26
|
queueUrl: pgRule.queue_url,
|
|
27
|
-
tags: pgRule.tags,
|
|
28
|
-
createdAt: pgRule.created_at.getTime(),
|
|
29
|
-
updatedAt: pgRule.updated_at.getTime(),
|
|
27
|
+
tags: (0, util_1.parseIfJson)(pgRule.tags),
|
|
28
|
+
createdAt: new Date(pgRule.created_at).getTime(),
|
|
29
|
+
updatedAt: new Date(pgRule.updated_at).getTime(),
|
|
30
30
|
};
|
|
31
31
|
return (0, util_1.removeNilProperties)(apiRule);
|
|
32
32
|
};
|
package/dist/types/file.d.ts
CHANGED
|
@@ -4,11 +4,13 @@ export interface PostgresFile {
|
|
|
4
4
|
granule_cumulus_id: number;
|
|
5
5
|
checksum_type?: string;
|
|
6
6
|
checksum_value?: string;
|
|
7
|
+
created_at?: Date | null;
|
|
7
8
|
file_name?: string;
|
|
8
9
|
file_size?: number;
|
|
9
10
|
path?: string;
|
|
10
11
|
source?: string;
|
|
11
12
|
type?: string;
|
|
13
|
+
updated_at?: Date | null;
|
|
12
14
|
}
|
|
13
15
|
export interface PostgresFileRecord extends Omit<PostgresFile, 'file_size'> {
|
|
14
16
|
bucket: string;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cumulus/db",
|
|
3
|
-
"version": "21.3.
|
|
3
|
+
"version": "21.3.2-testlerna.0",
|
|
4
4
|
"description": "Utilities for working with the Cumulus DB",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
],
|
|
12
12
|
"scripts": {
|
|
13
13
|
"clean": "rm -rf dist",
|
|
14
|
-
"
|
|
14
|
+
"build": "npm run tsc",
|
|
15
15
|
"tsc": "../../node_modules/.bin/tsc",
|
|
16
16
|
"tsc:listEmittedFiles": "../../node_modules/.bin/tsc --listEmittedFiles",
|
|
17
17
|
"test": "../../node_modules/.bin/ava",
|
|
@@ -32,23 +32,24 @@
|
|
|
32
32
|
"node": ">=22.21.1"
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
|
-
"@aws-sdk/client-secrets-manager": "^3.
|
|
36
|
-
"@cumulus/aws-client": "21.3.
|
|
37
|
-
"@cumulus/common": "21.3.
|
|
38
|
-
"@cumulus/errors": "21.3.
|
|
39
|
-
"@cumulus/logger": "21.3.
|
|
40
|
-
"@cumulus/message": "21.3.
|
|
41
|
-
"@cumulus/types": "21.3.
|
|
35
|
+
"@aws-sdk/client-secrets-manager": "^3.993.0",
|
|
36
|
+
"@cumulus/aws-client": "21.3.2-testlerna.0",
|
|
37
|
+
"@cumulus/common": "21.3.2-testlerna.0",
|
|
38
|
+
"@cumulus/errors": "21.3.2-testlerna.0",
|
|
39
|
+
"@cumulus/logger": "21.3.2-testlerna.0",
|
|
40
|
+
"@cumulus/message": "21.3.2-testlerna.0",
|
|
41
|
+
"@cumulus/types": "21.3.2-testlerna.0",
|
|
42
|
+
"@duckdb/node-api": "^1.4.4-r.1",
|
|
42
43
|
"crypto-random-string": "^3.2.0",
|
|
43
44
|
"is-valid-hostname": "1.0.2",
|
|
44
45
|
"knex": "2.4.1",
|
|
45
46
|
"lodash": "^4.17.21",
|
|
46
47
|
"pg": "~8.13",
|
|
47
48
|
"snake-camel": "^1.0.6",
|
|
48
|
-
"uuid": "8.
|
|
49
|
+
"uuid": "^8.2.0"
|
|
49
50
|
},
|
|
50
51
|
"devDependencies": {
|
|
51
52
|
"@types/uuid": "^8.0.0"
|
|
52
53
|
},
|
|
53
|
-
"gitHead": "
|
|
54
|
+
"gitHead": "ef6577b6caec96a20df8ccff293a9436db32a482"
|
|
54
55
|
}
|