@cumulus/db 18.4.0 → 19.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +5 -0
- package/dist/index.js +11 -1
- package/dist/migrations/20240728101230_add_table_indexes.d.ts +4 -0
- package/dist/migrations/20240728101230_add_table_indexes.js +53 -0
- package/dist/models/execution.d.ts +2 -2
- package/dist/models/execution.js +1 -1
- package/dist/search/BaseSearch.d.ts +187 -0
- package/dist/search/BaseSearch.js +416 -0
- package/dist/search/CollectionSearch.d.ts +79 -0
- package/dist/search/CollectionSearch.js +162 -0
- package/dist/search/ExecutionSearch.d.ts +62 -0
- package/dist/search/ExecutionSearch.js +133 -0
- package/dist/search/GranuleSearch.d.ts +55 -0
- package/dist/search/GranuleSearch.js +109 -0
- package/dist/search/StatsSearch.d.ts +111 -0
- package/dist/search/StatsSearch.js +214 -0
- package/dist/search/field-mapping.d.ts +16 -0
- package/dist/search/field-mapping.js +304 -0
- package/dist/search/queries.d.ts +10 -0
- package/dist/search/queries.js +235 -0
- package/dist/translate/executions.d.ts +6 -0
- package/dist/translate/executions.js +32 -23
- package/dist/translate/granules.d.ts +24 -0
- package/dist/translate/granules.js +48 -27
- package/dist/types/search.d.ts +52 -0
- package/dist/types/search.js +3 -0
- package/package.json +8 -8
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Map query string field to db field
|
|
3
|
+
*
|
|
4
|
+
* @param type - query record type
|
|
5
|
+
* @param queryField - query field
|
|
6
|
+
* @param queryField.name - query field value
|
|
7
|
+
* @param [queryField.value] - query field value
|
|
8
|
+
* @returns db field
|
|
9
|
+
*/
|
|
10
|
+
export declare const mapQueryStringFieldToDbField: (type: string, queryField: {
|
|
11
|
+
name: string;
|
|
12
|
+
value?: string;
|
|
13
|
+
}) => {
|
|
14
|
+
[key: string]: any;
|
|
15
|
+
} | undefined;
|
|
16
|
+
//# sourceMappingURL=field-mapping.d.ts.map
|
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.mapQueryStringFieldToDbField = void 0;
|
|
7
|
+
const Collections_1 = require("@cumulus/message/Collections");
|
|
8
|
+
const logger_1 = __importDefault(require("@cumulus/logger"));
|
|
9
|
+
const log = new logger_1.default({ sender: '@cumulus/db/field-mapping' });
|
|
10
|
+
// functions to map the api search string field name and value to postgres db field
|
|
11
|
+
const granuleMapping = {
|
|
12
|
+
beginningDateTime: (value) => ({
|
|
13
|
+
beginning_date_time: value,
|
|
14
|
+
}),
|
|
15
|
+
cmrLink: (value) => ({
|
|
16
|
+
cmr_link: value,
|
|
17
|
+
}),
|
|
18
|
+
createdAt: (value) => ({
|
|
19
|
+
created_at: value && new Date(Number(value)),
|
|
20
|
+
}),
|
|
21
|
+
duration: (value) => ({
|
|
22
|
+
duration: value && Number(value),
|
|
23
|
+
}),
|
|
24
|
+
endingDateTime: (value) => ({
|
|
25
|
+
ending_date_time: value,
|
|
26
|
+
}),
|
|
27
|
+
granuleId: (value) => ({
|
|
28
|
+
granule_id: value,
|
|
29
|
+
}),
|
|
30
|
+
_id: (value) => ({
|
|
31
|
+
granule_id: value,
|
|
32
|
+
}),
|
|
33
|
+
lastUpdateDateTime: (value) => ({
|
|
34
|
+
last_update_date_time: value,
|
|
35
|
+
}),
|
|
36
|
+
processingEndDateTime: (value) => ({
|
|
37
|
+
processing_end_date_time: value,
|
|
38
|
+
}),
|
|
39
|
+
processingStartDateTime: (value) => ({
|
|
40
|
+
processing_start_date_time: value,
|
|
41
|
+
}),
|
|
42
|
+
productionDateTime: (value) => ({
|
|
43
|
+
production_date_time: value,
|
|
44
|
+
}),
|
|
45
|
+
productVolume: (value) => ({
|
|
46
|
+
product_volume: value,
|
|
47
|
+
}),
|
|
48
|
+
published: (value) => ({
|
|
49
|
+
published: (value === 'true'),
|
|
50
|
+
}),
|
|
51
|
+
status: (value) => ({
|
|
52
|
+
status: value,
|
|
53
|
+
}),
|
|
54
|
+
timestamp: (value) => ({
|
|
55
|
+
updated_at: value && new Date(Number(value)),
|
|
56
|
+
}),
|
|
57
|
+
timeToArchive: (value) => ({
|
|
58
|
+
time_to_archive: Number(value),
|
|
59
|
+
}),
|
|
60
|
+
timeToPreprocess: (value) => ({
|
|
61
|
+
time_to_process: Number(value),
|
|
62
|
+
}),
|
|
63
|
+
updatedAt: (value) => ({
|
|
64
|
+
updated_at: value && new Date(Number(value)),
|
|
65
|
+
}),
|
|
66
|
+
error: (value) => ({
|
|
67
|
+
error: value,
|
|
68
|
+
}),
|
|
69
|
+
// nested error field
|
|
70
|
+
'error.Error': (value) => ({
|
|
71
|
+
'error.Error': value,
|
|
72
|
+
}),
|
|
73
|
+
'error.Error.keyword': (value) => ({
|
|
74
|
+
'error.Error': value,
|
|
75
|
+
}),
|
|
76
|
+
// The following fields require querying other tables
|
|
77
|
+
collectionId: (value) => {
|
|
78
|
+
const { name, version } = (value && (0, Collections_1.deconstructCollectionId)(value)) || {};
|
|
79
|
+
return {
|
|
80
|
+
collectionName: name,
|
|
81
|
+
collectionVersion: version,
|
|
82
|
+
};
|
|
83
|
+
},
|
|
84
|
+
provider: (value) => ({
|
|
85
|
+
providerName: value,
|
|
86
|
+
}),
|
|
87
|
+
pdrName: (value) => ({
|
|
88
|
+
pdrName: value,
|
|
89
|
+
}),
|
|
90
|
+
};
|
|
91
|
+
const asyncOperationMapping = {
|
|
92
|
+
createdAt: (value) => ({
|
|
93
|
+
created_at: value && new Date(Number(value)),
|
|
94
|
+
}),
|
|
95
|
+
id: (value) => ({
|
|
96
|
+
id: value,
|
|
97
|
+
}),
|
|
98
|
+
operationType: (value) => ({
|
|
99
|
+
operation_type: value,
|
|
100
|
+
}),
|
|
101
|
+
status: (value) => ({
|
|
102
|
+
status: value,
|
|
103
|
+
}),
|
|
104
|
+
taskArn: (value) => ({
|
|
105
|
+
task_arn: value,
|
|
106
|
+
}),
|
|
107
|
+
timestamp: (value) => ({
|
|
108
|
+
updated_at: value && new Date(Number(value)),
|
|
109
|
+
}),
|
|
110
|
+
updatedAt: (value) => ({
|
|
111
|
+
updated_at: value && new Date(Number(value)),
|
|
112
|
+
}),
|
|
113
|
+
};
|
|
114
|
+
const collectionMapping = {
|
|
115
|
+
createdAt: (value) => ({
|
|
116
|
+
created_at: value && new Date(Number(value)),
|
|
117
|
+
}),
|
|
118
|
+
name: (value) => ({
|
|
119
|
+
name: value,
|
|
120
|
+
}),
|
|
121
|
+
version: (value) => ({
|
|
122
|
+
version: value,
|
|
123
|
+
}),
|
|
124
|
+
_id: (value) => {
|
|
125
|
+
const { name, version } = (value && (0, Collections_1.deconstructCollectionId)(value)) || {};
|
|
126
|
+
return {
|
|
127
|
+
collectionName: name,
|
|
128
|
+
collectionVersion: version,
|
|
129
|
+
};
|
|
130
|
+
},
|
|
131
|
+
duplicateHandling: (value) => ({
|
|
132
|
+
duplicate_handling: value,
|
|
133
|
+
}),
|
|
134
|
+
granuleId: (value) => ({
|
|
135
|
+
granule_id_validation_regex: value,
|
|
136
|
+
}),
|
|
137
|
+
granuleIdExtraction: (value) => ({
|
|
138
|
+
granule_id_extraction_regex: value,
|
|
139
|
+
}),
|
|
140
|
+
timestamp: (value) => ({
|
|
141
|
+
updated_at: value && new Date(Number(value)),
|
|
142
|
+
}),
|
|
143
|
+
updatedAt: (value) => ({
|
|
144
|
+
updated_at: value && new Date(Number(value)),
|
|
145
|
+
}),
|
|
146
|
+
reportToEms: (value) => ({
|
|
147
|
+
report_to_ems: (value === 'true'),
|
|
148
|
+
}),
|
|
149
|
+
process: (value) => ({
|
|
150
|
+
process: value,
|
|
151
|
+
}),
|
|
152
|
+
sampleFileName: (value) => ({
|
|
153
|
+
sample_file_name: value,
|
|
154
|
+
}),
|
|
155
|
+
url_path: (value) => ({
|
|
156
|
+
url_path: value,
|
|
157
|
+
}),
|
|
158
|
+
};
|
|
159
|
+
// TODO add and verify all queryable fields for the following record types
|
|
160
|
+
const executionMapping = {
|
|
161
|
+
arn: (value) => ({
|
|
162
|
+
arn: value,
|
|
163
|
+
}),
|
|
164
|
+
createdAt: (value) => ({
|
|
165
|
+
created_at: value && new Date(Number(value)),
|
|
166
|
+
}),
|
|
167
|
+
duration: (value) => ({
|
|
168
|
+
duration: value && Number(value),
|
|
169
|
+
}),
|
|
170
|
+
// nested error field
|
|
171
|
+
'error.Error': (value) => ({
|
|
172
|
+
'error.Error': value,
|
|
173
|
+
}),
|
|
174
|
+
'error.Error.keyword': (value) => ({
|
|
175
|
+
'error.Error': value,
|
|
176
|
+
}),
|
|
177
|
+
execution: (value) => ({
|
|
178
|
+
url: value,
|
|
179
|
+
}),
|
|
180
|
+
type: (value) => ({
|
|
181
|
+
workflow_name: value,
|
|
182
|
+
}),
|
|
183
|
+
status: (value) => ({
|
|
184
|
+
status: value,
|
|
185
|
+
}),
|
|
186
|
+
timestamp: (value) => ({
|
|
187
|
+
updated_at: value && new Date(Number(value)),
|
|
188
|
+
}),
|
|
189
|
+
updatedAt: (value) => ({
|
|
190
|
+
updated_at: value && new Date(Number(value)),
|
|
191
|
+
}),
|
|
192
|
+
// The following fields require querying other tables
|
|
193
|
+
asyncOperationId: (value) => ({
|
|
194
|
+
asyncOperationId: value,
|
|
195
|
+
}),
|
|
196
|
+
parentArn: (value) => ({
|
|
197
|
+
parentArn: value,
|
|
198
|
+
}),
|
|
199
|
+
collectionId: (value) => {
|
|
200
|
+
const { name, version } = (value && (0, Collections_1.deconstructCollectionId)(value)) || {};
|
|
201
|
+
return {
|
|
202
|
+
collectionName: name,
|
|
203
|
+
collectionVersion: version,
|
|
204
|
+
};
|
|
205
|
+
},
|
|
206
|
+
};
|
|
207
|
+
const pdrMapping = {
|
|
208
|
+
createdAt: (value) => ({
|
|
209
|
+
created_at: value && new Date(Number(value)),
|
|
210
|
+
}),
|
|
211
|
+
pdrName: (value) => ({
|
|
212
|
+
name: value,
|
|
213
|
+
}),
|
|
214
|
+
status: (value) => ({
|
|
215
|
+
status: value,
|
|
216
|
+
}),
|
|
217
|
+
timestamp: (value) => ({
|
|
218
|
+
updated_at: value && new Date(Number(value)),
|
|
219
|
+
}),
|
|
220
|
+
updatedAt: (value) => ({
|
|
221
|
+
updated_at: value && new Date(Number(value)),
|
|
222
|
+
}),
|
|
223
|
+
// The following fields require querying other tables
|
|
224
|
+
collectionId: (value) => {
|
|
225
|
+
const { name, version } = (value && (0, Collections_1.deconstructCollectionId)(value)) || {};
|
|
226
|
+
return {
|
|
227
|
+
collectionName: name,
|
|
228
|
+
collectionVersion: version,
|
|
229
|
+
};
|
|
230
|
+
},
|
|
231
|
+
provider: (value) => ({
|
|
232
|
+
providerName: value,
|
|
233
|
+
}),
|
|
234
|
+
};
|
|
235
|
+
const providerMapping = {
|
|
236
|
+
createdAt: (value) => ({
|
|
237
|
+
created_at: value && new Date(Number(value)),
|
|
238
|
+
}),
|
|
239
|
+
id: (value) => ({
|
|
240
|
+
name: value,
|
|
241
|
+
}),
|
|
242
|
+
timestamp: (value) => ({
|
|
243
|
+
updated_at: value && new Date(Number(value)),
|
|
244
|
+
}),
|
|
245
|
+
updatedAt: (value) => ({
|
|
246
|
+
updated_at: value && new Date(Number(value)),
|
|
247
|
+
}),
|
|
248
|
+
};
|
|
249
|
+
const ruleMapping = {
|
|
250
|
+
createdAt: (value) => ({
|
|
251
|
+
created_at: value && new Date(Number(value)),
|
|
252
|
+
}),
|
|
253
|
+
name: (value) => ({
|
|
254
|
+
name: value,
|
|
255
|
+
}),
|
|
256
|
+
state: (value) => ({
|
|
257
|
+
enabled: (value === 'ENABLED'),
|
|
258
|
+
}),
|
|
259
|
+
timestamp: (value) => ({
|
|
260
|
+
updated_at: value && new Date(Number(value)),
|
|
261
|
+
}),
|
|
262
|
+
updatedAt: (value) => ({
|
|
263
|
+
updated_at: value && new Date(Number(value)),
|
|
264
|
+
}),
|
|
265
|
+
// The following fields require querying other tables
|
|
266
|
+
collectionId: (value) => {
|
|
267
|
+
const { name, version } = (value && (0, Collections_1.deconstructCollectionId)(value)) || {};
|
|
268
|
+
return {
|
|
269
|
+
collectionName: name,
|
|
270
|
+
collectionVersion: version,
|
|
271
|
+
};
|
|
272
|
+
},
|
|
273
|
+
provider: (value) => ({
|
|
274
|
+
providerName: value,
|
|
275
|
+
}),
|
|
276
|
+
};
|
|
277
|
+
// type and its mapping
|
|
278
|
+
const supportedMappings = {
|
|
279
|
+
granule: granuleMapping,
|
|
280
|
+
asyncOperation: asyncOperationMapping,
|
|
281
|
+
collection: collectionMapping,
|
|
282
|
+
execution: executionMapping,
|
|
283
|
+
pdr: pdrMapping,
|
|
284
|
+
provider: providerMapping,
|
|
285
|
+
rule: ruleMapping,
|
|
286
|
+
};
|
|
287
|
+
/**
|
|
288
|
+
* Map query string field to db field
|
|
289
|
+
*
|
|
290
|
+
* @param type - query record type
|
|
291
|
+
* @param queryField - query field
|
|
292
|
+
* @param queryField.name - query field value
|
|
293
|
+
* @param [queryField.value] - query field value
|
|
294
|
+
* @returns db field
|
|
295
|
+
*/
|
|
296
|
+
const mapQueryStringFieldToDbField = (type, queryField) => {
|
|
297
|
+
if (!(supportedMappings[type] && supportedMappings[type][queryField.name])) {
|
|
298
|
+
log.warn(`No db mapping field found for type: ${type}, field ${JSON.stringify(queryField)}`);
|
|
299
|
+
return undefined;
|
|
300
|
+
}
|
|
301
|
+
return supportedMappings[type] && supportedMappings[type][queryField.name](queryField.value);
|
|
302
|
+
};
|
|
303
|
+
exports.mapQueryStringFieldToDbField = mapQueryStringFieldToDbField;
|
|
304
|
+
//# sourceMappingURL=field-mapping.js.map
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { DbQueryParameters, QueryStringParameters } from '../types/search';
|
|
2
|
+
/**
|
|
3
|
+
* Convert api query string parameters to db query parameters
|
|
4
|
+
*
|
|
5
|
+
* @param type - query record type
|
|
6
|
+
* @param queryStringParameters - query string parameters
|
|
7
|
+
* @returns db query parameters
|
|
8
|
+
*/
|
|
9
|
+
export declare const convertQueryStringToDbQueryParameters: (type: string, queryStringParameters: QueryStringParameters) => DbQueryParameters;
|
|
10
|
+
//# sourceMappingURL=queries.d.ts.map
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.convertQueryStringToDbQueryParameters = void 0;
|
|
7
|
+
const omit_1 = __importDefault(require("lodash/omit"));
|
|
8
|
+
const logger_1 = __importDefault(require("@cumulus/logger"));
|
|
9
|
+
const field_mapping_1 = require("./field-mapping");
|
|
10
|
+
const log = new logger_1.default({ sender: '@cumulus/db/queries' });
|
|
11
|
+
// reserved words which are not record fields
|
|
12
|
+
const reservedWords = [
|
|
13
|
+
'limit',
|
|
14
|
+
'page',
|
|
15
|
+
'skip',
|
|
16
|
+
'sort_by',
|
|
17
|
+
'sort_key',
|
|
18
|
+
'order',
|
|
19
|
+
'prefix',
|
|
20
|
+
'infix',
|
|
21
|
+
'estimateTableRowCount',
|
|
22
|
+
'fields',
|
|
23
|
+
'includeFullRecord',
|
|
24
|
+
'searchContext',
|
|
25
|
+
];
|
|
26
|
+
/**
|
|
27
|
+
* regexp for matching api query string parameter to query type
|
|
28
|
+
*/
|
|
29
|
+
const regexes = {
|
|
30
|
+
terms: /^(.*)__in$/,
|
|
31
|
+
term: /^((?!__).)*$/,
|
|
32
|
+
not: /^(.*)__not$/,
|
|
33
|
+
exists: /^(.*)__exists$/,
|
|
34
|
+
range: /^(.*)__(from|to)$/,
|
|
35
|
+
};
|
|
36
|
+
/**
|
|
37
|
+
* Convert 'exists' query fields to db query parameters from api query string fields
|
|
38
|
+
*
|
|
39
|
+
* @param type - query record type
|
|
40
|
+
* @param queryStringFields - api query fields
|
|
41
|
+
* @returns 'exists' query parameter
|
|
42
|
+
*/
|
|
43
|
+
const convertExists = (type, queryStringFields) => {
|
|
44
|
+
const exists = queryStringFields.reduce((acc, queryField) => {
|
|
45
|
+
const match = queryField.name.match(regexes.exists);
|
|
46
|
+
if (!match)
|
|
47
|
+
return acc;
|
|
48
|
+
// get corresponding db field name, e.g. granuleId => granule_id
|
|
49
|
+
const dbField = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { name: match[1] });
|
|
50
|
+
if (!dbField)
|
|
51
|
+
return acc;
|
|
52
|
+
Object.keys(dbField).forEach((key) => { dbField[key] = (queryField.value === 'true'); });
|
|
53
|
+
return { ...acc, ...dbField };
|
|
54
|
+
}, {});
|
|
55
|
+
return { exists };
|
|
56
|
+
};
|
|
57
|
+
/**
|
|
58
|
+
* Convert 'not' query fields to db query parameters from api query string fields
|
|
59
|
+
*
|
|
60
|
+
* @param type - query record type
|
|
61
|
+
* @param queryStringFields - api query fields
|
|
62
|
+
* @returns 'not' query parameter
|
|
63
|
+
*/
|
|
64
|
+
const convertNotMatch = (type, queryStringFields) => {
|
|
65
|
+
const not = queryStringFields.reduce((acc, queryField) => {
|
|
66
|
+
const match = queryField.name.match(regexes.not);
|
|
67
|
+
if (!match)
|
|
68
|
+
return acc;
|
|
69
|
+
// get corresponding db field name, e.g. granuleId => granule_id
|
|
70
|
+
const queryParam = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { ...queryField, name: match[1] });
|
|
71
|
+
return { ...acc, ...queryParam };
|
|
72
|
+
}, {});
|
|
73
|
+
return { not };
|
|
74
|
+
};
|
|
75
|
+
/**
|
|
76
|
+
* Convert range query fields to db query parameters from api query string fields
|
|
77
|
+
*
|
|
78
|
+
* @param type - query record type
|
|
79
|
+
* @param queryStringFields - api query fields
|
|
80
|
+
* @returns range query parameter
|
|
81
|
+
*/
|
|
82
|
+
const convertRange = (type, queryStringFields) => {
|
|
83
|
+
const range = queryStringFields.reduce((acc, queryField) => {
|
|
84
|
+
const match = queryField.name.match(regexes.range);
|
|
85
|
+
if (!match)
|
|
86
|
+
return acc;
|
|
87
|
+
// get corresponding db field name, e.g. timestamp => updated_at
|
|
88
|
+
const dbField = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { ...queryField, name: match[1] });
|
|
89
|
+
if (!dbField)
|
|
90
|
+
return acc;
|
|
91
|
+
const dbFieldName = Object.keys(dbField)[0];
|
|
92
|
+
// build a range field, e.g.
|
|
93
|
+
// { timestamp__from: '1712708508310', timestamp__to: '1712712108310' } =>
|
|
94
|
+
// { updated_at: {
|
|
95
|
+
// gte: new Date(1712708508310),
|
|
96
|
+
// lte: new Date(1712712108310),
|
|
97
|
+
// },
|
|
98
|
+
// }
|
|
99
|
+
const rangeField = { [dbFieldName]: acc[dbFieldName] || {} };
|
|
100
|
+
if (match[2] === 'from') {
|
|
101
|
+
rangeField[dbFieldName].gte = dbField[dbFieldName];
|
|
102
|
+
}
|
|
103
|
+
if (match[2] === 'to') {
|
|
104
|
+
rangeField[dbFieldName].lte = dbField[dbFieldName];
|
|
105
|
+
}
|
|
106
|
+
return { ...acc, ...rangeField };
|
|
107
|
+
}, {});
|
|
108
|
+
return { range };
|
|
109
|
+
};
|
|
110
|
+
/**
|
|
111
|
+
* Convert term query fields to db query parameters from api query string fields
|
|
112
|
+
*
|
|
113
|
+
* @param type - query record type
|
|
114
|
+
* @param queryStringFields - api query fields
|
|
115
|
+
* @returns term query parameter
|
|
116
|
+
*/
|
|
117
|
+
const convertTerm = (type, queryStringFields) => {
|
|
118
|
+
const term = queryStringFields.reduce((acc, queryField) => {
|
|
119
|
+
const queryParam = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, queryField);
|
|
120
|
+
return { ...acc, ...queryParam };
|
|
121
|
+
}, {});
|
|
122
|
+
return { term };
|
|
123
|
+
};
|
|
124
|
+
/**
|
|
125
|
+
* Convert terms query fields to db query parameters from api query string fields
|
|
126
|
+
*
|
|
127
|
+
* @param type - query record type
|
|
128
|
+
* @param queryStringFields - api query fields
|
|
129
|
+
* @returns terms query parameter
|
|
130
|
+
*/
|
|
131
|
+
const convertTerms = (type, queryStringFields) => {
|
|
132
|
+
const terms = queryStringFields.reduce((acc, queryField) => {
|
|
133
|
+
const match = queryField.name.match(regexes.terms);
|
|
134
|
+
if (!match)
|
|
135
|
+
return acc;
|
|
136
|
+
// build a terms field, e.g.
|
|
137
|
+
// { granuleId__in: 'granuleId1,granuleId2' } =>
|
|
138
|
+
// [[granule_id, granuleId1], [granule_id, granuleId2]] =>
|
|
139
|
+
// { granule_id: [granuleId1, granuleId2] }
|
|
140
|
+
// this converts collectionId into name and version fields
|
|
141
|
+
const name = match[1];
|
|
142
|
+
const values = queryField.value.split(',');
|
|
143
|
+
const dbFieldValues = values
|
|
144
|
+
.map((value) => {
|
|
145
|
+
const dbField = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { name, value });
|
|
146
|
+
return Object.entries(dbField ?? {});
|
|
147
|
+
})
|
|
148
|
+
.filter(Boolean)
|
|
149
|
+
.flat();
|
|
150
|
+
if (dbFieldValues.length === 0)
|
|
151
|
+
return acc;
|
|
152
|
+
dbFieldValues.forEach(([field, value]) => {
|
|
153
|
+
acc[field] = acc[field] ?? [];
|
|
154
|
+
acc[field].push(value);
|
|
155
|
+
});
|
|
156
|
+
return acc;
|
|
157
|
+
}, {});
|
|
158
|
+
return { terms };
|
|
159
|
+
};
|
|
160
|
+
/**
|
|
161
|
+
* Convert sort query fields to db query parameters from api query string fields
|
|
162
|
+
*
|
|
163
|
+
* @param type - query record type
|
|
164
|
+
* @param queryStringParameters - query string parameters
|
|
165
|
+
* @returns sort query parameter
|
|
166
|
+
*/
|
|
167
|
+
const convertSort = (type, queryStringParameters) => {
|
|
168
|
+
const sortArray = [];
|
|
169
|
+
const { sort_by: sortBy, sort_key: sortKey } = queryStringParameters;
|
|
170
|
+
let { order } = queryStringParameters;
|
|
171
|
+
if (sortBy) {
|
|
172
|
+
order = order ?? 'asc';
|
|
173
|
+
const queryParam = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { name: sortBy });
|
|
174
|
+
Object.keys(queryParam ?? {}).map((key) => sortArray.push({ column: key, order }));
|
|
175
|
+
}
|
|
176
|
+
else if (sortKey) {
|
|
177
|
+
sortKey.map((item) => {
|
|
178
|
+
order = item.startsWith('-') ? 'desc' : 'asc';
|
|
179
|
+
const queryParam = (0, field_mapping_1.mapQueryStringFieldToDbField)(type, { name: item.replace(/^[+-]/, '') });
|
|
180
|
+
return Object.keys(queryParam ?? {}).map((key) => sortArray.push({ column: key, order }));
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
return sortArray;
|
|
184
|
+
};
|
|
185
|
+
/**
|
|
186
|
+
* functions for converting from api query string parameters to db query parameters
|
|
187
|
+
* for each type of query
|
|
188
|
+
*/
|
|
189
|
+
const convert = {
|
|
190
|
+
exists: convertExists,
|
|
191
|
+
not: convertNotMatch,
|
|
192
|
+
range: convertRange,
|
|
193
|
+
term: convertTerm,
|
|
194
|
+
terms: convertTerms,
|
|
195
|
+
};
|
|
196
|
+
/**
|
|
197
|
+
* Convert api query string parameters to db query parameters
|
|
198
|
+
*
|
|
199
|
+
* @param type - query record type
|
|
200
|
+
* @param queryStringParameters - query string parameters
|
|
201
|
+
* @returns db query parameters
|
|
202
|
+
*/
|
|
203
|
+
const convertQueryStringToDbQueryParameters = (type, queryStringParameters) => {
|
|
204
|
+
const { limit, page, prefix, infix, fields, estimateTableRowCount, includeFullRecord, } = queryStringParameters;
|
|
205
|
+
const dbQueryParameters = {};
|
|
206
|
+
dbQueryParameters.page = Number.parseInt(page ?? '1', 10);
|
|
207
|
+
dbQueryParameters.limit = Number.parseInt(limit ?? '10', 10);
|
|
208
|
+
dbQueryParameters.offset = (dbQueryParameters.page - 1) * dbQueryParameters.limit;
|
|
209
|
+
if (typeof infix === 'string')
|
|
210
|
+
dbQueryParameters.infix = infix;
|
|
211
|
+
if (typeof prefix === 'string')
|
|
212
|
+
dbQueryParameters.prefix = prefix;
|
|
213
|
+
if (typeof fields === 'string')
|
|
214
|
+
dbQueryParameters.fields = fields.split(',');
|
|
215
|
+
dbQueryParameters.estimateTableRowCount = (estimateTableRowCount === 'true');
|
|
216
|
+
dbQueryParameters.includeFullRecord = (includeFullRecord === 'true');
|
|
217
|
+
dbQueryParameters.sort = convertSort(type, queryStringParameters);
|
|
218
|
+
// remove reserved words (that are not fields)
|
|
219
|
+
const fieldParams = (0, omit_1.default)(queryStringParameters, reservedWords);
|
|
220
|
+
// determine which search strategy should be applied
|
|
221
|
+
// options are term, terms, range, exists and not in
|
|
222
|
+
const fieldsList = Object.entries(fieldParams).map(([name, value]) => ({ name, value }));
|
|
223
|
+
// for each search strategy, get all parameters and convert them to db parameters
|
|
224
|
+
Object.keys(regexes).forEach((k) => {
|
|
225
|
+
const matchedFields = fieldsList.filter((f) => f.name.match(regexes[k]));
|
|
226
|
+
if (matchedFields && matchedFields.length > 0 && convert[k]) {
|
|
227
|
+
const queryParams = convert[k](type, matchedFields, regexes[k]);
|
|
228
|
+
Object.assign(dbQueryParameters, queryParams);
|
|
229
|
+
}
|
|
230
|
+
});
|
|
231
|
+
log.debug(`convertQueryStringToDbQueryParameters returns ${JSON.stringify(dbQueryParameters)}`);
|
|
232
|
+
return dbQueryParameters;
|
|
233
|
+
};
|
|
234
|
+
exports.convertQueryStringToDbQueryParameters = convertQueryStringToDbQueryParameters;
|
|
235
|
+
//# sourceMappingURL=queries.js.map
|
|
@@ -4,6 +4,12 @@ import { PostgresExecution, PostgresExecutionRecord } from '../types/execution';
|
|
|
4
4
|
import { ExecutionPgModel } from '../models/execution';
|
|
5
5
|
import { CollectionPgModel } from '../models/collection';
|
|
6
6
|
import { AsyncOperationPgModel } from '../models/async_operation';
|
|
7
|
+
export declare const translatePostgresExecutionToApiExecutionWithoutDbQuery: ({ executionRecord, collectionId, asyncOperationId, parentArn, }: {
|
|
8
|
+
executionRecord: PostgresExecutionRecord;
|
|
9
|
+
collectionId: string | undefined;
|
|
10
|
+
asyncOperationId: string | undefined;
|
|
11
|
+
parentArn: string | undefined;
|
|
12
|
+
}) => ApiExecutionRecord;
|
|
7
13
|
export declare const translatePostgresExecutionToApiExecution: (executionRecord: PostgresExecutionRecord, knex: Knex, collectionPgModel?: CollectionPgModel, asyncOperationPgModel?: AsyncOperationPgModel, executionPgModel?: ExecutionPgModel) => Promise<ApiExecutionRecord>;
|
|
8
14
|
/**
|
|
9
15
|
* Translate execution record from API to RDS.
|
|
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.translateApiExecutionToPostgresExecution = exports.translateApiExecutionToPostgresExecutionWithoutNilsRemoved = exports.translatePostgresExecutionToApiExecution = void 0;
|
|
6
|
+
exports.translateApiExecutionToPostgresExecution = exports.translateApiExecutionToPostgresExecutionWithoutNilsRemoved = exports.translatePostgresExecutionToApiExecution = exports.translatePostgresExecutionToApiExecutionWithoutDbQuery = void 0;
|
|
7
7
|
const isNil_1 = __importDefault(require("lodash/isNil"));
|
|
8
8
|
const isNull_1 = __importDefault(require("lodash/isNull"));
|
|
9
9
|
const errors_1 = require("@cumulus/errors");
|
|
@@ -14,28 +14,7 @@ const Collections_1 = require("@cumulus/message/Collections");
|
|
|
14
14
|
const execution_1 = require("../models/execution");
|
|
15
15
|
const collection_1 = require("../models/collection");
|
|
16
16
|
const async_operation_1 = require("../models/async_operation");
|
|
17
|
-
const
|
|
18
|
-
let parentArn;
|
|
19
|
-
let collectionId;
|
|
20
|
-
let asyncOperationId;
|
|
21
|
-
if (executionRecord.collection_cumulus_id) {
|
|
22
|
-
const collection = await collectionPgModel.get(knex, {
|
|
23
|
-
cumulus_id: executionRecord.collection_cumulus_id,
|
|
24
|
-
});
|
|
25
|
-
collectionId = (0, Collections_1.constructCollectionId)(collection.name, collection.version);
|
|
26
|
-
}
|
|
27
|
-
if (executionRecord.async_operation_cumulus_id) {
|
|
28
|
-
const asyncOperation = await asyncOperationPgModel.get(knex, {
|
|
29
|
-
cumulus_id: executionRecord.async_operation_cumulus_id,
|
|
30
|
-
});
|
|
31
|
-
asyncOperationId = asyncOperation.id;
|
|
32
|
-
}
|
|
33
|
-
if (executionRecord.parent_cumulus_id) {
|
|
34
|
-
const parentExecution = await executionPgModel.get(knex, {
|
|
35
|
-
cumulus_id: executionRecord.parent_cumulus_id,
|
|
36
|
-
});
|
|
37
|
-
parentArn = parentExecution.arn;
|
|
38
|
-
}
|
|
17
|
+
const translatePostgresExecutionToApiExecutionWithoutDbQuery = ({ executionRecord, collectionId, asyncOperationId, parentArn, }) => {
|
|
39
18
|
const postfix = executionRecord.arn.split(':').pop();
|
|
40
19
|
if (!postfix) {
|
|
41
20
|
throw new Error(`Execution ARN record ${executionRecord.arn} has an invalid postfix and API cannot generate the required 'name' field`);
|
|
@@ -61,6 +40,36 @@ const translatePostgresExecutionToApiExecution = async (executionRecord, knex, c
|
|
|
61
40
|
};
|
|
62
41
|
return (0, util_1.removeNilProperties)(translatedRecord);
|
|
63
42
|
};
|
|
43
|
+
exports.translatePostgresExecutionToApiExecutionWithoutDbQuery = translatePostgresExecutionToApiExecutionWithoutDbQuery;
|
|
44
|
+
const translatePostgresExecutionToApiExecution = async (executionRecord, knex, collectionPgModel = new collection_1.CollectionPgModel(), asyncOperationPgModel = new async_operation_1.AsyncOperationPgModel(), executionPgModel = new execution_1.ExecutionPgModel()) => {
|
|
45
|
+
let collectionId;
|
|
46
|
+
let asyncOperationId;
|
|
47
|
+
let parentArn;
|
|
48
|
+
if (executionRecord.collection_cumulus_id) {
|
|
49
|
+
const collection = await collectionPgModel.get(knex, {
|
|
50
|
+
cumulus_id: executionRecord.collection_cumulus_id,
|
|
51
|
+
});
|
|
52
|
+
collectionId = (0, Collections_1.constructCollectionId)(collection.name, collection.version);
|
|
53
|
+
}
|
|
54
|
+
if (executionRecord.async_operation_cumulus_id) {
|
|
55
|
+
const asyncOperation = await asyncOperationPgModel.get(knex, {
|
|
56
|
+
cumulus_id: executionRecord.async_operation_cumulus_id,
|
|
57
|
+
});
|
|
58
|
+
asyncOperationId = asyncOperation.id;
|
|
59
|
+
}
|
|
60
|
+
if (executionRecord.parent_cumulus_id) {
|
|
61
|
+
const parentExecution = await executionPgModel.get(knex, {
|
|
62
|
+
cumulus_id: executionRecord.parent_cumulus_id,
|
|
63
|
+
});
|
|
64
|
+
parentArn = parentExecution.arn;
|
|
65
|
+
}
|
|
66
|
+
return (0, exports.translatePostgresExecutionToApiExecutionWithoutDbQuery)({
|
|
67
|
+
executionRecord,
|
|
68
|
+
collectionId,
|
|
69
|
+
asyncOperationId,
|
|
70
|
+
parentArn,
|
|
71
|
+
});
|
|
72
|
+
};
|
|
64
73
|
exports.translatePostgresExecutionToApiExecution = translatePostgresExecutionToApiExecution;
|
|
65
74
|
/**
|
|
66
75
|
* Validate translation api record doesn't contain invalid null/undefined values based
|
|
@@ -5,9 +5,33 @@ import { PdrPgModel } from '../models/pdr';
|
|
|
5
5
|
import { ProviderPgModel } from '../models/provider';
|
|
6
6
|
import { FilePgModel } from '../models/file';
|
|
7
7
|
import { PostgresCollectionRecord } from '../types/collection';
|
|
8
|
+
import { PostgresExecutionRecord } from '../types/execution';
|
|
8
9
|
import { PostgresGranule, PostgresGranuleRecord } from '../types/granule';
|
|
10
|
+
import { PostgresFileRecord } from '../types/file';
|
|
11
|
+
import { PostgresPdrRecord } from '../types/pdr';
|
|
9
12
|
import { GranuleWithProviderAndCollectionInfo } from '../types/query';
|
|
10
13
|
import { PostgresProviderRecord } from '../types/provider';
|
|
14
|
+
/**
|
|
15
|
+
* Generate an API Granule object from the granule and associated Postgres objects without
|
|
16
|
+
* querying the database
|
|
17
|
+
*
|
|
18
|
+
* @param params - params
|
|
19
|
+
* @param params.granulePgRecord - Granule from Postgres
|
|
20
|
+
* @param params.collectionPgRecord - Collection from Postgres
|
|
21
|
+
* @param [params.executionUrls] - executionUrls from Postgres
|
|
22
|
+
* @param [params.files] - granule files from Postgres
|
|
23
|
+
* @param [params.pdr] - pdr from Postgres
|
|
24
|
+
* @param [params.providerPgRecord] - provider from Postgres
|
|
25
|
+
* @returns An API Granule with associated Files
|
|
26
|
+
*/
|
|
27
|
+
export declare const translatePostgresGranuleToApiGranuleWithoutDbQuery: ({ granulePgRecord, collectionPgRecord, executionUrls, files, pdr, providerPgRecord, }: {
|
|
28
|
+
granulePgRecord: PostgresGranuleRecord;
|
|
29
|
+
collectionPgRecord: Pick<PostgresCollectionRecord, 'cumulus_id' | 'name' | 'version'>;
|
|
30
|
+
executionUrls?: Partial<PostgresExecutionRecord>[] | undefined;
|
|
31
|
+
files?: PostgresFileRecord[] | undefined;
|
|
32
|
+
pdr?: Pick<PostgresPdrRecord, "name"> | undefined;
|
|
33
|
+
providerPgRecord?: Pick<PostgresProviderRecord, "name"> | undefined;
|
|
34
|
+
}) => ApiGranuleRecord;
|
|
11
35
|
/**
|
|
12
36
|
* Generate an API Granule object from a Postgres Granule with associated Files.
|
|
13
37
|
*
|