@cumulus/db 21.3.1-alpha.0 → 21.3.2-testlerna.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/index.d.ts +1 -0
  2. package/dist/index.js +3 -1
  3. package/dist/s3search/AsyncOperationS3Search.d.ts +20 -0
  4. package/dist/s3search/AsyncOperationS3Search.js +29 -0
  5. package/dist/s3search/CollectionS3Search.d.ts +39 -0
  6. package/dist/s3search/CollectionS3Search.js +113 -0
  7. package/dist/s3search/DuckDBSearchExecutor.d.ts +36 -0
  8. package/dist/s3search/DuckDBSearchExecutor.js +57 -0
  9. package/dist/s3search/ExecutionS3Search.d.ts +20 -0
  10. package/dist/s3search/ExecutionS3Search.js +29 -0
  11. package/dist/s3search/GranuleS3Search.d.ts +31 -0
  12. package/dist/s3search/GranuleS3Search.js +100 -0
  13. package/dist/s3search/PdrS3Search.d.ts +20 -0
  14. package/dist/s3search/PdrS3Search.js +29 -0
  15. package/dist/s3search/ProviderS3Search.d.ts +20 -0
  16. package/dist/s3search/ProviderS3Search.js +29 -0
  17. package/dist/s3search/ReconciliationReportS3Search.d.ts +20 -0
  18. package/dist/s3search/ReconciliationReportS3Search.js +29 -0
  19. package/dist/s3search/RuleS3Search.d.ts +20 -0
  20. package/dist/s3search/RuleS3Search.js +29 -0
  21. package/dist/s3search/StatsS3Search.d.ts +25 -0
  22. package/dist/s3search/StatsS3Search.js +51 -0
  23. package/dist/s3search/duckdbHelpers.d.ts +43 -0
  24. package/dist/s3search/duckdbHelpers.js +83 -0
  25. package/dist/s3search/s3TableSchemas.d.ts +11 -0
  26. package/dist/s3search/s3TableSchemas.js +272 -0
  27. package/dist/search/BaseSearch.d.ts +46 -2
  28. package/dist/search/BaseSearch.js +84 -22
  29. package/dist/search/CollectionSearch.d.ts +6 -4
  30. package/dist/search/CollectionSearch.js +2 -3
  31. package/dist/search/ExecutionSearch.d.ts +1 -1
  32. package/dist/search/ExecutionSearch.js +3 -3
  33. package/dist/search/GranuleSearch.d.ts +2 -3
  34. package/dist/search/GranuleSearch.js +3 -3
  35. package/dist/search/PdrSearch.js +1 -1
  36. package/dist/search/ReconciliationReportSearch.js +1 -1
  37. package/dist/search/RuleSearch.js +4 -4
  38. package/dist/search/StatsSearch.d.ts +15 -4
  39. package/dist/search/StatsSearch.js +12 -6
  40. package/dist/search/field-mapping.d.ts +1 -3
  41. package/dist/search/field-mapping.js +40 -19
  42. package/dist/test-duckdb-utils.d.ts +31 -0
  43. package/dist/test-duckdb-utils.js +125 -0
  44. package/dist/test-utils.js +6 -0
  45. package/dist/translate/async_operations.js +7 -3
  46. package/dist/translate/collections.js +6 -6
  47. package/dist/translate/executions.js +7 -7
  48. package/dist/translate/granules.js +16 -11
  49. package/dist/translate/pdr.js +4 -4
  50. package/dist/translate/providers.js +2 -2
  51. package/dist/translate/reconciliation_reports.js +5 -4
  52. package/dist/translate/rules.d.ts +1 -1
  53. package/dist/translate/rules.js +6 -6
  54. package/dist/types/file.d.ts +2 -0
  55. package/package.json +12 -11
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ReconciliationReportS3Search = void 0;
4
+ const DuckDBSearchExecutor_1 = require("./DuckDBSearchExecutor");
5
+ const ReconciliationReportSearch_1 = require("../search/ReconciliationReportSearch");
6
+ /**
7
+ * Class to build and execute DuckDB search query for Reconciliation Report
8
+ */
9
+ class ReconciliationReportS3Search extends ReconciliationReportSearch_1.ReconciliationReportSearch {
10
+ constructor(event, dbConnection) {
11
+ super(event);
12
+ this.duckDBSearchExecutor = new DuckDBSearchExecutor_1.DuckDBSearchExecutor({
13
+ dbConnection,
14
+ dbQueryParameters: this.dbQueryParameters,
15
+ getMetaTemplate: this._metaTemplate.bind(this),
16
+ translateRecords: this.translatePostgresRecordsToApiRecords.bind(this),
17
+ });
18
+ }
19
+ /**
20
+ * Build and execute search query
21
+ *
22
+ * @returns search result
23
+ */
24
+ async query() {
25
+ return this.duckDBSearchExecutor.query((knexBuilder) => this.buildSearch(knexBuilder));
26
+ }
27
+ }
28
+ exports.ReconciliationReportS3Search = ReconciliationReportS3Search;
29
+ //# sourceMappingURL=ReconciliationReportS3Search.js.map
@@ -0,0 +1,20 @@
1
+ import { DuckDBConnection } from '@duckdb/node-api';
2
+ import { RuleSearch } from '../search/RuleSearch';
3
+ import { QueryEvent } from '../types/search';
4
+ /**
5
+ * Class to build and execute DuckDB search query for rules
6
+ */
7
+ export declare class RuleS3Search extends RuleSearch {
8
+ private duckDBSearchExecutor;
9
+ constructor(event: QueryEvent, dbConnection: DuckDBConnection);
10
+ /**
11
+ * Build and execute search query
12
+ *
13
+ * @returns search result
14
+ */
15
+ query(): Promise<{
16
+ meta: import("../search/BaseSearch").Meta;
17
+ results: any[];
18
+ }>;
19
+ }
20
+ //# sourceMappingURL=RuleS3Search.d.ts.map
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.RuleS3Search = void 0;
4
+ const DuckDBSearchExecutor_1 = require("./DuckDBSearchExecutor");
5
+ const RuleSearch_1 = require("../search/RuleSearch");
6
+ /**
7
+ * Class to build and execute DuckDB search query for rules
8
+ */
9
+ class RuleS3Search extends RuleSearch_1.RuleSearch {
10
+ constructor(event, dbConnection) {
11
+ super(event);
12
+ this.duckDBSearchExecutor = new DuckDBSearchExecutor_1.DuckDBSearchExecutor({
13
+ dbConnection,
14
+ dbQueryParameters: this.dbQueryParameters,
15
+ getMetaTemplate: this._metaTemplate.bind(this),
16
+ translateRecords: this.translatePostgresRecordsToApiRecords.bind(this),
17
+ });
18
+ }
19
+ /**
20
+ * Build and execute search query
21
+ *
22
+ * @returns search result
23
+ */
24
+ async query() {
25
+ return this.duckDBSearchExecutor.query((knexBuilder) => this.buildSearch(knexBuilder));
26
+ }
27
+ }
28
+ exports.RuleS3Search = RuleS3Search;
29
+ //# sourceMappingURL=RuleS3Search.js.map
@@ -0,0 +1,25 @@
1
+ import { DuckDBConnection } from '@duckdb/node-api';
2
+ import { QueryEvent } from '../types/search';
3
+ import { ApiAggregateResult, StatsSearch, SummaryResult } from '../search/StatsSearch';
4
+ /**
5
+ * A class to query postgres for the STATS and STATS/AGGREGATE endpoints
6
+ */
7
+ declare class StatsS3Search extends StatsSearch {
8
+ private dbConnection;
9
+ private knexBuilder;
10
+ constructor(event: QueryEvent, type: string, dbConnection: DuckDBConnection);
11
+ /**
12
+ * Queries postgres for a summary of statistics around the granules in the system
13
+ *
14
+ * @returns the postgres aggregations based on query
15
+ */
16
+ summary(): Promise<SummaryResult>;
17
+ /**
18
+ * Executes the aggregate search query
19
+ *
20
+ * @returns the aggregate query results in api format
21
+ */
22
+ aggregate(): Promise<ApiAggregateResult>;
23
+ }
24
+ export { StatsS3Search };
25
+ //# sourceMappingURL=StatsS3Search.d.ts.map
@@ -0,0 +1,51 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.StatsS3Search = void 0;
7
+ const knex_1 = require("knex");
8
+ const logger_1 = __importDefault(require("@cumulus/logger"));
9
+ const StatsSearch_1 = require("../search/StatsSearch");
10
+ const duckdbHelpers_1 = require("./duckdbHelpers");
11
+ const log = new logger_1.default({ sender: '@cumulus/db/StatsSearch' });
12
+ /**
13
+ * A class to query postgres for the STATS and STATS/AGGREGATE endpoints
14
+ */
15
+ class StatsS3Search extends StatsSearch_1.StatsSearch {
16
+ constructor(event, type, dbConnection) {
17
+ super(event, type);
18
+ this.dbConnection = dbConnection;
19
+ // Use 'pg' dialect to generate DuckDB-compatible SQL ($1, $2, etc.)
20
+ this.knexBuilder = (0, knex_1.knex)({ client: 'pg' });
21
+ }
22
+ /**
23
+ * Queries postgres for a summary of statistics around the granules in the system
24
+ *
25
+ * @returns the postgres aggregations based on query
26
+ */
27
+ async summary() {
28
+ const aggregateQuery = this.buildSummaryQuery(this.knexBuilder);
29
+ log.debug(`summary about to execute query: ${aggregateQuery?.toSQL().sql}`);
30
+ const { sql, bindings } = aggregateQuery.toSQL().toNative();
31
+ const reader = await this.dbConnection.runAndReadAll(sql, (0, duckdbHelpers_1.prepareBindings)([...bindings]) // prepareBindings must be imported/defined in scope
32
+ );
33
+ const aggregateQueryRes = reader.getRowObjectsJson();
34
+ return this.formatSummaryResult(aggregateQueryRes[0]);
35
+ }
36
+ /**
37
+ * Executes the aggregate search query
38
+ *
39
+ * @returns the aggregate query results in api format
40
+ */
41
+ async aggregate() {
42
+ const { searchQuery } = this.buildSearch(this.knexBuilder);
43
+ const { sql, bindings } = searchQuery.toSQL().toNative();
44
+ const reader = await this.dbConnection.runAndReadAll(sql, (0, duckdbHelpers_1.prepareBindings)([...bindings]) // prepareBindings must be imported/defined in scope
45
+ );
46
+ const records = reader.getRowObjectsJson();
47
+ return this.formatAggregateResult(records);
48
+ }
49
+ }
50
+ exports.StatsS3Search = StatsS3Search;
51
+ //# sourceMappingURL=StatsS3Search.js.map
@@ -0,0 +1,43 @@
1
+ import { DuckDBValue, DuckDBConnection } from '@duckdb/node-api';
2
+ import { Knex } from 'knex';
3
+ import { PostgresFileRecord } from '../types/file';
4
+ export declare function prepareBindings(bindings: ReadonlyArray<any>): DuckDBValue[];
5
+ /**
6
+ * Returns execution records sorted by most recent first for an input
7
+ * set of Granule Cumulus IDs.
8
+ *
9
+ * @param {object} params - The function parameters.
10
+ * @param {DuckDBConnection} params.connection - The active DuckDB connection.
11
+ * @param {number[]} params.granuleCumulusIds - Array of granule IDs to filter by.
12
+ * @param {Knex} [params.knexBuilder] - Optional Knex instance (defaults to 'pg' client).
13
+ * @param {number} [params.limit] - Optional limit for the number of records returned.
14
+ * @returns {Promise<{ granule_cumulus_id: number, url: string }[]>}
15
+ * Array of objects containing granule_cumulus_id and execution url, sorted by timestamp desc.
16
+ */
17
+ export declare const getExecutionInfoByGranuleCumulusIds: ({ connection, granuleCumulusIds, knexBuilder, limit, }: {
18
+ connection: DuckDBConnection;
19
+ granuleCumulusIds: number[];
20
+ knexBuilder: Knex;
21
+ limit?: number | undefined;
22
+ }) => Promise<{
23
+ granule_cumulus_id: number;
24
+ url: string;
25
+ }[]>;
26
+ /**
27
+ * Searches for file records by granule cumulus IDs using a DuckDB connection.
28
+ *
29
+ * @param params - Function parameters
30
+ * @param params.connection - Active DuckDB connection used to execute the query
31
+ * @param params.granuleCumulusIds - Array of granule Cumulus IDs to filter by
32
+ * @param [params.columns='*'] - Columns to select (string or string array)
33
+ * @param [params.knexBuilder] - Optional Knex instance (defaults to PostgreSQL dialect)
34
+ * @returns Promise resolving to an array of normalized `PostgresFileRecord` objects
35
+ * @throws If the DuckDB query execution fails
36
+ */
37
+ export declare const getFilesByGranuleCumulusIds: ({ connection, granuleCumulusIds, columns, knexBuilder, }: {
38
+ connection: DuckDBConnection;
39
+ granuleCumulusIds: number[];
40
+ columns?: string | string[] | undefined;
41
+ knexBuilder?: Knex<any, any[]> | undefined;
42
+ }) => Promise<PostgresFileRecord[]>;
43
+ //# sourceMappingURL=duckdbHelpers.d.ts.map
@@ -0,0 +1,83 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getFilesByGranuleCumulusIds = exports.getExecutionInfoByGranuleCumulusIds = exports.prepareBindings = void 0;
7
+ const knex_1 = require("knex");
8
+ const logger_1 = __importDefault(require("@cumulus/logger"));
9
+ const tables_1 = require("../tables");
10
+ const log = new logger_1.default({ sender: '@cumulus/db/duckdbHelpers' });
11
+ function prepareBindings(bindings) {
12
+ return bindings.map((value) => {
13
+ if (value instanceof Date)
14
+ return value.toISOString();
15
+ if (value !== null && typeof value === 'object')
16
+ return JSON.stringify(value);
17
+ return value;
18
+ });
19
+ }
20
+ exports.prepareBindings = prepareBindings;
21
+ /**
22
+ * Returns execution records sorted by most recent first for an input
23
+ * set of Granule Cumulus IDs.
24
+ *
25
+ * @param {object} params - The function parameters.
26
+ * @param {DuckDBConnection} params.connection - The active DuckDB connection.
27
+ * @param {number[]} params.granuleCumulusIds - Array of granule IDs to filter by.
28
+ * @param {Knex} [params.knexBuilder] - Optional Knex instance (defaults to 'pg' client).
29
+ * @param {number} [params.limit] - Optional limit for the number of records returned.
30
+ * @returns {Promise<{ granule_cumulus_id: number, url: string }[]>}
31
+ * Array of objects containing granule_cumulus_id and execution url, sorted by timestamp desc.
32
+ */
33
+ const getExecutionInfoByGranuleCumulusIds = async ({ connection, granuleCumulusIds, knexBuilder = (0, knex_1.knex)({ client: 'pg' }), limit, }) => {
34
+ const knexQuery = knexBuilder(tables_1.TableNames.executions)
35
+ .select([
36
+ `${tables_1.TableNames.executions}.url`,
37
+ `${tables_1.TableNames.granulesExecutions}.granule_cumulus_id`,
38
+ ])
39
+ .join(tables_1.TableNames.granulesExecutions, `${tables_1.TableNames.executions}.cumulus_id`, `${tables_1.TableNames.granulesExecutions}.execution_cumulus_id`)
40
+ .whereIn(`${tables_1.TableNames.granulesExecutions}.granule_cumulus_id`, granuleCumulusIds)
41
+ .orderBy(`${tables_1.TableNames.executions}.timestamp`, 'desc');
42
+ if (limit)
43
+ knexQuery.limit(limit);
44
+ const { sql, bindings } = knexQuery.toSQL().toNative();
45
+ log.debug(`getExecutionInfoByGranuleCumulusIds query: ${sql}`);
46
+ // Use spread operator to convert ReadonlyArray to mutable array
47
+ const reader = await connection.runAndReadAll(sql, prepareBindings([...bindings]));
48
+ return reader.getRowObjectsJson();
49
+ };
50
+ exports.getExecutionInfoByGranuleCumulusIds = getExecutionInfoByGranuleCumulusIds;
51
+ /**
52
+ * Searches for file records by granule cumulus IDs using a DuckDB connection.
53
+ *
54
+ * @param params - Function parameters
55
+ * @param params.connection - Active DuckDB connection used to execute the query
56
+ * @param params.granuleCumulusIds - Array of granule Cumulus IDs to filter by
57
+ * @param [params.columns='*'] - Columns to select (string or string array)
58
+ * @param [params.knexBuilder] - Optional Knex instance (defaults to PostgreSQL dialect)
59
+ * @returns Promise resolving to an array of normalized `PostgresFileRecord` objects
60
+ * @throws If the DuckDB query execution fails
61
+ */
62
+ const getFilesByGranuleCumulusIds = async ({ connection, granuleCumulusIds, columns = '*', knexBuilder = (0, knex_1.knex)({ client: 'pg' }), }) => {
63
+ const knexQuery = knexBuilder(tables_1.TableNames.files)
64
+ .select(columns)
65
+ .whereIn('granule_cumulus_id', granuleCumulusIds);
66
+ const { sql, bindings } = knexQuery.toSQL().toNative();
67
+ // Execute using DuckDB connection
68
+ const reader = await connection.runAndReadAll(sql, prepareBindings([...bindings]) // prepareBindings must be imported/defined in scope
69
+ );
70
+ const rows = reader.getRowObjectsJson();
71
+ // Mapping resolves TS errors and handles DuckDB type conversions
72
+ return rows.map((row) => ({
73
+ ...row,
74
+ // Ensure IDs are numbers (handles DuckDB BigInt/String return types)
75
+ cumulus_id: Number(row.cumulus_id),
76
+ granule_cumulus_id: Number(row.granule_cumulus_id),
77
+ // Convert ISO timestamp strings back to JS Date objects
78
+ created_at: row.created_at ? new Date(row.created_at) : undefined,
79
+ updated_at: row.updated_at ? new Date(row.updated_at) : undefined,
80
+ }));
81
+ };
82
+ exports.getFilesByGranuleCumulusIds = getFilesByGranuleCumulusIds;
83
+ //# sourceMappingURL=duckdbHelpers.js.map
@@ -0,0 +1,11 @@
1
+ export declare const asyncOperationsS3TableSql: (tableName?: string) => string;
2
+ export declare const collectionsS3TableSql: (tableName?: string) => string;
3
+ export declare const executionsS3TableSql: (tableName?: string) => string;
4
+ export declare const filesS3TableSql: (tableName?: string) => string;
5
+ export declare const granulesS3TableSql: (tableName?: string) => string;
6
+ export declare const granulesExecutionsS3TableSql: (tableName?: string) => string;
7
+ export declare const pdrsS3TableSql: (tableName?: string) => string;
8
+ export declare const providersS3TableSql: (tableName?: string) => string;
9
+ export declare const reconciliationReportsS3TableSql: (tableName?: string) => string;
10
+ export declare const rulesS3TableSql: (tableName?: string) => string;
11
+ //# sourceMappingURL=s3TableSchemas.d.ts.map
@@ -0,0 +1,272 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.rulesS3TableSql = exports.reconciliationReportsS3TableSql = exports.providersS3TableSql = exports.pdrsS3TableSql = exports.granulesExecutionsS3TableSql = exports.granulesS3TableSql = exports.filesS3TableSql = exports.executionsS3TableSql = exports.collectionsS3TableSql = exports.asyncOperationsS3TableSql = void 0;
4
+ const asyncOperationsS3TableSql = (tableName = 'async_operations') => `
5
+ CREATE TABLE IF NOT EXISTS ${tableName} (
6
+ cumulus_id INTEGER PRIMARY KEY,
7
+ id UUID NOT NULL,
8
+ description TEXT NOT NULL,
9
+ operation_type TEXT NOT NULL,
10
+ output JSON,
11
+ status TEXT NOT NULL,
12
+ task_arn TEXT,
13
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
14
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
15
+ CONSTRAINT async_operations_id_unique UNIQUE (id),
16
+ CONSTRAINT async_operations_status_check
17
+ CHECK (status IN (
18
+ 'RUNNING',
19
+ 'SUCCEEDED',
20
+ 'RUNNER_FAILED',
21
+ 'TASK_FAILED'
22
+ )),
23
+ CONSTRAINT async_operations_operation_type_check
24
+ CHECK (operation_type IN (
25
+ 'Bulk Execution Archive',
26
+ 'Bulk Execution Delete',
27
+ 'Bulk Granules',
28
+ 'Bulk Granule Archive',
29
+ 'Bulk Granule Delete',
30
+ 'Bulk Granule Reingest',
31
+ 'Data Migration',
32
+ 'Dead-Letter Processing',
33
+ 'DLA Migration',
34
+ 'ES Index',
35
+ 'Kinesis Replay',
36
+ 'Migration Count Report',
37
+ 'Reconciliation Report',
38
+ 'SQS Replay'
39
+ ))
40
+ );`;
41
+ exports.asyncOperationsS3TableSql = asyncOperationsS3TableSql;
42
+ const collectionsS3TableSql = (tableName = 'collections') => `
43
+ CREATE TABLE IF NOT EXISTS ${tableName} (
44
+ cumulus_id INTEGER PRIMARY KEY,
45
+ name TEXT NOT NULL,
46
+ version TEXT NOT NULL,
47
+ sample_file_name TEXT NOT NULL,
48
+ granule_id_validation_regex TEXT NOT NULL,
49
+ granule_id_extraction_regex TEXT NOT NULL,
50
+ files JSON NOT NULL,
51
+ process TEXT,
52
+ url_path TEXT,
53
+ duplicate_handling TEXT,
54
+ report_to_ems BOOLEAN,
55
+ ignore_files_config_for_discovery BOOLEAN,
56
+ meta JSON,
57
+ tags JSON,
58
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
59
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
60
+ UNIQUE (name, version),
61
+ CHECK (duplicate_handling IN ('error', 'replace', 'skip', 'version'))
62
+ );`;
63
+ exports.collectionsS3TableSql = collectionsS3TableSql;
64
+ const executionsS3TableSql = (tableName = 'executions') => `
65
+ CREATE TABLE IF NOT EXISTS ${tableName} (
66
+ cumulus_id BIGINT PRIMARY KEY,
67
+ arn TEXT NOT NULL,
68
+ async_operation_cumulus_id INTEGER,
69
+ collection_cumulus_id INTEGER,
70
+ parent_cumulus_id BIGINT,
71
+ cumulus_version TEXT,
72
+ url TEXT,
73
+ status TEXT NOT NULL,
74
+ tasks JSON,
75
+ error JSON,
76
+ workflow_name TEXT,
77
+ duration REAL,
78
+ original_payload JSON,
79
+ final_payload JSON,
80
+ "timestamp" TIMESTAMPTZ,
81
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
82
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
83
+ archived BOOLEAN NOT NULL DEFAULT FALSE,
84
+ CONSTRAINT executions_arn_unique UNIQUE (arn),
85
+ CONSTRAINT executions_url_unique UNIQUE (url),
86
+ CONSTRAINT executions_async_operation_cumulus_id_foreign
87
+ FOREIGN KEY (async_operation_cumulus_id)
88
+ REFERENCES async_operations (cumulus_id),
89
+ CONSTRAINT executions_collection_cumulus_id_foreign
90
+ FOREIGN KEY (collection_cumulus_id)
91
+ REFERENCES collections (cumulus_id),
92
+ CONSTRAINT executions_parent_cumulus_id_foreign
93
+ FOREIGN KEY (parent_cumulus_id)
94
+ REFERENCES ${tableName} (cumulus_id),
95
+ CONSTRAINT executions_status_check
96
+ CHECK (status IN ('running', 'completed', 'failed', 'unknown'))
97
+ );`;
98
+ exports.executionsS3TableSql = executionsS3TableSql;
99
+ const filesS3TableSql = (tableName = 'files') => `
100
+ CREATE TABLE IF NOT EXISTS ${tableName} (
101
+ cumulus_id BIGINT PRIMARY KEY,
102
+ granule_cumulus_id BIGINT NOT NULL,
103
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
104
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
105
+ file_size BIGINT,
106
+ bucket TEXT NOT NULL,
107
+ checksum_type TEXT,
108
+ checksum_value TEXT,
109
+ file_name TEXT,
110
+ key TEXT NOT NULL,
111
+ path TEXT,
112
+ source TEXT,
113
+ type TEXT,
114
+ CONSTRAINT files_bucket_key_unique UNIQUE (bucket, key),
115
+ CONSTRAINT files_granule_cumulus_id_foreign
116
+ FOREIGN KEY (granule_cumulus_id)
117
+ REFERENCES granules (cumulus_id)
118
+ );`;
119
+ exports.filesS3TableSql = filesS3TableSql;
120
+ const granulesS3TableSql = (tableName = 'granules') => `
121
+ CREATE TABLE IF NOT EXISTS ${tableName} (
122
+ cumulus_id BIGINT PRIMARY KEY,
123
+ granule_id TEXT NOT NULL,
124
+ status TEXT NOT NULL,
125
+ collection_cumulus_id INTEGER NOT NULL,
126
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
127
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
128
+ published BOOLEAN,
129
+ duration REAL,
130
+ time_to_archive REAL,
131
+ time_to_process REAL,
132
+ product_volume BIGINT,
133
+ error JSON,
134
+ cmr_link TEXT,
135
+ pdr_cumulus_id INTEGER,
136
+ provider_cumulus_id INTEGER,
137
+ beginning_date_time TIMESTAMPTZ,
138
+ ending_date_time TIMESTAMPTZ,
139
+ last_update_date_time TIMESTAMPTZ,
140
+ processing_end_date_time TIMESTAMPTZ,
141
+ processing_start_date_time TIMESTAMPTZ,
142
+ production_date_time TIMESTAMPTZ,
143
+ query_fields JSON,
144
+ "timestamp" TIMESTAMPTZ,
145
+ producer_granule_id TEXT NOT NULL,
146
+ archived BOOLEAN NOT NULL DEFAULT FALSE,
147
+ UNIQUE (collection_cumulus_id, granule_id),
148
+ CHECK (status IN ('running', 'completed', 'failed', 'queued'))
149
+ );`;
150
+ exports.granulesS3TableSql = granulesS3TableSql;
151
+ const granulesExecutionsS3TableSql = (tableName = 'granules_executions') => `
152
+ CREATE TABLE IF NOT EXISTS ${tableName} (
153
+ granule_cumulus_id BIGINT NOT NULL,
154
+ execution_cumulus_id BIGINT NOT NULL,
155
+ CONSTRAINT granules_executions_granule_execution_unique
156
+ UNIQUE (granule_cumulus_id, execution_cumulus_id),
157
+ CONSTRAINT granules_executions_execution_cumulus_id_foreign
158
+ FOREIGN KEY (execution_cumulus_id)
159
+ REFERENCES executions (cumulus_id),
160
+ CONSTRAINT granules_executions_granule_cumulus_id_foreign
161
+ FOREIGN KEY (granule_cumulus_id)
162
+ REFERENCES granules (cumulus_id)
163
+ );`;
164
+ exports.granulesExecutionsS3TableSql = granulesExecutionsS3TableSql;
165
+ const pdrsS3TableSql = (tableName = 'pdrs') => `
166
+ CREATE TABLE IF NOT EXISTS ${tableName} (
167
+ cumulus_id INTEGER PRIMARY KEY,
168
+ collection_cumulus_id INTEGER NOT NULL,
169
+ provider_cumulus_id INTEGER NOT NULL,
170
+ execution_cumulus_id BIGINT,
171
+ status TEXT NOT NULL,
172
+ name TEXT NOT NULL,
173
+ progress REAL,
174
+ pan_sent BOOLEAN,
175
+ pan_message TEXT,
176
+ stats JSON,
177
+ address TEXT,
178
+ original_url TEXT,
179
+ duration REAL,
180
+ "timestamp" TIMESTAMPTZ,
181
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
182
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
183
+ CONSTRAINT pdrs_name_unique UNIQUE (name),
184
+ CONSTRAINT pdrs_collection_cumulus_id_foreign
185
+ FOREIGN KEY (collection_cumulus_id)
186
+ REFERENCES collections (cumulus_id),
187
+ CONSTRAINT pdrs_execution_cumulus_id_foreign
188
+ FOREIGN KEY (execution_cumulus_id)
189
+ REFERENCES executions (cumulus_id),
190
+ CONSTRAINT pdrs_provider_cumulus_id_foreign
191
+ FOREIGN KEY (provider_cumulus_id)
192
+ REFERENCES providers (cumulus_id),
193
+ CONSTRAINT pdrs_status_check
194
+ CHECK (status IN ('running', 'failed', 'completed'))
195
+ );`;
196
+ exports.pdrsS3TableSql = pdrsS3TableSql;
197
+ const providersS3TableSql = (tableName = 'providers') => `
198
+ CREATE TABLE IF NOT EXISTS ${tableName} (
199
+ cumulus_id INTEGER PRIMARY KEY,
200
+ name TEXT NOT NULL,
201
+ protocol TEXT NOT NULL DEFAULT 'http',
202
+ host TEXT NOT NULL,
203
+ port INTEGER,
204
+ username TEXT,
205
+ password TEXT,
206
+ global_connection_limit INTEGER,
207
+ private_key TEXT,
208
+ cm_key_id TEXT,
209
+ certificate_uri TEXT,
210
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
211
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
212
+ allowed_redirects TEXT[],
213
+ max_download_time INTEGER,
214
+ CONSTRAINT providers_name_unique UNIQUE (name),
215
+ CONSTRAINT providers_protocol_check
216
+ CHECK (protocol IN ('http', 'https', 'ftp', 'sftp', 's3'))
217
+ );`;
218
+ exports.providersS3TableSql = providersS3TableSql;
219
+ const reconciliationReportsS3TableSql = (tableName = 'reconciliation_reports') => `
220
+ CREATE TABLE IF NOT EXISTS ${tableName} (
221
+ cumulus_id INTEGER PRIMARY KEY,
222
+ name TEXT NOT NULL,
223
+ type TEXT NOT NULL,
224
+ status TEXT NOT NULL,
225
+ location TEXT,
226
+ error JSON,
227
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
228
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
229
+ CONSTRAINT reconciliation_reports_name_unique UNIQUE (name),
230
+ CONSTRAINT reconciliation_reports_type_check
231
+ CHECK (type IN (
232
+ 'Granule Inventory',
233
+ 'Granule Not Found',
234
+ 'Internal',
235
+ 'Inventory',
236
+ 'ORCA Backup'
237
+ )),
238
+ CONSTRAINT reconciliation_reports_status_check
239
+ CHECK (status IN ('Generated', 'Pending', 'Failed'))
240
+ );`;
241
+ exports.reconciliationReportsS3TableSql = reconciliationReportsS3TableSql;
242
+ const rulesS3TableSql = (tableName = 'rules') => `
243
+ CREATE TABLE IF NOT EXISTS ${tableName} (
244
+ cumulus_id INTEGER PRIMARY KEY,
245
+ name TEXT NOT NULL,
246
+ workflow TEXT NOT NULL,
247
+ collection_cumulus_id INTEGER,
248
+ provider_cumulus_id INTEGER,
249
+ type TEXT NOT NULL,
250
+ enabled BOOLEAN NOT NULL,
251
+ value TEXT,
252
+ arn TEXT,
253
+ log_event_arn TEXT,
254
+ execution_name_prefix TEXT,
255
+ payload JSON,
256
+ meta JSON,
257
+ tags JSON,
258
+ queue_url TEXT,
259
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
260
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
261
+ CONSTRAINT rules_name_unique UNIQUE (name),
262
+ CONSTRAINT rules_collection_cumulus_id_foreign
263
+ FOREIGN KEY (collection_cumulus_id)
264
+ REFERENCES collections (cumulus_id),
265
+ CONSTRAINT rules_provider_cumulus_id_foreign
266
+ FOREIGN KEY (provider_cumulus_id)
267
+ REFERENCES providers (cumulus_id),
268
+ CONSTRAINT rules_type_check
269
+ CHECK (type IN ('onetime', 'scheduled', 'sns', 'kinesis', 'sqs'))
270
+ );`;
271
+ exports.rulesS3TableSql = rulesS3TableSql;
272
+ //# sourceMappingURL=s3TableSchemas.js.map
@@ -1,11 +1,44 @@
1
1
  import { Knex } from 'knex';
2
2
  import { BaseRecord } from '../types/base';
3
3
  import { DbQueryParameters, QueryEvent, QueryStringParameters } from '../types/search';
4
+ export declare type Meta = {
5
+ name: string;
6
+ stack?: string;
7
+ table?: string;
8
+ limit?: number;
9
+ page?: number;
10
+ count?: number;
11
+ };
4
12
  export declare const typeToTable: {
5
13
  [key: string]: string;
6
14
  };
7
15
  /**
8
- * Class to build and execute db search query
16
+ * BaseSearch
17
+ *
18
+ * Abstract base class for building and executing database search queries.
19
+ *
20
+ * Responsibilities:
21
+ * - Parse and normalize incoming query string parameters.
22
+ * - Build database queries using Knex.
23
+ * - Execute queries against PostgreSQL by default.
24
+ * - Return standardized search API response format including metadata.
25
+ *
26
+ * Default Behavior:
27
+ * - The `query()` method executes against PostgreSQL using a Knex client.
28
+ *
29
+ * DuckDB Support:
30
+ * - Subclasses that query DuckDB (e.g., *S3Search classes) must override
31
+ * the `query()` and related methods
32
+ * - DuckDB subclasses are responsible for:
33
+ * - Executing queries using a DuckDB connection.
34
+ * - Handling sequential execution (to avoid prepared statement conflicts).
35
+ * - Translating DuckDB result types (e.g., string dates/JSON) into proper API types.
36
+ *
37
+ * Design Notes:
38
+ * - Query construction logic (e.g., `buildSearch`) is shared across Postgres
39
+ * and DuckDB implementations.
40
+ * - Execution strategy is delegated to subclasses when a different database
41
+ * engine is required.
9
42
  */
10
43
  declare abstract class BaseSearch {
11
44
  readonly type: string;
@@ -44,6 +77,17 @@ declare abstract class BaseSearch {
44
77
  * @returns whether an estimated row count should be returned
45
78
  */
46
79
  protected shouldEstimateRowcount(countSql: string): boolean;
80
+ /**
81
+ * Build a JSON query expression string for nested fields.
82
+ *
83
+ *
84
+ * @param fullFieldName - Dot-separated JSON path, e.g., 'query_fields.cnm.receivedTime'
85
+ * @returns The JSON query path string
86
+ * @example
87
+ * buildJsonQueryExpression('query_fields.cnm.receivedTime')
88
+ * // returns: query_fields -> 'cnm' ->> 'receivedTime'
89
+ */
90
+ protected buildJsonQueryExpression: (fullFieldName: string) => string;
47
91
  /**
48
92
  * Build the search query
49
93
  *
@@ -59,7 +103,7 @@ declare abstract class BaseSearch {
59
103
  *
60
104
  * @returns metadata template
61
105
  */
62
- private _metaTemplate;
106
+ protected _metaTemplate(): Meta;
63
107
  /**
64
108
  * Build basic query
65
109
  *