@firebaseextensions/firestore-bigquery-change-tracker 1.1.38 → 1.1.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,18 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Clustering = void 0;
4
4
  const logs = require("../logs");
5
+ const VALID_CLUSTERING_TYPES = [
6
+ "BIGNUMERIC",
7
+ "BOOL",
8
+ "DATE",
9
+ "DATETIME",
10
+ "GEOGRAPHY",
11
+ "INT64",
12
+ "NUMERIC",
13
+ "RANGE",
14
+ "STRING",
15
+ "TIMESTAMP",
16
+ ];
5
17
  class Clustering {
6
18
  constructor(config, table, schema) {
7
19
  this.updateCluster = async (metaData) => {
@@ -33,17 +45,37 @@ class Clustering {
33
45
  }
34
46
  async hasInvalidFields(metaData) {
35
47
  const { clustering = [] } = this.config;
36
- if (!clustering)
37
- return Promise.resolve(false);
38
- const fieldNames = metaData
39
- ? metaData.schema.fields.map(($) => $.name)
40
- : [];
41
- const invalidFields = clustering.filter(($) => !fieldNames.includes($));
42
- if (invalidFields.length) {
43
- logs.invalidClustering(invalidFields.join(","));
44
- return Promise.resolve(true);
48
+ if (!clustering) {
49
+ return false;
45
50
  }
46
- return Promise.resolve(false);
51
+ if (!clustering.length) {
52
+ return false;
53
+ }
54
+ if (!metaData?.schema.fields.length) {
55
+ return false;
56
+ }
57
+ const fields = metaData.schema.fields;
58
+ const fieldNameToType = new Map(fields.map((field) => [field.name, field.type]));
59
+ // First check if all clustering fields exist in the schema
60
+ const nonExistentFields = clustering.filter((fieldName) => !fieldNameToType.has(fieldName));
61
+ if (nonExistentFields.length) {
62
+ logs.invalidClustering(nonExistentFields.join(","));
63
+ return true;
64
+ }
65
+ // Then check for invalid types among existing clustering fields
66
+ const invalidFieldTypes = clustering
67
+ .map((fieldName) => ({
68
+ fieldName,
69
+ type: fieldNameToType.get(fieldName),
70
+ }))
71
+ .filter(({ type }) => !VALID_CLUSTERING_TYPES.includes(type));
72
+ if (invalidFieldTypes.length) {
73
+ logs.invalidClusteringTypes(invalidFieldTypes
74
+ .map(({ fieldName, type }) => `${fieldName} (${type})`)
75
+ .join(", "));
76
+ return true;
77
+ }
78
+ return false;
47
79
  }
48
80
  }
49
81
  exports.Clustering = Clustering;
@@ -1,5 +1,6 @@
1
1
  import * as bigquery from "@google-cloud/bigquery";
2
2
  import { FirestoreEventHistoryTracker, FirestoreDocumentChangeEvent } from "../tracker";
3
+ import { LogLevel } from "../logger";
3
4
  export { RawChangelogSchema, RawChangelogViewSchema } from "./schema";
4
5
  export interface FirestoreBigQueryEventHistoryTrackerConfig {
5
6
  datasetId: string;
@@ -18,6 +19,11 @@ export interface FirestoreBigQueryEventHistoryTrackerConfig {
18
19
  useNewSnapshotQuerySyntax?: boolean;
19
20
  skipInit?: boolean;
20
21
  kmsKeyName?: string | undefined;
22
+ useMaterializedView?: boolean;
23
+ useIncrementalMaterializedView?: boolean;
24
+ maxStaleness?: string;
25
+ refreshIntervalMinutes?: number;
26
+ logLevel?: LogLevel | string;
21
27
  }
22
28
  /**
23
29
  * An FirestoreEventHistoryTracker that exports data to BigQuery.
@@ -70,7 +76,7 @@ export declare class FirestoreBigQueryEventHistoryTracker implements FirestoreEv
70
76
  * Creates the latest snapshot view, which returns only latest operations
71
77
  * of all existing documents over the raw change log table.
72
78
  */
73
- private initializeLatestView;
79
+ private _initializeLatestView;
74
80
  bigqueryDataset(): bigquery.Dataset;
75
81
  private rawChangeLogTableName;
76
82
  private rawLatestView;
@@ -6,7 +6,6 @@ const firestore_1 = require("firebase-admin/firestore");
6
6
  const traverse = require("traverse");
7
7
  const node_fetch_1 = require("node-fetch");
8
8
  const schema_1 = require("./schema");
9
- const snapshot_1 = require("./snapshot");
10
9
  const handleFailedTransactions_1 = require("./handleFailedTransactions");
11
10
  const tracker_1 = require("../tracker");
12
11
  const logs = require("../logs");
@@ -14,6 +13,8 @@ const partitioning_1 = require("./partitioning");
14
13
  const clustering_1 = require("./clustering");
15
14
  const checkUpdates_1 = require("./checkUpdates");
16
15
  const utils_1 = require("./utils");
16
+ const initializeLatestView_1 = require("./initializeLatestView");
17
+ const logger_1 = require("../logger");
17
18
  var schema_2 = require("./schema");
18
19
  Object.defineProperty(exports, "RawChangelogSchema", { enumerable: true, get: function () { return schema_2.RawChangelogSchema; } });
19
20
  Object.defineProperty(exports, "RawChangelogViewSchema", { enumerable: true, get: function () { return schema_2.RawChangelogViewSchema; } });
@@ -35,6 +36,7 @@ class FirestoreBigQueryEventHistoryTracker {
35
36
  if (!this.config.datasetLocation) {
36
37
  this.config.datasetLocation = "us";
37
38
  }
39
+ logger_1.logger.setLogLevel(this.config.logLevel || logger_1.LogLevel.INFO);
38
40
  }
39
41
  async record(events) {
40
42
  if (!this.config.skipInit) {
@@ -133,7 +135,15 @@ class FirestoreBigQueryEventHistoryTracker {
133
135
  async _waitForInitialization() {
134
136
  const dataset = this.bigqueryDataset();
135
137
  const changelogName = this.rawChangeLogTableName();
136
- return (0, utils_1.waitForInitialization)({ dataset, changelogName });
138
+ let materializedViewName;
139
+ if (this.config.useMaterializedView) {
140
+ materializedViewName = this.rawLatestView();
141
+ }
142
+ return (0, utils_1.waitForInitialization)({
143
+ dataset,
144
+ changelogName,
145
+ materializedViewName,
146
+ });
137
147
  }
138
148
  /**
139
149
  * Inserts rows of data into the BigQuery raw change log table.
@@ -192,7 +202,7 @@ class FirestoreBigQueryEventHistoryTracker {
192
202
  throw new Error(`Error initializing raw change log table: ${message}`);
193
203
  }
194
204
  try {
195
- await this.initializeLatestView();
205
+ await this._initializeLatestView();
196
206
  }
197
207
  catch (error) {
198
208
  const message = (0, utils_1.parseErrorMessage)(error, "initializing latest view");
@@ -310,77 +320,19 @@ class FirestoreBigQueryEventHistoryTracker {
310
320
  * Creates the latest snapshot view, which returns only latest operations
311
321
  * of all existing documents over the raw change log table.
312
322
  */
313
- async initializeLatestView() {
323
+ async _initializeLatestView() {
314
324
  const dataset = this.bigqueryDataset();
315
325
  const view = dataset.table(this.rawLatestView());
316
326
  const [viewExists] = await view.exists();
317
- const schema = schema_1.RawChangelogViewSchema;
318
- if (viewExists) {
319
- logs.bigQueryViewAlreadyExists(view.id, dataset.id);
320
- const [metadata] = await view.getMetadata();
321
- // TODO: just casting this for now, needs properly fixing
322
- const fields = (metadata.schema ? metadata.schema.fields : []);
323
- if (this.config.wildcardIds) {
324
- schema.fields.push(schema_1.documentPathParams);
325
- }
326
- const columnNames = fields.map((field) => field.name);
327
- const documentIdColExists = columnNames.includes("document_id");
328
- const pathParamsColExists = columnNames.includes("path_params");
329
- const oldDataColExists = columnNames.includes("old_data");
330
- /** If new view or opt-in to new query syntax **/
331
- const updateView = (0, checkUpdates_1.viewRequiresUpdate)({
332
- metadata,
333
- config: this.config,
334
- documentIdColExists,
335
- pathParamsColExists,
336
- oldDataColExists,
337
- });
338
- if (updateView) {
339
- metadata.view = (0, snapshot_1.latestConsistentSnapshotView)({
340
- datasetId: this.config.datasetId,
341
- tableName: this.rawChangeLogTableName(),
342
- schema,
343
- useLegacyQuery: !this.config.useNewSnapshotQuerySyntax,
344
- });
345
- if (!documentIdColExists) {
346
- logs.addNewColumn(this.rawLatestView(), schema_1.documentIdField.name);
347
- }
348
- await view.setMetadata(metadata);
349
- logs.updatingMetadata(this.rawLatestView(), {
350
- config: this.config,
351
- documentIdColExists,
352
- pathParamsColExists,
353
- oldDataColExists,
354
- });
355
- }
356
- }
357
- else {
358
- const schema = { fields: [...schema_1.RawChangelogViewSchema.fields] };
359
- if (this.config.wildcardIds) {
360
- schema.fields.push(schema_1.documentPathParams);
361
- }
362
- const latestSnapshot = (0, snapshot_1.latestConsistentSnapshotView)({
363
- datasetId: this.config.datasetId,
364
- tableName: this.rawChangeLogTableName(),
365
- schema,
366
- bqProjectId: this.bq.projectId,
367
- useLegacyQuery: !this.config.useNewSnapshotQuerySyntax,
368
- });
369
- logs.bigQueryViewCreating(this.rawLatestView(), latestSnapshot.query);
370
- const options = {
371
- friendlyName: this.rawLatestView(),
372
- view: latestSnapshot,
373
- };
374
- try {
375
- await view.create(options);
376
- await view.setMetadata({ schema: schema_1.RawChangelogViewSchema });
377
- logs.bigQueryViewCreated(this.rawLatestView());
378
- }
379
- catch (ex) {
380
- logs.tableCreationError(this.rawLatestView(), ex.message);
381
- }
382
- }
383
- return view;
327
+ return await (0, initializeLatestView_1.initializeLatestView)({
328
+ bq: this.bq,
329
+ changeTrackerConfig: this.config,
330
+ dataset,
331
+ view,
332
+ viewExists,
333
+ rawChangeLogTableName: this.rawChangeLogTableName(),
334
+ rawLatestViewName: this.rawLatestView(),
335
+ });
384
336
  }
385
337
  bigqueryDataset() {
386
338
  return this.bq.dataset(this.config.datasetId, {
@@ -0,0 +1,17 @@
1
+ import { BigQuery, Table } from "@google-cloud/bigquery";
2
+ import { FirestoreBigQueryEventHistoryTrackerConfig } from ".";
3
+ interface InitializeLatestMaterializedViewOptions {
4
+ bq: BigQuery;
5
+ changeTrackerConfig: FirestoreBigQueryEventHistoryTrackerConfig;
6
+ view: Table;
7
+ viewExists: boolean;
8
+ rawChangeLogTableName: string;
9
+ rawLatestViewName: string;
10
+ schema?: any;
11
+ }
12
+ export declare function shouldRecreateMaterializedView(view: Table, config: FirestoreBigQueryEventHistoryTrackerConfig, source: string): Promise<boolean>;
13
+ /**
14
+ * Creates the latest materialized view.
15
+ */
16
+ export declare function initializeLatestMaterializedView({ bq, changeTrackerConfig: config, view, viewExists, rawChangeLogTableName, rawLatestViewName, schema, }: InitializeLatestMaterializedViewOptions): Promise<Table>;
17
+ export {};
@@ -0,0 +1,71 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.initializeLatestMaterializedView = exports.shouldRecreateMaterializedView = void 0;
4
+ const logs = require("../logs");
5
+ const snapshot_1 = require("./snapshot");
6
+ const firebase_functions_1 = require("firebase-functions");
7
+ const sqlFormatter = require("sql-formatter");
8
+ async function shouldRecreateMaterializedView(view, config, source) {
9
+ const [viewMetadata] = await view.getMetadata();
10
+ const isIncremental = !viewMetadata.materializedView
11
+ ?.allowNonIncrementalDefinition;
12
+ const incrementalMatch = isIncremental === !!config.useIncrementalMaterializedView;
13
+ const viewQuery = viewMetadata.materializedView?.query || "";
14
+ const queryMatch = sqlFormatter.format(viewQuery) === sqlFormatter.format(source);
15
+ return !queryMatch || !incrementalMatch;
16
+ }
17
+ exports.shouldRecreateMaterializedView = shouldRecreateMaterializedView;
18
+ /**
19
+ * Creates the latest materialized view.
20
+ */
21
+ async function initializeLatestMaterializedView({ bq, changeTrackerConfig: config, view, viewExists, rawChangeLogTableName, rawLatestViewName, schema, }) {
22
+ try {
23
+ const { query, source } = config.useIncrementalMaterializedView
24
+ ? (0, snapshot_1.buildMaterializedViewQuery)({
25
+ projectId: bq.projectId,
26
+ datasetId: config.datasetId,
27
+ tableName: rawChangeLogTableName,
28
+ rawLatestViewName,
29
+ schema,
30
+ })
31
+ : (0, snapshot_1.buildNonIncrementalMaterializedViewQuery)({
32
+ projectId: bq.projectId,
33
+ datasetId: config.datasetId,
34
+ tableName: rawChangeLogTableName,
35
+ maxStaleness: config.maxStaleness,
36
+ refreshIntervalMinutes: config.refreshIntervalMinutes,
37
+ rawLatestViewName,
38
+ enableRefresh: true,
39
+ schema,
40
+ });
41
+ const desiredQuery = sqlFormatter.format(query);
42
+ if (viewExists) {
43
+ const shouldRecreate = await shouldRecreateMaterializedView(view, config, source);
44
+ if (!shouldRecreate) {
45
+ firebase_functions_1.logger.warn(`Materialized view requested, but a view with matching configuration exists. Skipping creation.`);
46
+ return view;
47
+ }
48
+ firebase_functions_1.logger.warn(`Configuration mismatch detected for ${rawLatestViewName} ` +
49
+ `Recreating view...`);
50
+ await view.delete();
51
+ return await initializeLatestMaterializedView({
52
+ bq,
53
+ changeTrackerConfig: config,
54
+ view,
55
+ viewExists: false,
56
+ rawChangeLogTableName,
57
+ rawLatestViewName,
58
+ schema,
59
+ });
60
+ }
61
+ logs.bigQueryViewCreating(rawLatestViewName, desiredQuery);
62
+ await bq.query(desiredQuery);
63
+ logs.bigQueryViewCreated(rawLatestViewName);
64
+ }
65
+ catch (error) {
66
+ logs.tableCreationError(rawLatestViewName, error.message);
67
+ throw error;
68
+ }
69
+ return view;
70
+ }
71
+ exports.initializeLatestMaterializedView = initializeLatestMaterializedView;
@@ -0,0 +1,21 @@
1
+ import { BigQuery, Dataset, Table } from "@google-cloud/bigquery";
2
+ import { FirestoreBigQueryEventHistoryTrackerConfig } from ".";
3
+ interface InitializeLatestViewOptions {
4
+ bq: BigQuery;
5
+ changeTrackerConfig: FirestoreBigQueryEventHistoryTrackerConfig;
6
+ dataset: Dataset;
7
+ view: Table;
8
+ viewExists: boolean;
9
+ rawChangeLogTableName: string;
10
+ rawLatestViewName: string;
11
+ useMaterializedView?: boolean;
12
+ useIncrementalMaterializedView?: boolean;
13
+ useLegacyQuery?: boolean;
14
+ refreshIntervalMinutes?: number;
15
+ maxStaleness?: string;
16
+ }
17
+ /**
18
+ * Creates the latest snapshot view or materialized view.
19
+ */
20
+ export declare function initializeLatestView({ changeTrackerConfig: config, dataset, view, viewExists, rawChangeLogTableName, rawLatestViewName, bq, }: InitializeLatestViewOptions): Promise<Table>;
21
+ export {};
@@ -0,0 +1,94 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.initializeLatestView = void 0;
4
+ const schema_1 = require("./schema");
5
+ const logs = require("../logs");
6
+ const snapshot_1 = require("./snapshot");
7
+ const checkUpdates_1 = require("./checkUpdates");
8
+ const initializeLatestMaterializedView_1 = require("./initializeLatestMaterializedView");
9
+ /**
10
+ * Creates the latest snapshot view or materialized view.
11
+ */
12
+ async function initializeLatestView({ changeTrackerConfig: config, dataset, view, viewExists, rawChangeLogTableName, rawLatestViewName, bq, }) {
13
+ if (config.useMaterializedView) {
14
+ const schema = { fields: [...schema_1.RawChangelogViewSchema.fields] };
15
+ if (config.wildcardIds) {
16
+ schema.fields.push(schema_1.documentPathParams);
17
+ }
18
+ return (0, initializeLatestMaterializedView_1.initializeLatestMaterializedView)({
19
+ bq,
20
+ changeTrackerConfig: config,
21
+ view,
22
+ viewExists,
23
+ rawChangeLogTableName,
24
+ rawLatestViewName,
25
+ schema,
26
+ });
27
+ }
28
+ const schema = schema_1.RawChangelogViewSchema;
29
+ if (viewExists) {
30
+ logs.bigQueryViewAlreadyExists(view.id, dataset.id);
31
+ const [metadata] = await view.getMetadata();
32
+ const fields = (metadata.schema ? metadata.schema.fields : []);
33
+ if (config.wildcardIds) {
34
+ schema.fields.push(schema_1.documentPathParams);
35
+ }
36
+ const columnNames = fields.map((field) => field.name);
37
+ const documentIdColExists = columnNames.includes("document_id");
38
+ const pathParamsColExists = columnNames.includes("path_params");
39
+ const oldDataColExists = columnNames.includes("old_data");
40
+ const updateView = (0, checkUpdates_1.viewRequiresUpdate)({
41
+ metadata,
42
+ config,
43
+ documentIdColExists,
44
+ pathParamsColExists,
45
+ oldDataColExists,
46
+ });
47
+ if (updateView) {
48
+ metadata.view = (0, snapshot_1.latestConsistentSnapshotView)({
49
+ datasetId: config.datasetId,
50
+ tableName: rawChangeLogTableName,
51
+ schema,
52
+ useLegacyQuery: !config.useNewSnapshotQuerySyntax,
53
+ });
54
+ if (!documentIdColExists) {
55
+ logs.addNewColumn(rawLatestViewName, schema_1.documentIdField.name);
56
+ }
57
+ await view.setMetadata(metadata);
58
+ logs.updatingMetadata(rawLatestViewName, {
59
+ config,
60
+ documentIdColExists,
61
+ pathParamsColExists,
62
+ oldDataColExists,
63
+ });
64
+ }
65
+ }
66
+ else {
67
+ const schema = { fields: [...schema_1.RawChangelogViewSchema.fields] };
68
+ if (config.wildcardIds) {
69
+ schema.fields.push(schema_1.documentPathParams);
70
+ }
71
+ const latestSnapshot = (0, snapshot_1.latestConsistentSnapshotView)({
72
+ datasetId: config.datasetId,
73
+ tableName: rawChangeLogTableName,
74
+ schema,
75
+ bqProjectId: bq.projectId,
76
+ useLegacyQuery: !config.useNewSnapshotQuerySyntax,
77
+ });
78
+ logs.bigQueryViewCreating(rawLatestViewName, latestSnapshot.query);
79
+ const options = {
80
+ friendlyName: rawLatestViewName,
81
+ view: latestSnapshot,
82
+ };
83
+ try {
84
+ await view.create(options);
85
+ await view.setMetadata({ schema: schema_1.RawChangelogViewSchema });
86
+ logs.bigQueryViewCreated(rawLatestViewName);
87
+ }
88
+ catch (error) {
89
+ logs.tableCreationError(rawLatestViewName, error.message);
90
+ }
91
+ }
92
+ return view;
93
+ }
94
+ exports.initializeLatestView = initializeLatestView;
@@ -9,7 +9,7 @@ export declare const latestConsistentSnapshotView: ({ datasetId, tableName, sche
9
9
  query: string;
10
10
  useLegacySql: boolean;
11
11
  };
12
- interface buildLatestSnapshotViewQueryOptions {
12
+ interface BuildLatestSnapshotViewQueryOptions {
13
13
  datasetId: string;
14
14
  tableName: string;
15
15
  timestampColumnName: string;
@@ -17,5 +17,27 @@ interface buildLatestSnapshotViewQueryOptions {
17
17
  bqProjectId?: string;
18
18
  useLegacyQuery?: boolean;
19
19
  }
20
- export declare function buildLatestSnapshotViewQuery({ datasetId, tableName, timestampColumnName, groupByColumns, bqProjectId, useLegacyQuery, }: buildLatestSnapshotViewQueryOptions): string;
20
+ export declare function buildLatestSnapshotViewQuery({ datasetId, tableName, timestampColumnName, groupByColumns, bqProjectId, useLegacyQuery, }: BuildLatestSnapshotViewQueryOptions): string;
21
+ interface MaterializedViewOptions {
22
+ projectId: string;
23
+ datasetId: string;
24
+ tableName: string;
25
+ rawLatestViewName: string;
26
+ schema: any;
27
+ refreshIntervalMinutes?: number;
28
+ maxStaleness?: string;
29
+ }
30
+ interface NonIncrementalMaterializedViewOptions extends MaterializedViewOptions {
31
+ enableRefresh?: boolean;
32
+ }
33
+ export declare function buildMaterializedViewQuery({ projectId, datasetId, tableName, rawLatestViewName, schema, refreshIntervalMinutes, maxStaleness, }: NonIncrementalMaterializedViewOptions): {
34
+ target: string;
35
+ source: string;
36
+ query: string;
37
+ };
38
+ export declare function buildNonIncrementalMaterializedViewQuery({ projectId, datasetId, tableName, rawLatestViewName, schema, refreshIntervalMinutes, maxStaleness, enableRefresh, }: NonIncrementalMaterializedViewOptions): {
39
+ target: string;
40
+ source: string;
41
+ query: string;
42
+ };
21
43
  export {};
@@ -15,18 +15,17 @@
15
15
  * limitations under the License.
16
16
  */
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
- exports.buildLatestSnapshotViewQuery = exports.latestConsistentSnapshotView = void 0;
18
+ exports.buildNonIncrementalMaterializedViewQuery = exports.buildMaterializedViewQuery = exports.buildLatestSnapshotViewQuery = exports.latestConsistentSnapshotView = void 0;
19
19
  const sqlFormatter = require("sql-formatter");
20
20
  const schema_1 = require("./schema");
21
21
  const excludeFields = ["document_name", "document_id"];
22
+ const nonGroupFields = ["event_id", "data", "old_data"];
22
23
  const latestConsistentSnapshotView = ({ datasetId, tableName, schema, bqProjectId, useLegacyQuery = false, }) => ({
23
24
  query: buildLatestSnapshotViewQuery({
24
25
  datasetId,
25
26
  tableName,
26
27
  timestampColumnName: schema_1.timestampField.name,
27
- groupByColumns: schema["fields"]
28
- .map((field) => field.name)
29
- .filter((name) => excludeFields.indexOf(name) === -1),
28
+ groupByColumns: extractGroupByColumns(schema),
30
29
  bqProjectId,
31
30
  useLegacyQuery,
32
31
  }),
@@ -34,72 +33,187 @@ const latestConsistentSnapshotView = ({ datasetId, tableName, schema, bqProjectI
34
33
  });
35
34
  exports.latestConsistentSnapshotView = latestConsistentSnapshotView;
36
35
  function buildLatestSnapshotViewQuery({ datasetId, tableName, timestampColumnName, groupByColumns, bqProjectId, useLegacyQuery = true, }) {
37
- if (datasetId === "" || tableName === "" || timestampColumnName === "") {
38
- throw Error(`Missing some query parameters!`);
36
+ validateInputs({ datasetId, tableName, timestampColumnName, groupByColumns });
37
+ const projectId = bqProjectId || process.env.PROJECT_ID;
38
+ return useLegacyQuery
39
+ ? buildLegacyQuery(projectId, datasetId, tableName, timestampColumnName, groupByColumns)
40
+ : buildStandardQuery(projectId, datasetId, tableName, timestampColumnName, groupByColumns);
41
+ }
42
+ exports.buildLatestSnapshotViewQuery = buildLatestSnapshotViewQuery;
43
+ function extractGroupByColumns(schema) {
44
+ return schema["fields"]
45
+ .map((field) => field.name)
46
+ .filter((name) => !excludeFields.includes(name));
47
+ }
48
+ function validateInputs({ datasetId, tableName, timestampColumnName, groupByColumns, }) {
49
+ if (!datasetId || !tableName || !timestampColumnName) {
50
+ throw new Error("Missing required query parameters!");
39
51
  }
40
- for (let columnName of groupByColumns) {
41
- if (columnName === "") {
42
- throw Error(`Found empty group by column!`);
43
- }
52
+ if (groupByColumns.some((columnName) => !columnName)) {
53
+ throw new Error("Group by columns must not contain empty values!");
44
54
  }
45
- const legacyQuery = sqlFormatter.format(` -- Retrieves the latest document change events for all live documents.
46
- -- timestamp: The Firestore timestamp at which the event took place.
47
- -- operation: One of INSERT, UPDATE, DELETE, IMPORT.
48
- -- event_id: The id of the event that triggered the cloud function mirrored the event.
49
- -- data: A raw JSON payload of the current state of the document.
50
- -- document_id: The document id as defined in the Firestore database
51
- SELECT
52
- document_name,
53
- document_id${groupByColumns.length > 0 ? `,` : ``}
54
- ${groupByColumns.join(",")}
55
- FROM (
55
+ }
56
+ function buildLegacyQuery(projectId, datasetId, tableName, timestampColumnName, groupByColumns) {
57
+ return sqlFormatter.format(`
58
+ -- Retrieves the latest document change events for all live documents.
59
+ -- timestamp: The Firestore timestamp at which the event took place.
60
+ -- operation: One of INSERT, UPDATE, DELETE, IMPORT.
61
+ -- event_id: The id of the event that triggered the cloud function mirrored the event.
62
+ -- data: A raw JSON payload of the current state of the document.
63
+ -- document_id: The document id as defined in the Firestore database
56
64
  SELECT
57
65
  document_name,
58
- document_id,
59
- ${groupByColumns
60
- .map((columnName) => `FIRST_VALUE(${columnName})
61
- OVER(PARTITION BY document_name ORDER BY ${timestampColumnName} DESC)
62
- AS ${columnName}`)
63
- .join(",")}${groupByColumns.length > 0 ? `,` : ``}
64
- FIRST_VALUE(operation)
65
- OVER(PARTITION BY document_name ORDER BY ${timestampColumnName} DESC) = "DELETE"
66
- AS is_deleted
67
- FROM \`${bqProjectId || process.env.PROJECT_ID}.${datasetId}.${tableName}\`
68
- ORDER BY document_name, ${timestampColumnName} DESC
69
- )
70
- WHERE NOT is_deleted
71
- GROUP BY document_name, document_id${groupByColumns.length > 0 ? `, ` : ``}${groupByColumns.join(",")}`);
72
- const nonGroupFields = ["event_id", "data", "old_data"];
73
- const joinFields = ["document_name"];
74
- const addSelectField = (field) => {
75
- if (joinFields.includes(field))
76
- return `t.${field}`;
77
- return nonGroupFields.includes(field)
78
- ? `ANY_VALUE(${field}) as ${field}`
79
- : `${field} as ${field}`;
80
- };
81
- const filterGroupField = (field) => {
82
- return nonGroupFields.includes(field);
83
- };
84
- const query = sqlFormatter.format(` -- Retrieves the latest document change events for all live documents.
66
+ document_id${groupByColumns.length > 0 ? `,` : ``}
67
+ ${groupByColumns.join(",")}
68
+ FROM (
69
+ SELECT
70
+ document_name,
71
+ document_id,
72
+ ${groupByColumns
73
+ .map((columnName) => `FIRST_VALUE(${columnName}) OVER (
74
+ PARTITION BY document_name
75
+ ORDER BY ${timestampColumnName} DESC
76
+ ) AS ${columnName}`)
77
+ .join(",")}${groupByColumns.length > 0 ? "," : ""}
78
+ FIRST_VALUE(operation) OVER (
79
+ PARTITION BY document_name
80
+ ORDER BY ${timestampColumnName} DESC
81
+ ) = "DELETE" AS is_deleted
82
+ FROM \`${projectId}.${datasetId}.${tableName}\`
83
+ ORDER BY document_name, ${timestampColumnName} DESC
84
+ )
85
+ WHERE NOT is_deleted
86
+ GROUP BY document_name, document_id${groupByColumns.length > 0 ? ", " : ""}${groupByColumns.join(",")}`);
87
+ }
88
+ function buildStandardQuery(projectId, datasetId, tableName, timestampColumnName, groupByColumns) {
89
+ return sqlFormatter.format(`
90
+ -- Retrieves the latest document change events for all live documents.
85
91
  -- timestamp: The Firestore timestamp at which the event took place.
86
92
  -- operation: One of INSERT, UPDATE, DELETE, IMPORT.
87
93
  -- event_id: The id of the event that triggered the cloud function mirrored the event.
88
94
  -- data: A raw JSON payload of the current state of the document.
89
95
  -- document_id: The document id as defined in the Firestore database
90
96
  WITH latest AS (
91
- SELECT max(${timestampColumnName}) as latest_timestamp, document_name
92
- FROM \`${bqProjectId || process.env.PROJECT_ID}.${datasetId}.${tableName}\`
97
+ SELECT MAX(${timestampColumnName}) AS latest_timestamp, document_name
98
+ FROM \`${projectId}.${datasetId}.${tableName}\`
93
99
  GROUP BY document_name
94
100
  )
95
101
  SELECT
96
- t.document_name,
97
- document_id${groupByColumns.length > 0 ? `,` : ``}
98
- ${groupByColumns.map((f) => addSelectField(f)).join(",")}
99
- FROM \`${bqProjectId || process.env.PROJECT_ID}.${datasetId}.${tableName}\` AS t
100
- JOIN latest ON (t.document_name = latest.document_name AND (IFNULL(t.${timestampColumnName}, timestamp("1970-01-01 00:00:00+00"))) = (IFNULL(latest.latest_timestamp, timestamp("1970-01-01 00:00:00+00"))))
102
+ t.document_name,
103
+ document_id${groupByColumns.length > 0 ? "," : ""}
104
+ ${groupByColumns
105
+ .map((field) => nonGroupFields.includes(field)
106
+ ? `ANY_VALUE(${field}) AS ${field}`
107
+ : `${field} AS ${field}`)
108
+ .join(",")}
109
+ FROM \`${projectId}.${datasetId}.${tableName}\` AS t
110
+ JOIN latest ON (
111
+ t.document_name = latest.document_name AND
112
+ IFNULL(t.${timestampColumnName}, TIMESTAMP("1970-01-01 00:00:00+00")) =
113
+ IFNULL(latest.latest_timestamp, TIMESTAMP("1970-01-01 00:00:00+00"))
114
+ )
101
115
  WHERE operation != "DELETE"
102
- GROUP BY document_name, document_id${groupByColumns.length > 0 ? `, ` : ``}${groupByColumns.filter((c) => !filterGroupField(c)).join(",")}`);
103
- return useLegacyQuery ? legacyQuery : query;
116
+ GROUP BY document_name, document_id${groupByColumns.length > 0 ? ", " : ""}${groupByColumns
117
+ .filter((field) => !nonGroupFields.includes(field))
118
+ .join(",")}`);
104
119
  }
105
- exports.buildLatestSnapshotViewQuery = buildLatestSnapshotViewQuery;
120
+ // Helper function to extract fields from schema
121
+ function extractFieldsFromSchema(schema) {
122
+ if (!schema || !schema.fields) {
123
+ throw new Error("Invalid schema: must contain fields array");
124
+ }
125
+ return schema.fields.map((field) => field.name);
126
+ }
127
+ function buildMaterializedViewQuery({ projectId, datasetId, tableName, rawLatestViewName, schema, refreshIntervalMinutes, maxStaleness, }) {
128
+ // Build the options string
129
+ const options = [];
130
+ if (refreshIntervalMinutes !== undefined) {
131
+ options.push(`refresh_interval_minutes = ${refreshIntervalMinutes}`);
132
+ }
133
+ if (maxStaleness) {
134
+ options.push(`max_staleness = ${maxStaleness}`);
135
+ }
136
+ const optionsString = options.length > 0
137
+ ? `OPTIONS (
138
+ ${options.join(",\n ")}
139
+ )`
140
+ : "";
141
+ // Extract fields from schema
142
+ const fields = extractFieldsFromSchema(schema);
143
+ // Build the aggregated fields for the CTE
144
+ const aggregatedFields = fields
145
+ .map((fieldName) => {
146
+ if (fieldName === "document_name") {
147
+ return " document_name";
148
+ }
149
+ if (fieldName === "timestamp") {
150
+ return " MAX(timestamp) AS timestamp";
151
+ }
152
+ return ` MAX_BY(${fieldName}, timestamp) AS ${fieldName}`;
153
+ })
154
+ .join(",\n ");
155
+ const target = `CREATE MATERIALIZED VIEW \`${projectId}.${datasetId}.${rawLatestViewName}\` ${optionsString}`;
156
+ const source = `
157
+ WITH latests AS (
158
+ SELECT
159
+ ${aggregatedFields}
160
+ FROM \`${projectId}.${datasetId}.${tableName}\`
161
+ GROUP BY document_name
162
+ )
163
+ SELECT *
164
+ FROM latests
165
+ `;
166
+ // Combine all parts with options before AS
167
+ const fullQuery = sqlFormatter.format(`${target} AS (${source})`);
168
+ return { target, source, query: fullQuery };
169
+ }
170
+ exports.buildMaterializedViewQuery = buildMaterializedViewQuery;
171
+ function buildNonIncrementalMaterializedViewQuery({ projectId, datasetId, tableName, rawLatestViewName, schema, refreshIntervalMinutes, maxStaleness, enableRefresh = true, }) {
172
+ // Build the options string
173
+ const options = [];
174
+ options.push("allow_non_incremental_definition = true");
175
+ if (enableRefresh !== undefined) {
176
+ options.push(`enable_refresh = ${enableRefresh}`);
177
+ }
178
+ if (refreshIntervalMinutes !== undefined) {
179
+ options.push(`refresh_interval_minutes = ${refreshIntervalMinutes}`);
180
+ }
181
+ if (maxStaleness) {
182
+ options.push(`max_staleness = ${maxStaleness}`);
183
+ }
184
+ const optionsString = options.length > 0
185
+ ? `OPTIONS (
186
+ ${options.join(",\n ")}
187
+ )`
188
+ : "";
189
+ // Extract fields from schema
190
+ const fields = extractFieldsFromSchema(schema);
191
+ // Build the aggregated fields for the CTE
192
+ const aggregatedFields = fields
193
+ .map((fieldName) => {
194
+ if (fieldName === "document_name") {
195
+ return " document_name";
196
+ }
197
+ if (fieldName === "timestamp") {
198
+ return " MAX(timestamp) AS timestamp";
199
+ }
200
+ return ` MAX_BY(${fieldName}, timestamp) AS ${fieldName}`;
201
+ })
202
+ .join(",\n ");
203
+ const target = `CREATE MATERIALIZED VIEW \`${projectId}.${datasetId}.${rawLatestViewName}\` ${optionsString}`;
204
+ const source = `
205
+ WITH latests AS (
206
+ SELECT
207
+ ${aggregatedFields}
208
+ FROM \`${projectId}.${datasetId}.${tableName}\`
209
+ GROUP BY document_name
210
+ )
211
+ SELECT *
212
+ FROM latests
213
+ WHERE operation != "DELETE"
214
+ `;
215
+ // Combine all parts with options before AS
216
+ const fullQuery = sqlFormatter.format(`${target} AS (${source})`);
217
+ return { target, source, query: fullQuery };
218
+ }
219
+ exports.buildNonIncrementalMaterializedViewQuery = buildNonIncrementalMaterializedViewQuery;
@@ -2,6 +2,7 @@ import { Dataset, Table } from "@google-cloud/bigquery";
2
2
  interface WaitForInitializationParams {
3
3
  dataset: Dataset;
4
4
  changelogName: string;
5
+ materializedViewName?: string;
5
6
  }
6
7
  /**
7
8
  * Periodically checks for the existence of a dataset and table until both are found or a maximum number of attempts is reached.
@@ -10,6 +11,6 @@ interface WaitForInitializationParams {
10
11
  * @returns {Promise<Table>} A promise that resolves with the Table if it exists, or rejects if it doesn't exist after maxAttempts or an error occurs.
11
12
  * @throws {Error} Throws an error if the dataset or table cannot be verified to exist after multiple attempts or if an unexpected error occurs.
12
13
  */
13
- export declare function waitForInitialization({ dataset, changelogName }: WaitForInitializationParams, maxAttempts?: number): Promise<Table>;
14
+ export declare function waitForInitialization({ dataset, changelogName, materializedViewName }: WaitForInitializationParams, maxAttempts?: number): Promise<Table>;
14
15
  export declare function parseErrorMessage(error: unknown, process?: string): string;
15
16
  export {};
@@ -9,7 +9,7 @@ const logs = require("../logs");
9
9
  * @returns {Promise<Table>} A promise that resolves with the Table if it exists, or rejects if it doesn't exist after maxAttempts or an error occurs.
10
10
  * @throws {Error} Throws an error if the dataset or table cannot be verified to exist after multiple attempts or if an unexpected error occurs.
11
11
  */
12
- async function waitForInitialization({ dataset, changelogName }, maxAttempts = 12) {
12
+ async function waitForInitialization({ dataset, changelogName, materializedViewName }, maxAttempts = 12) {
13
13
  return new Promise((resolve, reject) => {
14
14
  let attempts = 0;
15
15
  let handle = setInterval(async () => {
@@ -17,7 +17,13 @@ async function waitForInitialization({ dataset, changelogName }, maxAttempts = 1
17
17
  const [datasetExists] = await dataset.exists();
18
18
  const table = dataset.table(changelogName);
19
19
  const [tableExists] = await table.exists();
20
- if (datasetExists && tableExists) {
20
+ let waitingForMaterializedView = false;
21
+ if (materializedViewName) {
22
+ const materializedView = dataset.table(materializedViewName);
23
+ const [materializedViewExists] = await materializedView.exists();
24
+ waitingForMaterializedView = !materializedViewExists;
25
+ }
26
+ if (datasetExists && tableExists && !waitingForMaterializedView) {
21
27
  clearInterval(handle);
22
28
  resolve(table);
23
29
  }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,65 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const utils_1 = require("./utils");
4
+ const logs = require("../logs");
5
+ jest.mock("@google-cloud/bigquery");
6
+ jest.mock("../../logs");
7
+ const dataset = {
8
+ exists: jest.fn(),
9
+ table: jest.fn(),
10
+ };
11
+ const table = {
12
+ exists: jest.fn(),
13
+ };
14
+ const changelogName = "testTable";
15
+ describe("waitForInitialization", () => {
16
+ beforeEach(() => {
17
+ jest.clearAllMocks();
18
+ dataset.table.mockReturnValue(table);
19
+ });
20
+ test("should successfully find the dataset and table", async () => {
21
+ dataset.exists.mockResolvedValue([true]);
22
+ table.exists.mockResolvedValue([true]);
23
+ const result = await (0, utils_1.waitForInitialization)({
24
+ dataset: dataset,
25
+ changelogName,
26
+ });
27
+ expect(result).toBe(table);
28
+ expect(dataset.exists).toHaveBeenCalledTimes(1);
29
+ expect(table.exists).toHaveBeenCalledTimes(1);
30
+ });
31
+ test("should fail after max attempts if table does not exist", async () => {
32
+ dataset.exists.mockResolvedValue([true]);
33
+ table.exists.mockResolvedValue([false]);
34
+ await expect((0, utils_1.waitForInitialization)({ dataset: dataset, changelogName }, 3)).rejects.toThrow("Initialization timed out. Dataset or table could not be verified to exist after multiple attempts.");
35
+ expect(dataset.exists).toHaveBeenCalledTimes(3);
36
+ expect(table.exists).toHaveBeenCalledTimes(3);
37
+ });
38
+ test("should handle and throw an error if dataset.exists throws", async () => {
39
+ const error = new Error("Access denied");
40
+ dataset.exists.mockRejectedValue(error);
41
+ await expect((0, utils_1.waitForInitialization)({
42
+ dataset: dataset,
43
+ changelogName,
44
+ })).rejects.toThrow("Access denied");
45
+ expect(logs.failedToInitializeWait).toHaveBeenCalledWith(error.message);
46
+ });
47
+ test("should handle and throw an error if table.exists throws", async () => {
48
+ dataset.exists.mockResolvedValue([true]);
49
+ const error = new Error("Table error");
50
+ table.exists.mockRejectedValue(error);
51
+ await expect((0, utils_1.waitForInitialization)({
52
+ dataset: dataset,
53
+ changelogName,
54
+ })).rejects.toThrow("Table error");
55
+ expect(logs.failedToInitializeWait).toHaveBeenCalledWith(error.message);
56
+ });
57
+ test("should handle unexpected error types gracefully", async () => {
58
+ dataset.exists.mockRejectedValue("String error");
59
+ await expect((0, utils_1.waitForInitialization)({
60
+ dataset: dataset,
61
+ changelogName,
62
+ })).rejects.toThrow("An unexpected error occurred");
63
+ expect(logs.failedToInitializeWait).toHaveBeenCalledWith("An unexpected error occurred");
64
+ });
65
+ });
package/lib/index.d.ts CHANGED
@@ -1,2 +1,3 @@
1
1
  export { FirestoreBigQueryEventHistoryTracker, RawChangelogSchema, RawChangelogViewSchema, } from "./bigquery";
2
2
  export { ChangeType, FirestoreDocumentChangeEvent, FirestoreEventHistoryTracker, } from "./tracker";
3
+ export { LogLevel, Logger } from "./logger";
package/lib/index.js CHANGED
@@ -15,10 +15,13 @@
15
15
  * limitations under the License.
16
16
  */
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
- exports.ChangeType = exports.RawChangelogViewSchema = exports.RawChangelogSchema = exports.FirestoreBigQueryEventHistoryTracker = void 0;
18
+ exports.Logger = exports.LogLevel = exports.ChangeType = exports.RawChangelogViewSchema = exports.RawChangelogSchema = exports.FirestoreBigQueryEventHistoryTracker = void 0;
19
19
  var bigquery_1 = require("./bigquery");
20
20
  Object.defineProperty(exports, "FirestoreBigQueryEventHistoryTracker", { enumerable: true, get: function () { return bigquery_1.FirestoreBigQueryEventHistoryTracker; } });
21
21
  Object.defineProperty(exports, "RawChangelogSchema", { enumerable: true, get: function () { return bigquery_1.RawChangelogSchema; } });
22
22
  Object.defineProperty(exports, "RawChangelogViewSchema", { enumerable: true, get: function () { return bigquery_1.RawChangelogViewSchema; } });
23
23
  var tracker_1 = require("./tracker");
24
24
  Object.defineProperty(exports, "ChangeType", { enumerable: true, get: function () { return tracker_1.ChangeType; } });
25
+ var logger_1 = require("./logger");
26
+ Object.defineProperty(exports, "LogLevel", { enumerable: true, get: function () { return logger_1.LogLevel; } });
27
+ Object.defineProperty(exports, "Logger", { enumerable: true, get: function () { return logger_1.Logger; } });
@@ -0,0 +1,19 @@
1
+ export declare enum LogLevel {
2
+ DEBUG = "debug",
3
+ INFO = "info",
4
+ WARN = "warn",
5
+ ERROR = "error",
6
+ SILENT = "silent"
7
+ }
8
+ export declare class Logger {
9
+ private logLevel;
10
+ constructor(logLevel?: LogLevel | string);
11
+ setLogLevel(logLevel: LogLevel | string): void;
12
+ debug(...args: any[]): void;
13
+ info(...args: any[]): void;
14
+ warn(...args: any[]): void;
15
+ error(...args: any[]): void;
16
+ log(...args: any[]): void;
17
+ private runIfLogLevel;
18
+ }
19
+ export declare const logger: Logger;
package/lib/logger.js ADDED
@@ -0,0 +1,69 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.logger = exports.Logger = exports.LogLevel = void 0;
4
+ /*
5
+ * Copyright 2019 Google LLC
6
+ *
7
+ * Licensed under the Apache License, Version 2.0 (the "License");
8
+ * you may not use this file except in compliance with the License.
9
+ * You may obtain a copy of the License at
10
+ *
11
+ * https://www.apache.org/licenses/LICENSE-2.0
12
+ *
13
+ * Unless required by applicable law or agreed to in writing, software
14
+ * distributed under the License is distributed on an "AS IS" BASIS,
15
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ * See the License for the specific language governing permissions and
17
+ * limitations under the License.
18
+ */
19
+ const firebase_functions_1 = require("firebase-functions");
20
+ var LogLevel;
21
+ (function (LogLevel) {
22
+ LogLevel["DEBUG"] = "debug";
23
+ LogLevel["INFO"] = "info";
24
+ LogLevel["WARN"] = "warn";
25
+ LogLevel["ERROR"] = "error";
26
+ LogLevel["SILENT"] = "silent";
27
+ })(LogLevel = exports.LogLevel || (exports.LogLevel = {}));
28
+ const levels = {
29
+ debug: 0,
30
+ info: 1,
31
+ warn: 2,
32
+ error: 3,
33
+ silent: 4,
34
+ };
35
+ class Logger {
36
+ constructor(logLevel = LogLevel.INFO) {
37
+ this.setLogLevel(logLevel);
38
+ }
39
+ setLogLevel(logLevel) {
40
+ if (typeof logLevel === "string") {
41
+ this.logLevel = levels[logLevel] ?? levels.info;
42
+ }
43
+ else {
44
+ this.logLevel = levels[logLevel];
45
+ }
46
+ }
47
+ debug(...args) {
48
+ this.runIfLogLevel(levels.debug, firebase_functions_1.logger.debug, ...args);
49
+ }
50
+ info(...args) {
51
+ this.runIfLogLevel(levels.info, firebase_functions_1.logger.info, ...args);
52
+ }
53
+ warn(...args) {
54
+ this.runIfLogLevel(levels.warn, firebase_functions_1.logger.warn, ...args);
55
+ }
56
+ error(...args) {
57
+ this.runIfLogLevel(levels.error, firebase_functions_1.logger.error, ...args);
58
+ }
59
+ log(...args) {
60
+ this.info(...args);
61
+ }
62
+ runIfLogLevel(level, func, ...args) {
63
+ if (this.logLevel <= level) {
64
+ func(...args);
65
+ }
66
+ }
67
+ }
68
+ exports.Logger = Logger;
69
+ exports.logger = new Logger();
package/lib/logs.d.ts CHANGED
@@ -50,6 +50,7 @@ export declare const cannotPartitionExistingTable: (table: Table) => void;
50
50
  export declare function invalidProjectIdWarning(bqProjectId: string): void;
51
51
  export declare function invalidTableReference(): void;
52
52
  export declare function hourAndDatePartitioningWarning(): void;
53
+ export declare function invalidClusteringTypes(fields: string): void;
53
54
  export declare function invalidClustering(fields: string): void;
54
55
  export declare const tableCreationError: (table: any, message: any) => void;
55
56
  export declare const failedToInitializeWait: (message: any) => void;
package/lib/logs.js CHANGED
@@ -15,205 +15,209 @@
15
15
  * limitations under the License.
16
16
  */
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
- exports.updatingMetadata = exports.failedToInitializeWait = exports.tableCreationError = exports.invalidClustering = exports.hourAndDatePartitioningWarning = exports.invalidTableReference = exports.invalidProjectIdWarning = exports.cannotPartitionExistingTable = exports.removedClustering = exports.updatedClustering = exports.bigQueryTableInsertErrors = exports.firestoreTimePartitioningParametersWarning = exports.firestoreTimePartitionFieldError = exports.addPartitionFieldColumn = exports.addNewColumn = exports.timestampMissingValue = exports.error = exports.dataTypeInvalid = exports.dataInserting = exports.dataInsertRetried = exports.dataInserted = exports.complete = exports.bigQueryViewValidating = exports.bigQueryViewValidated = exports.bigQueryViewUpToDate = exports.bigQueryViewUpdating = exports.bigQueryViewUpdated = exports.bigQueryViewAlreadyExists = exports.bigQueryViewCreating = exports.bigQueryViewCreated = exports.bigQueryUserDefinedFunctionCreated = exports.bigQueryUserDefinedFunctionCreating = exports.bigQueryTableValidating = exports.bigQueryTableValidated = exports.bigQueryTableUpToDate = exports.bigQueryTableUpdating = exports.bigQueryTableUpdated = exports.bigQueryTableCreating = exports.bigQueryTableCreated = exports.bigQueryTableAlreadyExists = exports.bigQuerySchemaViewCreated = exports.bigQueryLatestSnapshotViewQueryCreated = exports.bigQueryErrorRecordingDocumentChange = exports.bigQueryDatasetExists = exports.bigQueryDatasetCreating = exports.bigQueryDatasetCreated = exports.arrayFieldInvalid = void 0;
19
- const firebase_functions_1 = require("firebase-functions");
18
+ exports.updatingMetadata = exports.failedToInitializeWait = exports.tableCreationError = exports.invalidClustering = exports.invalidClusteringTypes = exports.hourAndDatePartitioningWarning = exports.invalidTableReference = exports.invalidProjectIdWarning = exports.cannotPartitionExistingTable = exports.removedClustering = exports.updatedClustering = exports.bigQueryTableInsertErrors = exports.firestoreTimePartitioningParametersWarning = exports.firestoreTimePartitionFieldError = exports.addPartitionFieldColumn = exports.addNewColumn = exports.timestampMissingValue = exports.error = exports.dataTypeInvalid = exports.dataInserting = exports.dataInsertRetried = exports.dataInserted = exports.complete = exports.bigQueryViewValidating = exports.bigQueryViewValidated = exports.bigQueryViewUpToDate = exports.bigQueryViewUpdating = exports.bigQueryViewUpdated = exports.bigQueryViewAlreadyExists = exports.bigQueryViewCreating = exports.bigQueryViewCreated = exports.bigQueryUserDefinedFunctionCreated = exports.bigQueryUserDefinedFunctionCreating = exports.bigQueryTableValidating = exports.bigQueryTableValidated = exports.bigQueryTableUpToDate = exports.bigQueryTableUpdating = exports.bigQueryTableUpdated = exports.bigQueryTableCreating = exports.bigQueryTableCreated = exports.bigQueryTableAlreadyExists = exports.bigQuerySchemaViewCreated = exports.bigQueryLatestSnapshotViewQueryCreated = exports.bigQueryErrorRecordingDocumentChange = exports.bigQueryDatasetExists = exports.bigQueryDatasetCreating = exports.bigQueryDatasetCreated = exports.arrayFieldInvalid = void 0;
19
+ const logger_1 = require("./logger");
20
20
  const arrayFieldInvalid = (fieldName) => {
21
- firebase_functions_1.logger.warn(`Array field '${fieldName}' does not contain an array, skipping`);
21
+ logger_1.logger.warn(`Array field '${fieldName}' does not contain an array, skipping`);
22
22
  };
23
23
  exports.arrayFieldInvalid = arrayFieldInvalid;
24
24
  const bigQueryDatasetCreated = (datasetId) => {
25
- firebase_functions_1.logger.log(`Created BigQuery dataset: ${datasetId}`);
25
+ logger_1.logger.info(`Created BigQuery dataset: ${datasetId}`);
26
26
  };
27
27
  exports.bigQueryDatasetCreated = bigQueryDatasetCreated;
28
28
  const bigQueryDatasetCreating = (datasetId) => {
29
- firebase_functions_1.logger.log(`Creating BigQuery dataset: ${datasetId}`);
29
+ logger_1.logger.debug(`Creating BigQuery dataset: ${datasetId}`);
30
30
  };
31
31
  exports.bigQueryDatasetCreating = bigQueryDatasetCreating;
32
32
  const bigQueryDatasetExists = (datasetId) => {
33
- firebase_functions_1.logger.log(`BigQuery dataset already exists: ${datasetId}`);
33
+ logger_1.logger.info(`BigQuery dataset already exists: ${datasetId}`);
34
34
  };
35
35
  exports.bigQueryDatasetExists = bigQueryDatasetExists;
36
36
  const bigQueryErrorRecordingDocumentChange = (e) => {
37
- firebase_functions_1.logger.error(`Error recording document changes.`, e);
37
+ logger_1.logger.error(`Error recording document changes.`, e);
38
38
  };
39
39
  exports.bigQueryErrorRecordingDocumentChange = bigQueryErrorRecordingDocumentChange;
40
40
  const bigQueryLatestSnapshotViewQueryCreated = (query) => {
41
- firebase_functions_1.logger.log(`BigQuery latest snapshot view query:\n${query}`);
41
+ logger_1.logger.debug(`BigQuery latest snapshot view query:\n${query}`);
42
42
  };
43
43
  exports.bigQueryLatestSnapshotViewQueryCreated = bigQueryLatestSnapshotViewQueryCreated;
44
44
  const bigQuerySchemaViewCreated = (name) => {
45
- firebase_functions_1.logger.log(`BigQuery created schema view ${name}\n`);
45
+ logger_1.logger.debug(`BigQuery created schema view ${name}\n`);
46
46
  };
47
47
  exports.bigQuerySchemaViewCreated = bigQuerySchemaViewCreated;
48
48
  const bigQueryTableAlreadyExists = (tableName, datasetName) => {
49
- firebase_functions_1.logger.log(`BigQuery table with name ${tableName} already ` +
49
+ logger_1.logger.debug(`BigQuery table with name ${tableName} already ` +
50
50
  `exists in dataset ${datasetName}!`);
51
51
  };
52
52
  exports.bigQueryTableAlreadyExists = bigQueryTableAlreadyExists;
53
53
  const bigQueryTableCreated = (tableName) => {
54
- firebase_functions_1.logger.log(`Created BigQuery table: ${tableName}`);
54
+ logger_1.logger.info(`Created BigQuery table: ${tableName}`);
55
55
  };
56
56
  exports.bigQueryTableCreated = bigQueryTableCreated;
57
57
  const bigQueryTableCreating = (tableName) => {
58
- firebase_functions_1.logger.log(`Creating BigQuery table: ${tableName}`);
58
+ logger_1.logger.debug(`Creating BigQuery table: ${tableName}`);
59
59
  };
60
60
  exports.bigQueryTableCreating = bigQueryTableCreating;
61
61
  const bigQueryTableUpdated = (tableName) => {
62
- firebase_functions_1.logger.log(`Updated existing BigQuery table: ${tableName}`);
62
+ logger_1.logger.info(`Updated existing BigQuery table: ${tableName}`);
63
63
  };
64
64
  exports.bigQueryTableUpdated = bigQueryTableUpdated;
65
65
  const bigQueryTableUpdating = (tableName) => {
66
- firebase_functions_1.logger.log(`Updating existing BigQuery table: ${tableName}`);
66
+ logger_1.logger.debug(`Updating existing BigQuery table: ${tableName}`);
67
67
  };
68
68
  exports.bigQueryTableUpdating = bigQueryTableUpdating;
69
69
  const bigQueryTableUpToDate = (tableName) => {
70
- firebase_functions_1.logger.log(`BigQuery table: ${tableName} is up to date`);
70
+ logger_1.logger.info(`BigQuery table: ${tableName} is up to date`);
71
71
  };
72
72
  exports.bigQueryTableUpToDate = bigQueryTableUpToDate;
73
73
  const bigQueryTableValidated = (tableName) => {
74
- firebase_functions_1.logger.log(`Validated existing BigQuery table: ${tableName}`);
74
+ logger_1.logger.info(`Validated existing BigQuery table: ${tableName}`);
75
75
  };
76
76
  exports.bigQueryTableValidated = bigQueryTableValidated;
77
77
  const bigQueryTableValidating = (tableName) => {
78
- firebase_functions_1.logger.log(`Validating existing BigQuery table: ${tableName}`);
78
+ logger_1.logger.debug(`Validating existing BigQuery table: ${tableName}`);
79
79
  };
80
80
  exports.bigQueryTableValidating = bigQueryTableValidating;
81
81
  const bigQueryUserDefinedFunctionCreating = (functionName) => {
82
- firebase_functions_1.logger.log(`Creating BigQuery user-defined function ${functionName}`);
82
+ logger_1.logger.debug(`Creating BigQuery user-defined function ${functionName}`);
83
83
  };
84
84
  exports.bigQueryUserDefinedFunctionCreating = bigQueryUserDefinedFunctionCreating;
85
85
  const bigQueryUserDefinedFunctionCreated = (functionName) => {
86
- firebase_functions_1.logger.log(`Created BigQuery user-defined function ${functionName}`);
86
+ logger_1.logger.info(`Created BigQuery user-defined function ${functionName}`);
87
87
  };
88
88
  exports.bigQueryUserDefinedFunctionCreated = bigQueryUserDefinedFunctionCreated;
89
89
  const bigQueryViewCreated = (viewName) => {
90
- firebase_functions_1.logger.log(`Created BigQuery view: ${viewName}`);
90
+ logger_1.logger.info(`Created BigQuery view: ${viewName}`);
91
91
  };
92
92
  exports.bigQueryViewCreated = bigQueryViewCreated;
93
93
  const bigQueryViewCreating = (viewName, query) => {
94
- firebase_functions_1.logger.log(`Creating BigQuery view: ${viewName}\nQuery:\n${query}`);
94
+ logger_1.logger.debug(`Creating BigQuery view: ${viewName}\nQuery:\n${query}`);
95
95
  };
96
96
  exports.bigQueryViewCreating = bigQueryViewCreating;
97
97
  const bigQueryViewAlreadyExists = (viewName, datasetName) => {
98
- firebase_functions_1.logger.log(`View with id ${viewName} already exists in dataset ${datasetName}.`);
98
+ logger_1.logger.info(`View with id ${viewName} already exists in dataset ${datasetName}.`);
99
99
  };
100
100
  exports.bigQueryViewAlreadyExists = bigQueryViewAlreadyExists;
101
101
  const bigQueryViewUpdated = (viewName) => {
102
- firebase_functions_1.logger.log(`Updated existing BigQuery view: ${viewName}`);
102
+ logger_1.logger.info(`Updated existing BigQuery view: ${viewName}`);
103
103
  };
104
104
  exports.bigQueryViewUpdated = bigQueryViewUpdated;
105
105
  const bigQueryViewUpdating = (viewName) => {
106
- firebase_functions_1.logger.log(`Updating existing BigQuery view: ${viewName}`);
106
+ logger_1.logger.debug(`Updating existing BigQuery view: ${viewName}`);
107
107
  };
108
108
  exports.bigQueryViewUpdating = bigQueryViewUpdating;
109
109
  const bigQueryViewUpToDate = (viewName) => {
110
- firebase_functions_1.logger.log(`BigQuery view: ${viewName} is up to date`);
110
+ logger_1.logger.info(`BigQuery view: ${viewName} is up to date`);
111
111
  };
112
112
  exports.bigQueryViewUpToDate = bigQueryViewUpToDate;
113
113
  const bigQueryViewValidated = (viewName) => {
114
- firebase_functions_1.logger.log(`Validated existing BigQuery view: ${viewName}`);
114
+ logger_1.logger.info(`Validated existing BigQuery view: ${viewName}`);
115
115
  };
116
116
  exports.bigQueryViewValidated = bigQueryViewValidated;
117
117
  const bigQueryViewValidating = (viewName) => {
118
- firebase_functions_1.logger.log(`Validating existing BigQuery view: ${viewName}`);
118
+ logger_1.logger.debug(`Validating existing BigQuery view: ${viewName}`);
119
119
  };
120
120
  exports.bigQueryViewValidating = bigQueryViewValidating;
121
121
  const complete = () => {
122
- firebase_functions_1.logger.log("Completed mod execution");
122
+ logger_1.logger.info("Completed mod execution");
123
123
  };
124
124
  exports.complete = complete;
125
125
  const dataInserted = (rowCount) => {
126
- firebase_functions_1.logger.log(`Inserted ${rowCount} row(s) of data into BigQuery`);
126
+ logger_1.logger.debug(`Inserted ${rowCount} row(s) of data into BigQuery`);
127
127
  };
128
128
  exports.dataInserted = dataInserted;
129
129
  const dataInsertRetried = (rowCount) => {
130
- firebase_functions_1.logger.log(`Retried to insert ${rowCount} row(s) of data into BigQuery (ignoring unknown columns)`);
130
+ logger_1.logger.debug(`Retried to insert ${rowCount} row(s) of data into BigQuery (ignoring unknown columns)`);
131
131
  };
132
132
  exports.dataInsertRetried = dataInsertRetried;
133
133
  const dataInserting = (rowCount) => {
134
- firebase_functions_1.logger.log(`Inserting ${rowCount} row(s) of data into BigQuery`);
134
+ logger_1.logger.debug(`Inserting ${rowCount} row(s) of data into BigQuery`);
135
135
  };
136
136
  exports.dataInserting = dataInserting;
137
137
  const dataTypeInvalid = (fieldName, fieldType, dataType) => {
138
- firebase_functions_1.logger.warn(`Field '${fieldName}' has invalid data. Expected: ${fieldType}, received: ${dataType}`);
138
+ logger_1.logger.warn(`Field '${fieldName}' has invalid data. Expected: ${fieldType}, received: ${dataType}`);
139
139
  };
140
140
  exports.dataTypeInvalid = dataTypeInvalid;
141
141
  const error = (err) => {
142
- firebase_functions_1.logger.error("Error when mirroring data to BigQuery", err);
142
+ logger_1.logger.error("Error when mirroring data to BigQuery", err);
143
143
  };
144
144
  exports.error = error;
145
145
  const timestampMissingValue = (fieldName) => {
146
- firebase_functions_1.logger.warn(`Missing value for timestamp field: ${fieldName}, using default timestamp instead.`);
146
+ logger_1.logger.warn(`Missing value for timestamp field: ${fieldName}, using default timestamp instead.`);
147
147
  };
148
148
  exports.timestampMissingValue = timestampMissingValue;
149
149
  const addNewColumn = (table, field) => {
150
- firebase_functions_1.logger.log(`Updated '${table}' table with a '${field}' column`);
150
+ logger_1.logger.info(`Updated '${table}' table with a '${field}' column`);
151
151
  };
152
152
  exports.addNewColumn = addNewColumn;
153
153
  const addPartitionFieldColumn = (table, field) => {
154
- firebase_functions_1.logger.log(`Updated '${table}' table with a partition field '${field}' column`);
154
+ logger_1.logger.info(`Updated '${table}' table with a partition field '${field}' column`);
155
155
  };
156
156
  exports.addPartitionFieldColumn = addPartitionFieldColumn;
157
157
  const firestoreTimePartitionFieldError = (documentName, fieldName, firestoreFieldName, firestoreFieldData) => {
158
- firebase_functions_1.logger.warn(`Wrong type of Firestore Field for TimePartitioning. Accepts only strings in BigQuery format (DATE, DATETIME, TIMESTAMP) and Firestore Timestamp. Firestore Document field path: ${documentName}. Field name: ${firestoreFieldName}. Field data: ${firestoreFieldData}. Schema field "${fieldName}" value will be null.`);
158
+ logger_1.logger.warn(`Wrong type of Firestore Field for TimePartitioning. Accepts only strings in BigQuery format (DATE, DATETIME, TIMESTAMP) and Firestore Timestamp. Firestore Document field path: ${documentName}. Field name: ${firestoreFieldName}. Field data: ${firestoreFieldData}. Schema field "${fieldName}" value will be null.`);
159
159
  };
160
160
  exports.firestoreTimePartitionFieldError = firestoreTimePartitionFieldError;
161
161
  const firestoreTimePartitioningParametersWarning = (fieldName, fieldType, firestoreFieldName, dataFirestoreField) => {
162
- firebase_functions_1.logger.warn("All TimePartitioning option parameters need to be available to create new custom schema field");
163
- !fieldName && firebase_functions_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIELD`);
164
- !fieldType && firebase_functions_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIELD_TYPE`);
162
+ logger_1.logger.warn("All TimePartitioning option parameters need to be available to create new custom schema field");
163
+ !fieldName && logger_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIELD`);
164
+ !fieldType && logger_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIELD_TYPE`);
165
165
  !firestoreFieldName &&
166
- firebase_functions_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIRESTORE_FIELD`);
166
+ logger_1.logger.warn(`Parameter missing: TIME_PARTITIONING_FIRESTORE_FIELD`);
167
167
  !dataFirestoreField &&
168
- firebase_functions_1.logger.warn(`No data found in Firestore Document under selected field: "${firestoreFieldName}"`);
168
+ logger_1.logger.warn(`No data found in Firestore Document under selected field: "${firestoreFieldName}"`);
169
169
  };
170
170
  exports.firestoreTimePartitioningParametersWarning = firestoreTimePartitioningParametersWarning;
171
171
  const bigQueryTableInsertErrors = (insertErrors) => {
172
- firebase_functions_1.logger.warn(`Error when inserting data to table.`);
172
+ logger_1.logger.warn(`Error when inserting data to table.`);
173
173
  insertErrors?.forEach((error) => {
174
- firebase_functions_1.logger.warn("ROW DATA JSON:");
175
- firebase_functions_1.logger.warn(error.row);
176
- error.errors?.forEach((error) => firebase_functions_1.logger.warn(`ROW ERROR MESSAGE: ${error.message}`));
174
+ logger_1.logger.warn("ROW DATA JSON:");
175
+ logger_1.logger.warn(error.row);
176
+ error.errors?.forEach((error) => logger_1.logger.warn(`ROW ERROR MESSAGE: ${error.message}`));
177
177
  });
178
178
  };
179
179
  exports.bigQueryTableInsertErrors = bigQueryTableInsertErrors;
180
180
  const updatedClustering = (fields) => {
181
- firebase_functions_1.logger.info(`Clustering updated with new settings fields: ${fields}`);
181
+ logger_1.logger.info(`Clustering updated with new settings fields: ${fields}`);
182
182
  };
183
183
  exports.updatedClustering = updatedClustering;
184
184
  const removedClustering = (tableName) => {
185
- firebase_functions_1.logger.info(`Clustering removed on ${tableName}`);
185
+ logger_1.logger.info(`Clustering removed on ${tableName}`);
186
186
  };
187
187
  exports.removedClustering = removedClustering;
188
188
  const cannotPartitionExistingTable = (table) => {
189
- firebase_functions_1.logger.warn(`Cannot partition an existing table ${table.dataset.id}_${table.id}`);
189
+ logger_1.logger.warn(`Cannot partition an existing table ${table.dataset.id}_${table.id}`);
190
190
  };
191
191
  exports.cannotPartitionExistingTable = cannotPartitionExistingTable;
192
192
  function invalidProjectIdWarning(bqProjectId) {
193
- firebase_functions_1.logger.warn(`Invalid project Id ${bqProjectId}, data cannot be synchronized`);
193
+ logger_1.logger.warn(`Invalid project Id ${bqProjectId}, data cannot be synchronized`);
194
194
  }
195
195
  exports.invalidProjectIdWarning = invalidProjectIdWarning;
196
196
  function invalidTableReference() {
197
- firebase_functions_1.logger.warn(`No valid table reference is available. Skipping partitioning`);
197
+ logger_1.logger.warn(`No valid table reference is available. Skipping partitioning`);
198
198
  }
199
199
  exports.invalidTableReference = invalidTableReference;
200
200
  function hourAndDatePartitioningWarning() {
201
- firebase_functions_1.logger.warn(`Cannot partition table with hour partitioning and Date. For DATE columns, the partitions can have daily, monthly, or yearly granularity. Skipping partitioning`);
201
+ logger_1.logger.warn(`Cannot partition table with hour partitioning and Date. For DATE columns, the partitions can have daily, monthly, or yearly granularity. Skipping partitioning`);
202
202
  }
203
203
  exports.hourAndDatePartitioningWarning = hourAndDatePartitioningWarning;
204
+ function invalidClusteringTypes(fields) {
205
+ logger_1.logger.warn(`Unable to add clustering, field(s) ${fields} have invalid types.`);
206
+ }
207
+ exports.invalidClusteringTypes = invalidClusteringTypes;
204
208
  function invalidClustering(fields) {
205
- firebase_functions_1.logger.warn(`Unable to add clustering, field(s) ${fields} do not exist on the expected table`);
209
+ logger_1.logger.warn(`Unable to add clustering, field(s) ${fields} do not exist on the expected table`);
206
210
  }
207
211
  exports.invalidClustering = invalidClustering;
208
212
  const tableCreationError = (table, message) => {
209
- firebase_functions_1.logger.warn(`Error caught creating table`, message);
213
+ logger_1.logger.warn(`Error caught creating table`, message);
210
214
  };
211
215
  exports.tableCreationError = tableCreationError;
212
216
  const failedToInitializeWait = (message) => {
213
- firebase_functions_1.logger.warn(`Failed while waiting to initialize.`, message);
217
+ logger_1.logger.warn(`Failed while waiting to initialize.`, message);
214
218
  };
215
219
  exports.failedToInitializeWait = failedToInitializeWait;
216
220
  const updatingMetadata = (tableName, resources) => {
217
- firebase_functions_1.logger.info(`Updated Metadata on ${tableName}, ${JSON.stringify(resources)})`);
221
+ logger_1.logger.info(`Updated Metadata on ${tableName}, ${JSON.stringify(resources)})`);
218
222
  };
219
223
  exports.updatingMetadata = updatingMetadata;
package/package.json CHANGED
@@ -5,7 +5,7 @@
5
5
  "url": "github.com/firebase/extensions.git",
6
6
  "directory": "firestore-bigquery-export/firestore-bigquery-change-tracker"
7
7
  },
8
- "version": "1.1.38",
8
+ "version": "1.1.40",
9
9
  "description": "Core change-tracker library for Cloud Firestore Collection BigQuery Exports",
10
10
  "main": "./lib/index.js",
11
11
  "scripts": {
@@ -38,17 +38,18 @@
38
38
  },
39
39
  "devDependencies": {
40
40
  "@types/chai": "^4.1.6",
41
- "@types/jest": "29.5.0",
41
+ "@types/jest": "^29.5.14",
42
42
  "@types/node": "14.18.34",
43
43
  "@types/traverse": "^0.6.32",
44
44
  "chai": "^4.2.0",
45
- "nyc": "^14.0.0",
46
- "rimraf": "^2.6.3",
47
- "typescript": "^4.9.4",
48
45
  "jest": "29.5.0",
46
+ "jest-config": "29.5.0",
49
47
  "jest-environment-node": "29.5.0",
48
+ "jest-summarizing-reporter": "^1.1.4",
50
49
  "mocked-env": "^1.3.2",
50
+ "nyc": "^17.1.0",
51
+ "rimraf": "^2.6.3",
51
52
  "ts-jest": "29.1.2",
52
- "jest-config": "29.5.0"
53
+ "typescript": "^4.9.4"
53
54
  }
54
55
  }