bulltrackers-module 1.0.755 → 1.0.757

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,7 @@
9
9
  * * FIX: Switched to bigquery.createJob for GCS imports to prevent local file path interpretation errors.
10
10
  * * FIX: Improved error logging to catch swallowed BigQuery insert errors.
11
11
  * * FIX: finalizeResults now checks for file existence to prevent "Not found" errors on empty results.
12
- * * FIX: Added SAFE.PARSE_JSON to MERGE statement to handle JSON type mismatch.
12
+ * * FIX: Added SAFE.PARSE_JSON to MERGE statement for BOTH result_data and dependency_result_hashes.
13
13
  */
14
14
 
15
15
  const { Firestore } = require('@google-cloud/firestore');
@@ -499,6 +499,8 @@ class StorageManager {
499
499
  const dataset = this.bigquery.dataset(this.config.bigquery.dataset);
500
500
  const table = dataset.table(tableName);
501
501
 
502
+ // Note: result_data and dependency_result_hashes are loaded as STRING from the JSON file
503
+ // They will be parsed into JSON during the merge step.
502
504
  const schema = [
503
505
  { name: 'date', type: 'DATE', mode: 'REQUIRED' },
504
506
  { name: 'computation_name', type: 'STRING', mode: 'REQUIRED' },
@@ -521,10 +523,6 @@ class StorageManager {
521
523
 
522
524
  await this._ensureBigQueryTable(targetTable);
523
525
 
524
- // FIX: Added SAFE.PARSE_JSON() to dependency_result_hashes
525
- // The source (temp table) has this as a STRING (from the JSON file).
526
- // The destination (target table) has this as JSON.
527
- // We must explicitly parse the string to JSON during the merge.
528
526
  const mergeQuery = `
529
527
  MERGE INTO ${fullTarget} T
530
528
  USING (
@@ -540,16 +538,33 @@ class StorageManager {
540
538
  result_hash = S.result_hash,
541
539
  dependency_result_hashes = SAFE.PARSE_JSON(S.dependency_result_hashes),
542
540
  entity_count = S.entity_count,
543
- result_data = S.result_data,
541
+ result_data = SAFE.PARSE_JSON(S.result_data),
544
542
  updated_at = S.updated_at
545
543
  WHEN NOT MATCHED THEN
546
544
  INSERT (date, computation_name, category, entity_id, code_hash, result_hash,
547
545
  dependency_result_hashes, entity_count, result_data, updated_at)
548
546
  VALUES (S.date, S.computation_name, S.category, S.entity_id, S.code_hash, S.result_hash,
549
- SAFE.PARSE_JSON(S.dependency_result_hashes), S.entity_count, S.result_data, S.updated_at)
547
+ SAFE.PARSE_JSON(S.dependency_result_hashes), S.entity_count, SAFE.PARSE_JSON(S.result_data), S.updated_at)
550
548
  `;
551
549
 
552
- await this.bigquery.query({ query: mergeQuery, location: this.config.bigquery.location });
550
+ // UPDATE: Use createQueryJob to capture DML statistics
551
+ try {
552
+ const [job] = await this.bigquery.createQueryJob({
553
+ query: mergeQuery,
554
+ location: this.config.bigquery.location
555
+ });
556
+
557
+ await job.getQueryResults(); // Wait for completion
558
+
559
+ const metadata = await job.getMetadata();
560
+ const stats = metadata[0]?.statistics?.query;
561
+ const affectedRows = stats?.numDmlAffectedRows;
562
+
563
+ this._log('INFO', `Merge complete on ${targetTable}. Rows affected (Inserted/Updated): ${affectedRows}`);
564
+ } catch (e) {
565
+ this._logError(`Merge Failed on ${targetTable}`, e);
566
+ throw e;
567
+ }
553
568
  }
554
569
 
555
570
  async _cleanupGCSFiles(bucketName, prefix) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.755",
3
+ "version": "1.0.757",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [