fhirsmith 0.9.1 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -5,6 +5,17 @@ All notable changes to the Health Intersections Node Server will be documented i
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [v0.9.2] - 2026-04-14
9
+
10
+ ### Fixed
11
+
12
+ - Improve VSAC logging
13
+ - Fix SCT import to handle SCT DK
14
+
15
+ ### Tx Conformance Statement
16
+
17
+ FHIRsmith passed all 1578 HL7 terminology service tests (modes tx.fhir.org+omop+general+snomed, tests v1.9.1, runner v6.9.6)
18
+
8
19
  ## [v0.9.1] - 2026-04-10
9
20
 
10
21
  ### Added
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fhirsmith",
3
- "version": "0.9.1",
3
+ "version": "0.9.2",
4
4
  "txVersion": "1.9.1",
5
5
  "description": "A Node.js server that provides a collection of tools to serve the FHIR ecosystem",
6
6
  "main": "server.js",
@@ -48,37 +48,37 @@ class SnomedModule extends BaseTerminologyModule {
48
48
  registerCommands(terminologyCommand, globalOptions) {
49
49
  // Import command
50
50
  terminologyCommand
51
- .command('import')
52
- .description('Import SNOMED CT data from RF2 source directory')
53
- .option('-s, --source <directory>', 'Source directory containing RF2 files')
54
- .option('-b, --base <directory>', 'Base edition directory (for extensions)')
55
- .option('-d, --dest <file>', 'Destination cache file')
56
- .option('-e, --edition <code>', 'Edition code (e.g., 900000000000207008 for International)')
57
- .option('-v, --version <version>', 'Version in YYYYMMDD format (e.g., 20250801)')
58
- .option('-u, --uri <uri>', 'Version URI (overrides edition/version if provided)')
59
- .option('-l, --language <code>', 'Default language code (overrides edition default if provided)')
60
- .option('-y, --yes', 'Skip confirmations')
61
- .action(async (options) => {
62
- await this.handleImportCommand({...globalOptions, ...options});
63
- });
51
+ .command('import')
52
+ .description('Import SNOMED CT data from RF2 source directory')
53
+ .option('-s, --source <directory>', 'Source directory containing RF2 files')
54
+ .option('-b, --base <directory>', 'Base edition directory (for extensions)')
55
+ .option('-d, --dest <file>', 'Destination cache file')
56
+ .option('-e, --edition <code>', 'Edition code (e.g., 900000000000207008 for International)')
57
+ .option('-v, --version <version>', 'Version in YYYYMMDD format (e.g., 20250801)')
58
+ .option('-u, --uri <uri>', 'Version URI (overrides edition/version if provided)')
59
+ .option('-l, --language <code>', 'Default language code (overrides edition default if provided)')
60
+ .option('-y, --yes', 'Skip confirmations')
61
+ .action(async (options) => {
62
+ await this.handleImportCommand({...globalOptions, ...options});
63
+ });
64
64
 
65
65
  // Validate command
66
66
  terminologyCommand
67
- .command('validate')
68
- .description('Validate SNOMED CT RF2 directory structure')
69
- .option('-s, --source <directory>', 'Source directory to validate')
70
- .action(async (options) => {
71
- await this.handleValidateCommand({...globalOptions, ...options});
72
- });
67
+ .command('validate')
68
+ .description('Validate SNOMED CT RF2 directory structure')
69
+ .option('-s, --source <directory>', 'Source directory to validate')
70
+ .action(async (options) => {
71
+ await this.handleValidateCommand({...globalOptions, ...options});
72
+ });
73
73
 
74
74
  // Status command
75
75
  terminologyCommand
76
- .command('status')
77
- .description('Show status of SNOMED CT cache')
78
- .option('-d, --dest <file>', 'Cache file to check')
79
- .action(async (options) => {
80
- await this.handleStatusCommand({...globalOptions, ...options});
81
- });
76
+ .command('status')
77
+ .description('Show status of SNOMED CT cache')
78
+ .option('-d, --dest <file>', 'Cache file to check')
79
+ .action(async (options) => {
80
+ await this.handleStatusCommand({...globalOptions, ...options});
81
+ });
82
82
  }
83
83
 
84
84
  async handleImportCommand(options) {
@@ -633,7 +633,7 @@ class SnomedModule extends BaseTerminologyModule {
633
633
  }
634
634
 
635
635
  const additionalAnswers = additionalQuestions.length > 0 ?
636
- await inquirer.prompt(additionalQuestions) : {};
636
+ await inquirer.prompt(additionalQuestions) : {};
637
637
 
638
638
  // Build the final configuration
639
639
  const config = {
@@ -774,7 +774,7 @@ class SnomedModule extends BaseTerminologyModule {
774
774
  } else if (firstLine.startsWith('id\teffectiveTime\tactive\tmoduleId\tconceptId\tlanguageCode\ttypeId\tterm\tcaseSignificanceId')) {
775
775
  files.descriptions.push(filePath);
776
776
  } else if (firstLine.startsWith('id\teffectiveTime\tactive\tmoduleId\tsourceId\tdestinationId\trelationshipGroup\ttypeId\tcharacteristicTypeId\tmodifierId') &&
777
- !filePath.includes('StatedRelationship')) {
777
+ !filePath.includes('StatedRelationship')) {
778
778
  files.relationships.push(filePath);
779
779
  }
780
780
  } catch (error) {
@@ -1165,6 +1165,19 @@ class SnomedImporter {
1165
1165
  refsetDirectories: []
1166
1166
  };
1167
1167
 
1168
+ // For extensions: load base edition files first so that all International
1169
+ // Edition concepts, descriptions, and relationships are present before the
1170
+ // extension content is layered on top.
1171
+ if (this.config.base) {
1172
+ if (this.config.verbose) {
1173
+ console.log(`Loading base edition from: ${this.config.base}`);
1174
+ }
1175
+ this._scanDirectory(this.config.base, files);
1176
+ }
1177
+
1178
+ // Then load the extension (or standalone edition) source files.
1179
+ // For extensions these come second so that extension rows can override
1180
+ // base rows where the same component has been updated.
1168
1181
  this._scanDirectory(this.config.source, files);
1169
1182
  return files;
1170
1183
  }
@@ -1200,7 +1213,7 @@ class SnomedImporter {
1200
1213
  } else if (firstLine.startsWith('id\teffectiveTime\tactive\tmoduleId\tconceptId\tlanguageCode\ttypeId\tterm\tcaseSignificanceId')) {
1201
1214
  files.descriptions.push(filePath);
1202
1215
  } else if (firstLine.startsWith('id\teffectiveTime\tactive\tmoduleId\tsourceId\tdestinationId\trelationshipGroup\ttypeId\tcharacteristicTypeId\tmodifierId') &&
1203
- !filePath.includes('StatedRelationship')) {
1216
+ !filePath.includes('StatedRelationship')) {
1204
1217
  files.relationships.push(filePath);
1205
1218
  }
1206
1219
  } catch (error) {
@@ -1250,6 +1263,9 @@ class SnomedImporter {
1250
1263
  this.conceptList = [];
1251
1264
  let processedLines = 0;
1252
1265
 
1266
+ // When loading base + extension, track list indices for fast replacement
1267
+ const conceptIdToListIndex = this.config.base ? new Map() : null;
1268
+
1253
1269
  for (let i = 0; i < this.files.concepts.length; i++) {
1254
1270
  const file = this.files.concepts[i];
1255
1271
  const rl = readline.createInterface({
@@ -1275,8 +1291,23 @@ class SnomedImporter {
1275
1291
  };
1276
1292
 
1277
1293
  if (this.conceptMap.has(concept.id)) {
1278
- throw new Error(`Duplicate Concept Id at line ${lineCount}: ${concept.id} - check you are processing the snapshot not the full edition`);
1294
+ // When loading base + extension, the same concept may appear in both.
1295
+ // The extension snapshot row takes precedence (it is loaded second).
1296
+ // If there is no base directory this is a genuine duplicate in a single
1297
+ // snapshot and we should still raise an error.
1298
+ if (!this.config.base) {
1299
+ throw new Error(`Duplicate Concept Id at line ${lineCount}: ${concept.id} - check you are processing the snapshot not the full edition`);
1300
+ }
1301
+ // Replace the base edition row with the extension row
1302
+ const idx = conceptIdToListIndex.get(concept.id);
1303
+ if (idx !== undefined) {
1304
+ this.conceptList[idx] = concept;
1305
+ }
1306
+ this.conceptMap.set(concept.id, concept);
1279
1307
  } else {
1308
+ if (conceptIdToListIndex) {
1309
+ conceptIdToListIndex.set(concept.id, this.conceptList.length);
1310
+ }
1280
1311
  this.conceptList.push(concept);
1281
1312
  this.conceptMap.set(concept.id, concept);
1282
1313
  }
@@ -1347,6 +1378,12 @@ class SnomedImporter {
1347
1378
  const descriptionList = [];
1348
1379
  let processedLines = 0;
1349
1380
 
1381
+ // Build a lookup from description id -> index in descriptionList so that
1382
+ // extension rows can replace base rows for the same description.
1383
+ if (this.config.base) {
1384
+ this._descriptionIdSet = new Map();
1385
+ }
1386
+
1350
1387
  for (const file of this.files.descriptions) {
1351
1388
  const rl = readline.createInterface({
1352
1389
  input: fs.createReadStream(file),
@@ -1372,7 +1409,19 @@ class SnomedImporter {
1372
1409
  caseSignificanceId: BigInt(parts[8])
1373
1410
  };
1374
1411
 
1375
- descriptionList.push(desc);
1412
+ // When loading base + extension, the same description may appear in
1413
+ // both. The extension row (loaded second) takes precedence.
1414
+ if (this.config.base && this._descriptionIdSet) {
1415
+ const existingIdx = this._descriptionIdSet.get(desc.id);
1416
+ if (existingIdx !== undefined) {
1417
+ descriptionList[existingIdx] = desc;
1418
+ } else {
1419
+ this._descriptionIdSet.set(desc.id, descriptionList.length);
1420
+ descriptionList.push(desc);
1421
+ }
1422
+ } else {
1423
+ descriptionList.push(desc);
1424
+ }
1376
1425
  }
1377
1426
 
1378
1427
  processedLines++;
@@ -1417,8 +1466,8 @@ class SnomedImporter {
1417
1466
  const caps = this.conceptMap.get(desc.caseSignificanceId);
1418
1467
 
1419
1468
  const descOffset = this.descriptions.addDescription(
1420
- termOffset, desc.id, effectiveTime, concept.index,
1421
- module.index, kind.index, caps.index, desc.active, lang
1469
+ termOffset, desc.id, effectiveTime, concept.index,
1470
+ module.index, kind.index, caps.index, desc.active, lang
1422
1471
  );
1423
1472
 
1424
1473
  // Track description on concept
@@ -1692,6 +1741,11 @@ class SnomedImporter {
1692
1741
  }
1693
1742
  this.isAIndex = isAConcept.index;
1694
1743
 
1744
+ // Pass 1: collect all relationship rows, deduplicating so that extension
1745
+ // rows (loaded second) override base rows with the same relationship id.
1746
+ const relationshipRows = [];
1747
+ const relationshipIdMap = this.config.base ? new Map() : null; // id -> index in relationshipRows
1748
+
1695
1749
  for (const file of this.files.relationships) {
1696
1750
  const rl = readline.createInterface({
1697
1751
  input: fs.createReadStream(file),
@@ -1718,40 +1772,16 @@ class SnomedImporter {
1718
1772
  modifierId: BigInt(parts[9])
1719
1773
  };
1720
1774
 
1721
- const source = this.conceptMap.get(rel.sourceId);
1722
- const destination = this.conceptMap.get(rel.destinationId);
1723
- const type = this.conceptMap.get(rel.typeId);
1724
-
1725
- if (source && destination && type) {
1726
- const effectiveTime = this.convertDateToSnomedDate(rel.effectiveTime);
1727
-
1728
- // Check if this is a defining relationship
1729
- const defining = rel.characteristicTypeId === RF2_MAGIC_RELN_DEFINING ||
1730
- rel.characteristicTypeId === RF2_MAGIC_RELN_STATED ||
1731
- rel.characteristicTypeId === RF2_MAGIC_RELN_INFERRED;
1732
-
1733
- const relationshipIndex = this.relationships.addRelationship(
1734
- rel.id, source.index, destination.index, type.index,
1735
- 0, 0, 0, effectiveTime, rel.active, defining, rel.relationshipGroup
1736
- );
1737
-
1738
- // Track parent/child relationships for is-a relationships
1739
- if (type.index === this.isAIndex && defining) {
1740
- const sourceTracker = this.getOrCreateConceptTracker(source.index);
1741
- if (rel.active) {
1742
- sourceTracker.addActiveParent(destination.index);
1743
- } else {
1744
- sourceTracker.addInactiveParent(destination.index);
1745
- }
1775
+ if (relationshipIdMap) {
1776
+ const existingIdx = relationshipIdMap.get(rel.id);
1777
+ if (existingIdx !== undefined) {
1778
+ relationshipRows[existingIdx] = rel;
1779
+ } else {
1780
+ relationshipIdMap.set(rel.id, relationshipRows.length);
1781
+ relationshipRows.push(rel);
1746
1782
  }
1747
-
1748
- // Track inbound/outbound relationships
1749
- const sourceTracker = this.getOrCreateConceptTracker(source.index);
1750
- const destTracker = this.getOrCreateConceptTracker(destination.index);
1751
-
1752
- sourceTracker.addOutbound(relationshipIndex);
1753
- destTracker.addInbound(relationshipIndex);
1754
-
1783
+ } else {
1784
+ relationshipRows.push(rel);
1755
1785
  }
1756
1786
  }
1757
1787
 
@@ -1762,10 +1792,62 @@ class SnomedImporter {
1762
1792
  }
1763
1793
  }
1764
1794
 
1795
+ if (this.progressReporter) {
1796
+ this.progressReporter.completeTask('Reading Relationships', processedLines, totalLines);
1797
+ }
1798
+
1799
+ // Pass 2: process the deduplicated relationship rows into the binary
1800
+ // structures and concept trackers.
1801
+ const buildProgressBar = this.progressReporter?.createTaskProgressBar('Building Relationships');
1802
+ buildProgressBar?.start(relationshipRows.length, 0);
1803
+
1804
+ for (let i = 0; i < relationshipRows.length; i++) {
1805
+ const rel = relationshipRows[i];
1806
+
1807
+ const source = this.conceptMap.get(rel.sourceId);
1808
+ const destination = this.conceptMap.get(rel.destinationId);
1809
+ const type = this.conceptMap.get(rel.typeId);
1810
+
1811
+ if (source && destination && type) {
1812
+ const effectiveTime = this.convertDateToSnomedDate(rel.effectiveTime);
1813
+
1814
+ // Check if this is a defining relationship
1815
+ const defining = rel.characteristicTypeId === RF2_MAGIC_RELN_DEFINING ||
1816
+ rel.characteristicTypeId === RF2_MAGIC_RELN_STATED ||
1817
+ rel.characteristicTypeId === RF2_MAGIC_RELN_INFERRED;
1818
+
1819
+ const relationshipIndex = this.relationships.addRelationship(
1820
+ rel.id, source.index, destination.index, type.index,
1821
+ 0, 0, 0, effectiveTime, rel.active, defining, rel.relationshipGroup
1822
+ );
1823
+
1824
+ // Track parent/child relationships for is-a relationships
1825
+ if (type.index === this.isAIndex && defining) {
1826
+ const sourceTracker = this.getOrCreateConceptTracker(source.index);
1827
+ if (rel.active) {
1828
+ sourceTracker.addActiveParent(destination.index);
1829
+ } else {
1830
+ sourceTracker.addInactiveParent(destination.index);
1831
+ }
1832
+ }
1833
+
1834
+ // Track inbound/outbound relationships
1835
+ const sourceTracker = this.getOrCreateConceptTracker(source.index);
1836
+ const destTracker = this.getOrCreateConceptTracker(destination.index);
1837
+
1838
+ sourceTracker.addOutbound(relationshipIndex);
1839
+ destTracker.addInbound(relationshipIndex);
1840
+ }
1841
+
1842
+ if (i % 1000 === 0) {
1843
+ buildProgressBar?.update(i);
1844
+ }
1845
+ }
1846
+
1765
1847
  this.relationships.doneBuild();
1766
1848
 
1767
1849
  if (this.progressReporter) {
1768
- this.progressReporter.completeTask('Reading Relationships', processedLines, totalLines);
1850
+ this.progressReporter.completeTask('Building Relationships', relationshipRows.length, relationshipRows.length);
1769
1851
  }
1770
1852
  }
1771
1853
 
@@ -1800,9 +1882,9 @@ class SnomedImporter {
1800
1882
  // Set parents if concept has any
1801
1883
  if (tracker.activeParents.length > 0 || tracker.inactiveParents.length > 0) {
1802
1884
  const activeParentsRef = tracker.activeParents.length > 0 ?
1803
- this.refs.addReferences(tracker.activeParents) : 0;
1885
+ this.refs.addReferences(tracker.activeParents) : 0;
1804
1886
  const inactiveParentsRef = tracker.inactiveParents.length > 0 ?
1805
- this.refs.addReferences(tracker.inactiveParents) : 0;
1887
+ this.refs.addReferences(tracker.inactiveParents) : 0;
1806
1888
 
1807
1889
  this.concepts.setParents(concept.index, activeParentsRef, inactiveParentsRef);
1808
1890
  } else {
@@ -2104,14 +2186,14 @@ class SnomedImporter {
2104
2186
  // NOTE: This calls addString() so it must happen AFTER strings.reopen()
2105
2187
  for (const refSet of refSetsArray) {
2106
2188
  this.refsetIndex.addReferenceSet(
2107
- this.addString(refSet.title), // This needs strings builder to be active
2108
- refSet.filename,
2109
- refSet.index,
2110
- refSet.membersByRef,
2111
- refSet.membersByName,
2112
- refSet.fieldTypes,
2113
- refSet.fieldNames,
2114
- refSet.langs
2189
+ this.addString(refSet.title), // This needs strings builder to be active
2190
+ refSet.filename,
2191
+ refSet.index,
2192
+ refSet.membersByRef,
2193
+ refSet.membersByName,
2194
+ refSet.fieldTypes,
2195
+ refSet.fieldNames,
2196
+ refSet.langs
2115
2197
  );
2116
2198
  }
2117
2199
  }
@@ -2216,7 +2298,13 @@ class SnomedImporter {
2216
2298
  if (!refSet || currentRefSetId !== refSetId) {
2217
2299
  currentRefSetId = refSetId;
2218
2300
  refSet = this.getOrCreateRefSet(refSetId, displayName, isLangRefset);
2219
- refSet.filename = this.addString(path.relative(this.config.source, filePath));
2301
+ // Compute relative path — the file may live under the base directory
2302
+ // rather than the extension source directory.
2303
+ let relPath = path.relative(this.config.source, filePath);
2304
+ if (this.config.base && relPath.startsWith('..')) {
2305
+ relPath = path.relative(this.config.base, filePath);
2306
+ }
2307
+ refSet.filename = this.addString(relPath);
2220
2308
  refSet.fieldTypes = this.getOrCreateFieldTypes(fieldTypes);
2221
2309
  refSet.fieldNames = this.getOrCreateFieldNames(headers.slice(6), fieldTypes); // Additional fields beyond standard 6
2222
2310
  }
@@ -2577,8 +2665,8 @@ class SnomedImporter {
2577
2665
  };
2578
2666
 
2579
2667
  const services = new SnomedExpressionServices(
2580
- snomedStructures,
2581
- this.isAIndex
2668
+ snomedStructures,
2669
+ this.isAIndex
2582
2670
  );
2583
2671
 
2584
2672
  // Set building flag to true so services will generate normal forms dynamically
package/tx/library.js CHANGED
@@ -35,6 +35,7 @@ const { OCLCodeSystemProvider, OCLSourceCodeSystemFactory } = require('./ocl/cs-
35
35
  const { OCLValueSetProvider } = require('./ocl/vs-ocl');
36
36
  const { OCLConceptMapProvider } = require('./ocl/cm-ocl');
37
37
  const {UriServicesFactory} = require("./cs/cs-uri");
38
+ const {debugLog} = require("./operation-context");
38
39
 
39
40
  /**
40
41
  * This class holds all the loaded content ready for processing
@@ -185,6 +186,7 @@ class Library {
185
186
  try {
186
187
  await this.processSource(source, this.packageManager, "cs");
187
188
  } catch (error) {
189
+ debugLog(error);
188
190
  console.error(`Failed to load code systems from '${source}': ${error.message}`);
189
191
  throw error;
190
192
  }
@@ -196,6 +198,7 @@ class Library {
196
198
  try {
197
199
  await this.processSource(source, this.packageManager, "npm");
198
200
  } catch (error) {
201
+ debugLog(error);
199
202
  console.error(`Failed to load package '${source}': ${error.message}`);
200
203
  throw error;
201
204
  }
@@ -1,4 +1,5 @@
1
1
  const fs = require('fs').promises;
2
+ const crypto = require('crypto');
2
3
  const sqlite3 = require('sqlite3').verbose();
3
4
  const { VersionUtilities } = require('../../library/version-utilities');
4
5
  const ValueSet = require("../library/valueset");
@@ -18,8 +19,13 @@ class ValueSetDatabase {
18
19
  */
19
20
  constructor(dbPath) {
20
21
  this.dbPath = dbPath;
21
- this._db = null; // Shared read-only connection
22
- this._writeDb = null; // Write connection (opened only when needed)
22
+ // Single read-write connection used for everything. Using a separate
23
+ // OPEN_READONLY connection for reads can miss WAL-based schema changes
24
+ // made through the write connection (because read-only opens can't fully
25
+ // participate in the shared-memory protocol), so queries issued right
26
+ // after a migration ALTER TABLE can fail with a stale schema cache.
27
+ this._writeDb = null;
28
+ this._migrationPromise = null;
23
29
  }
24
30
 
25
31
  /**
@@ -29,46 +35,104 @@ class ValueSetDatabase {
29
35
  * @private
30
36
  */
31
37
  _migrateIfNeeded(db) {
32
- return new Promise((resolve, reject) => {
33
- db.all("PRAGMA table_info(valuesets)", [], (err, cols) => {
34
- if (err) { reject(err); return; }
35
- const hasCol = cols.some(c => c.name === 'date_first_seen');
36
- const migrations = [];
37
- if (!hasCol) {
38
- migrations.push(new Promise((res, rej) => {
38
+ // Run migrations SEQUENTIALLY. node-sqlite3 does not guarantee that
39
+ // separately-submitted statements run in submission order on the same
40
+ // connection `db.serialize()` is opt-in. Without sequencing, a
41
+ // `CREATE INDEX` can race ahead of its `CREATE TABLE`, or a `PRAGMA
42
+ // table_info` can race ahead of a `CREATE TABLE IF NOT EXISTS`, and
43
+ // you get "no such table" errors on DDL that should have been fine.
44
+ const run = (sql) => new Promise((res, rej) => {
45
+ db.run(sql, [], (err) => err ? rej(err) : res());
46
+ });
47
+ const all = (sql) => new Promise((res, rej) => {
48
+ db.all(sql, [], (err, rows) => err ? rej(err) : res(rows));
49
+ });
50
+
51
+ return (async () => {
52
+ const cols = await all("PRAGMA table_info(valuesets)");
53
+ const hasDateFirstSeen = cols.some(c => c.name === 'date_first_seen');
54
+ const hasContentHash = cols.some(c => c.name === 'content_hash');
55
+
56
+ if (!hasDateFirstSeen) {
57
+ await run("ALTER TABLE valuesets ADD COLUMN date_first_seen INTEGER DEFAULT 0");
58
+ }
59
+ if (!hasContentHash) {
60
+ await run("ALTER TABLE valuesets ADD COLUMN content_hash TEXT");
61
+ }
62
+
63
+ // Ensure vsac_runs table exists (with total_updated for fresh installs)
64
+ await run(`
65
+ CREATE TABLE IF NOT EXISTS vsac_runs (
66
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
67
+ started_at INTEGER NOT NULL,
68
+ finished_at INTEGER,
69
+ status TEXT NOT NULL DEFAULT 'running',
70
+ error_message TEXT,
71
+ total_fetched INTEGER,
72
+ total_new INTEGER,
73
+ total_updated INTEGER
74
+ )
75
+ `);
76
+
77
+ // If vsac_runs already existed (older schema), add total_updated column
78
+ const runCols = await all("PRAGMA table_info(vsac_runs)");
79
+ const hasTotalUpdated = runCols.some(c => c.name === 'total_updated');
80
+ if (!hasTotalUpdated && runCols.length > 0) {
81
+ await run("ALTER TABLE vsac_runs ADD COLUMN total_updated INTEGER");
82
+ }
83
+
84
+ // Ensure vsac_settings table exists (for _lastUpdated tracking etc.)
85
+ await run(`
86
+ CREATE TABLE IF NOT EXISTS vsac_settings (
87
+ key TEXT PRIMARY KEY,
88
+ value TEXT
89
+ )
90
+ `);
91
+
92
+ // Ensure vsac_events table exists (audit log of new/updated/deleted value sets)
93
+ await run(`
94
+ CREATE TABLE IF NOT EXISTS vsac_events (
95
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
96
+ timestamp INTEGER NOT NULL,
97
+ event_type TEXT NOT NULL,
98
+ url TEXT NOT NULL,
99
+ version TEXT,
100
+ detail TEXT
101
+ )
102
+ `);
103
+ await run("CREATE INDEX IF NOT EXISTS idx_events_timestamp ON vsac_events(timestamp)");
104
+
105
+ // Backfill content_hash for any existing rows that don't have one.
106
+ // This establishes a baseline so the NEXT sync can detect real content
107
+ // changes immediately (otherwise the first sync just silently populates
108
+ // hashes and can never flag anything as 'updated').
109
+ const needHash = await all(
110
+ "SELECT COUNT(*) AS n FROM valuesets WHERE content_hash IS NULL"
111
+ );
112
+ const missing = (needHash[0] && needHash[0].n) || 0;
113
+ if (missing > 0) {
114
+ console.log(`Backfilling content_hash for ${missing} existing value sets...`);
115
+ const rows = await all(
116
+ "SELECT id, content FROM valuesets WHERE content_hash IS NULL"
117
+ );
118
+ let done = 0;
119
+ for (const row of rows) {
120
+ const hash = crypto.createHash('sha256').update(row.content).digest('hex');
121
+ await new Promise((res, rej) => {
39
122
  db.run(
40
- "ALTER TABLE valuesets ADD COLUMN date_first_seen INTEGER DEFAULT 0",
41
- [],
123
+ 'UPDATE valuesets SET content_hash = ? WHERE id = ?',
124
+ [hash, row.id],
42
125
  (err) => err ? rej(err) : res()
43
126
  );
44
- }));
127
+ });
128
+ done++;
129
+ if (done % 1000 === 0) {
130
+ console.log(` ...${done}/${missing}`);
131
+ }
45
132
  }
46
- // Ensure vsac_runs table exists
47
- migrations.push(new Promise((res, rej) => {
48
- db.run(`
49
- CREATE TABLE IF NOT EXISTS vsac_runs (
50
- id INTEGER PRIMARY KEY AUTOINCREMENT,
51
- started_at INTEGER NOT NULL,
52
- finished_at INTEGER,
53
- status TEXT NOT NULL DEFAULT 'running',
54
- error_message TEXT,
55
- total_fetched INTEGER,
56
- total_new INTEGER
57
- )
58
- `, [], (err) => err ? rej(err) : res());
59
- }));
60
- // Ensure vsac_settings table exists (for _lastUpdated tracking etc.)
61
- migrations.push(new Promise((res, rej) => {
62
- db.run(`
63
- CREATE TABLE IF NOT EXISTS vsac_settings (
64
- key TEXT PRIMARY KEY,
65
- value TEXT
66
- )
67
- `, [], (err) => err ? rej(err) : res());
68
- }));
69
- Promise.all(migrations).then(() => resolve()).catch(reject);
70
- });
71
- });
133
+ console.log(`Backfilled ${done} hashes.`);
134
+ }
135
+ })();
72
136
  }
73
137
 
74
138
  /**
@@ -77,21 +141,9 @@ class ValueSetDatabase {
77
141
  * @private
78
142
  */
79
143
  _getReadConnection() {
80
- return new Promise((resolve, reject) => {
81
- if (this._db) {
82
- resolve(this._db);
83
- return;
84
- }
85
-
86
- this._db = new sqlite3.Database(this.dbPath, sqlite3.OPEN_READONLY, (err) => {
87
- if (err) {
88
- this._db = null;
89
- reject(new Error(`Failed to open database ${this.dbPath}: ${err.message}`));
90
- } else {
91
- resolve(this._db);
92
- }
93
- });
94
- });
144
+ // Reads go through the same connection as writes. See the constructor
145
+ // comment for why we don't use a separate OPEN_READONLY connection.
146
+ return this._ensureMigrated().then(() => this._writeDb);
95
147
  }
96
148
 
97
149
  /**
@@ -100,21 +152,38 @@ class ValueSetDatabase {
100
152
  * @private
101
153
  */
102
154
  _getWriteConnection() {
103
- return new Promise((resolve, reject) => {
155
+ return this._ensureMigrated().then(() => this._writeDb);
156
+ }
157
+
158
+ /**
159
+ * Ensure the database schema is migrated. Idempotent: subsequent calls
160
+ * return the cached promise. Opens a write connection (which is required
161
+ * for ALTER TABLE) if one is not already open. The write connection is
162
+ * kept open for reuse by later _getWriteConnection calls.
163
+ * @returns {Promise<void>}
164
+ * @private
165
+ */
166
+ _ensureMigrated() {
167
+ if (this._migrationPromise) {
168
+ return this._migrationPromise;
169
+ }
170
+ this._migrationPromise = new Promise((resolve, reject) => {
104
171
  if (this._writeDb) {
105
- resolve(this._writeDb);
172
+ this._migrateIfNeeded(this._writeDb).then(resolve).catch(reject);
106
173
  return;
107
174
  }
108
-
109
175
  this._writeDb = new sqlite3.Database(this.dbPath, (err) => {
110
176
  if (err) {
111
177
  this._writeDb = null;
112
178
  reject(new Error(`Failed to open database for writing: ${err.message}`));
113
- } else {
114
- this._migrateIfNeeded(this._writeDb).then(() => resolve(this._writeDb)).catch(reject);
179
+ return;
115
180
  }
181
+ this._migrateIfNeeded(this._writeDb).then(resolve).catch(reject);
116
182
  });
117
183
  });
184
+ // If migration fails, clear the cached promise so a retry can attempt again
185
+ this._migrationPromise.catch(() => { this._migrationPromise = null; });
186
+ return this._migrationPromise;
118
187
  }
119
188
 
120
189
  /**
@@ -122,29 +191,20 @@ class ValueSetDatabase {
122
191
  * @returns {Promise<void>}
123
192
  */
124
193
  async close() {
125
- const closePromises = [];
126
-
127
- if (this._db) {
128
- closePromises.push(new Promise((resolve) => {
129
- this._db.close((err) => {
130
- if (err) console.warn(`Warning closing read connection: ${err.message}`);
131
- this._db = null;
132
- resolve();
133
- });
134
- }));
135
- }
194
+ // Clear the cached migration promise so a subsequent open re-migrates
195
+ this._migrationPromise = null;
136
196
 
137
- if (this._writeDb) {
138
- closePromises.push(new Promise((resolve) => {
139
- this._writeDb.close((err) => {
140
- if (err) console.warn(`Warning closing write connection: ${err.message}`);
141
- this._writeDb = null;
142
- resolve();
143
- });
144
- }));
197
+ if (!this._writeDb) {
198
+ return;
145
199
  }
146
200
 
147
- await Promise.all(closePromises);
201
+ await new Promise((resolve) => {
202
+ this._writeDb.close((err) => {
203
+ if (err) console.warn(`Warning closing database connection: ${err.message}`);
204
+ this._writeDb = null;
205
+ resolve();
206
+ });
207
+ });
148
208
  }
149
209
 
150
210
  /**
@@ -193,6 +253,7 @@ class ValueSetDatabase {
193
253
  status TEXT,
194
254
  title TEXT,
195
255
  content TEXT NOT NULL,
256
+ content_hash TEXT,
196
257
  last_seen INTEGER DEFAULT (strftime('%s', 'now')),
197
258
  date_first_seen INTEGER DEFAULT (strftime('%s', 'now'))
198
259
  )
@@ -241,17 +302,31 @@ class ValueSetDatabase {
241
302
  status TEXT NOT NULL DEFAULT 'running',
242
303
  error_message TEXT,
243
304
  total_fetched INTEGER,
244
- total_new INTEGER
305
+ total_new INTEGER,
306
+ total_updated INTEGER
245
307
  )
246
308
  `);
247
309
 
248
310
  // Settings table (key-value store for _lastUpdated tracking etc.)
249
311
  db.run(`
250
- CREATE TABLE IF NOT EXISTS vsac_settings (
251
- key TEXT PRIMARY KEY,
252
- value TEXT
253
- )
312
+ CREATE TABLE IF NOT EXISTS vsac_settings (
313
+ key TEXT PRIMARY KEY,
314
+ value TEXT
315
+ )
316
+ `);
317
+
318
+ // Event log table (new/updated/deleted value sets)
319
+ db.run(`
320
+ CREATE TABLE IF NOT EXISTS vsac_events (
321
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
322
+ timestamp INTEGER NOT NULL,
323
+ event_type TEXT NOT NULL,
324
+ url TEXT NOT NULL,
325
+ version TEXT,
326
+ detail TEXT
327
+ )
254
328
  `);
329
+ db.run('CREATE INDEX idx_events_timestamp ON vsac_events(timestamp)');
255
330
 
256
331
  // Create indexes for better search performance
257
332
  db.run('CREATE INDEX idx_valuesets_url ON valuesets(url, version)');
@@ -300,15 +375,36 @@ class ValueSetDatabase {
300
375
  * @param {number} id - The run ID from startRun()
301
376
  * @param {number} totalFetched - Total value sets fetched
302
377
  * @param {number} totalNew - Number of new value sets found
378
+ * @param {number} [totalUpdated=0] - Number of existing value sets whose content changed
303
379
  * @returns {Promise<void>}
304
380
  */
305
- async finishRun(id, totalFetched, totalNew) {
381
+ async finishRun(id, totalFetched, totalNew, totalUpdated = 0) {
306
382
  const db = await this._getWriteConnection();
307
383
  return new Promise((resolve, reject) => {
308
384
  db.run(
309
385
  `UPDATE vsac_runs SET finished_at = strftime('%s','now'), status = 'ok',
310
- total_fetched = ?, total_new = ? WHERE id = ?`,
311
- [totalFetched, totalNew, id],
386
+ total_fetched = ?, total_new = ?, total_updated = ? WHERE id = ?`,
387
+ [totalFetched, totalNew, totalUpdated, id],
388
+ err => err ? reject(err) : resolve()
389
+ );
390
+ });
391
+ }
392
+
393
+ /**
394
+ * Record a VSAC event in the audit log
395
+ * @param {string} eventType - 'new', 'updated', or 'deleted'
396
+ * @param {string} url - The value set URL
397
+ * @param {string|null} version - The version, or null
398
+ * @param {string|null} [detail] - Optional detail string
399
+ * @returns {Promise<void>}
400
+ */
401
+ async recordEvent(eventType, url, version, detail = null) {
402
+ const db = await this._getWriteConnection();
403
+ return new Promise((resolve, reject) => {
404
+ db.run(
405
+ `INSERT INTO vsac_events (timestamp, event_type, url, version, detail)
406
+ VALUES (strftime('%s','now'), ?, ?, ?, ?)`,
407
+ [eventType, url, version || null, detail],
312
408
  err => err ? reject(err) : resolve()
313
409
  );
314
410
  });
@@ -358,7 +454,7 @@ class ValueSetDatabase {
358
454
  return new Promise((resolve, reject) => {
359
455
  db.run(
360
456
  `INSERT INTO vsac_settings (key, value) VALUES (?, ?)
361
- ON CONFLICT(key) DO UPDATE SET value = excluded.value`,
457
+ ON CONFLICT(key) DO UPDATE SET value = excluded.value`,
362
458
  [key, value],
363
459
  err => err ? reject(err) : resolve()
364
460
  );
@@ -368,9 +464,10 @@ class ValueSetDatabase {
368
464
  /**
369
465
  * Insert or update a single ValueSet in the database
370
466
  * @param {Object} valueSet - The ValueSet resource
467
+ * @param {string} [contentHash] - Optional pre-computed content hash to store
371
468
  * @returns {Promise<void>}
372
469
  */
373
- async upsertValueSet(valueSet) {
470
+ async upsertValueSet(valueSet, contentHash = null) {
374
471
  if (!valueSet.url) {
375
472
  throw new Error('ValueSet must have a url property');
376
473
  }
@@ -405,8 +502,9 @@ class ValueSetDatabase {
405
502
  db.run(`
406
503
  INSERT INTO valuesets (
407
504
  id, url, version, date, description, effectivePeriod_start, effectivePeriod_end,
408
- expansion_identifier, name, publisher, status, title, content, last_seen, date_first_seen
409
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now'))
505
+ expansion_identifier, name, publisher, status, title, content, content_hash,
506
+ last_seen, date_first_seen
507
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, strftime('%s', 'now'), strftime('%s', 'now'))
410
508
  ON CONFLICT(id) DO UPDATE SET
411
509
  url=excluded.url,
412
510
  version=excluded.version,
@@ -420,6 +518,7 @@ class ValueSetDatabase {
420
518
  status=excluded.status,
421
519
  title=excluded.title,
422
520
  content=excluded.content,
521
+ content_hash=excluded.content_hash,
423
522
  last_seen=strftime('%s', 'now')
424
523
  `, [
425
524
  valueSet.id,
@@ -434,7 +533,8 @@ class ValueSetDatabase {
434
533
  valueSet.publisher || null,
435
534
  valueSet.status || null,
436
535
  valueSet.title || null,
437
- JSON.stringify(valueSet)
536
+ JSON.stringify(valueSet),
537
+ contentHash
438
538
  ], (err) => {
439
539
  if (err) {
440
540
  reject(new Error(`Failed to insert main record: ${err.message}`));
@@ -450,6 +550,24 @@ class ValueSetDatabase {
450
550
  });
451
551
  }
452
552
 
553
+ /**
554
+ * Backfill the content_hash column for a row without rewriting content or
555
+ * emitting an event. Used for legacy rows from before content_hash existed.
556
+ * @param {string} id - The ValueSet id
557
+ * @param {string} hash - The SHA-256 hex hash to store
558
+ * @returns {Promise<void>}
559
+ */
560
+ async setContentHash(id, hash) {
561
+ const db = await this._getWriteConnection();
562
+ return new Promise((resolve, reject) => {
563
+ db.run(
564
+ 'UPDATE valuesets SET content_hash = ? WHERE id = ?',
565
+ [hash, id],
566
+ err => err ? reject(err) : resolve()
567
+ );
568
+ });
569
+ }
570
+
453
571
  /**
454
572
  * Just update the timestamp on the valueset
455
573
  * @param {Object} valueSet - The ValueSet resource
@@ -590,7 +708,7 @@ class ValueSetDatabase {
590
708
  const db = await this._getReadConnection();
591
709
 
592
710
  return new Promise((resolve, reject) => {
593
- db.all('SELECT id, url, version, content FROM valuesets', [], (err, rows) => {
711
+ db.all('SELECT id, url, version, content, content_hash FROM valuesets', [], (err, rows) => {
594
712
  if (err) {
595
713
  reject(new Error(`Failed to load value sets: ${err.message}`));
596
714
  return;
@@ -603,6 +721,9 @@ class ValueSetDatabase {
603
721
  for (const row of rows) {
604
722
  const valueSet = new ValueSet(JSON.parse(row.content));
605
723
  valueSet.sourcePackage = source;
724
+ // Attach the stored content hash so callers can detect changes
725
+ // without recomputing over the full JSON.
726
+ valueSet.contentHash = row.content_hash || null;
606
727
  // Store by URL and id alone
607
728
  this.addToMap(valueSetMap, row.id, row.url, row.version, valueSet);
608
729
  }
package/tx/vs/vs-vsac.js CHANGED
@@ -1,4 +1,5 @@
1
1
  const path = require('path');
2
+ const crypto = require('crypto');
2
3
  const axios = require('axios');
3
4
  const { AbstractValueSetProvider } = require('./vs-api');
4
5
  const { ValueSetDatabase } = require('./vs-database');
@@ -11,6 +12,8 @@ const {debugLog} = require("../operation-context");
11
12
  * Fetches and caches ValueSets from the NLM VSAC FHIR server
12
13
  */
13
14
  class VSACValueSetProvider extends AbstractValueSetProvider {
15
+ SYNC_AT_START_UP = false;
16
+
14
17
  /**
15
18
  * @param {Object} config - Configuration object
16
19
  * @param {string} config.apiKey - API key for VSAC authentication
@@ -71,12 +74,11 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
71
74
  if (!(await this.database.exists())) {
72
75
  await this.database.create();
73
76
  } else {
74
- // Ensure schema is up to date (e.g. date_first_seen column added after initial deploy)
75
- await this.database._migrateIfNeeded(await this.database._getWriteConnection());
76
- // Load existing data
77
+ // Schema migrations are applied lazily by the database layer on first
78
+ // connection. Just load existing data.
77
79
  await this._reloadMap();
78
80
  }
79
- if (this.valueSetMap.size == 0) {
81
+ if (this.SYNC_AT_START_UP || this.valueSetMap.size == 0) {
80
82
  await this.refreshValueSets();
81
83
  }
82
84
  // Start periodic refresh
@@ -182,11 +184,11 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
182
184
  // deduplicate the queue
183
185
  this.queue = [...new Set(this.queue)];
184
186
 
185
- let tracking = { totalFetched: 0, totalNew: 0, count: 0, newCount : 0 };
187
+ let tracking = { totalFetched: 0, totalNew: 0, totalUpdated: 0, count: 0, newCount : 0 };
186
188
  // phase 2: query for history & content
187
189
  this.requeue = [];
188
190
  for (let q of this.queue) {
189
- this.stats.task('VSAC History for '+q, `running (${tracking.totalFetched} fetched, ${tracking.totalNew} new)`);
191
+ this.stats.task('VSAC History for '+q, `running (${tracking.totalFetched} fetched, ${tracking.totalNew} new, ${tracking.totalUpdated} updated)`);
190
192
  try {
191
193
  await this.processContentAndHistory(q, tracking, this.queue.length);
192
194
  } catch (error) {
@@ -194,29 +196,27 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
194
196
  debugLog(error);
195
197
  this.stats.task('VSAC Sync', error.message);
196
198
  }
197
- // `running (${totalFetched} fetched, ${totalNew} new)`)
198
199
  tracking.count++;
199
200
  }
200
201
  console.log("Requeue");
201
202
  for (let q of this.requeue) {
202
- this.stats.task('VSAC History for '+q, `running (${tracking.totalFetched} fetched, ${tracking.totalNew} new)`);
203
+ this.stats.task('VSAC History for '+q, `running (${tracking.totalFetched} fetched, ${tracking.totalNew} new, ${tracking.totalUpdated} updated)`);
203
204
  try {
204
205
  await this.processContentAndHistory(q, tracking, this.requeue.length);
205
206
  } catch (error) {
206
207
  debugLog(error);
207
208
  this.stats.task('VSAC Sync', error.message);
208
209
  }
209
- // `running (${totalFetched} fetched, ${totalNew} new)`)
210
210
  tracking.count++;
211
211
  }
212
212
 
213
213
  // Reload map with fresh data
214
214
  await this._reloadMap();
215
- let msg = `VSAC refresh completed. Total: ${tracking.totalFetched} ValueSets, Deleted: ${tracking.deletedCount}`;
215
+ let msg = `VSAC refresh completed. Total: ${tracking.totalFetched} ValueSets, New: ${tracking.totalNew}, Updated: ${tracking.totalUpdated}`;
216
216
  this.stats.taskDone('VSAC Sync', msg);
217
217
  console.log(msg);
218
218
 
219
- await this.database.finishRun(runId, tracking.totalFetched, tracking.totalNew);
219
+ await this.database.finishRun(runId, tracking.totalFetched, tracking.totalNew, tracking.totalUpdated);
220
220
  } catch (error) {
221
221
  debugLog(error, 'Error during VSAC refresh:');
222
222
  this.stats.taskError('VSAC Sync', `Error (${error.message})`);
@@ -228,30 +228,71 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
228
228
  }
229
229
 
230
230
  /**
231
- * Insert multiple ValueSets in a batch operation
231
+ * Compute a SHA-256 hash of the ValueSet content for change detection.
232
+ * @param {Object} vs - The ValueSet resource (plain JSON object)
233
+ * @returns {string} hex-encoded SHA-256
234
+ * @private
235
+ */
236
+ _hashValueSet(vs) {
237
+ return crypto.createHash('sha256').update(JSON.stringify(vs)).digest('hex');
238
+ }
239
+
240
+ /**
241
+ * Insert multiple ValueSets in a batch operation.
242
+ * For each value set: if url|version is already known, compare content hashes.
243
+ * - hash unchanged -> touch last_seen only (seeValueSet)
244
+ * - hash changed -> upsert and record an 'updated' event
245
+ * - not seen before -> upsert and record a 'new' event
232
246
  * @param {Array<Object>} valueSets - Array of ValueSet resources
233
- * @returns {Promise<void>}
247
+ * @returns {Promise<{newCount: number, updatedCount: number}>}
234
248
  */
235
249
  async batchUpsertValueSets(valueSets) {
236
250
  if (valueSets.length === 0) {
237
- return;
251
+ return { newCount: 0, updatedCount: 0 };
238
252
  }
239
253
 
240
- let count = 0;
254
+ let newCount = 0;
255
+ let updatedCount = 0;
256
+
241
257
  // Process sequentially to avoid database locking
242
258
  for (const valueSet of valueSets) {
243
- let key = valueSet.url+"|"+valueSet.version;
244
- let vs = this.valueSetMap.get(key);
245
- if (vs) {
246
- // we've seen this before, and maybe fetched it's history, so just update
247
- // the timestamp
248
- await this.database.seeValueSet(valueSet);
259
+ const key = valueSet.url+"|"+valueSet.version;
260
+ const existing = this.valueSetMap.get(key);
261
+ const newHash = this._hashValueSet(valueSet);
262
+
263
+ if (existing) {
264
+ // We've seen this url|version before. Decide whether the content
265
+ // has actually changed by comparing hashes.
266
+ //
267
+ // Note: _reloadMap() mutates the in-memory jsonObj (strips inc.version
268
+ // from compose.include/exclude), so we cannot reliably recompute a
269
+ // hash from existing.jsonObj — it would not match the hash of the
270
+ // original unmutated JSON we stored. For rows predating this feature
271
+ // (content_hash NULL), we defer update detection until the next cycle:
272
+ // the upsert below runs only when hashes differ, so on the *next*
273
+ // sync after migration we'll have a proper baseline.
274
+ if (existing.contentHash && existing.contentHash === newHash) {
275
+ // No change - just touch last_seen
276
+ await this.database.seeValueSet(valueSet);
277
+ } else if (!existing.contentHash) {
278
+ // Legacy row without a stored hash - backfill the hash silently
279
+ // without emitting a spurious 'updated' event. We do a lightweight
280
+ // touch + hash update rather than a full upsert+event.
281
+ await this.database.seeValueSet(valueSet);
282
+ await this.database.setContentHash(valueSet.id, newHash);
283
+ } else {
284
+ // Content has changed - treat as update
285
+ await this.database.upsertValueSet(valueSet, newHash);
286
+ await this.database.recordEvent('updated', valueSet.url, valueSet.version);
287
+ updatedCount++;
288
+ }
249
289
  } else {
250
- await this.database.upsertValueSet(valueSet);
251
- count++;
290
+ await this.database.upsertValueSet(valueSet, newHash);
291
+ await this.database.recordEvent('new', valueSet.url, valueSet.version);
292
+ newCount++;
252
293
  }
253
294
  }
254
- return count;
295
+ return { newCount, updatedCount };
255
296
  }
256
297
 
257
298
  /**
@@ -511,18 +552,21 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
511
552
  const bundle = await this._fetchBundle(url);
512
553
 
513
554
  let vcount = 0;
555
+ let perRun = { newCount: 0, updatedCount: 0 };
514
556
  if (bundle.entry && bundle.entry.length > 0) {
515
557
  // Extract ValueSets from bundle entries
516
558
  const valueSets = bundle.entry
517
559
  .filter(entry => entry.resource && entry.resource.resourceType === 'ValueSet')
518
560
  .map(entry => entry.resource);
519
561
  if (valueSets.length > 0) {
520
- tracking.totalNew = tracking.totalNew + await this.batchUpsertValueSets(valueSets);
562
+ perRun = await this.batchUpsertValueSets(valueSets);
563
+ tracking.totalNew += perRun.newCount;
564
+ tracking.totalUpdated += perRun.updatedCount;
521
565
  tracking.totalFetched += valueSets.length;
522
566
  vcount = valueSets.length;
523
567
  }
524
568
  }
525
- let logMsg = `VSAC (${tracking.count} of ${length}) ${q}: ${vcount} versions`;
569
+ let logMsg = `VSAC (${tracking.count} of ${length}) ${q}: ${vcount} versions (${perRun.newCount} new, ${perRun.updatedCount} updated)`;
526
570
  console.log(logMsg);
527
571
  this.stats.task('VSAC Sync', logMsg);
528
572
  }
@@ -593,30 +637,33 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
593
637
 
594
638
  const rows = await new Promise((resolve, reject) => {
595
639
  db.all(
596
- `SELECT 'vs' AS kind,
640
+ `SELECT 'event' AS kind,
597
641
  url,
598
642
  version,
599
- date_first_seen AS ts,
600
- NULL AS status,
601
- NULL AS error_message,
602
- NULL AS finished_at,
603
- NULL AS total_fetched,
604
- NULL AS total_new
605
- FROM valuesets
606
- WHERE date_first_seen > 0
643
+ timestamp AS ts,
644
+ event_type,
645
+ NULL AS status,
646
+ NULL AS error_message,
647
+ NULL AS finished_at,
648
+ NULL AS total_fetched,
649
+ NULL AS total_new,
650
+ NULL AS total_updated
651
+ FROM vsac_events
607
652
  UNION ALL
608
- SELECT 'run' AS kind,
609
- NULL,
610
- NULL,
611
- started_at AS ts,
612
- status,
613
- error_message,
614
- finished_at,
615
- total_fetched,
616
- total_new
617
- FROM vsac_runs
618
- ORDER BY ts DESC
619
- LIMIT 200`,
653
+ SELECT 'run' AS kind,
654
+ NULL,
655
+ NULL,
656
+ started_at AS ts,
657
+ NULL AS event_type,
658
+ status,
659
+ error_message,
660
+ finished_at,
661
+ total_fetched,
662
+ total_new,
663
+ total_updated
664
+ FROM vsac_runs
665
+ ORDER BY ts DESC
666
+ LIMIT 200`,
620
667
  [],
621
668
  (err, rows) => err ? reject(err) : resolve(rows)
622
669
  );
@@ -636,7 +683,8 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
636
683
  const duration = row.finished_at ? `${row.finished_at - row.ts}s` : 'in progress';
637
684
  let detail, colour;
638
685
  if (row.status === 'ok') {
639
- detail = `${row.total_fetched} fetched, ${row.total_new} new, ${duration}`;
686
+ const updated = row.total_updated != null ? `, ${row.total_updated} updated` : '';
687
+ detail = `${row.total_fetched} fetched, ${row.total_new} new${updated}, ${duration}`;
640
688
  colour = 'green';
641
689
  } else if (row.status === 'error') {
642
690
  detail = `Failed: ${escape(row.error_message || '')} (${duration})`;
@@ -651,9 +699,28 @@ class VSACValueSetProvider extends AbstractValueSetProvider {
651
699
  html += `<td>${detail}</td>`;
652
700
  html += `</tr>`;
653
701
  } else {
702
+ // Event row: 'new', 'updated', or 'deleted'
703
+ let label, colour;
704
+ switch (row.event_type) {
705
+ case 'new':
706
+ label = 'New value set';
707
+ colour = 'green';
708
+ break;
709
+ case 'updated':
710
+ label = 'Updated value set';
711
+ colour = 'blue';
712
+ break;
713
+ case 'deleted':
714
+ label = 'Deleted value set';
715
+ colour = 'red';
716
+ break;
717
+ default:
718
+ label = escape(row.event_type || 'Event');
719
+ colour = 'black';
720
+ }
654
721
  html += `<tr>`;
655
722
  html += `<td>${escape(fmt(row.ts))}</td>`;
656
- html += `<td>New value set</td>`;
723
+ html += `<td><span style="color:${colour}">${label}</span></td>`;
657
724
  html += `<td>${escape(row.url || '')}#${escape(row.version || '')}</td>`;
658
725
  html += `</tr>`;
659
726
  }