dhti-cli 0.6.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  - 🚀 *Dhanvantari rose out of the water with his four hands, holding a pot full of elixirs!*
15
15
 
16
- #### TL; DR: 🏥 DHTI enables rapid prototyping, sharing, and testing of GenAI applications within an Electronic Health Record (EHR), facilitating the seamless transition of your experiments to clinical practice.
16
+ ### TL; DR: 🏥 DHTI enables rapid prototyping, sharing, and testing of GenAI healthcare applications within an EHR, facilitating the seamless transition of your experiments to practice!
17
17
  👉 [Try it out today!](#try-it-out) and give us a star ⭐️ if you like it!
18
18
 
19
19
  ### About
@@ -62,8 +62,6 @@ The essence of DHTI is *modularity* with an emphasis on *configuration!* It is n
62
62
  <img src="https://github.com/dermatologist/dhti/blob/develop/notes/arch-1.drawio.svg" />
63
63
  </p>
64
64
 
65
- 🔥 **Coming soon!:** We are currently working on expanding the DHTI architecture to support traditional machine learning models, such as *EEG sleep stage classification* and *trichogram analysis*, exposing inference pipelines as agentic tools!
66
-
67
65
  ## ✨ Features
68
66
  * **Modular**: Supports installable Gen AI routines and UI elements.
69
67
  * **Quick prototyping**: CLI helps in quick prototyping and testing of Gen AI routines and UI elements.
@@ -71,7 +69,7 @@ The essence of DHTI is *modularity* with an emphasis on *configuration!* It is n
71
69
  * **Developer friendly**: Copy working files to running containers for testing.
72
70
  * **Dry-run mode**: Preview changes before execution with the `--dry-run` flag.
73
71
  * **Dependency Injection**: Dependency injection for models and hyperparameters for configuring elixirs.
74
- * **Generate synthetic data**: DHTI supports generating synthetic data for testing.
72
+ * **Generate synthetic data**: [DHTI supports generating synthetic data for testing, using synthea.](/notes/SYNTHEA.md)
75
73
  * **CQL support**: [CQL for clinical decision support](https://nuchange.ca/2025/06/v-llm-in-the-loop-cql-execution-with-unstructured-data-and-fhir-terminology-support.html).
76
74
  * **FHIR**: Data exchange with FHIR schema.
77
75
  * **MCP**: Built in MCP server for pluggable tools.
@@ -81,6 +79,15 @@ The essence of DHTI is *modularity* with an emphasis on *configuration!* It is n
81
79
  * **Graph utilities**: Neo4j for graph utilities.
82
80
  * **LLM**: Ollama for self-hosting LLM models.
83
81
 
82
+ ## ✨ New
83
+ * **MCPX integration**: DHTI now includes an [MCP integrator](https://docs.lunar.dev/mcpx/) that allows other MCP servers to be "installed" and exposed seamlessly to DHTI through the MCPX gateway.
84
+ * **DOCKTOR module**: A new module, [DOCKTOR](/notes/DOCKTOR.md), support traditional machine learning model packaged as Docker containers, to be used as MCP tools, enabling the deployment of inference pipelines as agent-invokable tools. (in beta)
85
+ * **MCP aware agent**: [dhti-elixir-template](https://github.com/dermatologist/dhti-elixir-template) used in the examples now includes an [MCP aware agent](https://github.com/dermatologist/dhti-elixir-template/blob/feature/agent-2/src/dhti_elixir_template/chain.py) that can autodiscover and invoke tools from the MCPX gateway. Install it using `npx dhti-cli elixir install -g https://github.com/dermatologist/dhti-elixir-template.git -n dhti-elixir-template -b feature/agent2`.
86
+ * **Medplum integration**: [Medplum](https://www.medplum.com/) is now supported as an alternative FHIR server. Read more [here](/notes/medplum.md). This allows you to add FHIR subscriptions for real-time updates and much more.
87
+ * **Synthea integration**: You can now generate synthetic FHIR data using [Synthea](https://synthetichealth.github.io/synthea/). Read more [here](/notes/SYNTHEA.md).
88
+ * **MIMIC support**: You can now load [MIMIC Demo](https://physionet.org/content/mimic-iv-demo/2.2/) data using DHTI in [one command](https://nuchange.ca/2024/11/loading-mimic-dataset-onto-a-fhir-server-in-two-easy-steps.html).
89
+
90
+
84
91
  ## 🔧 For Gen AI Developers
85
92
 
86
93
  *Developers can build elixirs and conchs for DHTI.*
@@ -124,6 +131,7 @@ Tools to fine-tune language models for the stack are on our roadmap. We encourag
124
131
  ## :sparkles: Resources (in Alpha)
125
132
  * [cookiecutter for scaffolding elixirs](https://github.com/dermatologist/cookiecutter-uv)
126
133
  * [cds-hooks-sandbox for testing](https://github.com/dermatologist/cds-hooks-sandbox/tree/dhti-1)
134
+ * [Medplum integration](/notes/medplum.md)
127
135
 
128
136
  ## :sunglasses: Coming soon
129
137
 
@@ -62,19 +62,19 @@ export default class Compose extends Command {
62
62
  const medplum = ['medplum-server', 'medplum-app', 'postgres-db', 'redis', 'mpclient'];
63
63
  const _modules = {
64
64
  cqlFhir,
65
+ docktor,
65
66
  fhir,
66
67
  gateway,
67
68
  langfuse,
68
69
  langserve,
69
70
  mcpFhir,
70
71
  mcpx,
71
- docktor,
72
+ medplum,
72
73
  neo4j,
73
74
  ollama,
74
75
  openmrs,
75
76
  redis,
76
77
  webui,
77
- medplum,
78
78
  };
79
79
  try {
80
80
  const masterData = yaml.load(fs.readFileSync(path.join(RESOURCES_DIR, 'docker-compose-master.yml'), 'utf8'));
@@ -13,6 +13,6 @@ export default class Docktor extends Command {
13
13
  'model-path': import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
14
14
  workdir: import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
15
15
  };
16
- private restartMcpxContainer;
17
16
  run(): Promise<void>;
17
+ private restartMcpxContainer;
18
18
  }
@@ -22,8 +22,8 @@ export default class Docktor extends Command {
22
22
  }),
23
23
  environment: Flags.string({
24
24
  char: 'e',
25
- multiple: true,
26
25
  description: 'Environment variables to pass to docker (format: VAR=value)',
26
+ multiple: true,
27
27
  }),
28
28
  image: Flags.string({ char: 'i', description: 'Docker image for the inference pipeline (required for install)' }),
29
29
  'model-path': Flags.string({
@@ -37,18 +37,6 @@ export default class Docktor extends Command {
37
37
  description: 'Working directory for MCPX config',
38
38
  }),
39
39
  };
40
- async restartMcpxContainer(mcpxConfigPath, containerName) {
41
- try {
42
- const { execSync } = await import('node:child_process');
43
- execSync(`docker cp ${mcpxConfigPath} ${containerName}:/lunar/packages/mcpx-server/`);
44
- this.log(chalk.green('Copied mcp.json to container: /lunar/packages/mcpx-server/config/mcp.json'));
45
- execSync(`docker restart ${containerName}`);
46
- this.log(chalk.green(`Restarted ${containerName} container.`));
47
- }
48
- catch (err) {
49
- this.log(chalk.red(`Failed to copy config or restart container '${containerName}'. Please check Docker status and container name.`));
50
- }
51
- }
52
40
  async run() {
53
41
  const { args, flags } = await this.parse(Docktor);
54
42
  const mcpxConfigPath = path.join(flags.workdir, 'config');
@@ -61,83 +49,105 @@ export default class Docktor extends Command {
61
49
  if (!fs.existsSync(mcpJsonPath)) {
62
50
  fs.writeFileSync(mcpJsonPath, JSON.stringify({ mcpServers: {} }, null, 2));
63
51
  }
64
- let mcpConfig = JSON.parse(fs.readFileSync(mcpJsonPath, 'utf8'));
52
+ const mcpConfig = JSON.parse(fs.readFileSync(mcpJsonPath, 'utf8'));
65
53
  // Ensure mcpServers exists
66
54
  if (!mcpConfig.mcpServers) {
67
55
  mcpConfig.mcpServers = {};
68
56
  }
69
- if (args.op === 'install') {
70
- if (!args.name) {
71
- this.error('Name is required for install operation');
72
- }
73
- if (!flags.image) {
74
- this.error('Image is required for install operation');
75
- }
76
- const binds = [];
77
- const envVars = [];
78
- if (flags['model-path']) {
79
- const absModelPath = path.resolve(flags['model-path']);
80
- binds.push(`${absModelPath}:/model`);
57
+ switch (args.op) {
58
+ case 'install': {
59
+ if (!args.name) {
60
+ this.error('Name is required for install operation');
61
+ }
62
+ if (!flags.image) {
63
+ this.error('Image is required for install operation');
64
+ }
65
+ const binds = [];
66
+ const envVars = [];
67
+ if (flags['model-path']) {
68
+ const absModelPath = path.resolve(flags['model-path']);
69
+ binds.push(`${absModelPath}:/model`);
70
+ }
71
+ if (flags.environment && flags.environment.length > 0) {
72
+ const invalidEnvVars = flags.environment.filter((e) => {
73
+ const idx = e.indexOf('=');
74
+ return idx <= 0 || idx === e.length - 1;
75
+ });
76
+ if (invalidEnvVars.length > 0) {
77
+ this.error(`Invalid environment variable format. Expected 'NAME=value'. Invalid entries: ${invalidEnvVars.join(', ')}`);
78
+ }
79
+ envVars.push(...flags.environment);
80
+ }
81
+ // Add socket mounting for docker tools if needed, but primarily we want the container to run as a server
82
+ // MCPX handles the running of the docker container.
83
+ // We need to configure it in mcp.json so MCPX picks it up.
84
+ // Based on MCP std, docker servers are defined with `docker` command.
85
+ // Add (merge) new server into existing mcpServers
86
+ mcpConfig.mcpServers[args.name] = {
87
+ args: [
88
+ 'run',
89
+ '-i',
90
+ '--rm',
91
+ ...binds.flatMap((b) => ['-v', b]),
92
+ ...envVars.flatMap((e) => ['-e', e]),
93
+ flags.image,
94
+ ],
95
+ command: 'docker',
96
+ };
97
+ // Write back the updated config (preserving all other properties and existing servers)
98
+ fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
99
+ this.log(chalk.green(`Inference pipeline '${args.name}' added`));
100
+ // Copy only mcp.json to container and restart (non-fatal if it fails)
101
+ try {
102
+ await this.restartMcpxContainer(mcpxConfigPath, flags.container);
103
+ }
104
+ catch {
105
+ this.log(chalk.yellow('Note: Could not restart container. Please restart manually if needed.'));
106
+ }
107
+ break;
81
108
  }
82
- if (flags.environment && flags.environment.length > 0) {
83
- const invalidEnvVars = flags.environment.filter((e) => {
84
- const idx = e.indexOf('=');
85
- return idx <= 0 || idx === e.length - 1;
86
- });
87
- if (invalidEnvVars.length > 0) {
88
- this.error(`Invalid environment variable format. Expected 'NAME=value'. Invalid entries: ${invalidEnvVars.join(', ')}`);
109
+ case 'remove': {
110
+ if (!args.name) {
111
+ this.error('Name is required for remove operation');
112
+ }
113
+ if (mcpConfig.mcpServers && mcpConfig.mcpServers[args.name]) {
114
+ delete mcpConfig.mcpServers[args.name];
115
+ // Write back the updated config (preserving all other properties and remaining servers)
116
+ fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
117
+ this.log(chalk.green(`Inference pipeline '${args.name}' removed`));
118
+ }
119
+ else {
120
+ this.log(chalk.yellow(`Inference pipeline '${args.name}' not found.`));
89
121
  }
90
- envVars.push(...flags.environment);
122
+ break;
91
123
  }
92
- // Add socket mounting for docker tools if needed, but primarily we want the container to run as a server
93
- // MCPX handles the running of the docker container.
94
- // We need to configure it in mcp.json so MCPX picks it up.
95
- // Based on MCP std, docker servers are defined with `docker` command.
96
- // Add (merge) new server into existing mcpServers
97
- mcpConfig.mcpServers[args.name] = {
98
- command: 'docker',
99
- args: [
100
- 'run',
101
- '-i',
102
- '--rm',
103
- ...binds.flatMap((b) => ['-v', b]),
104
- ...envVars.flatMap((e) => ['-e', e]),
105
- flags.image,
106
- ],
107
- };
108
- // Write back the updated config (preserving all other properties and existing servers)
109
- fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
110
- this.log(chalk.green(`Inference pipeline '${args.name}' added to MCPX config.`));
111
- // Copy only mcp.json to container and restart
112
- await this.restartMcpxContainer(mcpxConfigPath, flags.container);
113
- }
114
- else if (args.op === 'remove') {
115
- if (!args.name) {
116
- this.error('Name is required for remove operation');
124
+ case 'restart': {
125
+ await this.restartMcpxContainer(mcpxConfigPath, flags.container);
126
+ break;
117
127
  }
118
- if (mcpConfig.mcpServers && mcpConfig.mcpServers[args.name]) {
119
- delete mcpConfig.mcpServers[args.name];
120
- // Write back the updated config (preserving all other properties and remaining servers)
121
- fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
122
- this.log(chalk.green(`Inference pipeline '${args.name}' removed from MCPX config.`));
123
- this.log(chalk.yellow('Please restart the MCPX container to apply changes: dhti-cli docktor restart'));
128
+ case 'list': {
129
+ this.log(chalk.blue('Installed Inference Pipelines:'));
130
+ for (const [name, config] of Object.entries(mcpConfig.mcpServers)) {
131
+ const argsList = Array.isArray(config.args) ? config.args.join(' ') : '';
132
+ this.log(`- ${name}: ${argsList}`);
133
+ }
134
+ break;
124
135
  }
125
- else {
126
- this.log(chalk.yellow(`Inference pipeline '${args.name}' not found.`));
136
+ default: {
137
+ this.error(`Unknown operation: ${args.op}`);
127
138
  }
128
139
  }
129
- else if (args.op === 'restart') {
130
- await this.restartMcpxContainer(mcpxConfigPath, flags.container);
131
- }
132
- else if (args.op === 'list') {
133
- this.log(chalk.blue('Installed Inference Pipelines:'));
134
- for (const [name, config] of Object.entries(mcpConfig.mcpServers)) {
135
- const argsList = Array.isArray(config.args) ? config.args.join(' ') : '';
136
- this.log(`- ${name}: ${argsList}`);
137
- }
140
+ }
141
+ async restartMcpxContainer(mcpxConfigPath, containerName) {
142
+ try {
143
+ const { execSync } = await import('node:child_process');
144
+ execSync(`docker cp ${mcpxConfigPath} ${containerName}:/lunar/packages/mcpx-server/`);
145
+ this.log(chalk.green('Copied mcp.json to container: /lunar/packages/mcpx-server/config/mcp.json'));
146
+ execSync(`docker restart ${containerName}`);
147
+ this.log(chalk.green(`Restarted ${containerName} container.`));
138
148
  }
139
- else {
140
- this.error(`Unknown operation: ${args.op}`);
149
+ catch {
150
+ this.log(chalk.red(`Failed to copy config or restart container '${containerName}'. Please check Docker status and container name.`));
141
151
  }
142
152
  }
143
153
  }
@@ -0,0 +1,73 @@
1
+ import { Command } from '@oclif/core';
2
+ /**
3
+ * Synthea command for managing synthetic FHIR data generation
4
+ *
5
+ * This command provides subcommands to:
6
+ * - install: Download and install Synthea JAR file
7
+ * - generate: Generate synthetic FHIR data using Synthea
8
+ * - upload: Upload generated FHIR resources to a FHIR server
9
+ * - delete: Clean up generated synthetic data
10
+ * - download: Download pre-generated Synthea datasets
11
+ */
12
+ export default class Synthea extends Command {
13
+ static args: {
14
+ subcommand: import("@oclif/core/interfaces").Arg<string, Record<string, unknown>>;
15
+ };
16
+ static description: string;
17
+ static examples: string[];
18
+ static flags: {
19
+ age: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
20
+ city: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
21
+ covid19: import("@oclif/core/interfaces").BooleanFlag<boolean>;
22
+ covid19_10k: import("@oclif/core/interfaces").BooleanFlag<boolean>;
23
+ covid19_csv: import("@oclif/core/interfaces").BooleanFlag<boolean>;
24
+ covid19_csv_10k: import("@oclif/core/interfaces").BooleanFlag<boolean>;
25
+ 'dry-run': import("@oclif/core/interfaces").BooleanFlag<boolean>;
26
+ endpoint: import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
27
+ gender: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
28
+ population: import("@oclif/core/interfaces").OptionFlag<number, import("@oclif/core/interfaces").CustomOptions>;
29
+ seed: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
30
+ state: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
31
+ synthea_sample_data_csv_latest: import("@oclif/core/interfaces").BooleanFlag<boolean>;
32
+ synthea_sample_data_fhir_latest: import("@oclif/core/interfaces").BooleanFlag<boolean>;
33
+ synthea_sample_data_fhir_stu3_latest: import("@oclif/core/interfaces").BooleanFlag<boolean>;
34
+ token: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
35
+ workdir: import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
36
+ };
37
+ /**
38
+ * Main command execution
39
+ * Dispatches to appropriate subcommand handler
40
+ * @returns Promise that resolves when subcommand completes
41
+ */
42
+ run(): Promise<void>;
43
+ /**
44
+ * Delete synthetic data
45
+ * @param flags Command flags including workdir and dry-run
46
+ * @returns Promise that resolves when deletion completes
47
+ */
48
+ private delete;
49
+ /**
50
+ * Download pre-generated Synthea datasets
51
+ * @param flags Command flags including workdir, dataset selections, and dry-run
52
+ * @returns Promise that resolves when download completes
53
+ */
54
+ private download;
55
+ /**
56
+ * Generate synthetic FHIR data
57
+ * @param flags Command flags including population, state, city, gender, age, seed, workdir, and dry-run
58
+ * @returns Promise that resolves when generation completes
59
+ */
60
+ private generate;
61
+ /**
62
+ * Install Synthea JAR file
63
+ * @param flags Command flags including workdir and dry-run
64
+ * @returns Promise that resolves when installation completes
65
+ */
66
+ private install;
67
+ /**
68
+ * Upload FHIR resources to server
69
+ * @param flags Command flags including endpoint, token, workdir, and dry-run
70
+ * @returns Promise that resolves when upload completes
71
+ */
72
+ private upload;
73
+ }
@@ -0,0 +1,579 @@
1
+ import { Args, Command, Flags } from '@oclif/core';
2
+ import chalk from 'chalk';
3
+ import { exec } from 'node:child_process';
4
+ import { createWriteStream, existsSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync } from 'node:fs';
5
+ import { homedir } from 'node:os';
6
+ import { join } from 'node:path';
7
+ import { createInterface } from 'node:readline';
8
+ import { promisify } from 'node:util';
9
+ const execAsync = promisify(exec);
10
+ /**
11
+ * Synthea command for managing synthetic FHIR data generation
12
+ *
13
+ * This command provides subcommands to:
14
+ * - install: Download and install Synthea JAR file
15
+ * - generate: Generate synthetic FHIR data using Synthea
16
+ * - upload: Upload generated FHIR resources to a FHIR server
17
+ * - delete: Clean up generated synthetic data
18
+ * - download: Download pre-generated Synthea datasets
19
+ */
20
+ export default class Synthea extends Command {
21
+ static args = {
22
+ subcommand: Args.string({
23
+ description: 'Subcommand to execute: install, generate, upload, delete, download',
24
+ options: ['install', 'generate', 'upload', 'delete', 'download'],
25
+ required: true,
26
+ }),
27
+ };
28
+ static description = 'Manage Synthea synthetic FHIR data generation';
29
+ static examples = [
30
+ '<%= config.bin %> <%= command.id %> install',
31
+ '<%= config.bin %> <%= command.id %> generate -p 10',
32
+ '<%= config.bin %> <%= command.id %> upload -e http://fhir:8005/baseR4',
33
+ '<%= config.bin %> <%= command.id %> delete',
34
+ '<%= config.bin %> <%= command.id %> download --covid19',
35
+ ];
36
+ static flags = {
37
+ // Generate flags
38
+ age: Flags.string({
39
+ char: 'a',
40
+ description: 'Generate patients with specific age range (e.g., "0-18" for pediatric)',
41
+ }),
42
+ city: Flags.string({
43
+ char: 'c',
44
+ description: 'City for patient generation',
45
+ }),
46
+ // Download flags - various datasets from synthea.mitre.org
47
+ covid19: Flags.boolean({
48
+ description: 'Download COVID-19 dataset (1k patients)',
49
+ }),
50
+ // eslint-disable-next-line camelcase
51
+ covid19_10k: Flags.boolean({
52
+ description: 'Download COVID-19 dataset (10k patients)',
53
+ }),
54
+ // eslint-disable-next-line camelcase
55
+ covid19_csv: Flags.boolean({
56
+ description: 'Download COVID-19 CSV dataset (1k patients)',
57
+ }),
58
+ // eslint-disable-next-line camelcase
59
+ covid19_csv_10k: Flags.boolean({
60
+ description: 'Download COVID-19 CSV dataset (10k patients)',
61
+ }),
62
+ // Common flags
63
+ 'dry-run': Flags.boolean({
64
+ default: false,
65
+ description: 'Show what changes would be made without actually making them',
66
+ }),
67
+ // Upload flags
68
+ endpoint: Flags.string({
69
+ char: 'e',
70
+ default: 'http://fhir:8005/baseR4',
71
+ description: 'FHIR server endpoint URL',
72
+ }),
73
+ gender: Flags.string({
74
+ char: 'g',
75
+ description: 'Generate patients of specific gender (M or F)',
76
+ options: ['M', 'F'],
77
+ }),
78
+ population: Flags.integer({
79
+ char: 'p',
80
+ default: 1,
81
+ description: 'Number of patients to generate',
82
+ }),
83
+ seed: Flags.string({
84
+ char: 's',
85
+ description: 'Random seed for reproducible generation',
86
+ }),
87
+ state: Flags.string({
88
+ description: 'State for patient generation (default: Massachusetts)',
89
+ }),
90
+ // eslint-disable-next-line camelcase
91
+ synthea_sample_data_csv_latest: Flags.boolean({
92
+ description: 'Download latest CSV sample data',
93
+ }),
94
+ // eslint-disable-next-line camelcase
95
+ synthea_sample_data_fhir_latest: Flags.boolean({
96
+ description: 'Download latest FHIR sample data',
97
+ }),
98
+ // eslint-disable-next-line camelcase
99
+ synthea_sample_data_fhir_stu3_latest: Flags.boolean({
100
+ description: 'Download latest FHIR STU3 sample data',
101
+ }),
102
+ token: Flags.string({
103
+ char: 't',
104
+ description: 'Bearer token for FHIR server authentication',
105
+ }),
106
+ workdir: Flags.string({
107
+ char: 'w',
108
+ default: join(homedir(), 'dhti'),
109
+ description: 'Working directory for Synthea files',
110
+ }),
111
+ };
112
+ /**
113
+ * Main command execution
114
+ * Dispatches to appropriate subcommand handler
115
+ * @returns Promise that resolves when subcommand completes
116
+ */
117
+ async run() {
118
+ const { args, flags } = await this.parse(Synthea);
119
+ // Execute appropriate subcommand
120
+ switch (args.subcommand) {
121
+ case 'install': {
122
+ await this.install(flags);
123
+ break;
124
+ }
125
+ case 'generate': {
126
+ await this.generate(flags);
127
+ break;
128
+ }
129
+ case 'upload': {
130
+ await this.upload(flags);
131
+ break;
132
+ }
133
+ case 'delete': {
134
+ await this.delete(flags);
135
+ break;
136
+ }
137
+ case 'download': {
138
+ await this.download(flags);
139
+ break;
140
+ }
141
+ default: {
142
+ this.error(`Unknown subcommand: ${args.subcommand}`);
143
+ }
144
+ }
145
+ }
146
+ /**
147
+ * Delete synthetic data
148
+ * @param flags Command flags including workdir and dry-run
149
+ * @returns Promise that resolves when deletion completes
150
+ */
151
+ async delete(flags) {
152
+ const dataDir = join(flags.workdir, 'synthea_data');
153
+ if (flags['dry-run']) {
154
+ console.log(chalk.yellow('[DRY RUN] Data deletion simulation'));
155
+ console.log(chalk.cyan(` Data directory: ${dataDir}`));
156
+ console.log(chalk.green('[DRY RUN] Would delete all files in synthea_data directory'));
157
+ return;
158
+ }
159
+ // Check if directory exists
160
+ if (!existsSync(dataDir)) {
161
+ console.log(chalk.yellow(`⚠ Directory does not exist: ${dataDir}`));
162
+ return;
163
+ }
164
+ // Count files
165
+ let fileCount = 0;
166
+ const countFiles = (dir) => {
167
+ const items = readdirSync(dir);
168
+ for (const item of items) {
169
+ const fullPath = join(dir, item);
170
+ const stat = statSync(fullPath);
171
+ if (stat.isDirectory()) {
172
+ countFiles(fullPath);
173
+ }
174
+ else {
175
+ fileCount++;
176
+ }
177
+ }
178
+ };
179
+ countFiles(dataDir);
180
+ console.log(chalk.yellow(`⚠ About to delete ${fileCount} files from: ${dataDir}`));
181
+ // Confirmation prompt
182
+ const rl = createInterface({
183
+ input: process.stdin,
184
+ output: process.stdout,
185
+ });
186
+ const answer = await new Promise((resolve) => {
187
+ rl.question(chalk.red('Are you sure you want to delete all data? (yes/N): '), resolve);
188
+ });
189
+ rl.close();
190
+ if (answer.toLowerCase() !== 'yes') {
191
+ console.log(chalk.blue('Deletion cancelled.'));
192
+ return;
193
+ }
194
+ // Delete directory
195
+ try {
196
+ rmSync(dataDir, { force: true, recursive: true });
197
+ console.log(chalk.green(`✓ Deleted: ${dataDir}`));
198
+ }
199
+ catch (error) {
200
+ this.error(`Failed to delete directory: ${error instanceof Error ? error.message : String(error)}`);
201
+ }
202
+ }
203
+ /**
204
+ * Download pre-generated Synthea datasets
205
+ * @param flags Command flags including workdir, dataset selections, and dry-run
206
+ * @returns Promise that resolves when download completes
207
+ */
208
+ async download(flags) {
209
+ const tmpDir = '/tmp/synthea_downloads';
210
+ const outputDir = join(flags.workdir, 'synthea_data');
211
+ // Map of dataset flags to download URLs
212
+ // eslint-disable-next-line camelcase
213
+ const datasets = {
214
+ covid19: {
215
+ file: 'covid19.zip',
216
+ url: 'https://synthea.mitre.org/downloads/covid19_1k.zip',
217
+ },
218
+ // eslint-disable-next-line camelcase
219
+ covid19_10k: {
220
+ file: 'covid19_10k.zip',
221
+ url: 'https://synthea.mitre.org/downloads/covid19_10k.zip',
222
+ },
223
+ // eslint-disable-next-line camelcase
224
+ covid19_csv: {
225
+ file: 'covid19_csv.zip',
226
+ url: 'https://synthea.mitre.org/downloads/covid19_csv_1k.zip',
227
+ },
228
+ // eslint-disable-next-line camelcase
229
+ covid19_csv_10k: {
230
+ file: 'covid19_csv_10k.zip',
231
+ url: 'https://synthea.mitre.org/downloads/covid19_csv_10k.zip',
232
+ },
233
+ // eslint-disable-next-line camelcase
234
+ synthea_sample_data_csv_latest: {
235
+ file: 'synthea_sample_data_csv_latest.zip',
236
+ url: 'https://synthea.mitre.org/downloads/synthea_sample_data_csv_latest.zip',
237
+ },
238
+ // eslint-disable-next-line camelcase
239
+ synthea_sample_data_fhir_latest: {
240
+ file: 'synthea_sample_data_fhir_latest.zip',
241
+ url: 'https://synthea.mitre.org/downloads/synthea_sample_data_fhir_latest.zip',
242
+ },
243
+ // eslint-disable-next-line camelcase
244
+ synthea_sample_data_fhir_stu3_latest: {
245
+ file: 'synthea_sample_data_fhir_stu3_latest.zip',
246
+ url: 'https://synthea.mitre.org/downloads/synthea_sample_data_fhir_stu3_latest.zip',
247
+ },
248
+ };
249
+ // Find which dataset to download
250
+ const selectedDatasets = Object.keys(datasets).filter((key) => flags[key]);
251
+ if (selectedDatasets.length === 0) {
252
+ if (flags['dry-run']) {
253
+ console.log(chalk.yellow('[DRY RUN] Dataset download simulation'));
254
+ console.log(chalk.yellow('⚠ No dataset selected. Use one of the following flags:'));
255
+ }
256
+ else {
257
+ console.log(chalk.yellow('⚠ No dataset selected. Use one of the following flags:'));
258
+ }
259
+ for (const [key] of Object.entries(datasets)) {
260
+ console.log(chalk.cyan(` --${key}`));
261
+ }
262
+ return;
263
+ }
264
+ if (flags['dry-run']) {
265
+ console.log(chalk.yellow('[DRY RUN] Dataset download simulation'));
266
+ console.log(chalk.cyan(` Temporary directory: ${tmpDir}`));
267
+ console.log(chalk.cyan(` Output directory: ${outputDir}`));
268
+ for (const dataset of selectedDatasets) {
269
+ console.log(chalk.cyan(` Dataset: ${dataset}`));
270
+ console.log(chalk.cyan(` URL: ${datasets[dataset].url}`));
271
+ }
272
+ console.log(chalk.green('[DRY RUN] Would download and extract selected datasets'));
273
+ return;
274
+ }
275
+ // Create directories
276
+ if (!existsSync(tmpDir)) {
277
+ mkdirSync(tmpDir, { recursive: true });
278
+ }
279
+ if (!existsSync(outputDir)) {
280
+ mkdirSync(outputDir, { recursive: true });
281
+ }
282
+ // Download and extract each selected dataset
283
+ // Note: Sequential processing is intentional to avoid overwhelming the server
284
+ // eslint-disable-next-line no-await-in-loop
285
+ for (const datasetKey of selectedDatasets) {
286
+ const dataset = datasets[datasetKey];
287
+ const downloadPath = join(tmpDir, dataset.file);
288
+ console.log(chalk.blue(`\nDownloading ${datasetKey}...`));
289
+ console.log(chalk.gray(`URL: ${dataset.url}`));
290
+ try {
291
+ // Download file
292
+ // eslint-disable-next-line no-await-in-loop
293
+ const response = await fetch(dataset.url);
294
+ if (!response.ok) {
295
+ throw new Error(`Failed to download: ${response.statusText}`);
296
+ }
297
+ const fileStream = createWriteStream(downloadPath);
298
+ // @ts-expect-error - ReadableStream types from fetch
299
+ const reader = response.body.getReader();
300
+ let downloadedBytes = 0;
301
+ const contentLength = Number.parseInt(response.headers.get('content-length') || '0', 10);
302
+ // eslint-disable-next-line no-constant-condition
303
+ while (true) {
304
+ // eslint-disable-next-line no-await-in-loop
305
+ const { done, value } = await reader.read();
306
+ if (done)
307
+ break;
308
+ downloadedBytes += value.length;
309
+ fileStream.write(value);
310
+ if (contentLength > 0) {
311
+ const progress = Math.round((downloadedBytes / contentLength) * 100);
312
+ process.stdout.write(`\rDownloading: ${progress}%`);
313
+ }
314
+ }
315
+ fileStream.end();
316
+ console.log('\n' + chalk.green(`✓ Downloaded ${dataset.file}`));
317
+ // Extract ZIP file
318
+ console.log(chalk.blue('Extracting...'));
319
+ // eslint-disable-next-line no-await-in-loop
320
+ await execAsync(`unzip -o "${downloadPath}" -d "${outputDir}"`);
321
+ console.log(chalk.green(`✓ Extracted to ${outputDir}`));
322
+ }
323
+ catch (error) {
324
+ console.log(chalk.red(`✗ Failed to download ${datasetKey}: ${error instanceof Error ? error.message : String(error)}`));
325
+ }
326
+ }
327
+ console.log(chalk.green(`\n✓ Download complete. Data available at: ${outputDir}`));
328
+ }
329
+ /**
330
+ * Generate synthetic FHIR data
331
+ * @param flags Command flags including population, state, city, gender, age, seed, workdir, and dry-run
332
+ * @returns Promise that resolves when generation completes
333
+ */
334
+ async generate(flags) {
335
+ const syntheaDir = join(flags.workdir, 'synthea');
336
+ const jarPath = join(syntheaDir, 'synthea-with-dependencies.jar');
337
+ const outputDir = join(flags.workdir, 'synthea_data');
338
+ if (flags['dry-run']) {
339
+ console.log(chalk.yellow('[DRY RUN] Synthetic data generation simulation'));
340
+ console.log(chalk.cyan(` Synthea JAR: ${jarPath}`));
341
+ console.log(chalk.cyan(` Output directory: ${outputDir}`));
342
+ console.log(chalk.cyan(` Population: ${flags.population} patients`));
343
+ if (flags.state)
344
+ console.log(chalk.cyan(` State: ${flags.state}`));
345
+ if (flags.city)
346
+ console.log(chalk.cyan(` City: ${flags.city}`));
347
+ if (flags.gender)
348
+ console.log(chalk.cyan(` Gender: ${flags.gender}`));
349
+ if (flags.age)
350
+ console.log(chalk.cyan(` Age range: ${flags.age}`));
351
+ if (flags.seed)
352
+ console.log(chalk.cyan(` Random seed: ${flags.seed}`));
353
+ console.log(chalk.green('[DRY RUN] Would create output directory'));
354
+ console.log(chalk.green('[DRY RUN] Would execute Synthea JAR to generate data'));
355
+ return;
356
+ }
357
+ // Check if JAR exists
358
+ if (!existsSync(jarPath)) {
359
+ console.log(chalk.red(`✗ Synthea JAR not found at: ${jarPath}\nRun 'dhti-cli synthea install' first.`));
360
+ this.exit(1);
361
+ }
362
+ // Create output directory
363
+ if (!existsSync(outputDir)) {
364
+ mkdirSync(outputDir, { recursive: true });
365
+ console.log(chalk.green(`✓ Created output directory: ${outputDir}`));
366
+ }
367
+ // Build Synthea command
368
+ const javaArgs = ['-jar', jarPath];
369
+ // Add optional flags
370
+ if (flags.population)
371
+ javaArgs.push('-p', String(flags.population));
372
+ if (flags.state)
373
+ javaArgs.push('-s', flags.state);
374
+ if (flags.city)
375
+ javaArgs.push('-c', flags.city);
376
+ if (flags.gender)
377
+ javaArgs.push('-g', flags.gender);
378
+ if (flags.age)
379
+ javaArgs.push('-a', flags.age);
380
+ if (flags.seed)
381
+ javaArgs.push('--seed', flags.seed);
382
+ // Set output directory
383
+ javaArgs.push('--exporter.baseDirectory', outputDir);
384
+ console.log(chalk.blue('Generating synthetic data...'));
385
+ console.log(chalk.gray(`Command: java ${javaArgs.join(' ')}`));
386
+ try {
387
+ const { stderr, stdout } = await execAsync(`java ${javaArgs.join(' ')}`, {
388
+ cwd: syntheaDir,
389
+ maxBuffer: 10 * 1024 * 1024, // 10MB buffer
390
+ });
391
+ if (stdout)
392
+ console.log(stdout);
393
+ if (stderr)
394
+ console.error(chalk.yellow(stderr));
395
+ console.log(chalk.green(`✓ Generated synthetic data in: ${outputDir}`));
396
+ // Show FHIR output location
397
+ const fhirDir = join(outputDir, 'fhir');
398
+ if (existsSync(fhirDir)) {
399
+ const files = readdirSync(fhirDir);
400
+ console.log(chalk.cyan(`\nGenerated ${files.length} FHIR resource files`));
401
+ console.log(chalk.white(`FHIR files location: ${fhirDir}`));
402
+ }
403
+ }
404
+ catch (error) {
405
+ this.error(`Failed to generate synthetic data: ${error instanceof Error ? error.message : String(error)}`);
406
+ }
407
+ }
408
+ /**
409
+ * Install Synthea JAR file
410
+ * @param flags Command flags including workdir and dry-run
411
+ * @returns Promise that resolves when installation completes
412
+ */
413
+ async install(flags) {
414
+ const syntheaDir = join(flags.workdir, 'synthea');
415
+ const jarPath = join(syntheaDir, 'synthea-with-dependencies.jar');
416
+ if (flags['dry-run']) {
417
+ console.log(chalk.yellow('[DRY RUN] Synthea installation simulation'));
418
+ console.log(chalk.cyan(` Working directory: ${flags.workdir}`));
419
+ console.log(chalk.cyan(` Synthea directory: ${syntheaDir}`));
420
+ console.log(chalk.cyan(` JAR path: ${jarPath}`));
421
+ console.log(chalk.green('[DRY RUN] Would create synthea directory'));
422
+ console.log(chalk.green('[DRY RUN] Would download synthea-with-dependencies.jar'));
423
+ console.log(chalk.green('[DRY RUN] Would display usage instructions'));
424
+ return;
425
+ }
426
+ // Create synthea directory
427
+ if (!existsSync(syntheaDir)) {
428
+ mkdirSync(syntheaDir, { recursive: true });
429
+ console.log(chalk.green(`✓ Created directory: ${syntheaDir}`));
430
+ }
431
+ // Check if JAR already exists
432
+ if (existsSync(jarPath)) {
433
+ console.log(chalk.yellow(`⚠ Synthea JAR already exists at: ${jarPath}`));
434
+ const rl = createInterface({
435
+ input: process.stdin,
436
+ output: process.stdout,
437
+ });
438
+ const answer = await new Promise((resolve) => {
439
+ rl.question('Overwrite existing file? (y/N): ', resolve);
440
+ });
441
+ rl.close();
442
+ if (answer.toLowerCase() !== 'y') {
443
+ console.log(chalk.blue('Installation cancelled.'));
444
+ return;
445
+ }
446
+ }
447
+ // Download synthea-with-dependencies.jar
448
+ console.log(chalk.blue('Downloading synthea-with-dependencies.jar...'));
449
+ const downloadUrl = 'https://github.com/synthetichealth/synthea/releases/download/master-branch-latest/synthea-with-dependencies.jar';
450
+ try {
451
+ const response = await fetch(downloadUrl);
452
+ if (!response.ok) {
453
+ throw new Error(`Failed to download: ${response.statusText}`);
454
+ }
455
+ const fileStream = createWriteStream(jarPath);
456
+ // @ts-expect-error - ReadableStream types from fetch
457
+ const reader = response.body.getReader();
458
+ let downloadedBytes = 0;
459
+ const contentLength = Number.parseInt(response.headers.get('content-length') || '0', 10);
460
+ // eslint-disable-next-line no-constant-condition
461
+ while (true) {
462
+ // eslint-disable-next-line no-await-in-loop
463
+ const { done, value } = await reader.read();
464
+ if (done)
465
+ break;
466
+ downloadedBytes += value.length;
467
+ fileStream.write(value);
468
+ if (contentLength > 0) {
469
+ const progress = Math.round((downloadedBytes / contentLength) * 100);
470
+ process.stdout.write(`\rDownloading: ${progress}%`);
471
+ }
472
+ }
473
+ fileStream.end();
474
+ console.log('\n' + chalk.green(`✓ Downloaded synthea-with-dependencies.jar to ${jarPath}`));
475
+ }
476
+ catch (error) {
477
+ this.error(`Failed to download Synthea JAR: ${error instanceof Error ? error.message : String(error)}`);
478
+ }
479
+ // Display usage instructions
480
+ console.log(chalk.cyan('\n' + '='.repeat(60)));
481
+ console.log(chalk.bold.green('Synthea Installation Complete!'));
482
+ console.log(chalk.cyan('='.repeat(60)));
483
+ console.log(chalk.white('\nUsage Instructions:'));
484
+ console.log(chalk.white('-------------------'));
485
+ console.log(chalk.white('To generate synthetic data:'));
486
+ console.log(chalk.yellow(` ${this.config.bin} synthea generate -p 10`));
487
+ console.log(chalk.white('\nTo upload data to FHIR server:'));
488
+ console.log(chalk.yellow(` ${this.config.bin} synthea upload -e http://fhir:8005/baseR4`));
489
+ console.log(chalk.white('\nManual usage:'));
490
+ console.log(chalk.yellow(` cd ${syntheaDir}`));
491
+ console.log(chalk.yellow(' java -jar synthea-with-dependencies.jar -p 10'));
492
+ console.log(chalk.white('\nFor more options, see:'));
493
+ console.log(chalk.blue(' https://github.com/synthetichealth/synthea/wiki/Basic-Setup-and-Running'));
494
+ console.log(chalk.cyan('='.repeat(60) + '\n'));
495
+ }
496
+ /**
497
+ * Upload FHIR resources to server
498
+ * @param flags Command flags including endpoint, token, workdir, and dry-run
499
+ * @returns Promise that resolves when upload completes
500
+ */
501
+ async upload(flags) {
502
+ const fhirDir = join(flags.workdir, 'synthea_data', 'fhir');
503
+ if (flags['dry-run']) {
504
+ console.log(chalk.yellow('[DRY RUN] FHIR upload simulation'));
505
+ console.log(chalk.cyan(` FHIR directory: ${fhirDir}`));
506
+ console.log(chalk.cyan(` Endpoint: ${flags.endpoint}`));
507
+ if (flags.token)
508
+ console.log(chalk.cyan(' Authentication: Bearer token'));
509
+ console.log(chalk.green('[DRY RUN] Would read FHIR resources from directory'));
510
+ console.log(chalk.green('[DRY RUN] Would upload each resource to FHIR server'));
511
+ return;
512
+ }
513
+ // Check if FHIR directory exists
514
+ if (!existsSync(fhirDir)) {
515
+ console.log(chalk.red(`✗ FHIR data directory not found: ${fhirDir}\nRun 'dhti-cli synthea generate' first.`));
516
+ this.exit(1);
517
+ }
518
+ // Read all JSON files from FHIR directory
519
+ const files = readdirSync(fhirDir).filter((f) => f.endsWith('.json'));
520
+ if (files.length === 0) {
521
+ console.log(chalk.yellow('⚠ No FHIR JSON files found in directory'));
522
+ return;
523
+ }
524
+ console.log(chalk.blue(`Found ${files.length} FHIR resource files`));
525
+ // Prepare headers
526
+ const headers = {
527
+ 'Content-Type': 'application/fhir+json',
528
+ };
529
+ if (flags.token) {
530
+ headers.Authorization = `Bearer ${flags.token}`;
531
+ }
532
+ let successCount = 0;
533
+ let failCount = 0;
534
+ // Upload each file
535
+ // Note: Sequential processing is intentional to maintain order and avoid overwhelming server
536
+ // eslint-disable-next-line no-await-in-loop
537
+ for (const [index, file] of files.entries()) {
538
+ const filePath = join(fhirDir, file);
539
+ console.log(chalk.gray(`[${index + 1}/${files.length}] Uploading ${file}...`));
540
+ try {
541
+ const content = readFileSync(filePath, 'utf8');
542
+ const resource = JSON.parse(content);
543
+ // Determine resource type and construct URL
544
+ const { resourceType } = resource;
545
+ if (!resourceType) {
546
+ console.log(chalk.yellow(` ⚠ Skipping ${file} - no resourceType`));
547
+ continue;
548
+ }
549
+ const url = `${flags.endpoint}/${resourceType}`;
550
+ // eslint-disable-next-line no-await-in-loop
551
+ const response = await fetch(url, {
552
+ body: content,
553
+ headers,
554
+ method: 'POST',
555
+ });
556
+ if (response.ok) {
557
+ successCount++;
558
+ console.log(chalk.green(` ✓ Uploaded ${file}`));
559
+ }
560
+ else {
561
+ failCount++;
562
+ console.log(chalk.red(` ✗ Failed to upload ${file}: ${response.status} ${response.statusText}`));
563
+ }
564
+ }
565
+ catch (error) {
566
+ failCount++;
567
+ console.log(chalk.red(` ✗ Error uploading ${file}: ${error instanceof Error ? error.message : String(error)}`));
568
+ }
569
+ }
570
+ // Summary
571
+ console.log(chalk.cyan('\n' + '='.repeat(60)));
572
+ console.log(chalk.bold.white('Upload Summary'));
573
+ console.log(chalk.cyan('='.repeat(60)));
574
+ console.log(chalk.green(` ✓ Successful: ${successCount}`));
575
+ console.log(chalk.red(` ✗ Failed: ${failCount}`));
576
+ console.log(chalk.white(` Total: ${files.length}`));
577
+ console.log(chalk.cyan('='.repeat(60) + '\n'));
578
+ }
579
+ }
@@ -12,7 +12,6 @@ services:
12
12
  - "80:80"
13
13
  - "9001:80"
14
14
 
15
-
16
15
  frontend:
17
16
  image: openmrs/openmrs-reference-application-3-frontend:3.0.0-beta.17
18
17
  # image: openmrs/openmrs-reference-application-3-frontend:${TAG:-3.0.0-beta.17} # dev3, qa, demo, 3.0.0-beta.18
@@ -59,7 +58,7 @@ services:
59
58
  restart: "unless-stopped"
60
59
  command: "mysqld --character-set-server=utf8 --collation-server=utf8_general_ci"
61
60
  healthcheck:
62
- test: "mysql --user=${OMRS_DB_USER:-openmrs} --password=${OMRS_DB_PASSWORD:-openmrs} --execute \"SHOW DATABASES;\""
61
+ test: 'mysql --user=${OMRS_DB_USER:-openmrs} --password=${OMRS_DB_PASSWORD:-openmrs} --execute "SHOW DATABASES;"'
63
62
  interval: 3s
64
63
  timeout: 1s
65
64
  retries: 5
@@ -78,11 +77,15 @@ services:
78
77
  - "8001:8001"
79
78
  restart: "unless-stopped"
80
79
  environment:
81
- - OLLAMA_SERVER_URL==http://ollama:11434
80
+ - OLLAMA_SERVER_URL=http://ollama:11434
82
81
  - OLLAMA_WEBUI=http://ollama-webui:8080
83
82
  - LANGFUSE_HOST=http://langfuse:3000
84
- - LANGFUSE_PUBLIC_KEY=pk-lf-abcd
85
- - LANGFUSE_SECRET_KEY=sk-lf-abcd
83
+ - LANGFUSE_PUBLIC_KEY=${LANGFUSE_PUBLIC_KEY:-pk-lf-abcd}
84
+ - LANGFUSE_SECRET_KEY=${LANGFUSE_SECRET_KEY:-sk-lf-abcd}
85
+ - GOOGLE_API_KEY=${GOOGLE_API_KEY:-google-api-key}
86
+ - OPENAI_API_KEY=${OPENAI_API_KEY:-openai-api-key}
87
+ - OPENAI_API_BASE=${OPENAI_API_BASE:-https://openrouter.ai/api/v1}
88
+ - OPENROUTER_API_KEY=${OPENROUTER_API_KEY:-openrouter-api-key}
86
89
 
87
90
  ollama:
88
91
  image: ollama/ollama:latest
@@ -105,7 +108,7 @@ services:
105
108
  ports:
106
109
  - 8080:8080
107
110
  environment:
108
- - '/ollama/api=http://ollama:11434/api'
111
+ - "/ollama/api=http://ollama:11434/api"
109
112
  extra_hosts:
110
113
  - host.docker.internal:host-gateway
111
114
  restart: unless-stopped
@@ -118,19 +121,19 @@ services:
118
121
  depends_on:
119
122
  - postgres-db
120
123
  environment:
121
- - "spring.datasource.url=jdbc:postgresql://postgres-db:5432/postgres"
122
- - "spring.datasource.username=postgres"
123
- - "spring.datasource.password=postgres"
124
- - "spring.datasource.driverClassName=org.postgresql.Driver"
125
- - "spring.jpa.properties.hibernate.dialect=ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect"
126
- - "hapi.fhir.fhir_version=R4"
127
- - "hapi.fhir.cors.allowed-origins=*"
128
- - "hapi.fhir.cors.allowCredentials=true"
129
- - "hapi.fhir.bulkdata.enabled=true"
130
- - "hapi.fhir.bulk_export_enabled=true"
131
- - "hapi.fhir.bulk_import_enabled=true"
132
- - "hapi.fhir.enforce_referential_integrity_on_write=false"
133
- - "hapi.fhir.enforce_referential_integrity_on_delete=false"
124
+ - "spring.datasource.url=jdbc:postgresql://postgres-db:5432/postgres"
125
+ - "spring.datasource.username=postgres"
126
+ - "spring.datasource.password=postgres"
127
+ - "spring.datasource.driverClassName=org.postgresql.Driver"
128
+ - "spring.jpa.properties.hibernate.dialect=ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect"
129
+ - "hapi.fhir.fhir_version=R4"
130
+ - "hapi.fhir.cors.allowed-origins=*"
131
+ - "hapi.fhir.cors.allowCredentials=true"
132
+ - "hapi.fhir.bulkdata.enabled=true"
133
+ - "hapi.fhir.bulk_export_enabled=true"
134
+ - "hapi.fhir.bulk_import_enabled=true"
135
+ - "hapi.fhir.enforce_referential_integrity_on_write=false"
136
+ - "hapi.fhir.enforce_referential_integrity_on_delete=false"
134
137
 
135
138
  mcp-fhir:
136
139
  image: beapen/fhir-mcp-server:4.0
@@ -264,7 +267,7 @@ services:
264
267
  redis:
265
268
  condition: service_healthy
266
269
  ports:
267
- - '8103:8103'
270
+ - "8103:8103"
268
271
  volumes:
269
272
  # Conditionally define a volume for a `medplum.config.json` if one is specified by the MEDPLUM_CONFIG_PATH env var
270
273
  - ${MEDPLUM_CONFIG_PATH:-./medplum.config.json}:/usr/src/medplum/packages/server/medplum.config.json
@@ -280,43 +283,43 @@ services:
280
283
  "
281
284
  environment:
282
285
  MEDPLUM_PORT: 8103
283
- MEDPLUM_BASE_URL: 'http://localhost:8103/'
284
- MEDPLUM_APP_BASE_URL: 'http://localhost:3103/'
285
- MEDPLUM_STORAGE_BASE_URL: 'http://localhost:8103/storage/'
286
+ MEDPLUM_BASE_URL: "http://localhost:8103/"
287
+ MEDPLUM_APP_BASE_URL: "http://localhost:3103/"
288
+ MEDPLUM_STORAGE_BASE_URL: "http://localhost:8103/storage/"
286
289
 
287
- MEDPLUM_DATABASE_HOST: 'postgres-db'
290
+ MEDPLUM_DATABASE_HOST: "postgres-db"
288
291
  MEDPLUM_DATABASE_PORT: 5432
289
- MEDPLUM_DATABASE_DBNAME: 'postgres'
290
- MEDPLUM_DATABASE_USERNAME: 'postgres'
291
- MEDPLUM_DATABASE_PASSWORD: 'postgres'
292
+ MEDPLUM_DATABASE_DBNAME: "postgres"
293
+ MEDPLUM_DATABASE_USERNAME: "postgres"
294
+ MEDPLUM_DATABASE_PASSWORD: "postgres"
292
295
 
293
- MEDPLUM_REDIS_HOST: 'redis'
296
+ MEDPLUM_REDIS_HOST: "redis"
294
297
  MEDPLUM_REDIS_PORT: 6379
295
298
  # MEDPLUM_REDIS_PASSWORD: 'medplum'
296
299
 
297
- MEDPLUM_BINARY_STORAGE: 'file:./binary/'
300
+ MEDPLUM_BINARY_STORAGE: "file:./binary/"
298
301
  MEDPLUM_SUPPORT_EMAIL: '\"Medplum\" <support@medplum.com>'
299
- MEDPLUM_GOOGLE_CLIENT_ID: '397236612778-c0b5tnjv98frbo1tfuuha5vkme3cmq4s.apps.googleusercontent.com'
300
- MEDPLUM_GOOGLE_CLIENT_SECRET: ''
301
- MEDPLUM_RECAPTCHA_SITE_KEY: '6LfHdsYdAAAAAC0uLnnRrDrhcXnziiUwKd8VtLNq'
302
- MEDPLUM_RECAPTCHA_SECRET_KEY: '6LfHdsYdAAAAAH9dN154jbJ3zpQife3xaiTvPChL'
303
- MEDPLUM_MAX_JSON_SIZE: '1mb'
304
- MEDPLUM_MAX_BATCH_SIZE: '50mb'
305
- MEDPLUM_BOT_LAMBDA_ROLE_ARN: ''
306
- MEDPLUM_BOT_LAMBDA_LAYER_NAME: 'medplum-bot-layer'
307
- MEDPLUM_VM_CONTEXT_BOTS_ENABLED: 'true'
308
- MEDPLUM_DEFAULT_BOT_RUNTIME_VERSION: 'vmcontext'
309
- MEDPLUM_ALLOWED_ORIGINS: '*'
310
- MEDPLUM_INTROSPECTION_ENABLED: 'true'
302
+ MEDPLUM_GOOGLE_CLIENT_ID: "397236612778-c0b5tnjv98frbo1tfuuha5vkme3cmq4s.apps.googleusercontent.com"
303
+ MEDPLUM_GOOGLE_CLIENT_SECRET: ""
304
+ MEDPLUM_RECAPTCHA_SITE_KEY: "6LfHdsYdAAAAAC0uLnnRrDrhcXnziiUwKd8VtLNq"
305
+ MEDPLUM_RECAPTCHA_SECRET_KEY: "6LfHdsYdAAAAAH9dN154jbJ3zpQife3xaiTvPChL"
306
+ MEDPLUM_MAX_JSON_SIZE: "1mb"
307
+ MEDPLUM_MAX_BATCH_SIZE: "50mb"
308
+ MEDPLUM_BOT_LAMBDA_ROLE_ARN: ""
309
+ MEDPLUM_BOT_LAMBDA_LAYER_NAME: "medplum-bot-layer"
310
+ MEDPLUM_VM_CONTEXT_BOTS_ENABLED: "true"
311
+ MEDPLUM_DEFAULT_BOT_RUNTIME_VERSION: "vmcontext"
312
+ MEDPLUM_ALLOWED_ORIGINS: "*"
313
+ MEDPLUM_INTROSPECTION_ENABLED: "true"
311
314
  MEDPLUM_SHUTDOWN_TIMEOUT_MILLISECONDS: 30000
312
315
 
313
316
  healthcheck:
314
317
  test:
315
318
  # We use Node's fetch for healthcheck because this image doesn't have a curl or wget installed
316
319
  [
317
- 'CMD',
318
- 'node',
319
- '-e',
320
+ "CMD",
321
+ "node",
322
+ "-e",
320
323
  'fetch("http://localhost:8103/healthcheck").then(r => r.json()).then(console.log).catch(() => { process.exit(1); })',
321
324
  ]
322
325
  interval: 30s
@@ -331,9 +334,9 @@ services:
331
334
  # medplum-server:
332
335
  # condition: service_healthy
333
336
  ports:
334
- - '3103:3000'
337
+ - "3103:3000"
335
338
  healthcheck:
336
- test: ['CMD', 'curl', '-f', 'http://localhost:3103']
339
+ test: ["CMD", "curl", "-f", "http://localhost:3103"]
337
340
  interval: 10s
338
341
  timeout: 5s
339
342
  retries: 5
@@ -362,4 +365,4 @@ volumes:
362
365
  ollama-code: ~
363
366
  ollama-root: ~
364
367
  ollama-webui: ~
365
- mcpx-config: ~
368
+ mcpx-config: ~
@@ -482,6 +482,173 @@
482
482
  "mimic.js"
483
483
  ]
484
484
  },
485
+ "synthea": {
486
+ "aliases": [],
487
+ "args": {
488
+ "subcommand": {
489
+ "description": "Subcommand to execute: install, generate, upload, delete, download",
490
+ "name": "subcommand",
491
+ "options": [
492
+ "install",
493
+ "generate",
494
+ "upload",
495
+ "delete",
496
+ "download"
497
+ ],
498
+ "required": true
499
+ }
500
+ },
501
+ "description": "Manage Synthea synthetic FHIR data generation",
502
+ "examples": [
503
+ "<%= config.bin %> <%= command.id %> install",
504
+ "<%= config.bin %> <%= command.id %> generate -p 10",
505
+ "<%= config.bin %> <%= command.id %> upload -e http://fhir:8005/baseR4",
506
+ "<%= config.bin %> <%= command.id %> delete",
507
+ "<%= config.bin %> <%= command.id %> download --covid19"
508
+ ],
509
+ "flags": {
510
+ "age": {
511
+ "char": "a",
512
+ "description": "Generate patients with specific age range (e.g., \"0-18\" for pediatric)",
513
+ "name": "age",
514
+ "hasDynamicHelp": false,
515
+ "multiple": false,
516
+ "type": "option"
517
+ },
518
+ "city": {
519
+ "char": "c",
520
+ "description": "City for patient generation",
521
+ "name": "city",
522
+ "hasDynamicHelp": false,
523
+ "multiple": false,
524
+ "type": "option"
525
+ },
526
+ "covid19": {
527
+ "description": "Download COVID-19 dataset (1k patients)",
528
+ "name": "covid19",
529
+ "allowNo": false,
530
+ "type": "boolean"
531
+ },
532
+ "covid19_10k": {
533
+ "description": "Download COVID-19 dataset (10k patients)",
534
+ "name": "covid19_10k",
535
+ "allowNo": false,
536
+ "type": "boolean"
537
+ },
538
+ "covid19_csv": {
539
+ "description": "Download COVID-19 CSV dataset (1k patients)",
540
+ "name": "covid19_csv",
541
+ "allowNo": false,
542
+ "type": "boolean"
543
+ },
544
+ "covid19_csv_10k": {
545
+ "description": "Download COVID-19 CSV dataset (10k patients)",
546
+ "name": "covid19_csv_10k",
547
+ "allowNo": false,
548
+ "type": "boolean"
549
+ },
550
+ "dry-run": {
551
+ "description": "Show what changes would be made without actually making them",
552
+ "name": "dry-run",
553
+ "allowNo": false,
554
+ "type": "boolean"
555
+ },
556
+ "endpoint": {
557
+ "char": "e",
558
+ "description": "FHIR server endpoint URL",
559
+ "name": "endpoint",
560
+ "default": "http://fhir:8005/baseR4",
561
+ "hasDynamicHelp": false,
562
+ "multiple": false,
563
+ "type": "option"
564
+ },
565
+ "gender": {
566
+ "char": "g",
567
+ "description": "Generate patients of specific gender (M or F)",
568
+ "name": "gender",
569
+ "hasDynamicHelp": false,
570
+ "multiple": false,
571
+ "options": [
572
+ "M",
573
+ "F"
574
+ ],
575
+ "type": "option"
576
+ },
577
+ "population": {
578
+ "char": "p",
579
+ "description": "Number of patients to generate",
580
+ "name": "population",
581
+ "default": 1,
582
+ "hasDynamicHelp": false,
583
+ "multiple": false,
584
+ "type": "option"
585
+ },
586
+ "seed": {
587
+ "char": "s",
588
+ "description": "Random seed for reproducible generation",
589
+ "name": "seed",
590
+ "hasDynamicHelp": false,
591
+ "multiple": false,
592
+ "type": "option"
593
+ },
594
+ "state": {
595
+ "description": "State for patient generation (default: Massachusetts)",
596
+ "name": "state",
597
+ "hasDynamicHelp": false,
598
+ "multiple": false,
599
+ "type": "option"
600
+ },
601
+ "synthea_sample_data_csv_latest": {
602
+ "description": "Download latest CSV sample data",
603
+ "name": "synthea_sample_data_csv_latest",
604
+ "allowNo": false,
605
+ "type": "boolean"
606
+ },
607
+ "synthea_sample_data_fhir_latest": {
608
+ "description": "Download latest FHIR sample data",
609
+ "name": "synthea_sample_data_fhir_latest",
610
+ "allowNo": false,
611
+ "type": "boolean"
612
+ },
613
+ "synthea_sample_data_fhir_stu3_latest": {
614
+ "description": "Download latest FHIR STU3 sample data",
615
+ "name": "synthea_sample_data_fhir_stu3_latest",
616
+ "allowNo": false,
617
+ "type": "boolean"
618
+ },
619
+ "token": {
620
+ "char": "t",
621
+ "description": "Bearer token for FHIR server authentication",
622
+ "name": "token",
623
+ "hasDynamicHelp": false,
624
+ "multiple": false,
625
+ "type": "option"
626
+ },
627
+ "workdir": {
628
+ "char": "w",
629
+ "description": "Working directory for Synthea files",
630
+ "name": "workdir",
631
+ "default": "/home/runner/dhti",
632
+ "hasDynamicHelp": false,
633
+ "multiple": false,
634
+ "type": "option"
635
+ }
636
+ },
637
+ "hasDynamicHelp": false,
638
+ "hiddenAliases": [],
639
+ "id": "synthea",
640
+ "pluginAlias": "dhti-cli",
641
+ "pluginName": "dhti-cli",
642
+ "pluginType": "core",
643
+ "strict": true,
644
+ "enableJsonFlag": false,
645
+ "isESM": true,
646
+ "relativePath": [
647
+ "dist",
648
+ "commands",
649
+ "synthea.js"
650
+ ]
651
+ },
485
652
  "synthetic": {
486
653
  "aliases": [],
487
654
  "args": {
@@ -574,5 +741,5 @@
574
741
  ]
575
742
  }
576
743
  },
577
- "version": "0.6.0"
744
+ "version": "0.7.0"
578
745
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "dhti-cli",
3
3
  "description": "DHTI CLI",
4
- "version": "0.6.0",
4
+ "version": "0.7.0",
5
5
  "author": "Bell Eapen",
6
6
  "bin": {
7
7
  "dhti-cli": "bin/run.js"
@@ -83,7 +83,8 @@
83
83
  "postpack": "shx rm -f oclif.manifest.json",
84
84
  "posttest": "echo 'npm run lint'",
85
85
  "prepack": "npx oclif manifest && npx oclif readme --readme-path notes/README.md",
86
- "test": "mocha --forbid-only \"test/**/*.test.ts\"",
86
+ "test": "mocha --forbid-only \"test/**/*.test.ts\" --exclude \"test/e2e/**/*.test.ts\"",
87
+ "test-e2e": "mocha --forbid-only \"test/**/*.test.ts\"",
87
88
  "readme": "npx oclif readme --readme-path notes/README.md && git add notes/README.md",
88
89
  "docs": "typedoc --exclude src/tool.ts --html docs src"
89
90
  },