dhti-cli 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -22,6 +22,9 @@ Generative AI features are built as [LangServe Apps](https://python.langchain.co
22
22
 
23
23
  ### Want to know more?
24
24
 
25
+ *Watch this demo video:*
26
+ [![WATCH DHTI DEMO](https://img.youtube.com/vi/5jFFe3wqKM0/0.jpg)](https://www.youtube.com/watch?v=5jFFe3wqKM0)
27
+
25
28
  Gen AI can transform medicine. But it needs a framework for collaborative research and practice. DHTI is a reference architecture and an implementation for such a framework that integrates an EMR ([OpenMRS](https://openmrs.org/)), :link: Gen AI application server ([LangServe](https://python.langchain.com/v0.2/docs/langserve/)), self-hosted LLMs for privacy ([Ollama](https://ollama.com/)), tools on [MCP server](https://github.com/dermatologist/fhir-mcp-server), vector store for RAG ([redis](https://redis.io/)), monitoring ([LangFuse](https://langfuse.com/)), 🔥 FHIR repository with [CQL](https://nuchange.ca/2025/06/v-llm-in-the-loop-cql-execution-with-unstructured-data-and-fhir-terminology-support.html) support ([HAPI](https://cloud.alphora.com/sandbox/r4/cqm/)) and graph utilities ([Neo4j](https://neo4j.com/)) in one docker-compose! DHTI is inspired by [Bahmni](https://www.bahmni.org/) and **aims to facilitate GenAI adoption and research in areas with low resources.** The MCP server hosts pluggable, agent-invokable tools (FHIR query, summarization, terminology lookup, custom analytics, etc.) that you can extend without modifying core services.
26
29
 
27
30
  The essence of DHTI is *modularity* with an emphasis on *configuration!* It is non-opinionated on LLMs, hyperparameters and pretty much everything. DHTI supports installable Gen AI routines through [LangServe Apps](https://python.langchain.com/docs/langserve/) (which we call :curry: **elixir**) and installable UI elements through [OpenMRS O3](https://o3-docs.openmrs.org/) React container (which we call :shell: **conch**). 🔥 FHIR is used for backend and [CDS-Hooks](https://cds-hooks.org/) for frontend communication, decoupling conches from OpenMRS, making them potentially usable with any health information system. We have a [fork of the cds-hook sandbox](https://github.com/dermatologist/cds-hooks-sandbox/tree/dhti-1) for testing that uses the [order-select](https://cds-hooks.org/hooks/order-select/) hook, utilizing the contentString from the [FHIR CommunicationRequest](https://build.fhir.org/communicationrequest.html) within the [cds-hook context](https://cds-hooks.org/examples/) for user inputs (recommended).
@@ -106,7 +109,7 @@ Tools to fine-tune language models for the stack are on our roadmap. We encourag
106
109
  * **EMR**: Built-in EMR, OpenMRS, for patient records.
107
110
  * 👉 [Try it out today!](#try-it-out)
108
111
 
109
- 🌈 *Join us to make the Gen AI equitable and help doctors save lives!*
112
+ *Join us to make the Gen AI equitable and help doctors save lives!*
110
113
 
111
114
  ## :sparkles: Resources
112
115
  * [fhiry](https://github.com/dermatologist/fhiry): FHIR to pandas dataframe for data analytics, AI and ML!
@@ -131,6 +134,8 @@ Tools to fine-tune language models for the stack are on our roadmap. We encourag
131
134
 
132
135
  * You only need [Node.js](https://nodejs.org/) and [Docker](https://www.docker.com/) installed to run this project. Optionally, you can install [Python](https://www.python.org/) if you want to develop new elixirs. We use a fake LLM script for testing purposes, so you don't need an OpenAI key to run this project. It just says "Paris" or "I don't know" to any prompt. You can replace it with any internal or external LLM service later.
133
136
 
137
+ 👉 **If you are in a hurry, just run `./demo.sh` from a terminal (Linux or MacOS) in the root folder to try out the demo.** Windows users can use WSL. You only need [Node.js](https://nodejs.org/) and [Docker](https://www.docker.com/). This script runs all the commands below. Once done, use `npx dhti-cli docker -d` to stop and delete all the docker containers.
138
+
134
139
  * `npx dhti-cli help` to see all available commands.
135
140
 
136
141
  * `npx dhti-cli compose add -m openmrs -m langserve` to add OpenMRS and Langserve elixirs to your docker-compose.yml at ~/dhti. Other available modules: `ollama, langfuse, cqlFhir, redis, neo4j and mcpFhir`. You can read the newly created docker-compose by: `npx dhti-cli compose read`
@@ -24,7 +24,7 @@ export default class Compose extends Command {
24
24
  // flag with a value (-n, --name=VALUE)
25
25
  module: Flags.string({
26
26
  char: 'm',
27
- description: 'Modules to add from ( langserve, openmrs, ollama, langfuse, cqlFhir, redis, neo4j and mcpFhir)',
27
+ description: 'Modules to add from ( langserve, openmrs, ollama, langfuse, cqlFhir, redis, neo4j, mcpFhir, mcpx and docktor)',
28
28
  multiple: true,
29
29
  }),
30
30
  };
@@ -57,6 +57,9 @@ export default class Compose extends Command {
57
57
  const webui = ['ollama-webui'];
58
58
  const fhir = ['fhir', 'postgres-db'];
59
59
  const mcpFhir = ['mcp-fhir', 'fhir', 'postgres-db'];
60
+ const mcpx = ['mcpx'];
61
+ const docktor = ['mcpx'];
62
+ const medplum = ['medplum-server', 'medplum-app', 'postgres-db', 'redis', 'mpclient'];
60
63
  const _modules = {
61
64
  cqlFhir,
62
65
  fhir,
@@ -64,11 +67,14 @@ export default class Compose extends Command {
64
67
  langfuse,
65
68
  langserve,
66
69
  mcpFhir,
70
+ mcpx,
71
+ docktor,
67
72
  neo4j,
68
73
  ollama,
69
74
  openmrs,
70
75
  redis,
71
76
  webui,
77
+ medplum,
72
78
  };
73
79
  try {
74
80
  const masterData = yaml.load(fs.readFileSync(path.join(RESOURCES_DIR, 'docker-compose-master.yml'), 'utf8'));
@@ -0,0 +1,18 @@
1
+ import { Command } from '@oclif/core';
2
+ export default class Docktor extends Command {
3
+ static args: {
4
+ op: import("@oclif/core/interfaces").Arg<string, Record<string, unknown>>;
5
+ name: import("@oclif/core/interfaces").Arg<string | undefined, Record<string, unknown>>;
6
+ };
7
+ static description: string;
8
+ static examples: string[];
9
+ static flags: {
10
+ container: import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
11
+ environment: import("@oclif/core/interfaces").OptionFlag<string[] | undefined, import("@oclif/core/interfaces").CustomOptions>;
12
+ image: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
13
+ 'model-path': import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
14
+ workdir: import("@oclif/core/interfaces").OptionFlag<string, import("@oclif/core/interfaces").CustomOptions>;
15
+ };
16
+ private restartMcpxContainer;
17
+ run(): Promise<void>;
18
+ }
@@ -0,0 +1,143 @@
1
+ import { Args, Command, Flags } from '@oclif/core';
2
+ import chalk from 'chalk';
3
+ import fs from 'node:fs';
4
+ import os from 'node:os';
5
+ import path from 'node:path';
6
+ export default class Docktor extends Command {
7
+ static args = {
8
+ op: Args.string({ description: 'Operation to perform (install, remove, restart, list)', required: true }),
9
+ name: Args.string({ description: 'Name of the inference pipeline (e.g., skin-cancer-classifier)', required: false }),
10
+ };
11
+ static description = 'Manage inference pipelines for MCPX';
12
+ static examples = [
13
+ '<%= config.bin %> <%= command.id %> install my-pipeline --image my-image:latest --model-path ./models',
14
+ '<%= config.bin %> <%= command.id %> remove my-pipeline',
15
+ '<%= config.bin %> <%= command.id %> list',
16
+ ];
17
+ static flags = {
18
+ container: Flags.string({
19
+ char: 'c',
20
+ default: 'dhti-mcpx-1',
21
+ description: 'Docker container name for MCPX (use docker ps to find the correct name)',
22
+ }),
23
+ environment: Flags.string({
24
+ char: 'e',
25
+ multiple: true,
26
+ description: 'Environment variables to pass to docker (format: VAR=value)',
27
+ }),
28
+ image: Flags.string({ char: 'i', description: 'Docker image for the inference pipeline (required for install)' }),
29
+ 'model-path': Flags.string({
30
+ char: 'm',
31
+ default: '/lunar/packages/mcpx-server/config',
32
+ description: 'Local path to the model directory (optional for install)',
33
+ }),
34
+ workdir: Flags.string({
35
+ char: 'w',
36
+ default: `${os.homedir()}/dhti`,
37
+ description: 'Working directory for MCPX config',
38
+ }),
39
+ };
40
+ async restartMcpxContainer(mcpxConfigPath, containerName) {
41
+ try {
42
+ const { execSync } = await import('node:child_process');
43
+ execSync(`docker cp ${mcpxConfigPath} ${containerName}:/lunar/packages/mcpx-server/`);
44
+ this.log(chalk.green('Copied mcp.json to container: /lunar/packages/mcpx-server/config/mcp.json'));
45
+ execSync(`docker restart ${containerName}`);
46
+ this.log(chalk.green(`Restarted ${containerName} container.`));
47
+ }
48
+ catch (err) {
49
+ this.log(chalk.red(`Failed to copy config or restart container '${containerName}'. Please check Docker status and container name.`));
50
+ }
51
+ }
52
+ async run() {
53
+ const { args, flags } = await this.parse(Docktor);
54
+ const mcpxConfigPath = path.join(flags.workdir, 'config');
55
+ const mcpJsonPath = path.join(mcpxConfigPath, 'mcp.json');
56
+ // Ensure config directory exists
57
+ if (!fs.existsSync(mcpxConfigPath)) {
58
+ fs.mkdirSync(mcpxConfigPath, { recursive: true });
59
+ }
60
+ // Ensure mcp.json exists
61
+ if (!fs.existsSync(mcpJsonPath)) {
62
+ fs.writeFileSync(mcpJsonPath, JSON.stringify({ mcpServers: {} }, null, 2));
63
+ }
64
+ let mcpConfig = JSON.parse(fs.readFileSync(mcpJsonPath, 'utf8'));
65
+ // Ensure mcpServers exists
66
+ if (!mcpConfig.mcpServers) {
67
+ mcpConfig.mcpServers = {};
68
+ }
69
+ if (args.op === 'install') {
70
+ if (!args.name) {
71
+ this.error('Name is required for install operation');
72
+ }
73
+ if (!flags.image) {
74
+ this.error('Image is required for install operation');
75
+ }
76
+ const binds = [];
77
+ const envVars = [];
78
+ if (flags['model-path']) {
79
+ const absModelPath = path.resolve(flags['model-path']);
80
+ binds.push(`${absModelPath}:/model`);
81
+ }
82
+ if (flags.environment && flags.environment.length > 0) {
83
+ const invalidEnvVars = flags.environment.filter((e) => {
84
+ const idx = e.indexOf('=');
85
+ return idx <= 0 || idx === e.length - 1;
86
+ });
87
+ if (invalidEnvVars.length > 0) {
88
+ this.error(`Invalid environment variable format. Expected 'NAME=value'. Invalid entries: ${invalidEnvVars.join(', ')}`);
89
+ }
90
+ envVars.push(...flags.environment);
91
+ }
92
+ // Add socket mounting for docker tools if needed, but primarily we want the container to run as a server
93
+ // MCPX handles the running of the docker container.
94
+ // We need to configure it in mcp.json so MCPX picks it up.
95
+ // Based on MCP std, docker servers are defined with `docker` command.
96
+ // Add (merge) new server into existing mcpServers
97
+ mcpConfig.mcpServers[args.name] = {
98
+ command: 'docker',
99
+ args: [
100
+ 'run',
101
+ '-i',
102
+ '--rm',
103
+ ...binds.flatMap((b) => ['-v', b]),
104
+ ...envVars.flatMap((e) => ['-e', e]),
105
+ flags.image,
106
+ ],
107
+ };
108
+ // Write back the updated config (preserving all other properties and existing servers)
109
+ fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
110
+ this.log(chalk.green(`Inference pipeline '${args.name}' added to MCPX config.`));
111
+ // Copy only mcp.json to container and restart
112
+ await this.restartMcpxContainer(mcpxConfigPath, flags.container);
113
+ }
114
+ else if (args.op === 'remove') {
115
+ if (!args.name) {
116
+ this.error('Name is required for remove operation');
117
+ }
118
+ if (mcpConfig.mcpServers && mcpConfig.mcpServers[args.name]) {
119
+ delete mcpConfig.mcpServers[args.name];
120
+ // Write back the updated config (preserving all other properties and remaining servers)
121
+ fs.writeFileSync(mcpJsonPath, JSON.stringify(mcpConfig, null, 2));
122
+ this.log(chalk.green(`Inference pipeline '${args.name}' removed from MCPX config.`));
123
+ this.log(chalk.yellow('Please restart the MCPX container to apply changes: dhti-cli docktor restart'));
124
+ }
125
+ else {
126
+ this.log(chalk.yellow(`Inference pipeline '${args.name}' not found.`));
127
+ }
128
+ }
129
+ else if (args.op === 'restart') {
130
+ await this.restartMcpxContainer(mcpxConfigPath, flags.container);
131
+ }
132
+ else if (args.op === 'list') {
133
+ this.log(chalk.blue('Installed Inference Pipelines:'));
134
+ for (const [name, config] of Object.entries(mcpConfig.mcpServers)) {
135
+ const argsList = Array.isArray(config.args) ? config.args.join(' ') : '';
136
+ this.log(`- ${name}: ${argsList}`);
137
+ }
138
+ }
139
+ else {
140
+ this.error(`Unknown operation: ${args.op}`);
141
+ }
142
+ }
143
+ }
@@ -7,6 +7,7 @@ export default class Mimic extends Command {
7
7
  static examples: string[];
8
8
  static flags: {
9
9
  'dry-run': import("@oclif/core/interfaces").BooleanFlag<boolean>;
10
+ token: import("@oclif/core/interfaces").OptionFlag<string | undefined, import("@oclif/core/interfaces").CustomOptions>;
10
11
  };
11
12
  run(): Promise<void>;
12
13
  }
@@ -11,9 +11,18 @@ export default class Mimic extends Command {
11
11
  default: false,
12
12
  description: 'Show what changes would be made without actually making them',
13
13
  }),
14
+ token: Flags.string({
15
+ char: 't',
16
+ description: 'Bearer token for authentication (optional)',
17
+ }),
14
18
  };
15
19
  async run() {
16
20
  const { args, flags } = await this.parse(Mimic);
21
+ // Ensure server URL ends with /$import
22
+ let serverUrl = args.server;
23
+ if (!serverUrl.endsWith('/$import')) {
24
+ serverUrl = serverUrl.replace(/\/$/, '') + '/$import';
25
+ }
17
26
  const mimic_request = `{
18
27
 
19
28
  "resourceType": "Parameters",
@@ -148,25 +157,34 @@ export default class Mimic extends Command {
148
157
 
149
158
  }`;
150
159
  if (flags['dry-run']) {
151
- console.log(chalk.yellow(`[DRY RUN] Would send POST request to: ${args.server}`));
160
+ console.log(chalk.yellow(`[DRY RUN] Would send POST request to: ${serverUrl}`));
152
161
  console.log(chalk.cyan('[DRY RUN] Request headers:'));
153
162
  console.log(chalk.green(' Content-Type: application/fhir+json'));
154
163
  console.log(chalk.green(' Prefer: respond-async'));
164
+ if (flags.token) {
165
+ console.log(chalk.green(' Authorization: Bearer <token>'));
166
+ }
155
167
  console.log(chalk.cyan('[DRY RUN] Request body:'));
156
168
  console.log(mimic_request);
157
169
  return;
158
170
  }
171
+ // Build request headers
172
+ const headers = {
173
+ 'Content-Type': 'application/fhir+json',
174
+ Prefer: 'respond-async',
175
+ };
176
+ if (flags.token) {
177
+ headers.Authorization = `Bearer ${flags.token}`;
178
+ }
159
179
  // send a POST request to the server with the mimic_request body
160
- const response = await fetch(args.server, {
180
+ const response = await fetch(serverUrl, {
161
181
  body: mimic_request,
162
- headers: {
163
- 'Content-Type': 'application/fhir+json',
164
- Prefer: 'respond-async',
165
- },
182
+ headers,
166
183
  method: 'POST',
167
184
  });
168
185
  if (!response.ok) {
169
186
  console.error(`Error: ${response.status} ${response.statusText}`);
187
+ this.exit(1);
170
188
  }
171
189
  }
172
190
  }
@@ -10,7 +10,7 @@ services:
10
10
  - backend
11
11
  ports:
12
12
  - "80:80"
13
- - "9000:80"
13
+ - "9001:80"
14
14
 
15
15
 
16
16
  frontend:
@@ -133,7 +133,7 @@ services:
133
133
  - "hapi.fhir.enforce_referential_integrity_on_delete=false"
134
134
 
135
135
  mcp-fhir:
136
- image: beapen/fhir-mcp-server:1.0
136
+ image: beapen/fhir-mcp-server:4.0
137
137
  ports:
138
138
  - 8006:8000
139
139
  restart: "unless-stopped"
@@ -173,7 +173,7 @@ services:
173
173
  - LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES=${LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES:-false}
174
174
 
175
175
  postgres-db:
176
- image: postgres
176
+ image: postgres:17
177
177
  restart: "unless-stopped"
178
178
  environment:
179
179
  - POSTGRES_USER=postgres
@@ -183,6 +183,12 @@ services:
183
183
  - 5432:5432
184
184
  volumes:
185
185
  - postgres-db:/var/lib/postgresql/data
186
+ healthcheck:
187
+ test: ["CMD-SHELL", "pg_isready -U postgres -d postgres"]
188
+ interval: 10s
189
+ timeout: 5s
190
+ retries: 5
191
+ start_period: 10s
186
192
 
187
193
  redis:
188
194
  image: redislabs/redisearch:2.8.8
@@ -191,6 +197,12 @@ services:
191
197
  restart: "unless-stopped"
192
198
  volumes:
193
199
  - redis-db:/data
200
+ healthcheck:
201
+ test: ["CMD", "redis-cli", "ping"]
202
+ interval: 10s
203
+ timeout: 5s
204
+ retries: 3
205
+ start_period: 10s
194
206
 
195
207
  redis-commander:
196
208
  image: rediscommander/redis-commander:latest
@@ -225,6 +237,120 @@ services:
225
237
  - spring.neo4j.authentication.username=neo4j
226
238
  - spring.neo4j.authentication.password=password
227
239
 
240
+ mcpx:
241
+ image: us-central1-docker.pkg.dev/prj-common-442813/mcpx/mcpx:latest
242
+ ports:
243
+ - "8000:8000"
244
+ - "9000:9000"
245
+ - "5173:5173"
246
+ - "3000:3000"
247
+ # environment:
248
+ # - MCPX_PORT=9000
249
+ # - MCPX_SERVER_URL="http://localhost:9000"
250
+ # - VITE_MCPX_SERVER_PORT=9000
251
+ # - VITE_MCPX_SERVER_URL="http://localhost:9000"
252
+ restart: unless-stopped
253
+ volumes:
254
+ - mcpx-config:/lunar/packages/mcpx-server/config
255
+ privileged: true
256
+
257
+ # Medplum server container
258
+ medplum-server:
259
+ image: medplum/medplum-server:latest
260
+ restart: always
261
+ depends_on:
262
+ postgres-db:
263
+ condition: service_healthy
264
+ redis:
265
+ condition: service_healthy
266
+ ports:
267
+ - '8103:8103'
268
+ volumes:
269
+ # Conditionally define a volume for a `medplum.config.json` if one is specified by the MEDPLUM_CONFIG_PATH env var
270
+ - ${MEDPLUM_CONFIG_PATH:-./medplum.config.json}:/usr/src/medplum/packages/server/medplum.config.json
271
+ entrypoint: >
272
+ sh -c "
273
+ if [ -n '${MEDPLUM_CONFIG_PATH}' ]; then
274
+ echo 'Config file found, running with custom config'
275
+ node --require ./packages/server/dist/otel/instrumentation.js packages/server/dist/index.js file:/usr/src/medplum/packages/server/medplum.config.json
276
+ else
277
+ echo 'No config file found, running with default env settings'
278
+ node --require ./packages/server/dist/otel/instrumentation.js packages/server/dist/index.js env
279
+ fi
280
+ "
281
+ environment:
282
+ MEDPLUM_PORT: 8103
283
+ MEDPLUM_BASE_URL: 'http://localhost:8103/'
284
+ MEDPLUM_APP_BASE_URL: 'http://localhost:3103/'
285
+ MEDPLUM_STORAGE_BASE_URL: 'http://localhost:8103/storage/'
286
+
287
+ MEDPLUM_DATABASE_HOST: 'postgres-db'
288
+ MEDPLUM_DATABASE_PORT: 5432
289
+ MEDPLUM_DATABASE_DBNAME: 'postgres'
290
+ MEDPLUM_DATABASE_USERNAME: 'postgres'
291
+ MEDPLUM_DATABASE_PASSWORD: 'postgres'
292
+
293
+ MEDPLUM_REDIS_HOST: 'redis'
294
+ MEDPLUM_REDIS_PORT: 6379
295
+ # MEDPLUM_REDIS_PASSWORD: 'medplum'
296
+
297
+ MEDPLUM_BINARY_STORAGE: 'file:./binary/'
298
+ MEDPLUM_SUPPORT_EMAIL: '\"Medplum\" <support@medplum.com>'
299
+ MEDPLUM_GOOGLE_CLIENT_ID: '397236612778-c0b5tnjv98frbo1tfuuha5vkme3cmq4s.apps.googleusercontent.com'
300
+ MEDPLUM_GOOGLE_CLIENT_SECRET: ''
301
+ MEDPLUM_RECAPTCHA_SITE_KEY: '6LfHdsYdAAAAAC0uLnnRrDrhcXnziiUwKd8VtLNq'
302
+ MEDPLUM_RECAPTCHA_SECRET_KEY: '6LfHdsYdAAAAAH9dN154jbJ3zpQife3xaiTvPChL'
303
+ MEDPLUM_MAX_JSON_SIZE: '1mb'
304
+ MEDPLUM_MAX_BATCH_SIZE: '50mb'
305
+ MEDPLUM_BOT_LAMBDA_ROLE_ARN: ''
306
+ MEDPLUM_BOT_LAMBDA_LAYER_NAME: 'medplum-bot-layer'
307
+ MEDPLUM_VM_CONTEXT_BOTS_ENABLED: 'true'
308
+ MEDPLUM_DEFAULT_BOT_RUNTIME_VERSION: 'vmcontext'
309
+ MEDPLUM_ALLOWED_ORIGINS: '*'
310
+ MEDPLUM_INTROSPECTION_ENABLED: 'true'
311
+ MEDPLUM_SHUTDOWN_TIMEOUT_MILLISECONDS: 30000
312
+
313
+ healthcheck:
314
+ test:
315
+ # We use Node's fetch for healthcheck because this image doesn't have a curl or wget installed
316
+ [
317
+ 'CMD',
318
+ 'node',
319
+ '-e',
320
+ 'fetch("http://localhost:8103/healthcheck").then(r => r.json()).then(console.log).catch(() => { process.exit(1); })',
321
+ ]
322
+ interval: 30s
323
+ timeout: 10s
324
+ retries: 5
325
+
326
+ # Medplum app container (web UI)
327
+ medplum-app:
328
+ image: medplum/medplum-app:latest
329
+ restart: always
330
+ # depends_on:
331
+ # medplum-server:
332
+ # condition: service_healthy
333
+ ports:
334
+ - '3103:3000'
335
+ healthcheck:
336
+ test: ['CMD', 'curl', '-f', 'http://localhost:3103']
337
+ interval: 10s
338
+ timeout: 5s
339
+ retries: 5
340
+
341
+ mpclient:
342
+ image: beapen/mpclient:15.0
343
+ ports:
344
+ - "8111:8111"
345
+ restart: "unless-stopped"
346
+ environment:
347
+ - PORT=8111
348
+ - MPCLIENT_BASE_URL=http://localhost:8111
349
+ - MEDPLUM_TOKEN_URL=http://medplum-server:8103/oauth2/token
350
+ - MEDPLUM_CLIENT_ID=client-id
351
+ - MEDPLUM_CLIENT_SECRET=secret
352
+ depends_on:
353
+ - medplum-server
228
354
 
229
355
  volumes:
230
356
  openmrs-data: ~
@@ -235,4 +361,5 @@ volumes:
235
361
  neo4j-db: ~
236
362
  ollama-code: ~
237
363
  ollama-root: ~
238
- ollama-webui: ~
364
+ ollama-webui: ~
365
+ mcpx-config: ~
@@ -30,7 +30,7 @@
30
30
  },
31
31
  "module": {
32
32
  "char": "m",
33
- "description": "Modules to add from ( langserve, openmrs, ollama, langfuse, cqlFhir, redis, neo4j and mcpFhir)",
33
+ "description": "Modules to add from ( langserve, openmrs, ollama, langfuse, cqlFhir, redis, neo4j, mcpFhir, mcpx and docktor)",
34
34
  "name": "module",
35
35
  "hasDynamicHelp": false,
36
36
  "multiple": true,
@@ -243,6 +243,86 @@
243
243
  "docker.js"
244
244
  ]
245
245
  },
246
+ "docktor": {
247
+ "aliases": [],
248
+ "args": {
249
+ "op": {
250
+ "description": "Operation to perform (install, remove, restart, list)",
251
+ "name": "op",
252
+ "required": true
253
+ },
254
+ "name": {
255
+ "description": "Name of the inference pipeline (e.g., skin-cancer-classifier)",
256
+ "name": "name",
257
+ "required": false
258
+ }
259
+ },
260
+ "description": "Manage inference pipelines for MCPX",
261
+ "examples": [
262
+ "<%= config.bin %> <%= command.id %> install my-pipeline --image my-image:latest --model-path ./models",
263
+ "<%= config.bin %> <%= command.id %> remove my-pipeline",
264
+ "<%= config.bin %> <%= command.id %> list"
265
+ ],
266
+ "flags": {
267
+ "container": {
268
+ "char": "c",
269
+ "description": "Docker container name for MCPX (use docker ps to find the correct name)",
270
+ "name": "container",
271
+ "default": "dhti-mcpx-1",
272
+ "hasDynamicHelp": false,
273
+ "multiple": false,
274
+ "type": "option"
275
+ },
276
+ "environment": {
277
+ "char": "e",
278
+ "description": "Environment variables to pass to docker (format: VAR=value)",
279
+ "name": "environment",
280
+ "hasDynamicHelp": false,
281
+ "multiple": true,
282
+ "type": "option"
283
+ },
284
+ "image": {
285
+ "char": "i",
286
+ "description": "Docker image for the inference pipeline (required for install)",
287
+ "name": "image",
288
+ "hasDynamicHelp": false,
289
+ "multiple": false,
290
+ "type": "option"
291
+ },
292
+ "model-path": {
293
+ "char": "m",
294
+ "description": "Local path to the model directory (optional for install)",
295
+ "name": "model-path",
296
+ "default": "/lunar/packages/mcpx-server/config",
297
+ "hasDynamicHelp": false,
298
+ "multiple": false,
299
+ "type": "option"
300
+ },
301
+ "workdir": {
302
+ "char": "w",
303
+ "description": "Working directory for MCPX config",
304
+ "name": "workdir",
305
+ "default": "/home/runner/dhti",
306
+ "hasDynamicHelp": false,
307
+ "multiple": false,
308
+ "type": "option"
309
+ }
310
+ },
311
+ "hasDynamicHelp": false,
312
+ "hiddenAliases": [],
313
+ "id": "docktor",
314
+ "pluginAlias": "dhti-cli",
315
+ "pluginName": "dhti-cli",
316
+ "pluginType": "core",
317
+ "strict": true,
318
+ "enableJsonFlag": false,
319
+ "isESM": true,
320
+ "relativePath": [
321
+ "dist",
322
+ "commands",
323
+ "docktor.js"
324
+ ]
325
+ },
246
326
  "elixir": {
247
327
  "aliases": [],
248
328
  "args": {
@@ -377,6 +457,14 @@
377
457
  "name": "dry-run",
378
458
  "allowNo": false,
379
459
  "type": "boolean"
460
+ },
461
+ "token": {
462
+ "char": "t",
463
+ "description": "Bearer token for authentication (optional)",
464
+ "name": "token",
465
+ "hasDynamicHelp": false,
466
+ "multiple": false,
467
+ "type": "option"
380
468
  }
381
469
  },
382
470
  "hasDynamicHelp": false,
@@ -486,5 +574,5 @@
486
574
  ]
487
575
  }
488
576
  },
489
- "version": "0.4.0"
577
+ "version": "0.6.0"
490
578
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "dhti-cli",
3
3
  "description": "DHTI CLI",
4
- "version": "0.4.0",
4
+ "version": "0.6.0",
5
5
  "author": "Bell Eapen",
6
6
  "bin": {
7
7
  "dhti-cli": "bin/run.js"