@chaprola/mcp-server 1.12.0 → 1.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +34 -1
- package/package.json +1 -1
- package/references/cookbook.md +97 -0
- package/references/gotchas.md +33 -9
package/dist/index.js
CHANGED
|
@@ -96,7 +96,7 @@ const server = new McpServer({
|
|
|
96
96
|
**What you can do:**
|
|
97
97
|
- **Import data:** chaprola_import (JSON or FHIR bundles), chaprola_import_download (CSV/Excel/Parquet from URL)
|
|
98
98
|
- **Query data:** chaprola_query (filter, aggregate, join, pivot — like SELECT without SQL)
|
|
99
|
-
- **Record CRUD:** chaprola_insert_record, chaprola_update_record, chaprola_delete_record
|
|
99
|
+
- **Record CRUD:** chaprola_insert_record (single or batch), chaprola_upsert_record (insert-or-update by key), chaprola_update_record, chaprola_delete_record
|
|
100
100
|
- **Batch operations:** chaprola_run_each — run a compiled program against every record in a file (like a stored procedure that executes per-row). Use this for scoring, bulk updates, conditional logic across records.
|
|
101
101
|
- **Compile programs:** chaprola_compile (source code → bytecode). Programs are stored procedures — compile once, run on demand.
|
|
102
102
|
- **Run programs:** chaprola_run (single execution), chaprola_run_each (per-record batch), chaprola_report (published reports)
|
|
@@ -119,6 +119,7 @@ const server = new McpServer({
|
|
|
119
119
|
- "Concurrent writes will conflict" → Wrong. The merge-file model is concurrency-safe with dirty-bit checking. Multiple writers are handled transparently.
|
|
120
120
|
- "Only for AI agents" → Wrong. Every operation is a plain HTTP POST. React, Laravel, Python, curl — any HTTP client works. The MCP server is a convenience wrapper.
|
|
121
121
|
- "Fields get truncated" → Auto-expand: if you insert data longer than a field, the format file automatically expands to fit. No manual schema management needed.
|
|
122
|
+
- "PHI flags can't be removed" → Wrong. Set phi_exempt on the project config via chaprola_config to disable all PHI detection for non-healthcare projects (e.g., video pipelines, task trackers).
|
|
122
123
|
|
|
123
124
|
**For specialized processing** (NLP, ML inference, image recognition): use external services and import results into Chaprola. Chaprola is the data and compute layer, not the everything layer.
|
|
124
125
|
|
|
@@ -733,6 +734,19 @@ server.tool("chaprola_intent", "Read, write, or delete project and program inten
|
|
|
733
734
|
const res = await authedFetch("/intent", body);
|
|
734
735
|
return textResult(res);
|
|
735
736
|
});
|
|
737
|
+
// --- Project Config ---
|
|
738
|
+
server.tool("chaprola_config", "Set project configuration. Currently supports phi_exempt (boolean) to disable automatic PHI detection for non-healthcare projects. Fields like step_name, task_name won't be flagged as PHI. Omit phi_exempt to read current config.", {
|
|
739
|
+
project: z.string().describe("Project name"),
|
|
740
|
+
phi_exempt: z.boolean().optional().describe("If true, disable all PHI detection and masking for this project. For non-healthcare data only."),
|
|
741
|
+
userid: z.string().optional().describe("Project owner's username. Defaults to the authenticated user."),
|
|
742
|
+
}, async ({ project, phi_exempt, userid }) => withBaaCheck(async () => {
|
|
743
|
+
const { username } = getCredentials();
|
|
744
|
+
const body = { userid: userid || username, project };
|
|
745
|
+
if (phi_exempt !== undefined)
|
|
746
|
+
body.phi_exempt = phi_exempt;
|
|
747
|
+
const res = await authedFetch("/config", body);
|
|
748
|
+
return textResult(res);
|
|
749
|
+
}));
|
|
736
750
|
// --- Optimize (HULDRA) ---
|
|
737
751
|
server.tool("chaprola_optimize", "Run HULDRA nonlinear optimization using a compiled .PR as the objective evaluator", {
|
|
738
752
|
project: z.string().describe("Project name"),
|
|
@@ -897,6 +911,25 @@ server.tool("chaprola_insert_record", "Insert one or more records into a data fi
|
|
|
897
911
|
const res = await authedFetch("/insert-record", body);
|
|
898
912
|
return textResult(res);
|
|
899
913
|
}));
|
|
914
|
+
server.tool("chaprola_upsert_record", "Insert or update a record by key field. If a record with the matching key value exists, update it. If not, insert it. Batch supported via records array.", {
|
|
915
|
+
project: z.string().describe("Project name"),
|
|
916
|
+
file: z.string().describe("Data file name (without extension)"),
|
|
917
|
+
key: z.string().describe("Field name to match on (must exist in format file)"),
|
|
918
|
+
record: z.string().optional().describe("JSON object of a single record. Must contain the key field. Use this OR records, not both."),
|
|
919
|
+
records: z.string().optional().describe("JSON array of records for batch upsert. Each must contain the key field. Use this OR record, not both."),
|
|
920
|
+
userid: z.string().optional().describe("Project owner's username. Required when accessing a shared project where you are a writer. Defaults to the authenticated user."),
|
|
921
|
+
}, async ({ project, file, key, record: recordStr, records: recordsStr, userid }) => withBaaCheck(async () => {
|
|
922
|
+
const record = recordStr ? (typeof recordStr === 'string' ? JSON.parse(recordStr) : recordStr) : undefined;
|
|
923
|
+
const records = recordsStr ? (typeof recordsStr === 'string' ? JSON.parse(recordsStr) : recordsStr) : undefined;
|
|
924
|
+
const { username } = getCredentials();
|
|
925
|
+
const body = { userid: userid || username, project, file, key };
|
|
926
|
+
if (record)
|
|
927
|
+
body.record = record;
|
|
928
|
+
if (records)
|
|
929
|
+
body.records = records;
|
|
930
|
+
const res = await authedFetch("/upsert-record", body);
|
|
931
|
+
return textResult(res);
|
|
932
|
+
}));
|
|
900
933
|
server.tool("chaprola_update_record", "Update fields in a single record matched by a where clause. If no sort-key changes, updates in place; otherwise marks old record ignored and appends to merge file.", {
|
|
901
934
|
project: z.string().describe("Project name"),
|
|
902
935
|
file: z.string().describe("Data file name (without extension)"),
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@chaprola/mcp-server",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.13.1",
|
|
4
4
|
"description": "MCP server for Chaprola — agent-first data platform. Gives AI agents tools for structured data storage, record CRUD, querying, schema inspection, documentation lookup, web search, URL fetching, scheduled jobs, scoped site keys, and execution via plain HTTP.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
package/references/cookbook.md
CHANGED
|
@@ -271,6 +271,103 @@ LET rec = 1
|
|
|
271
271
|
|
|
272
272
|
If the IN-file doesn't exist (e.g., new user), NOT IN treats it as empty — all records pass.
|
|
273
273
|
|
|
274
|
+
## Batch Insert (Multi-Record Append)
|
|
275
|
+
|
|
276
|
+
Insert multiple records in a single call:
|
|
277
|
+
|
|
278
|
+
```bash
|
|
279
|
+
POST /insert-record {
|
|
280
|
+
userid, project, file: "events",
|
|
281
|
+
records: [
|
|
282
|
+
{"event": "login", "ts": "2026-04-09T10:00:00Z"},
|
|
283
|
+
{"event": "purchase", "ts": "2026-04-09T10:05:00Z"},
|
|
284
|
+
{"event": "logout", "ts": "2026-04-09T10:30:00Z"}
|
|
285
|
+
]
|
|
286
|
+
}
|
|
287
|
+
```
|
|
288
|
+
|
|
289
|
+
All-or-nothing: if any record has an invalid field, the entire batch is rejected. Max 1000 per call. Single `record: {}` still works for one record.
|
|
290
|
+
|
|
291
|
+
## UPSERT (Insert or Update)
|
|
292
|
+
|
|
293
|
+
Create if new, update if exists — one call:
|
|
294
|
+
|
|
295
|
+
```bash
|
|
296
|
+
POST /upsert-record {
|
|
297
|
+
userid, project, file: "contacts", key: "contact_id",
|
|
298
|
+
record: {"contact_id": "c-42", "name": "Alice", "status": "active"}
|
|
299
|
+
}
|
|
300
|
+
```
|
|
301
|
+
|
|
302
|
+
If a record with `contact_id = "c-42"` exists, update it. If not, insert it. Batch supported via `records: [...]`.
|
|
303
|
+
|
|
304
|
+
## IN / NOT IN on /query
|
|
305
|
+
|
|
306
|
+
Filter by a set of values:
|
|
307
|
+
|
|
308
|
+
```bash
|
|
309
|
+
POST /query {
|
|
310
|
+
userid, project, file: "orders",
|
|
311
|
+
where: [{"field": "status", "op": "in", "value": ["active", "pending", "review"]}]
|
|
312
|
+
}
|
|
313
|
+
```
|
|
314
|
+
|
|
315
|
+
`not_in` excludes values. Empty array: `in` matches nothing, `not_in` matches everything.
|
|
316
|
+
|
|
317
|
+
## String Transforms on /query
|
|
318
|
+
|
|
319
|
+
Transform field values at query time without a .CS program:
|
|
320
|
+
|
|
321
|
+
```bash
|
|
322
|
+
POST /query {
|
|
323
|
+
userid, project, file: "contacts",
|
|
324
|
+
select: ["email", "name"],
|
|
325
|
+
transform: [{"field": "email", "func": "lower"}, {"field": "name", "func": "trim"}]
|
|
326
|
+
}
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
Functions: `upper`, `lower`, `trim`, `substring` (start, length), `replace` (old, new), `length`. Transforms apply before WHERE evaluation.
|
|
330
|
+
|
|
331
|
+
## DISTINCT on /query
|
|
332
|
+
|
|
333
|
+
Return unique rows:
|
|
334
|
+
|
|
335
|
+
```bash
|
|
336
|
+
POST /query {
|
|
337
|
+
userid, project, file: "products",
|
|
338
|
+
select: ["category"],
|
|
339
|
+
distinct: true
|
|
340
|
+
}
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
Cannot combine with aggregate or pivot.
|
|
344
|
+
|
|
345
|
+
## Date Filters on /query
|
|
346
|
+
|
|
347
|
+
```bash
|
|
348
|
+
# Records from the last 30 days
|
|
349
|
+
POST /query { where: [{"field": "created_at", "op": "date_within", "value": "30d"}] }
|
|
350
|
+
|
|
351
|
+
# Truncate dates for GROUP BY month (combine with distinct or pivot)
|
|
352
|
+
POST /query { transform: [{"field": "created_at", "func": "date_trunc", "unit": "month"}], distinct: true }
|
|
353
|
+
```
|
|
354
|
+
|
|
355
|
+
Units for date_trunc: `year`, `month`, `day`, `hour`. Date fields stored as ISO 8601 strings already sort correctly with `gt`, `lt`, `ge`, `le`.
|
|
356
|
+
|
|
357
|
+
## Parameterized /run
|
|
358
|
+
|
|
359
|
+
Pass named parameters to /run (same as /report supports):
|
|
360
|
+
|
|
361
|
+
```bash
|
|
362
|
+
POST /run {
|
|
363
|
+
userid, project, name: "PROCESS",
|
|
364
|
+
primary_file: "data",
|
|
365
|
+
params: {"video_id": "V-AT", "action": "advance"}
|
|
366
|
+
}
|
|
367
|
+
```
|
|
368
|
+
|
|
369
|
+
Programs read params via `MOVE PARAM.name` or `LET Rn = PARAM.name`. Use /run (not /report) when the program needs to WRITE data.
|
|
370
|
+
|
|
274
371
|
## Async for Large Datasets
|
|
275
372
|
|
|
276
373
|
```bash
|
package/references/gotchas.md
CHANGED
|
@@ -30,16 +30,31 @@ MOVE X.username U.1
|
|
|
30
30
|
GET R41 FROM P.63 10
|
|
31
31
|
```
|
|
32
32
|
|
|
33
|
-
###
|
|
33
|
+
### PRINT concatenation requires the + operator between every term
|
|
34
|
+
The `+` is mandatory. Without it, the compiler rejects the code.
|
|
34
35
|
```chaprola
|
|
35
|
-
//
|
|
36
|
-
PRINT P.name + " earns $" + R41
|
|
37
|
-
|
|
38
|
-
//
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
36
|
+
// CORRECT:
|
|
37
|
+
PRINT "Name: " + P.name + " earns $" + R41
|
|
38
|
+
|
|
39
|
+
// WRONG — compiler error: "requires + between terms"
|
|
40
|
+
PRINT "Name: " P.name
|
|
41
|
+
PRINT P.name " earns $" R41
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
MOVE + PRINT 0 still works but is the old style. Prefer PRINT concatenation.
|
|
45
|
+
|
|
46
|
+
### No commas anywhere in code
|
|
47
|
+
Chaprola has no commas. Not in PRINT, not in function calls, not in lists. Everything is space-separated or uses `+` for concatenation.
|
|
48
|
+
|
|
49
|
+
### No WHILE, FOR, LOOP, CONTINUE, BREAK, SWITCH, CASE
|
|
50
|
+
These keywords do not exist. Use GOTO with labels for all control flow:
|
|
51
|
+
```chaprola
|
|
52
|
+
LET rec = 1
|
|
53
|
+
100 SEEK rec
|
|
54
|
+
IF EOF END
|
|
55
|
+
// ... process ...
|
|
56
|
+
LET rec = rec + 1
|
|
57
|
+
GOTO 100
|
|
43
58
|
```
|
|
44
59
|
|
|
45
60
|
### Use CLEAR, not MOVE BLANKS for full regions
|
|
@@ -57,6 +72,12 @@ MOVE P.txn_type U.76 6
|
|
|
57
72
|
IF EQUAL "CREDIT" U.76 GOTO 200
|
|
58
73
|
```
|
|
59
74
|
|
|
75
|
+
### IF EQUAL "" is not valid — use IF BLANK
|
|
76
|
+
Comparing against an empty string (`IF EQUAL "" U.1 GOTO 200`) is rejected by the compiler. To test if a field is empty, use `IF BLANK`:
|
|
77
|
+
```chaprola
|
|
78
|
+
IF BLANK P.notes GOTO 200 // field is all spaces/empty
|
|
79
|
+
```
|
|
80
|
+
|
|
60
81
|
### MOVE literal auto-pads to field width
|
|
61
82
|
`MOVE "Jones" P.name` auto-fills the rest of the field with blanks. No need to clear first.
|
|
62
83
|
|
|
@@ -107,6 +128,9 @@ Every request body's `userid` must equal your username. 403 on mismatch.
|
|
|
107
128
|
### BAA only required for PHI
|
|
108
129
|
The BAA is only needed if your data contains Protected Health Information (patient names, SSNs, dates of birth, etc.). Non-PHI data works without signing a BAA. If you get a 403 on a PHI-flagged field, either sign the BAA or rename the field to avoid PHI auto-detection.
|
|
109
130
|
|
|
131
|
+
### PHI detection on non-healthcare projects
|
|
132
|
+
If your fields get PHI-flagged incorrectly (e.g., step_name, task_name, video_name), set phi_exempt on the project via `POST /config {"userid": "...", "project": "...", "phi_exempt": true}` or `chaprola_config`. This disables all PHI detection and masking for the project.
|
|
133
|
+
|
|
110
134
|
### Async for large datasets
|
|
111
135
|
`POST /run` with `async: true` for >100K records. API Gateway has a 30-second timeout; async bypasses it. Poll `/run/status` until `status: "done"`.
|
|
112
136
|
|