@chaprola/mcp-server 1.0.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  MCP server for [Chaprola](https://chaprola.org) — the agent-first data platform.
4
4
 
5
- Gives AI agents 35 tools for structured data storage, querying, compilation, and execution through the [Model Context Protocol](https://modelcontextprotocol.io).
5
+ Gives AI agents 40 tools for structured data storage, querying, web search, URL fetching, scheduled jobs, and execution through the [Model Context Protocol](https://modelcontextprotocol.io).
6
6
 
7
7
  ## Quick Start
8
8
 
@@ -116,13 +116,18 @@ Or use the `chaprola_register` tool after connecting.
116
116
  | `chaprola_email_read` | Read email |
117
117
  | `chaprola_email_send` | Send email |
118
118
  | `chaprola_email_delete` | Delete email |
119
+ | `chaprola_search` | Web search via Brave API |
120
+ | `chaprola_fetch` | Fetch URL content as markdown/text/JSON |
121
+ | `chaprola_schedule` | Create scheduled recurring job |
122
+ | `chaprola_schedule_list` | List scheduled jobs |
123
+ | `chaprola_schedule_delete` | Delete scheduled job |
119
124
 
120
125
  ## Resources
121
126
 
122
127
  The server exposes reference documentation as MCP resources:
123
128
 
124
129
  - `chaprola://cookbook` — Language cookbook with complete examples
125
- - `chaprola://endpoints` — All 35 API endpoints
130
+ - `chaprola://endpoints` — All 40 API endpoints
126
131
  - `chaprola://auth` — Authentication reference
127
132
  - `chaprola://gotchas` — Common mistakes to avoid
128
133
 
package/dist/index.js CHANGED
@@ -121,7 +121,7 @@ server.resource("cookbook", "chaprola://cookbook", { description: "Chaprola lang
121
121
  server.resource("gotchas", "chaprola://gotchas", { description: "Common Chaprola mistakes — no parentheses in LET, no commas in PRINT, MOVE length must match field width, DEFINE names must not collide with fields. READ THIS before writing code.", mimeType: "text/markdown" }, async () => ({
122
122
  contents: [{ uri: "chaprola://gotchas", mimeType: "text/markdown", text: readRef("gotchas.md") }],
123
123
  }));
124
- server.resource("endpoints", "chaprola://endpoints", { description: "Chaprola API endpoint reference — all 35 endpoints with request/response shapes", mimeType: "text/markdown" }, async () => ({
124
+ server.resource("endpoints", "chaprola://endpoints", { description: "Chaprola API endpoint reference — all 40 endpoints with request/response shapes", mimeType: "text/markdown" }, async () => ({
125
125
  contents: [{ uri: "chaprola://endpoints", mimeType: "text/markdown", text: readRef("endpoints.md") }],
126
126
  }));
127
127
  server.resource("auth", "chaprola://auth", { description: "Chaprola authentication reference — API key model, BAA flow, credential recovery", mimeType: "text/markdown" }, async () => ({
@@ -201,15 +201,12 @@ server.tool("chaprola_baa_text", "Get the current Business Associate Agreement t
201
201
  const res = await publicFetch("POST", "/baa-text", {});
202
202
  return textResult(res);
203
203
  });
204
- server.tool("chaprola_report", "Run a published program and return output. No auth required — program must be published first", {
204
+ server.tool("chaprola_report", "Run a published program and return output. No auth required — program must be published first via /publish", {
205
205
  userid: z.string().describe("Owner of the published program"),
206
206
  project: z.string().describe("Project containing the program"),
207
207
  name: z.string().describe("Name of the published .PR file"),
208
- primary_file: z.string().optional().describe("Data file to load"),
209
- }, async ({ userid, project, name, primary_file }) => {
208
+ }, async ({ userid, project, name }) => {
210
209
  const body = { userid, project, name };
211
- if (primary_file)
212
- body.primary_file = primary_file;
213
210
  const res = await publicFetch("POST", "/report", body);
214
211
  return textResult(res);
215
212
  });
@@ -311,19 +308,19 @@ server.tool("chaprola_list", "List files in a project with optional wildcard pat
311
308
  return textResult(res);
312
309
  }));
313
310
  // --- Compile ---
314
- server.tool("chaprola_compile", "Compile Chaprola source (.CS) to bytecode (.PR). READ chaprola://cookbook BEFORE writing source. Key syntax: no PROGRAM keyword (start with commands), no commas, MOVE+PRINT 0 buffer model (not PRINT field), SEEK for primary records, OPEN/READ/WRITE/CLOSE for secondary files, LET supports one operation (no parentheses), field addressing via P.field/S.field requires primary_format/secondary_formats params.", {
311
+ server.tool("chaprola_compile", "Compile Chaprola source (.CS) to bytecode (.PR). READ chaprola://cookbook BEFORE writing source. Key syntax: no PROGRAM keyword (start with commands), no commas, MOVE+PRINT 0 buffer model (not PRINT field), SEEK for primary records, OPEN/READ/WRITE/CLOSE for secondary files, LET supports one operation (no parentheses), field addressing via P.field/S.field requires primary_format/secondary_format params.", {
315
312
  project: z.string().describe("Project name"),
316
313
  name: z.string().describe("Program name (without extension)"),
317
314
  source: z.string().describe("Chaprola source code"),
318
315
  primary_format: z.string().optional().describe("Primary data file name (enables P.fieldname addressing)"),
319
- secondary_formats: z.array(z.string()).optional().describe("Secondary format file names (enables S.fieldname addressing)"),
320
- }, async ({ project, name, source, primary_format, secondary_formats }) => withBaaCheck(async () => {
316
+ secondary_format: z.string().optional().describe("Secondary format file name (enables S.fieldname addressing)"),
317
+ }, async ({ project, name, source, primary_format, secondary_format }) => withBaaCheck(async () => {
321
318
  const { username } = getCredentials();
322
319
  const body = { userid: username, project, name, source };
323
320
  if (primary_format)
324
321
  body.primary_format = primary_format;
325
- if (secondary_formats)
326
- body.secondary_formats = secondary_formats;
322
+ if (secondary_format)
323
+ body.secondary_format = secondary_format;
327
324
  const res = await authedFetch("/compile", body);
328
325
  return textResult(res);
329
326
  }));
@@ -492,6 +489,46 @@ server.tool("chaprola_merge", "Merge two sorted data files into one. Both must s
492
489
  const res = await authedFetch("/merge", { userid: username, project, file_a, file_b, output, key });
493
490
  return textResult(res);
494
491
  }));
492
+ // --- Schema: Format + Alter ---
493
+ server.tool("chaprola_format", "Inspect a data file's schema — returns field names, positions, lengths, types, and PHI flags", {
494
+ project: z.string().describe("Project name"),
495
+ name: z.string().describe("Data file name (without .F extension)"),
496
+ }, async ({ project, name }) => withBaaCheck(async () => {
497
+ const { username } = getCredentials();
498
+ const res = await authedFetch("/format", { userid: username, project, name });
499
+ return textResult(res);
500
+ }));
501
+ server.tool("chaprola_alter", "Modify a data file's schema: widen/narrow/rename fields, add new fields, drop fields. Transforms existing data to match the new schema.", {
502
+ project: z.string().describe("Project name"),
503
+ name: z.string().describe("Data file name (without extension)"),
504
+ alter: z.array(z.object({
505
+ field: z.string().describe("Field name to modify"),
506
+ width: z.number().optional().describe("New width (widen or narrow)"),
507
+ rename: z.string().optional().describe("New field name"),
508
+ type: z.enum(["text", "numeric"]).optional().describe("Change field type"),
509
+ })).optional().describe("Fields to alter"),
510
+ add: z.array(z.object({
511
+ name: z.string().describe("New field name"),
512
+ width: z.number().describe("Field width"),
513
+ type: z.enum(["text", "numeric"]).optional().describe("Field type (default: text)"),
514
+ after: z.string().optional().describe("Insert after this field (default: end)"),
515
+ })).optional().describe("Fields to add"),
516
+ drop: z.array(z.string()).optional().describe("Field names to drop"),
517
+ output: z.string().optional().describe("Output file name (default: in-place)"),
518
+ }, async ({ project, name, alter, add, drop, output }) => withBaaCheck(async () => {
519
+ const { username } = getCredentials();
520
+ const body = { userid: username, project, name };
521
+ if (alter)
522
+ body.alter = alter;
523
+ if (add)
524
+ body.add = add;
525
+ if (drop)
526
+ body.drop = drop;
527
+ if (output)
528
+ body.output = output;
529
+ const res = await authedFetch("/alter", body);
530
+ return textResult(res);
531
+ }));
495
532
  // --- Optimize (HULDRA) ---
496
533
  server.tool("chaprola_optimize", "Run HULDRA nonlinear optimization using a compiled .PR as the objective evaluator", {
497
534
  project: z.string().describe("Project name"),
@@ -576,6 +613,58 @@ server.tool("chaprola_email_delete", "Delete a specific email from your mailbox"
576
613
  const res = await authedFetch("/email/delete", { address: username, message_id });
577
614
  return textResult(res);
578
615
  }));
616
+ // --- Search ---
617
+ server.tool("chaprola_search", "Search the web via Brave Search API. Returns titles, URLs, and snippets. Optional AI-grounded summary. Rate limit: 10/day per user", {
618
+ query: z.string().describe("Search query string"),
619
+ count: z.number().optional().describe("Number of results to return (default 5, max 20)"),
620
+ summarize: z.boolean().optional().describe("Include AI-grounded summary from Brave Answers API"),
621
+ }, async ({ query, count, summarize }) => withBaaCheck(async () => {
622
+ const body = { query };
623
+ if (count !== undefined)
624
+ body.count = count;
625
+ if (summarize !== undefined)
626
+ body.summarize = summarize;
627
+ const res = await authedFetch("/search", body);
628
+ return textResult(res);
629
+ }));
630
+ // --- Fetch ---
631
+ server.tool("chaprola_fetch", "Fetch any URL and return clean content. HTML pages converted to markdown. SSRF-protected. Rate limit: 20/day per user", {
632
+ url: z.string().url().describe("URL to fetch (http:// or https://)"),
633
+ format: z.enum(["markdown", "text", "html", "json"]).optional().describe("Output format (default: markdown)"),
634
+ max_length: z.number().optional().describe("Max output characters (default: 50000, max: 200000)"),
635
+ }, async ({ url, format, max_length }) => withBaaCheck(async () => {
636
+ const body = { url };
637
+ if (format)
638
+ body.format = format;
639
+ if (max_length !== undefined)
640
+ body.max_length = max_length;
641
+ const res = await authedFetch("/fetch", body);
642
+ return textResult(res);
643
+ }));
644
+ // --- Schedule ---
645
+ server.tool("chaprola_schedule", "Create a scheduled job that runs a Chaprola endpoint on a recurring cron. Max 10 schedules/user, 15-min minimum interval", {
646
+ name: z.string().describe("Unique name for this schedule (alphanumeric + hyphens/underscores)"),
647
+ cron: z.string().describe("Standard 5-field cron expression (min hour day month weekday). Minimum interval: 15 minutes"),
648
+ endpoint: z.enum(["/import-download", "/run", "/export-report", "/search", "/fetch", "/query", "/email/send", "/export", "/report", "/list"]).describe("Target endpoint to call"),
649
+ body: z.record(z.any()).describe("Request body for the target endpoint. userid is injected automatically"),
650
+ skip_if_unchanged: z.boolean().optional().describe("Skip when response matches previous run (SHA-256 hash). Default: false"),
651
+ }, async ({ name, cron, endpoint, body, skip_if_unchanged }) => withBaaCheck(async () => {
652
+ const reqBody = { name, cron, endpoint, body };
653
+ if (skip_if_unchanged !== undefined)
654
+ reqBody.skip_if_unchanged = skip_if_unchanged;
655
+ const res = await authedFetch("/schedule", reqBody);
656
+ return textResult(res);
657
+ }));
658
+ server.tool("chaprola_schedule_list", "List all scheduled jobs for the authenticated user with run history and next execution time", {}, async () => withBaaCheck(async () => {
659
+ const res = await authedFetch("/schedule/list", {});
660
+ return textResult(res);
661
+ }));
662
+ server.tool("chaprola_schedule_delete", "Delete a scheduled job by name", {
663
+ name: z.string().describe("Name of the schedule to delete"),
664
+ }, async ({ name }) => withBaaCheck(async () => {
665
+ const res = await authedFetch("/schedule/delete", { name });
666
+ return textResult(res);
667
+ }));
579
668
  // --- Start server ---
580
669
  async function main() {
581
670
  const transport = new StdioServerTransport();
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@chaprola/mcp-server",
3
- "version": "1.0.2",
4
- "description": "MCP server for Chaprola — agent-first data platform. Gives AI agents 35 tools for structured data storage, querying, compilation, and execution via plain HTTP.",
3
+ "version": "1.2.0",
4
+ "description": "MCP server for Chaprola — agent-first data platform. Gives AI agents 42 tools for structured data storage, querying, schema inspection, web search, URL fetching, scheduled jobs, and execution via plain HTTP.",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "bin": {
@@ -58,7 +58,7 @@ READ match // load matched secondary record
58
58
  MOVE S.dept_name U.12 15 // now accessible
59
59
  ```
60
60
 
61
- Compile with: `primary_format: "EMPLOYEES", secondary_formats: ["DEPARTMENTS"]`
61
+ Compile with: `primary_format: "EMPLOYEES", secondary_format: "DEPARTMENTS"`
62
62
 
63
63
  ## Read-Modify-Write (UPDATE)
64
64
 
@@ -38,7 +38,7 @@ Auth: `Authorization: Bearer chp_your_api_key` on all protected endpoints.
38
38
  ### Compile & Run
39
39
  | Endpoint | Body | Response |
40
40
  |----------|------|----------|
41
- | `POST /compile` | `{userid, project, name, source, primary_format?, secondary_formats?}` | `{instructions, bytes}` |
41
+ | `POST /compile` | `{userid, project, name, source, primary_format?, secondary_format?}` | `{instructions, bytes}` |
42
42
  | `POST /run` | `{userid, project, name, primary_file?, record?, async?, nophi?}` | `{output, registers}` or `{job_id}` |
43
43
  | `POST /run/status` | `{userid, project, job_id}` | `{status: "running"/"done", output?}` |
44
44
  | `POST /publish` | `{userid, project, name, primary_file?, record?}` | `{report_url}` |
@@ -58,8 +58,8 @@ All import/export/compile/run/query/email endpoints return 403 without a signed
58
58
  ### Async for large datasets
59
59
  `POST /run` with `async: true` for >100K records. API Gateway has a 30-second timeout; async bypasses it. Poll `/run/status` until `status: "done"`.
60
60
 
61
- ### secondary_formats is an array
62
- Pass `secondary_formats: ["DEPARTMENTS"]` (array), not `secondary_format: "DEPARTMENTS"` (string), to `/compile`.
61
+ ### secondary_format is a string
62
+ Pass `secondary_format: "DEPARTMENTS"` (a single string), not an array, to `/compile`.
63
63
 
64
64
  ### Data files expire
65
65
  Default 90 days. Set `expires_in_days` on import to override. Expired files are deleted daily at 03:00 UTC.