@jaypie/mcp 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -406,3 +406,94 @@ expressHandler automatically sets these headers:
406
406
  ## Datadog Integration
407
407
 
408
408
  When Datadog environment variables are configured, expressHandler automatically submits metrics for each request including status code and path.
409
+
410
+ ## Streaming Responses
411
+
412
+ Use `expressStreamHandler` for Server-Sent Events (SSE) streaming responses. Ideal for real-time updates, LLM streaming, and long-running operations.
413
+
414
+ ### Basic Streaming Usage
415
+
416
+ ```typescript
417
+ import { expressStreamHandler } from "jaypie";
418
+ import type { Request, Response } from "express";
419
+
420
+ const streamRoute = expressStreamHandler(async (req: Request, res: Response) => {
421
+ // Write SSE events directly to response
422
+ res.write("event: message\ndata: {\"text\": \"Hello\"}\n\n");
423
+ res.write("event: message\ndata: {\"text\": \"World\"}\n\n");
424
+ // Handler automatically ends the stream
425
+ });
426
+
427
+ app.get("/stream", streamRoute);
428
+ ```
429
+
430
+ ### Streaming with LLM
431
+
432
+ ```typescript
433
+ import { expressStreamHandler, Llm, createExpressStream } from "jaypie";
434
+
435
+ const llmStreamRoute = expressStreamHandler(async (req: Request, res: Response) => {
436
+ const llm = new Llm("anthropic");
437
+ const stream = llm.stream(req.body.prompt);
438
+
439
+ // createExpressStream pipes LLM chunks as SSE events
440
+ await createExpressStream(stream, res);
441
+ });
442
+
443
+ app.post("/chat", llmStreamRoute);
444
+ ```
445
+
446
+ ### Stream Handler Options
447
+
448
+ `expressStreamHandler` supports the same lifecycle options as `expressHandler`:
449
+
450
+ ```typescript
451
+ import { expressStreamHandler } from "jaypie";
452
+ import type { ExpressStreamHandlerOptions } from "jaypie";
453
+
454
+ const options: ExpressStreamHandlerOptions = {
455
+ name: "myStreamHandler", // Handler name for logging
456
+ contentType: "text/event-stream", // Default SSE content type
457
+ chaos: "low", // Chaos testing level
458
+ secrets: ["API_KEY"], // Secrets to load
459
+ setup: [], // Setup function(s)
460
+ teardown: [], // Teardown function(s)
461
+ validate: [], // Validation function(s)
462
+ locals: {}, // Values to set on req.locals
463
+ unavailable: false, // Return 503 if true
464
+ };
465
+
466
+ const handler = expressStreamHandler(async (req, res) => {
467
+ // Streaming logic
468
+ }, options);
469
+ ```
470
+
471
+ ### SSE Headers
472
+
473
+ `expressStreamHandler` automatically sets SSE headers:
474
+ - `Content-Type: text/event-stream`
475
+ - `Cache-Control: no-cache`
476
+ - `Connection: keep-alive`
477
+ - `X-Accel-Buffering: no` (disables nginx buffering)
478
+
479
+ ### Error Handling in Streams
480
+
481
+ Errors are formatted as SSE error events:
482
+
483
+ ```typescript
484
+ // Jaypie errors and unhandled errors are written as:
485
+ // event: error
486
+ // data: {"errors":[{"status":500,"title":"Internal Error"}]}
487
+ ```
488
+
489
+ ### TypeScript Types
490
+
491
+ ```typescript
492
+ import type {
493
+ ExpressStreamHandlerOptions,
494
+ ExpressStreamHandlerLocals,
495
+ JaypieStreamHandlerSetup,
496
+ JaypieStreamHandlerTeardown,
497
+ JaypieStreamHandlerValidate,
498
+ } from "jaypie";
499
+ ```
@@ -361,4 +361,137 @@ Deploy using AWS CDK or other deployment tool. The Lambda handler will be refere
361
361
  - Use double quotes, trailing commas, semicolons
362
362
  - Alphabetize imports and properties
363
363
  - Define constants for hard-coded values at file top
364
- - Never throw vanilla Error; use errors from `@jaypie/errors`
364
+ - Never throw vanilla Error; use errors from `@jaypie/errors`
365
+
366
+ ## Streaming Lambda Functions
367
+
368
+ Use `lambdaStreamHandler` for AWS Lambda Response Streaming. This enables real-time streaming responses for LLM interactions, large file processing, and SSE endpoints.
369
+
370
+ ### Lambda Streaming Setup
371
+
372
+ Create a streaming Lambda handler with `awslambda.streamifyResponse`:
373
+
374
+ ```typescript
375
+ // src/streamWorker.ts
376
+ import { log } from "@jaypie/core";
377
+ import { lambdaStreamHandler, createLambdaStream, Llm } from "jaypie";
378
+ import type { StreamHandlerContext } from "@jaypie/lambda";
379
+
380
+ export interface StreamWorkerEvent {
381
+ prompt?: string;
382
+ }
383
+
384
+ const streamWorker = lambdaStreamHandler(
385
+ async (event: StreamWorkerEvent, context: StreamHandlerContext) => {
386
+ log.trace("streamWorker: start");
387
+
388
+ const llm = new Llm("anthropic");
389
+ const stream = llm.stream(event.prompt || "Hello");
390
+
391
+ // createLambdaStream pipes LLM chunks as SSE events
392
+ await createLambdaStream(stream, context.responseStream);
393
+
394
+ log.trace("streamWorker: complete");
395
+ },
396
+ {
397
+ name: "streamWorker",
398
+ contentType: "text/event-stream",
399
+ }
400
+ );
401
+
402
+ // Wrap with AWS streamifyResponse
403
+ declare const awslambda: { streamifyResponse: <T>(handler: T) => T };
404
+ export const handler = awslambda.streamifyResponse(streamWorker);
405
+ ```
406
+
407
+ ### Manual Stream Writing
408
+
409
+ Write directly to the response stream for custom SSE events:
410
+
411
+ ```typescript
412
+ import { lambdaStreamHandler } from "jaypie";
413
+ import type { StreamHandlerContext } from "@jaypie/lambda";
414
+
415
+ const manualStreamHandler = lambdaStreamHandler(
416
+ async (event: unknown, context: StreamHandlerContext) => {
417
+ const { responseStream } = context;
418
+
419
+ // Write SSE events directly
420
+ responseStream.write("event: start\ndata: {\"status\": \"processing\"}\n\n");
421
+
422
+ // Process data in chunks
423
+ for (const item of items) {
424
+ const result = await process(item);
425
+ responseStream.write(`event: data\ndata: ${JSON.stringify(result)}\n\n`);
426
+ }
427
+
428
+ responseStream.write("event: done\ndata: {\"status\": \"complete\"}\n\n");
429
+ // Handler automatically calls responseStream.end()
430
+ },
431
+ {
432
+ name: "manualStream",
433
+ }
434
+ );
435
+ ```
436
+
437
+ ### Stream Handler Options
438
+
439
+ ```typescript
440
+ import type { LambdaStreamHandlerOptions } from "@jaypie/lambda";
441
+
442
+ const options: LambdaStreamHandlerOptions = {
443
+ name: "myStreamHandler", // Handler name for logging
444
+ contentType: "text/event-stream", // Response content type (default)
445
+ chaos: "low", // Chaos testing level
446
+ secrets: ["API_KEY"], // AWS secrets to load into process.env
447
+ setup: [], // Setup function(s)
448
+ teardown: [], // Teardown function(s)
449
+ validate: [], // Validation function(s)
450
+ throw: false, // Re-throw errors instead of SSE error
451
+ unavailable: false, // Return 503 if true
452
+ };
453
+ ```
454
+
455
+ ### Stream Handler Types
456
+
457
+ ```typescript
458
+ import type {
459
+ LambdaStreamHandlerOptions,
460
+ StreamHandlerContext,
461
+ ResponseStream,
462
+ AwsStreamingHandler,
463
+ } from "@jaypie/lambda";
464
+ ```
465
+
466
+ ### CDK Configuration for Streaming
467
+
468
+ Enable Lambda Response Streaming via Function URL in CDK:
469
+
470
+ ```typescript
471
+ import { JaypieLambda } from "@jaypie/constructs";
472
+ import { FunctionUrlAuthType, InvokeMode } from "aws-cdk-lib/aws-lambda";
473
+
474
+ const streamingLambda = new JaypieLambda(this, "StreamingFunction", {
475
+ code: "dist",
476
+ handler: "streamWorker.handler",
477
+ timeout: Duration.minutes(5),
478
+ });
479
+
480
+ // Add Function URL with streaming enabled
481
+ streamingLambda.addFunctionUrl({
482
+ authType: FunctionUrlAuthType.NONE, // or AWS_IAM for auth
483
+ invokeMode: InvokeMode.RESPONSE_STREAM,
484
+ });
485
+ ```
486
+
487
+ ### Error Handling in Streams
488
+
489
+ Errors are formatted as SSE error events:
490
+
491
+ ```typescript
492
+ // Jaypie errors written as:
493
+ // event: error
494
+ // data: {"errors":[{"status":500,"title":"Internal Error"}]}
495
+ ```
496
+
497
+ Set `throw: true` to re-throw errors instead of writing to stream.
@@ -312,6 +312,60 @@ type LlmStreamChunk =
312
312
  | LlmStreamChunkError; // { type: "error", error: { status, title, detail? } }
313
313
  ```
314
314
 
315
+ ### Streaming to Express
316
+
317
+ Use `createExpressStream` to pipe LLM streams to Express responses:
318
+
319
+ ```javascript
320
+ import { expressStreamHandler, Llm, createExpressStream } from "jaypie";
321
+
322
+ const chatRoute = expressStreamHandler(async (req, res) => {
323
+ const llm = new Llm("anthropic");
324
+ const stream = llm.stream(req.body.prompt);
325
+ await createExpressStream(stream, res);
326
+ });
327
+
328
+ app.post("/chat", chatRoute);
329
+ ```
330
+
331
+ ### Streaming to Lambda
332
+
333
+ Use `createLambdaStream` with Lambda Response Streaming:
334
+
335
+ ```javascript
336
+ import { lambdaStreamHandler, Llm, createLambdaStream } from "jaypie";
337
+
338
+ const handler = awslambda.streamifyResponse(
339
+ lambdaStreamHandler(async (event, context) => {
340
+ const llm = new Llm("openai");
341
+ const stream = llm.stream(event.prompt);
342
+ await createLambdaStream(stream, context.responseStream);
343
+ })
344
+ );
345
+ ```
346
+
347
+ ### JaypieStream Wrapper
348
+
349
+ Use `JaypieStream` or `createJaypieStream` for fluent piping:
350
+
351
+ ```javascript
352
+ import { createJaypieStream, Llm } from "jaypie";
353
+
354
+ const llm = new Llm("gemini");
355
+ const stream = createJaypieStream(llm.stream("Hello"));
356
+
357
+ // Pipe to Express
358
+ await stream.toExpress(res);
359
+
360
+ // Or pipe to Lambda
361
+ await stream.toLambda(responseStream);
362
+
363
+ // Or iterate manually
364
+ for await (const chunk of stream) {
365
+ console.log(chunk);
366
+ }
367
+ ```
368
+
315
369
  ## Hooks
316
370
 
317
371
  Use hooks to intercept and observe the LLM lifecycle:
@@ -365,6 +419,29 @@ const result = await llm.operate("Roll dice and check weather", {
365
419
  });
366
420
  ```
367
421
 
422
+ ### Zod Schema Support
423
+
424
+ Tool parameters can be defined using Zod schemas instead of JSON Schema:
425
+
426
+ ```javascript
427
+ import { z } from "zod/v4";
428
+ import { Llm, Toolkit } from "jaypie";
429
+
430
+ const weatherTool = {
431
+ name: "get_weather",
432
+ description: "Get weather for a city",
433
+ parameters: z.object({
434
+ city: z.string().describe("City name"),
435
+ unit: z.enum(["celsius", "fahrenheit"]),
436
+ }),
437
+ type: "function",
438
+ call: async ({ city, unit }) => ({ city, temp: 72, unit }),
439
+ };
440
+
441
+ const toolkit = new Toolkit([weatherTool]);
442
+ // Zod schemas are automatically converted to JSON Schema
443
+ ```
444
+
368
445
  ## Footnotes
369
446
 
370
447
  Llm.operate(input, options)
@@ -16,10 +16,12 @@ Create and integrate tools that enable LLMs to perform specific functions beyond
16
16
  Implement the `LlmTool` interface:
17
17
 
18
18
  ```typescript
19
+ import { z } from "zod/v4";
20
+
19
21
  interface LlmTool {
20
22
  description: string;
21
23
  name: string;
22
- parameters: JsonObject;
24
+ parameters: JsonObject | z.ZodType; // JSON Schema or Zod schema
23
25
  type: "function" | string;
24
26
  call: (args?: JsonObject) => Promise<AnyValue> | AnyValue;
25
27
  }
@@ -28,11 +30,11 @@ interface LlmTool {
28
30
  Properties:
29
31
  - `description`: Clear explanation of tool functionality
30
32
  - `name`: Unique identifier
31
- - `parameters`: JSON Schema defining input parameters
33
+ - `parameters`: JSON Schema or Zod schema defining input parameters
32
34
  - `type`: Usually "function" (OpenAI convention)
33
35
  - `call`: Implementation function executed on invocation
34
36
 
35
- ## Example: Dice Roller
37
+ ## Example: Dice Roller (JSON Schema)
36
38
 
37
39
  ```typescript
38
40
  import { LlmTool } from "../types/LlmTool.interface.js";
@@ -84,6 +86,27 @@ export const roll: LlmTool = {
84
86
  };
85
87
  ```
86
88
 
89
+ ## Example: Weather Tool (Zod Schema)
90
+
91
+ ```typescript
92
+ import { z } from "zod/v4";
93
+ import { LlmTool } from "jaypie";
94
+
95
+ export const getWeather: LlmTool = {
96
+ description: "Get current weather for a city",
97
+ name: "get_weather",
98
+ parameters: z.object({
99
+ city: z.string().describe("City name"),
100
+ unit: z.enum(["celsius", "fahrenheit"]).describe("Temperature unit"),
101
+ }),
102
+ type: "function",
103
+ call: async ({ city, unit }) => {
104
+ // Implementation here
105
+ return { city, temperature: 72, unit };
106
+ },
107
+ };
108
+ ```
109
+
87
110
  ## Best Practices
88
111
 
89
112
  ### Input Validation