@n8n/chat 0.46.0 → 0.48.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -14,6 +14,9 @@ Open the **Chat Trigger** node and add your domain to the **Allowed Origins (COR
14
14
 
15
15
  [See example workflow](https://github.com/n8n-io/n8n/blob/master/packages/%40n8n/chat/resources/workflow.json)
16
16
 
17
+ To use streaming responses, you need to enable the **Streaming response** response mode in the **Chat Trigger** node.
18
+ [See example workflow with streaming](https://github.com/n8n-io/n8n/blob/master/packages/%40n8n/chat/resources/workflow-streaming.json)
19
+
17
20
  > Make sure the workflow is **Active.**
18
21
 
19
22
  ### How it works
@@ -129,6 +132,7 @@ createChat({
129
132
  inputPlaceholder: 'Type your question..',
130
133
  },
131
134
  },
135
+ enableStreaming: false,
132
136
  });
133
137
  ```
134
138
 
@@ -175,7 +179,7 @@ createChat({
175
179
  ### `loadPreviousSession`
176
180
  - **Type**: `boolean`
177
181
  - **Default**: `true`
178
- - **Description**: Whether to load previous messages (chat context).
182
+ - **Description**: Whether to load previous messages (chat context).
179
183
 
180
184
  ### `defaultLanguage`
181
185
  - **Type**: `string`
@@ -200,6 +204,11 @@ createChat({
200
204
  - **Default**: `''`
201
205
  - **Description**: A comma-separated list of allowed MIME types for file uploads. Only applicable if `allowFileUploads` is set to `true`. If left empty, all file types are allowed. For example: `'image/*,application/pdf'`.
202
206
 
207
+ ### enableStreaming
208
+ - Type: boolean
209
+ - Default: false
210
+ - Description: Whether to enable streaming responses from the n8n workflow. If set to `true`, the chat will display responses as they are being generated, providing a more interactive experience. For this to work the workflow must be configured as well to return streaming responses.
211
+
203
212
  ## Customization
204
213
  The Chat window is entirely customizable using CSS variables.
205
214
 
@@ -0,0 +1 @@
1
+ export {};
@@ -2,3 +2,12 @@ import { LoadPreviousSessionResponse, SendMessageResponse } from '../../types';
2
2
  export declare function createFetchResponse<T>(data: T): () => Promise<Response>;
3
3
  export declare const createGetLatestMessagesResponse: (data?: LoadPreviousSessionResponse["data"]) => LoadPreviousSessionResponse;
4
4
  export declare const createSendMessageResponse: (output: SendMessageResponse["output"]) => SendMessageResponse;
5
+ export declare function createMockStreamingFetchResponse(chunks: Array<{
6
+ type: string;
7
+ content?: string;
8
+ metadata?: {
9
+ nodeId: string;
10
+ nodeName: string;
11
+ timestamp: number;
12
+ };
13
+ }>): () => Promise<Response>;
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1 @@
1
+ export {};
@@ -1,3 +1,9 @@
1
1
  import { ChatOptions, LoadPreviousSessionResponse, SendMessageResponse } from '../types';
2
2
  export declare function loadPreviousSession(sessionId: string, options: ChatOptions): Promise<LoadPreviousSessionResponse>;
3
3
  export declare function sendMessage(message: string, files: File[], sessionId: string, options: ChatOptions): Promise<SendMessageResponse>;
4
+ export interface StreamingEventHandlers {
5
+ onBeginMessage: (nodeId: string, runIndex?: number) => void;
6
+ onChunk: (chunk: string, nodeId?: string, runIndex?: number) => void;
7
+ onEndMessage: (nodeId: string, runIndex?: number) => void;
8
+ }
9
+ export declare function sendMessageStreaming(message: string, files: File[], sessionId: string, options: ChatOptions, handlers: StreamingEventHandlers): Promise<void>;