ai-streamui 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,136 @@
1
+ import { UIMessage, ChatTransport, ChatRequestOptions, UIMessageChunk, ModelMessage } from 'ai';
2
+ import * as _json_render_core from '@json-render/core';
3
+ import { Spec, SchemaDefinition, InferCatalogInput, Schema, Catalog, UserPromptOptions } from '@json-render/core';
4
+ export { Catalog, Schema, Spec, defineSchema } from '@json-render/core';
5
+
6
+ interface UITextData {
7
+ segmentId: string;
8
+ text: string;
9
+ }
10
+ /**
11
+ * UIDataTypes for useChat generic parameter.
12
+ * Enables type-safe access to UI data in message.parts.
13
+ */
14
+ interface UIDataTypes {
15
+ ui: Spec;
16
+ "ui-text": UITextData;
17
+ [key: string]: unknown;
18
+ }
19
+
20
+ /**
21
+ * Options for UITransport.
22
+ */
23
+ interface UITransportOptions {
24
+ /** API endpoint URL. Defaults to '/api/chat'. */
25
+ api?: string;
26
+ /** Additional headers for requests. */
27
+ headers?: Record<string, string> | (() => Record<string, string> | Promise<Record<string, string>>);
28
+ /** Additional body properties for requests. */
29
+ body?: Record<string, unknown>;
30
+ /** Request credentials mode. */
31
+ credentials?: RequestCredentials;
32
+ /** Custom fetch implementation. */
33
+ fetch?: typeof fetch;
34
+ }
35
+ /**
36
+ * UITransport - Custom ChatTransport for streaming UI with SpecStream patches.
37
+ *
38
+ * Extends DefaultChatTransport behavior:
39
+ * - Parses UIMessageStream (JSON event stream)
40
+ * - Extracts SpecStream patches from text-delta chunks
41
+ * - Builds Spec incrementally and emits `data-ui` chunks
42
+ * - Passes through all other chunk types (tool, reasoning, etc.)
43
+ */
44
+ declare class UITransport<UI_MESSAGE extends UIMessage = UIMessage> implements ChatTransport<UI_MESSAGE> {
45
+ private api;
46
+ private headers?;
47
+ private body?;
48
+ private credentials?;
49
+ private customFetch?;
50
+ constructor(options?: UITransportOptions);
51
+ sendMessages(options: {
52
+ trigger: "submit-message" | "regenerate-message";
53
+ chatId: string;
54
+ messageId: string | undefined;
55
+ messages: UI_MESSAGE[];
56
+ abortSignal: AbortSignal | undefined;
57
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk<unknown, UIDataTypes>>>;
58
+ /**
59
+ * Reconnect to an existing stream is not supported.
60
+ */
61
+ reconnectToStream(_options: {
62
+ chatId: string;
63
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk<unknown, UIDataTypes>> | null>;
64
+ /**
65
+ * Process the UIMessageStream response:
66
+ * - Parse JSON event stream (same format as DefaultChatTransport)
67
+ * - Extract SpecStream patches from text-delta chunks
68
+ * - Build Spec incrementally and emit data-ui chunks
69
+ * - Pass through all other chunk types unchanged
70
+ */
71
+ private processResponseStream;
72
+ }
73
+
74
+ /**
75
+ * Convert UI messages to model messages.
76
+ * Automatically converts data-ui parts to text descriptions for LLM context.
77
+ *
78
+ * @example
79
+ * ```ts
80
+ * const modelMessages = await convertUIMessagesToModelMessages(messages);
81
+ * const result = streamText({ model, messages: modelMessages });
82
+ * ```
83
+ */
84
+ declare function convertUIMessagesToModelMessages(messages: UIMessage<unknown, UIDataTypes>[]): Promise<ModelMessage[]>;
85
+
86
+ /**
87
+ * Default schema for streamui - Element Tree format.
88
+ *
89
+ * Defines the Spec shape (what AI generates) and the Catalog shape
90
+ * (what components provide). Compatible with @json-render/react's Renderer.
91
+ *
92
+ * Spec structure:
93
+ * ```json
94
+ * {
95
+ * "root": "card-1",
96
+ * "state": { "count": 0 },
97
+ * "elements": {
98
+ * "card-1": { "type": "Card", "props": {...}, "children": [...], "on": {...}, "repeat": {...} }
99
+ * }
100
+ * }
101
+ * ```
102
+ */
103
+ declare const defaultSchema: Schema<{
104
+ spec: _json_render_core.SchemaType<"object", {
105
+ root: _json_render_core.SchemaType<"string", unknown>;
106
+ elements: _json_render_core.SchemaType<"record", _json_render_core.SchemaType<"object", {
107
+ type: _json_render_core.SchemaType<"ref", string>;
108
+ props: _json_render_core.SchemaType<"propsOf", string>;
109
+ children: _json_render_core.SchemaType<"array", _json_render_core.SchemaType<"string", unknown>>;
110
+ visible: _json_render_core.SchemaType<"any", unknown>;
111
+ on: _json_render_core.SchemaType<"any", unknown>;
112
+ repeat: _json_render_core.SchemaType<"any", unknown>;
113
+ }>>;
114
+ state: _json_render_core.SchemaType<"any", unknown>;
115
+ }>;
116
+ catalog: _json_render_core.SchemaType<"object", {
117
+ components: _json_render_core.SchemaType<"map", {
118
+ props: _json_render_core.SchemaType<"zod", unknown>;
119
+ slots: _json_render_core.SchemaType<"array", _json_render_core.SchemaType<"string", unknown>>;
120
+ description: _json_render_core.SchemaType<"string", unknown>;
121
+ }>;
122
+ actions: _json_render_core.SchemaType<"map", {
123
+ params: _json_render_core.SchemaType<"zod", unknown>;
124
+ description: _json_render_core.SchemaType<"string", unknown>;
125
+ }>;
126
+ }>;
127
+ }>;
128
+ declare function createCatalog<TDef extends SchemaDefinition, TCatalog extends InferCatalogInput<TDef["catalog"]>>(schema: Schema<TDef>, catalog: TCatalog): Catalog<TDef, TCatalog>;
129
+ /**
130
+ * Build a user prompt with streamui-specific rules appended.
131
+ * Wraps `@json-render/core`'s `buildUserPrompt` and adds transport
132
+ * instructions (no code fences, raw JSONL output).
133
+ */
134
+ declare function createUserPrompt(options: UserPromptOptions): string;
135
+
136
+ export { type UIDataTypes, UITransport, type UITransportOptions, convertUIMessagesToModelMessages, createCatalog, createUserPrompt, defaultSchema };
package/dist/index.js ADDED
@@ -0,0 +1,311 @@
1
+ // src/spec-stream.ts
2
+ import {
3
+ applySpecStreamPatch
4
+ } from "@json-render/core";
5
+ function parseSpecLine(line) {
6
+ const trimmed = line.trim();
7
+ if (!trimmed) return null;
8
+ try {
9
+ const parsed = JSON.parse(trimmed);
10
+ if (parsed.op && parsed.path) {
11
+ return { type: "patch", data: parsed };
12
+ }
13
+ return { type: "text", content: line };
14
+ } catch {
15
+ return { type: "text", content: line };
16
+ }
17
+ }
18
+ var SpecStreamBuffer = class {
19
+ buffer = "";
20
+ /**
21
+ * Process a chunk of text and return parsed lines.
22
+ * Incomplete lines are buffered for the next chunk.
23
+ */
24
+ processChunk(chunk) {
25
+ this.buffer += chunk;
26
+ const lines = this.buffer.split("\n");
27
+ this.buffer = lines.pop() || "";
28
+ const results = [];
29
+ for (const line of lines) {
30
+ const parsed = parseSpecLine(line);
31
+ if (parsed) {
32
+ results.push(parsed);
33
+ }
34
+ }
35
+ return results;
36
+ }
37
+ /**
38
+ * Flush any remaining buffered content.
39
+ * Call this when the stream ends.
40
+ */
41
+ flush() {
42
+ if (!this.buffer.trim()) return null;
43
+ const result = parseSpecLine(this.buffer);
44
+ this.buffer = "";
45
+ return result;
46
+ }
47
+ };
48
+ function applySpecPatch(spec, patch) {
49
+ const nextSpec = JSON.parse(JSON.stringify(spec));
50
+ applySpecStreamPatch(
51
+ nextSpec,
52
+ patch
53
+ );
54
+ return nextSpec;
55
+ }
56
+
57
+ // src/transport.ts
58
+ var UITransport = class {
59
+ api;
60
+ headers;
61
+ body;
62
+ credentials;
63
+ customFetch;
64
+ constructor(options = {}) {
65
+ this.api = options.api ?? "/api/chat";
66
+ this.headers = options.headers;
67
+ this.body = options.body;
68
+ this.credentials = options.credentials;
69
+ this.customFetch = options.fetch;
70
+ }
71
+ async sendMessages(options) {
72
+ const resolvedHeaders = typeof this.headers === "function" ? await this.headers() : this.headers;
73
+ const optionHeaders = options.headers instanceof Headers ? Object.fromEntries(options.headers.entries()) : options.headers ?? {};
74
+ const fetchFn = this.customFetch ?? globalThis.fetch;
75
+ const response = await fetchFn(this.api, {
76
+ method: "POST",
77
+ headers: {
78
+ "Content-Type": "application/json",
79
+ ...resolvedHeaders,
80
+ ...optionHeaders
81
+ },
82
+ body: JSON.stringify({
83
+ ...this.body,
84
+ ...options.body,
85
+ id: options.chatId,
86
+ messages: options.messages,
87
+ trigger: options.trigger,
88
+ messageId: options.messageId
89
+ }),
90
+ credentials: this.credentials,
91
+ signal: options.abortSignal
92
+ });
93
+ if (!response.ok) {
94
+ throw new Error(
95
+ await response.text() || "Failed to fetch the chat response."
96
+ );
97
+ }
98
+ if (!response.body) {
99
+ throw new Error("The response body is empty.");
100
+ }
101
+ return this.processResponseStream(response.body);
102
+ }
103
+ /**
104
+ * Reconnect to an existing stream is not supported.
105
+ */
106
+ async reconnectToStream(_options) {
107
+ return null;
108
+ }
109
+ /**
110
+ * Process the UIMessageStream response:
111
+ * - Parse JSON event stream (same format as DefaultChatTransport)
112
+ * - Extract SpecStream patches from text-delta chunks
113
+ * - Build Spec incrementally and emit data-ui chunks
114
+ * - Pass through all other chunk types unchanged
115
+ */
116
+ processResponseStream(stream) {
117
+ const specBuffer = new SpecStreamBuffer();
118
+ let currentSpec = { root: "", elements: {} };
119
+ let segmentIndex = 0;
120
+ let currentSegmentText = "";
121
+ const textDecoder = new TextDecoder();
122
+ return new ReadableStream({
123
+ async start(controller) {
124
+ const reader = stream.getReader();
125
+ let buffer = "";
126
+ const emitUIText = (text) => {
127
+ currentSegmentText += text;
128
+ controller.enqueue({
129
+ type: "data-ui-text",
130
+ id: `ui-text-${segmentIndex}`,
131
+ data: {
132
+ segmentId: `seg-${segmentIndex}`,
133
+ text: currentSegmentText
134
+ }
135
+ });
136
+ };
137
+ const splitTextSegment = () => {
138
+ if (!currentSegmentText) return;
139
+ segmentIndex += 1;
140
+ currentSegmentText = "";
141
+ };
142
+ const processLine = (line) => {
143
+ if (!line.trim()) return;
144
+ let jsonLine = line.trim();
145
+ if (jsonLine.startsWith("data:")) {
146
+ jsonLine = jsonLine.slice(5).trimStart();
147
+ }
148
+ if (jsonLine === "[DONE]") return;
149
+ try {
150
+ const chunk = JSON.parse(jsonLine);
151
+ if (chunk.type === "text-delta") {
152
+ const parsed = specBuffer.processChunk(chunk.delta);
153
+ for (const item of parsed) {
154
+ if (item.type === "patch") {
155
+ splitTextSegment();
156
+ currentSpec = applySpecPatch(
157
+ currentSpec,
158
+ item.data
159
+ );
160
+ controller.enqueue({
161
+ type: "data-ui",
162
+ id: "ui",
163
+ data: currentSpec
164
+ });
165
+ } else {
166
+ emitUIText(item.content + "\n");
167
+ }
168
+ }
169
+ } else if (chunk.type === "text-end") {
170
+ const remaining = specBuffer.flush();
171
+ if (remaining) {
172
+ if (remaining.type === "patch") {
173
+ splitTextSegment();
174
+ currentSpec = applySpecPatch(
175
+ currentSpec,
176
+ remaining.data
177
+ );
178
+ controller.enqueue({
179
+ type: "data-ui",
180
+ id: "ui",
181
+ data: currentSpec
182
+ });
183
+ } else {
184
+ emitUIText(remaining.content);
185
+ }
186
+ }
187
+ splitTextSegment();
188
+ } else if (chunk.type === "text-start") {
189
+ return;
190
+ } else {
191
+ controller.enqueue(chunk);
192
+ }
193
+ } catch {
194
+ }
195
+ };
196
+ try {
197
+ while (true) {
198
+ const { done, value } = await reader.read();
199
+ if (done) {
200
+ buffer += textDecoder.decode();
201
+ break;
202
+ }
203
+ buffer += textDecoder.decode(value, { stream: true });
204
+ const lines = buffer.split("\n");
205
+ buffer = lines.pop() || "";
206
+ for (const line of lines) {
207
+ processLine(line);
208
+ }
209
+ }
210
+ if (buffer.trim()) {
211
+ processLine(buffer);
212
+ }
213
+ controller.close();
214
+ } catch (error) {
215
+ controller.error(error);
216
+ } finally {
217
+ reader.releaseLock();
218
+ }
219
+ }
220
+ });
221
+ }
222
+ };
223
+
224
+ // src/utils.ts
225
+ import { convertToModelMessages } from "ai";
226
+ function serializeSpec(spec) {
227
+ return JSON.stringify(spec);
228
+ }
229
+ async function convertUIMessagesToModelMessages(messages) {
230
+ return convertToModelMessages(messages, {
231
+ convertDataPart: (part) => {
232
+ if (part.type === "data-ui-text" && part.data) {
233
+ const text = part.data.text;
234
+ if (typeof text === "string" && text.length > 0) {
235
+ return { type: "text", text };
236
+ }
237
+ }
238
+ if (part.id === "ui" && part.data) {
239
+ const specJson = serializeSpec(part.data);
240
+ return { type: "text", text: `[Generated UI Spec JSON]
241
+ ${specJson}` };
242
+ }
243
+ return void 0;
244
+ }
245
+ });
246
+ }
247
+
248
+ // src/schema.ts
249
+ import {
250
+ defineSchema,
251
+ defineCatalog,
252
+ buildUserPrompt
253
+ } from "@json-render/core";
254
+ var defaultSchema = defineSchema((s) => ({
255
+ spec: s.object({
256
+ root: s.string(),
257
+ elements: s.record(
258
+ s.object({
259
+ type: s.ref("catalog.components"),
260
+ props: s.propsOf("catalog.components"),
261
+ children: s.array(s.string()),
262
+ visible: s.any(),
263
+ on: s.any(),
264
+ repeat: s.any()
265
+ })
266
+ ),
267
+ state: s.any()
268
+ }),
269
+ catalog: s.object({
270
+ components: s.map({
271
+ props: s.zod(),
272
+ slots: s.array(s.string()),
273
+ description: s.string()
274
+ }),
275
+ actions: s.map({
276
+ params: s.zod(),
277
+ description: s.string()
278
+ })
279
+ })
280
+ }));
281
+ var STREAMUI_RULES = [
282
+ "Never wrap output in code fences (``` or ```json). Output raw JSONL lines directly.",
283
+ "You may include plain text lines before, after, or between JSONL patch lines."
284
+ ];
285
+ function createCatalog(schema, catalog) {
286
+ const base = defineCatalog(schema, catalog);
287
+ const originalPrompt = base.prompt.bind(base);
288
+ base.prompt = (options) => {
289
+ return originalPrompt({
290
+ ...options,
291
+ customRules: [...STREAMUI_RULES, ...options?.customRules ?? []]
292
+ });
293
+ };
294
+ return base;
295
+ }
296
+ function createUserPrompt(options) {
297
+ const base = buildUserPrompt(options);
298
+ return base + "\n\n" + STREAMUI_RULES.join("\n");
299
+ }
300
+
301
+ // src/index.ts
302
+ import { defineSchema as defineSchema2 } from "@json-render/core";
303
+ export {
304
+ UITransport,
305
+ convertUIMessagesToModelMessages,
306
+ createCatalog,
307
+ createUserPrompt,
308
+ defaultSchema,
309
+ defineSchema2 as defineSchema
310
+ };
311
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/spec-stream.ts","../src/transport.ts","../src/utils.ts","../src/schema.ts","../src/index.ts"],"sourcesContent":["import {\n applySpecStreamPatch,\n type Spec,\n type SpecStreamLine,\n} from \"@json-render/core\";\nimport type { ParsedLine, SpecPatch } from \"./types\";\n\n// ---------------------------------------------------------------------------\n// SpecStream Buffer - Parse streaming text into patches and text lines\n// ---------------------------------------------------------------------------\n\n/**\n * Parse a single line as either a spec patch or text.\n * Preserves original whitespace for text content.\n */\nfunction parseSpecLine(line: string): ParsedLine | null {\n const trimmed = line.trim();\n if (!trimmed) return null;\n\n try {\n const parsed = JSON.parse(trimmed);\n if (parsed.op && parsed.path) {\n return { type: \"patch\", data: parsed as SpecPatch };\n }\n // Valid JSON but not a patch - return original line content\n return { type: \"text\", content: line };\n } catch {\n // Not JSON - return original line content (preserving whitespace)\n return { type: \"text\", content: line };\n }\n}\n\n/**\n * Buffer for processing SpecStream content.\n * Handles incomplete lines across chunk boundaries.\n */\nexport class SpecStreamBuffer {\n private buffer = \"\";\n\n /**\n * Process a chunk of text and return parsed lines.\n * Incomplete lines are buffered for the next chunk.\n */\n processChunk(chunk: string): ParsedLine[] {\n this.buffer += chunk;\n\n const lines = this.buffer.split(\"\\n\");\n this.buffer = lines.pop() || \"\";\n\n const results: ParsedLine[] = [];\n for (const line of lines) {\n const parsed = parseSpecLine(line);\n if (parsed) {\n results.push(parsed);\n }\n }\n\n return results;\n }\n\n /**\n * Flush any remaining buffered content.\n * Call this when the stream ends.\n */\n flush(): ParsedLine | null {\n if (!this.buffer.trim()) return null;\n const result = parseSpecLine(this.buffer);\n this.buffer = \"\";\n return result;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Spec Patch Application\n// ---------------------------------------------------------------------------\n\n/**\n * Apply a single patch to a Spec. Returns a new Spec (immutable).\n */\nexport function applySpecPatch(spec: Spec, patch: SpecPatch): Spec {\n // json-render's applySpecStreamPatch mutates the target object.\n // Clone first to preserve streamui's immutable update behavior.\n const nextSpec = JSON.parse(JSON.stringify(spec)) as Spec;\n applySpecStreamPatch(\n nextSpec as unknown as Record<string, unknown>,\n patch as unknown as SpecStreamLine,\n );\n return nextSpec;\n}\n\n/**\n * Build a Spec from an array of patches.\n */\nexport function buildSpec(patches: SpecPatch[]): Spec {\n let spec: Spec = { root: \"\", elements: {} };\n let firstElementKey: string | null = null;\n\n for (const patch of patches) {\n const directElementPath = /^\\/elements\\/[^/]+$/.exec(patch.path);\n if (\n directElementPath &&\n (patch.op === \"add\" || patch.op === \"replace\") &&\n !firstElementKey\n ) {\n firstElementKey = directElementPath[0].slice(\"/elements/\".length);\n }\n spec = applySpecPatch(spec, patch);\n }\n\n // Auto-infer root from first element if not set\n if (!spec.root && firstElementKey) {\n spec.root = firstElementKey;\n }\n\n return spec;\n}\n","import type {\n ChatTransport,\n UIMessage,\n ChatRequestOptions,\n UIMessageChunk,\n} from \"ai\";\nimport type { Spec } from \"@json-render/core\";\nimport { SpecStreamBuffer, applySpecPatch } from \"./spec-stream\";\nimport type { SpecPatch, UIDataTypes } from \"./types\";\n\n/**\n * Options for UITransport.\n */\nexport interface UITransportOptions {\n /** API endpoint URL. Defaults to '/api/chat'. */\n api?: string;\n /** Additional headers for requests. */\n headers?:\n | Record<string, string>\n | (() => Record<string, string> | Promise<Record<string, string>>);\n /** Additional body properties for requests. */\n body?: Record<string, unknown>;\n /** Request credentials mode. */\n credentials?: RequestCredentials;\n /** Custom fetch implementation. */\n fetch?: typeof fetch;\n}\n\n/**\n * UITransport - Custom ChatTransport for streaming UI with SpecStream patches.\n *\n * Extends DefaultChatTransport behavior:\n * - Parses UIMessageStream (JSON event stream)\n * - Extracts SpecStream patches from text-delta chunks\n * - Builds Spec incrementally and emits `data-ui` chunks\n * - Passes through all other chunk types (tool, reasoning, etc.)\n */\nexport class UITransport<\n UI_MESSAGE extends UIMessage = UIMessage,\n> implements ChatTransport<UI_MESSAGE> {\n private api: string;\n private headers?: UITransportOptions[\"headers\"];\n private body?: Record<string, unknown>;\n private credentials?: RequestCredentials;\n private customFetch?: typeof fetch;\n\n constructor(options: UITransportOptions = {}) {\n this.api = options.api ?? \"/api/chat\";\n this.headers = options.headers;\n this.body = options.body;\n this.credentials = options.credentials;\n this.customFetch = options.fetch;\n }\n\n async sendMessages(\n options: {\n trigger: \"submit-message\" | \"regenerate-message\";\n chatId: string;\n messageId: string | undefined;\n messages: UI_MESSAGE[];\n abortSignal: AbortSignal | undefined;\n } & ChatRequestOptions,\n ): Promise<ReadableStream<UIMessageChunk<unknown, UIDataTypes>>> {\n const resolvedHeaders =\n typeof this.headers === \"function\" ? await this.headers() : this.headers;\n\n const optionHeaders =\n options.headers instanceof Headers\n ? Object.fromEntries(options.headers.entries())\n : (options.headers ?? {});\n\n const fetchFn = this.customFetch ?? globalThis.fetch;\n\n const response = await fetchFn(this.api, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...resolvedHeaders,\n ...optionHeaders,\n },\n body: JSON.stringify({\n ...this.body,\n ...options.body,\n id: options.chatId,\n messages: options.messages,\n trigger: options.trigger,\n messageId: options.messageId,\n }),\n credentials: this.credentials,\n signal: options.abortSignal,\n });\n\n if (!response.ok) {\n throw new Error(\n (await response.text()) || \"Failed to fetch the chat response.\",\n );\n }\n\n if (!response.body) {\n throw new Error(\"The response body is empty.\");\n }\n\n return this.processResponseStream(response.body);\n }\n\n /**\n * Reconnect to an existing stream is not supported.\n */\n async reconnectToStream(\n _options: { chatId: string } & ChatRequestOptions,\n ): Promise<ReadableStream<UIMessageChunk<unknown, UIDataTypes>> | null> {\n return null;\n }\n\n /**\n * Process the UIMessageStream response:\n * - Parse JSON event stream (same format as DefaultChatTransport)\n * - Extract SpecStream patches from text-delta chunks\n * - Build Spec incrementally and emit data-ui chunks\n * - Pass through all other chunk types unchanged\n */\n private processResponseStream(\n stream: ReadableStream<Uint8Array>,\n ): ReadableStream<UIMessageChunk<unknown, UIDataTypes>> {\n const specBuffer = new SpecStreamBuffer();\n let currentSpec: Spec = { root: \"\", elements: {} };\n let segmentIndex = 0;\n let currentSegmentText = \"\";\n\n const textDecoder = new TextDecoder();\n\n return new ReadableStream<UIMessageChunk<unknown, UIDataTypes>>({\n async start(controller) {\n const reader = stream.getReader();\n let buffer = \"\";\n const emitUIText = (text: string) => {\n currentSegmentText += text;\n controller.enqueue({\n type: \"data-ui-text\",\n id: `ui-text-${segmentIndex}`,\n data: {\n segmentId: `seg-${segmentIndex}`,\n text: currentSegmentText,\n },\n });\n };\n\n const splitTextSegment = () => {\n if (!currentSegmentText) return;\n segmentIndex += 1;\n currentSegmentText = \"\";\n };\n\n const processLine = (line: string) => {\n if (!line.trim()) return;\n\n // Handle SSE format (data:{...} or data: {...}) or raw JSON\n let jsonLine = line.trim();\n if (jsonLine.startsWith(\"data:\")) {\n jsonLine = jsonLine.slice(5).trimStart();\n }\n if (jsonLine === \"[DONE]\") return;\n\n try {\n const chunk = JSON.parse(jsonLine) as UIMessageChunk<\n unknown,\n UIDataTypes\n >;\n\n // Handle text-delta: extract patches and convert text to data-ui-text\n if (chunk.type === \"text-delta\") {\n const parsed = specBuffer.processChunk(chunk.delta);\n\n for (const item of parsed) {\n if (item.type === \"patch\") {\n splitTextSegment();\n currentSpec = applySpecPatch(\n currentSpec,\n item.data as SpecPatch,\n );\n controller.enqueue({\n type: \"data-ui\",\n id: \"ui\",\n data: currentSpec,\n });\n } else {\n // Keep newline to preserve streamed text formatting.\n emitUIText(item.content + \"\\n\");\n }\n }\n }\n // Handle text-end: flush remaining buffer content\n else if (chunk.type === \"text-end\") {\n const remaining = specBuffer.flush();\n if (remaining) {\n if (remaining.type === \"patch\") {\n splitTextSegment();\n currentSpec = applySpecPatch(\n currentSpec,\n remaining.data as SpecPatch,\n );\n controller.enqueue({\n type: \"data-ui\",\n id: \"ui\",\n data: currentSpec,\n });\n } else {\n emitUIText(remaining.content);\n }\n }\n\n splitTextSegment();\n }\n // text-start is transport-internal for us; suppress it.\n else if (chunk.type === \"text-start\") {\n return;\n }\n // Pass through all other chunk types unchanged\n else {\n controller.enqueue(chunk);\n }\n } catch {\n // Skip invalid JSON lines\n }\n };\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n buffer += textDecoder.decode();\n break;\n }\n\n buffer += textDecoder.decode(value, { stream: true });\n\n // Parse JSON events (newline-delimited JSON)\n const lines = buffer.split(\"\\n\");\n buffer = lines.pop() || \"\";\n\n for (const line of lines) {\n processLine(line);\n }\n }\n\n if (buffer.trim()) {\n processLine(buffer);\n }\n\n controller.close();\n } catch (error) {\n controller.error(error);\n } finally {\n reader.releaseLock();\n }\n },\n });\n }\n}\n","import type { UIMessage, ModelMessage } from \"ai\";\nimport { convertToModelMessages } from \"ai\";\nimport type { Spec } from \"@json-render/core\";\nimport type { UIDataTypes } from \"./types\";\n\n/**\n * Serialize a Spec as compact JSON for model context.\n * Avoids format conversion to keep model output aligned with JSON/patch generation.\n */\nfunction serializeSpec(spec: Spec): string {\n return JSON.stringify(spec);\n}\n\n/**\n * Convert UI messages to model messages.\n * Automatically converts data-ui parts to text descriptions for LLM context.\n *\n * @example\n * ```ts\n * const modelMessages = await convertUIMessagesToModelMessages(messages);\n * const result = streamText({ model, messages: modelMessages });\n * ```\n */\nexport async function convertUIMessagesToModelMessages(\n messages: UIMessage<unknown, UIDataTypes>[],\n): Promise<ModelMessage[]> {\n return convertToModelMessages(messages, {\n convertDataPart: (part) => {\n if (part.type === \"data-ui-text\" && part.data) {\n const text = (part.data as { text?: string }).text;\n if (typeof text === \"string\" && text.length > 0) {\n return { type: \"text\", text };\n }\n }\n\n if (part.id === \"ui\" && part.data) {\n const specJson = serializeSpec(part.data as Spec);\n return { type: \"text\", text: `[Generated UI Spec JSON]\\n${specJson}` };\n }\n return undefined;\n },\n });\n}\n","import {\n defineSchema,\n defineCatalog,\n buildUserPrompt,\n type Schema,\n type SchemaDefinition,\n type Catalog,\n type InferCatalogInput,\n type PromptOptions,\n type UserPromptOptions,\n} from \"@json-render/core\";\n\n/**\n * Default schema for streamui - Element Tree format.\n *\n * Defines the Spec shape (what AI generates) and the Catalog shape\n * (what components provide). Compatible with @json-render/react's Renderer.\n *\n * Spec structure:\n * ```json\n * {\n * \"root\": \"card-1\",\n * \"state\": { \"count\": 0 },\n * \"elements\": {\n * \"card-1\": { \"type\": \"Card\", \"props\": {...}, \"children\": [...], \"on\": {...}, \"repeat\": {...} }\n * }\n * }\n * ```\n */\nexport const defaultSchema = defineSchema((s) => ({\n spec: s.object({\n root: s.string(),\n elements: s.record(\n s.object({\n type: s.ref(\"catalog.components\"),\n props: s.propsOf(\"catalog.components\"),\n children: s.array(s.string()),\n visible: s.any(),\n on: s.any(),\n repeat: s.any(),\n }),\n ),\n state: s.any(),\n }),\n catalog: s.object({\n components: s.map({\n props: s.zod(),\n slots: s.array(s.string()),\n description: s.string(),\n }),\n actions: s.map({\n params: s.zod(),\n description: s.string(),\n }),\n }),\n}));\n\n/**\n * Create a catalog from a schema and component/action definitions.\n *\n * @example\n * ```typescript\n * import { createCatalog, defaultSchema } from 'streamui';\n * import { z } from 'zod';\n *\n * const catalog = createCatalog(defaultSchema, {\n * components: {\n * Card: {\n * props: z.object({ title: z.string().optional() }),\n * slots: ['default'],\n * description: 'Container with optional title',\n * },\n * Button: {\n * props: z.object({ label: z.string() }),\n * slots: [],\n * description: 'Clickable button',\n * },\n * },\n * });\n *\n * // Generate system prompt for AI\n * const prompt = catalog.prompt();\n *\n * // Validate AI output\n * const result = catalog.validate(spec);\n * ```\n */\n/**\n * Default rules injected into prompt() for streamui.\n * These ensure the LLM outputs raw JSONL compatible with UITransport.\n */\nconst STREAMUI_RULES: string[] = [\n \"Never wrap output in code fences (``` or ```json). Output raw JSONL lines directly.\",\n \"You may include plain text lines before, after, or between JSONL patch lines.\",\n];\n\nexport function createCatalog<\n TDef extends SchemaDefinition,\n TCatalog extends InferCatalogInput<TDef[\"catalog\"]>,\n>(schema: Schema<TDef>, catalog: TCatalog): Catalog<TDef, TCatalog> {\n const base = defineCatalog(schema, catalog);\n const originalPrompt = base.prompt.bind(base);\n\n base.prompt = (options?: PromptOptions): string => {\n return originalPrompt({\n ...options,\n customRules: [...STREAMUI_RULES, ...(options?.customRules ?? [])],\n });\n };\n\n return base;\n}\n\n/**\n * Build a user prompt with streamui-specific rules appended.\n * Wraps `@json-render/core`'s `buildUserPrompt` and adds transport\n * instructions (no code fences, raw JSONL output).\n */\nexport function createUserPrompt(options: UserPromptOptions): string {\n const base = buildUserPrompt(options);\n return base + \"\\n\\n\" + STREAMUI_RULES.join(\"\\n\");\n}\n","// Transport\nexport { UITransport } from \"./transport\";\nexport type { UITransportOptions } from \"./transport\";\n\n// Types for useChat generic\nexport type { UIDataTypes } from \"./types\";\n\n// Utilities\nexport { convertUIMessagesToModelMessages } from \"./utils\";\n\n// Schema / Catalog\nexport { defaultSchema, createCatalog, createUserPrompt } from \"./schema\";\nexport { defineSchema } from \"@json-render/core\";\nexport type { Catalog, Schema } from \"@json-render/core\";\n\n// Re-export Spec for convenience\nexport type { Spec } from \"@json-render/core\";\n"],"mappings":";AAAA;AAAA,EACE;AAAA,OAGK;AAWP,SAAS,cAAc,MAAiC;AACtD,QAAM,UAAU,KAAK,KAAK;AAC1B,MAAI,CAAC,QAAS,QAAO;AAErB,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,QAAI,OAAO,MAAM,OAAO,MAAM;AAC5B,aAAO,EAAE,MAAM,SAAS,MAAM,OAAoB;AAAA,IACpD;AAEA,WAAO,EAAE,MAAM,QAAQ,SAAS,KAAK;AAAA,EACvC,QAAQ;AAEN,WAAO,EAAE,MAAM,QAAQ,SAAS,KAAK;AAAA,EACvC;AACF;AAMO,IAAM,mBAAN,MAAuB;AAAA,EACpB,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMjB,aAAa,OAA6B;AACxC,SAAK,UAAU;AAEf,UAAM,QAAQ,KAAK,OAAO,MAAM,IAAI;AACpC,SAAK,SAAS,MAAM,IAAI,KAAK;AAE7B,UAAM,UAAwB,CAAC;AAC/B,eAAW,QAAQ,OAAO;AACxB,YAAM,SAAS,cAAc,IAAI;AACjC,UAAI,QAAQ;AACV,gBAAQ,KAAK,MAAM;AAAA,MACrB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAA2B;AACzB,QAAI,CAAC,KAAK,OAAO,KAAK,EAAG,QAAO;AAChC,UAAM,SAAS,cAAc,KAAK,MAAM;AACxC,SAAK,SAAS;AACd,WAAO;AAAA,EACT;AACF;AASO,SAAS,eAAe,MAAY,OAAwB;AAGjE,QAAM,WAAW,KAAK,MAAM,KAAK,UAAU,IAAI,CAAC;AAChD;AAAA,IACE;AAAA,IACA;AAAA,EACF;AACA,SAAO;AACT;;;ACnDO,IAAM,cAAN,MAEgC;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,UAA8B,CAAC,GAAG;AAC5C,SAAK,MAAM,QAAQ,OAAO;AAC1B,SAAK,UAAU,QAAQ;AACvB,SAAK,OAAO,QAAQ;AACpB,SAAK,cAAc,QAAQ;AAC3B,SAAK,cAAc,QAAQ;AAAA,EAC7B;AAAA,EAEA,MAAM,aACJ,SAO+D;AAC/D,UAAM,kBACJ,OAAO,KAAK,YAAY,aAAa,MAAM,KAAK,QAAQ,IAAI,KAAK;AAEnE,UAAM,gBACJ,QAAQ,mBAAmB,UACvB,OAAO,YAAY,QAAQ,QAAQ,QAAQ,CAAC,IAC3C,QAAQ,WAAW,CAAC;AAE3B,UAAM,UAAU,KAAK,eAAe,WAAW;AAE/C,UAAM,WAAW,MAAM,QAAQ,KAAK,KAAK;AAAA,MACvC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG;AAAA,QACH,GAAG;AAAA,MACL;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,GAAG,KAAK;AAAA,QACR,GAAG,QAAQ;AAAA,QACX,IAAI,QAAQ;AAAA,QACZ,UAAU,QAAQ;AAAA,QAClB,SAAS,QAAQ;AAAA,QACjB,WAAW,QAAQ;AAAA,MACrB,CAAC;AAAA,MACD,aAAa,KAAK;AAAA,MAClB,QAAQ,QAAQ;AAAA,IAClB,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI;AAAA,QACP,MAAM,SAAS,KAAK,KAAM;AAAA,MAC7B;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAM,IAAI,MAAM,6BAA6B;AAAA,IAC/C;AAEA,WAAO,KAAK,sBAAsB,SAAS,IAAI;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBACJ,UACsE;AACtE,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,sBACN,QACsD;AACtD,UAAM,aAAa,IAAI,iBAAiB;AACxC,QAAI,cAAoB,EAAE,MAAM,IAAI,UAAU,CAAC,EAAE;AACjD,QAAI,eAAe;AACnB,QAAI,qBAAqB;AAEzB,UAAM,cAAc,IAAI,YAAY;AAEpC,WAAO,IAAI,eAAqD;AAAA,MAC9D,MAAM,MAAM,YAAY;AACtB,cAAM,SAAS,OAAO,UAAU;AAChC,YAAI,SAAS;AACb,cAAM,aAAa,CAAC,SAAiB;AACnC,gCAAsB;AACtB,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,WAAW,YAAY;AAAA,YAC3B,MAAM;AAAA,cACJ,WAAW,OAAO,YAAY;AAAA,cAC9B,MAAM;AAAA,YACR;AAAA,UACF,CAAC;AAAA,QACH;AAEA,cAAM,mBAAmB,MAAM;AAC7B,cAAI,CAAC,mBAAoB;AACzB,0BAAgB;AAChB,+BAAqB;AAAA,QACvB;AAEA,cAAM,cAAc,CAAC,SAAiB;AACpC,cAAI,CAAC,KAAK,KAAK,EAAG;AAGlB,cAAI,WAAW,KAAK,KAAK;AACzB,cAAI,SAAS,WAAW,OAAO,GAAG;AAChC,uBAAW,SAAS,MAAM,CAAC,EAAE,UAAU;AAAA,UACzC;AACA,cAAI,aAAa,SAAU;AAE3B,cAAI;AACF,kBAAM,QAAQ,KAAK,MAAM,QAAQ;AAMjC,gBAAI,MAAM,SAAS,cAAc;AAC/B,oBAAM,SAAS,WAAW,aAAa,MAAM,KAAK;AAElD,yBAAW,QAAQ,QAAQ;AACzB,oBAAI,KAAK,SAAS,SAAS;AACzB,mCAAiB;AACjB,gCAAc;AAAA,oBACZ;AAAA,oBACA,KAAK;AAAA,kBACP;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,oBACJ,MAAM;AAAA,kBACR,CAAC;AAAA,gBACH,OAAO;AAEL,6BAAW,KAAK,UAAU,IAAI;AAAA,gBAChC;AAAA,cACF;AAAA,YACF,WAES,MAAM,SAAS,YAAY;AAClC,oBAAM,YAAY,WAAW,MAAM;AACnC,kBAAI,WAAW;AACb,oBAAI,UAAU,SAAS,SAAS;AAC9B,mCAAiB;AACjB,gCAAc;AAAA,oBACZ;AAAA,oBACA,UAAU;AAAA,kBACZ;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,oBACJ,MAAM;AAAA,kBACR,CAAC;AAAA,gBACH,OAAO;AACL,6BAAW,UAAU,OAAO;AAAA,gBAC9B;AAAA,cACF;AAEA,+BAAiB;AAAA,YACnB,WAES,MAAM,SAAS,cAAc;AACpC;AAAA,YACF,OAEK;AACH,yBAAW,QAAQ,KAAK;AAAA,YAC1B;AAAA,UACF,QAAQ;AAAA,UAER;AAAA,QACF;AAEA,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,gBAAI,MAAM;AACR,wBAAU,YAAY,OAAO;AAC7B;AAAA,YACF;AAEA,sBAAU,YAAY,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAGpD,kBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,qBAAS,MAAM,IAAI,KAAK;AAExB,uBAAW,QAAQ,OAAO;AACxB,0BAAY,IAAI;AAAA,YAClB;AAAA,UACF;AAEA,cAAI,OAAO,KAAK,GAAG;AACjB,wBAAY,MAAM;AAAA,UACpB;AAEA,qBAAW,MAAM;AAAA,QACnB,SAAS,OAAO;AACd,qBAAW,MAAM,KAAK;AAAA,QACxB,UAAE;AACA,iBAAO,YAAY;AAAA,QACrB;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;ACjQA,SAAS,8BAA8B;AAQvC,SAAS,cAAc,MAAoB;AACzC,SAAO,KAAK,UAAU,IAAI;AAC5B;AAYA,eAAsB,iCACpB,UACyB;AACzB,SAAO,uBAAuB,UAAU;AAAA,IACtC,iBAAiB,CAAC,SAAS;AACzB,UAAI,KAAK,SAAS,kBAAkB,KAAK,MAAM;AAC7C,cAAM,OAAQ,KAAK,KAA2B;AAC9C,YAAI,OAAO,SAAS,YAAY,KAAK,SAAS,GAAG;AAC/C,iBAAO,EAAE,MAAM,QAAQ,KAAK;AAAA,QAC9B;AAAA,MACF;AAEA,UAAI,KAAK,OAAO,QAAQ,KAAK,MAAM;AACjC,cAAM,WAAW,cAAc,KAAK,IAAY;AAChD,eAAO,EAAE,MAAM,QAAQ,MAAM;AAAA,EAA6B,QAAQ,GAAG;AAAA,MACvE;AACA,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;AC1CA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OAOK;AAmBA,IAAM,gBAAgB,aAAa,CAAC,OAAO;AAAA,EAChD,MAAM,EAAE,OAAO;AAAA,IACb,MAAM,EAAE,OAAO;AAAA,IACf,UAAU,EAAE;AAAA,MACV,EAAE,OAAO;AAAA,QACP,MAAM,EAAE,IAAI,oBAAoB;AAAA,QAChC,OAAO,EAAE,QAAQ,oBAAoB;AAAA,QACrC,UAAU,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,QAC5B,SAAS,EAAE,IAAI;AAAA,QACf,IAAI,EAAE,IAAI;AAAA,QACV,QAAQ,EAAE,IAAI;AAAA,MAChB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,EAAE,IAAI;AAAA,EACf,CAAC;AAAA,EACD,SAAS,EAAE,OAAO;AAAA,IAChB,YAAY,EAAE,IAAI;AAAA,MAChB,OAAO,EAAE,IAAI;AAAA,MACb,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,MACzB,aAAa,EAAE,OAAO;AAAA,IACxB,CAAC;AAAA,IACD,SAAS,EAAE,IAAI;AAAA,MACb,QAAQ,EAAE,IAAI;AAAA,MACd,aAAa,EAAE,OAAO;AAAA,IACxB,CAAC;AAAA,EACH,CAAC;AACH,EAAE;AAoCF,IAAM,iBAA2B;AAAA,EAC/B;AAAA,EACA;AACF;AAEO,SAAS,cAGd,QAAsB,SAA4C;AAClE,QAAM,OAAO,cAAc,QAAQ,OAAO;AAC1C,QAAM,iBAAiB,KAAK,OAAO,KAAK,IAAI;AAE5C,OAAK,SAAS,CAAC,YAAoC;AACjD,WAAO,eAAe;AAAA,MACpB,GAAG;AAAA,MACH,aAAa,CAAC,GAAG,gBAAgB,GAAI,SAAS,eAAe,CAAC,CAAE;AAAA,IAClE,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAOO,SAAS,iBAAiB,SAAoC;AACnE,QAAM,OAAO,gBAAgB,OAAO;AACpC,SAAO,OAAO,SAAS,eAAe,KAAK,IAAI;AACjD;;;AC7GA,SAAS,gBAAAA,qBAAoB;","names":["defineSchema"]}
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "ai-streamui",
3
+ "version": "0.1.0",
4
+ "description": "Streaming UI from AI in chat — a Transport for AI SDK with json-render",
5
+ "type": "module",
6
+ "license": "MIT",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "git+https://github.com/miurla/streamui.git",
10
+ "directory": "packages/streamui"
11
+ },
12
+ "homepage": "https://github.com/miurla/streamui#readme",
13
+ "bugs": {
14
+ "url": "https://github.com/miurla/streamui/issues"
15
+ },
16
+ "keywords": [
17
+ "ai",
18
+ "streaming",
19
+ "ui",
20
+ "chat",
21
+ "ai-sdk",
22
+ "json-render",
23
+ "transport",
24
+ "llm"
25
+ ],
26
+ "files": [
27
+ "dist"
28
+ ],
29
+ "main": "dist/index.js",
30
+ "types": "dist/index.d.ts",
31
+ "exports": {
32
+ ".": {
33
+ "types": "./dist/index.d.ts",
34
+ "import": "./dist/index.js"
35
+ }
36
+ },
37
+ "scripts": {
38
+ "build": "tsup",
39
+ "dev": "tsup --watch",
40
+ "lint": "eslint src",
41
+ "check-types": "tsc --noEmit",
42
+ "test": "vitest run"
43
+ },
44
+ "dependencies": {
45
+ "@json-render/core": "^0.5.0"
46
+ },
47
+ "peerDependencies": {
48
+ "ai": "^6.0.0"
49
+ },
50
+ "devDependencies": {
51
+ "@ai-sdk/react": "^3.0.0",
52
+ "@repo/eslint-config": "workspace:*",
53
+ "@repo/typescript-config": "workspace:*",
54
+ "ai": "^6.0.0",
55
+ "eslint": "^9.0.0",
56
+ "tsup": "^8.3.0",
57
+ "typescript": "^5.9.0",
58
+ "vitest": "^3.0.0",
59
+ "zod": "^4.0.0"
60
+ }
61
+ }