create-seiro 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "create-seiro",
3
- "version": "0.1.2",
3
+ "version": "0.1.4",
4
4
  "description": "Scaffold a new Seiro project",
5
5
  "type": "module",
6
6
  "bin": {
@@ -47,6 +47,7 @@ export type EntityQueries = {
47
47
  export type EntityEvents = {
48
48
  entity_created: Entity;
49
49
  entity_updated: Entity;
50
+ entity_deleted: { id: number }; // different payload type
50
51
  };
51
52
  ```
52
53
 
@@ -74,7 +75,7 @@ const server = createServer({
74
75
 
75
76
  // Register domain handlers
76
77
  auth.register(server, sql);
77
- entity.register(server, sql);
78
+ await entity.register(server, sql, listener); // with pg_notify listener
78
79
 
79
80
  await server.start({ "/": homepage });
80
81
  ```
@@ -86,21 +87,38 @@ import type { Sql } from "postgres";
86
87
  import type { Server } from "seiro";
87
88
  import type { Entity, EntityCommands, EntityQueries, EntityEvents } from "./types";
88
89
 
89
- export function register<
90
+ export async function register<
90
91
  C extends EntityCommands,
91
92
  Q extends EntityQueries,
92
93
  E extends EntityEvents,
93
- >(server: Server<C, Q, E>, sql: Sql) {
94
+ >(server: Server<C, Q, E>, sql: Sql, listener?: Sql) {
95
+ // Listen to postgres notifications (if listener provided)
96
+ if (listener) {
97
+ await listener.listen("entity_created", (payload: string) => {
98
+ try {
99
+ server.emit("entity_created", JSON.parse(payload) as Entity);
100
+ } catch (e) {
101
+ console.error("Failed to parse entity_created payload:", payload, e);
102
+ }
103
+ });
94
104
 
95
- // Send profile on connect (auth example)
96
- server.onOpen(async (ctx) => {
97
- if (!ctx.userId) {
98
- ctx.send({ profile: null });
99
- return;
100
- }
101
- // fetch and send user profile
102
- ctx.send({ profile: user });
103
- });
105
+ await listener.listen("entity_updated", (payload: string) => {
106
+ try {
107
+ server.emit("entity_updated", JSON.parse(payload) as Entity);
108
+ } catch (e) {
109
+ console.error("Failed to parse entity_updated payload:", payload, e);
110
+ }
111
+ });
112
+
113
+ // Different payload type for delete - just the id
114
+ await listener.listen("entity_deleted", (payload: string) => {
115
+ try {
116
+ server.emit("entity_deleted", JSON.parse(payload) as { id: number });
117
+ } catch (e) {
118
+ console.error("Failed to parse entity_deleted payload:", payload, e);
119
+ }
120
+ });
121
+ }
104
122
 
105
123
  // Command with typed result
106
124
  server.command("entity.save", async (data, ctx) => {
@@ -122,9 +140,6 @@ export function register<
122
140
  }
123
141
  });
124
142
  }
125
-
126
- // Broadcast events from server
127
- server.emit("entity_created", entity);
128
143
  ```
129
144
 
130
145
  ## Client Setup
@@ -279,10 +294,73 @@ RETURNS SETOF jsonb AS $$
279
294
  $$ LANGUAGE sql;
280
295
  ```
281
296
 
297
+ ## Streaming Queries
298
+
299
+ Queries stream rows over the WebSocket - each row is sent as soon as the server yields it, and the client can process rows as they arrive.
300
+
301
+ ### How It Works
302
+
303
+ **Server:** Each `yield` sends a message immediately:
304
+
305
+ ```typescript
306
+ server.query("logs.recent", async function* (params, ctx) {
307
+ const rows = await sql`SELECT * FROM logs LIMIT 1000`;
308
+ for (const row of rows) {
309
+ yield row; // sent to client immediately
310
+ }
311
+ });
312
+ ```
313
+
314
+ **Wire:** Rows stream as individual messages:
315
+
316
+ ```
317
+ → { q: "logs.recent", id: 1, params: {} }
318
+ ← { id: 1, row: { id: 1, message: "..." } } // immediate
319
+ ← { id: 1, row: { id: 2, message: "..." } } // immediate
320
+ ← { id: 1, row: { id: 3, message: "..." } } // immediate
321
+ ...
322
+ ← { id: 1 } // end marker
323
+ ```
324
+
325
+ **Client:** Process rows as they arrive:
326
+
327
+ ```typescript
328
+ // Streaming - handle each row immediately
329
+ for await (const row of client.query("logs.recent")) {
330
+ appendToUI(row); // renders while more rows are coming
331
+ }
332
+
333
+ // Or collect all (waits for stream to complete)
334
+ const all = await client.queryAll("logs.recent");
335
+ ```
336
+
337
+ ### Use Cases
338
+
339
+ - **Large datasets:** Render first results while fetching more
340
+ - **Progress feedback:** Show items appearing one by one
341
+ - **Memory efficiency:** Process rows without holding all in memory
342
+ - **Responsive UI:** User sees data immediately, not after full load
343
+
344
+ ### True End-to-End Streaming
345
+
346
+ The example above streams WebSocket delivery, but SQL fetches all rows first. For true streaming from database to client, use cursors:
347
+
348
+ ```typescript
349
+ server.query("logs.stream", async function* (params, ctx) {
350
+ // Cursor-based streaming from postgres
351
+ const cursor = sql`SELECT * FROM logs`.cursor(100);
352
+ for await (const rows of cursor) {
353
+ for (const row of rows) {
354
+ yield row;
355
+ }
356
+ }
357
+ });
358
+ ```
359
+
282
360
  ## Conventions
283
361
 
284
362
  - Commands return `{ id }` for create/save operations
285
- - Queries stream rows, end with empty `{ id }`
363
+ - Queries stream rows - each `yield` sends immediately, end with empty `{ id }`
286
364
  - Use typed SQL: `sql<[{ result: Type }]>` or `sql<{ fn_name: Type }[]>`
287
365
  - Events broadcast full data via pg_notify
288
366
  - Pattern subscriptions support wildcards: `entity_*`
@@ -10,7 +10,7 @@
10
10
  "test": "bun test server.test.ts"
11
11
  },
12
12
  "dependencies": {
13
- "seiro": "^0.1.2",
13
+ "seiro": "^0.1.3",
14
14
  "@preact/signals-core": "^1.12.2",
15
15
  "postgres": "^3.4.8"
16
16
  },