@durable-streams/client 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@durable-streams/client",
3
3
  "description": "TypeScript client for the Durable Streams protocol",
4
- "version": "0.2.0",
4
+ "version": "0.2.2",
5
5
  "author": "Durable Stream contributors",
6
6
  "license": "Apache-2.0",
7
7
  "repository": {
@@ -36,18 +36,25 @@
36
36
  "./package.json": "./package.json"
37
37
  },
38
38
  "sideEffects": false,
39
+ "bin": {
40
+ "intent": "./bin/intent.js"
41
+ },
39
42
  "files": [
40
43
  "dist",
41
- "src"
44
+ "src",
45
+ "skills",
46
+ "bin",
47
+ "!skills/_artifacts"
42
48
  ],
43
49
  "dependencies": {
44
50
  "@microsoft/fetch-event-source": "^2.0.1",
45
51
  "fastq": "^1.19.1"
46
52
  },
47
53
  "devDependencies": {
54
+ "@tanstack/intent": "latest",
48
55
  "fast-check": "^4.4.0",
49
56
  "tsdown": "^0.9.0",
50
- "@durable-streams/server": "0.1.7"
57
+ "@durable-streams/server": "0.2.2"
51
58
  },
52
59
  "engines": {
53
60
  "node": ">=18.0.0"
@@ -0,0 +1,223 @@
1
+ ---
2
+ name: getting-started
3
+ description: >
4
+ First-time setup for Durable Streams. Install @durable-streams/client,
5
+ create a stream with DurableStream.create(), read with stream(), subscribe
6
+ to live updates, resume from saved offsets. Covers offset semantics ("-1",
7
+ "now", opaque tokens), LiveMode (false, true, "long-poll", "sse"), and
8
+ StreamResponse consumption (.json(), .text(), .subscribeJson()).
9
+ type: lifecycle
10
+ library: durable-streams
11
+ library_version: "0.2.1"
12
+ sources:
13
+ - "durable-streams/durable-streams:README.md"
14
+ - "durable-streams/durable-streams:packages/client/src/stream-api.ts"
15
+ - "durable-streams/durable-streams:packages/client/src/stream.ts"
16
+ ---
17
+
18
+ # Durable Streams — Getting Started
19
+
20
+ Durable Streams is an HTTP-based protocol for persistent, resumable, append-only
21
+ event streams. Use `stream()` for reading and `DurableStream` when you also need
22
+ to create or write to streams.
23
+
24
+ ## Setup
25
+
26
+ ```typescript
27
+ import { stream, DurableStream } from "@durable-streams/client"
28
+
29
+ // Create a JSON stream (use DurableStream for write operations)
30
+ const handle = await DurableStream.create({
31
+ url: "https://your-server.com/v1/stream/my-stream",
32
+ contentType: "application/json",
33
+ })
34
+
35
+ // Write some data
36
+ await handle.append(JSON.stringify({ event: "user.created", userId: "123" }))
37
+ await handle.append(JSON.stringify({ event: "user.updated", userId: "123" }))
38
+
39
+ // Read all data (use stream() for read-only access)
40
+ const res = await stream({
41
+ url: "https://your-server.com/v1/stream/my-stream",
42
+ offset: "-1",
43
+ live: false,
44
+ })
45
+ const items = await res.json()
46
+ // [{ event: "user.created", userId: "123" }, { event: "user.updated", userId: "123" }]
47
+ ```
48
+
49
+ ## Core Patterns
50
+
51
+ ### Read all existing data (catch-up)
52
+
53
+ ```typescript
54
+ import { stream } from "@durable-streams/client"
55
+
56
+ const res = await stream({
57
+ url: "https://your-server.com/v1/stream/my-stream",
58
+ offset: "-1", // Start from beginning
59
+ live: false, // Stop after catching up
60
+ })
61
+ const data = await res.json()
62
+ const savedOffset = res.offset // Save for resumption
63
+ ```
64
+
65
+ ### Subscribe to live updates
66
+
67
+ ```typescript
68
+ import { stream } from "@durable-streams/client"
69
+
70
+ const res = await stream({
71
+ url: "https://your-server.com/v1/stream/my-stream",
72
+ offset: "-1", // Catch up first, then continue live
73
+ live: true, // Auto-selects best transport (SSE for JSON, long-poll for binary)
74
+ })
75
+
76
+ res.subscribeJson(async (batch) => {
77
+ for (const item of batch.items) {
78
+ console.log("Received:", item)
79
+ }
80
+ saveCheckpoint(batch.offset) // Persist for resumption
81
+ })
82
+ ```
83
+
84
+ ### Resume from a saved offset
85
+
86
+ ```typescript
87
+ import { stream } from "@durable-streams/client"
88
+
89
+ const savedOffset = loadCheckpoint() // Load previously saved offset
90
+
91
+ const res = await stream({
92
+ url: "https://your-server.com/v1/stream/my-stream",
93
+ offset: savedOffset, // Resume from where we left off
94
+ live: true,
95
+ })
96
+
97
+ res.subscribeJson(async (batch) => {
98
+ for (const item of batch.items) {
99
+ processItem(item)
100
+ }
101
+ saveCheckpoint(batch.offset)
102
+ })
103
+ ```
104
+
105
+ ### Create and write to a stream
106
+
107
+ ```typescript
108
+ import { DurableStream, IdempotentProducer } from "@durable-streams/client"
109
+
110
+ const handle = await DurableStream.create({
111
+ url: "https://your-server.com/v1/stream/my-stream",
112
+ contentType: "application/json",
113
+ })
114
+
115
+ // For simple one-off writes, use append() directly
116
+ await handle.append(JSON.stringify({ event: "hello" }))
117
+
118
+ // For sustained writes, use IdempotentProducer (faster, exactly-once)
119
+ const producer = new IdempotentProducer(handle, "my-service", {
120
+ autoClaim: true,
121
+ onError: (err) => console.error("Write failed:", err),
122
+ })
123
+
124
+ producer.append(JSON.stringify({ event: "world" })) // Fire-and-forget
125
+ await producer.flush() // Ensure delivery before shutdown
126
+ await producer.close()
127
+ ```
128
+
129
+ ## Common Mistakes
130
+
131
+ ### CRITICAL Parsing or constructing offsets manually
132
+
133
+ Wrong:
134
+
135
+ ```typescript
136
+ const nextOffset = `${parseInt(offset.split("_")[0]) + 1}_0`
137
+ ```
138
+
139
+ Correct:
140
+
141
+ ```typescript
142
+ const nextOffset = response.offset // Always use server-returned offset
143
+ ```
144
+
145
+ Offsets are opaque tokens. The internal format is an implementation detail that may change between server versions.
146
+
147
+ Source: PROTOCOL.md section 6 (Offsets)
148
+
149
+ ### CRITICAL Using offset 0 instead of "-1" for stream start
150
+
151
+ Wrong:
152
+
153
+ ```typescript
154
+ const res = await stream({ url, offset: "0" })
155
+ ```
156
+
157
+ Correct:
158
+
159
+ ```typescript
160
+ const res = await stream({ url, offset: "-1" })
161
+ ```
162
+
163
+ The special start-of-stream offset is the string `"-1"`, not `"0"`. Using `"0"` may miss data or return 400.
164
+
165
+ Source: README.md offset semantics section
166
+
167
+ ### HIGH Calling multiple consumption methods on same response
168
+
169
+ Wrong:
170
+
171
+ ```typescript
172
+ const res = await stream({ url, offset: "-1" })
173
+ const data = await res.json()
174
+ res.subscribeJson((batch) => {
175
+ /* ... */
176
+ }) // throws ALREADY_CONSUMED!
177
+ ```
178
+
179
+ Correct:
180
+
181
+ ```typescript
182
+ const res = await stream({ url, offset: "-1", live: true })
183
+ res.subscribeJson((batch) => {
184
+ for (const item of batch.items) {
185
+ /* process */
186
+ }
187
+ })
188
+ ```
189
+
190
+ StreamResponse enforces single consumption. Choose one consumption method per response.
191
+
192
+ Source: packages/client/src/response.ts
193
+
194
+ ### HIGH Setting live mode for one-shot reads
195
+
196
+ Wrong:
197
+
198
+ ```typescript
199
+ const res = await stream({ url, offset: "-1", live: true })
200
+ const data = await res.json() // hangs until stream closes
201
+ ```
202
+
203
+ Correct:
204
+
205
+ ```typescript
206
+ const res = await stream({ url, offset: "-1", live: false })
207
+ const data = await res.json() // returns immediately with existing data
208
+ ```
209
+
210
+ Use `live: false` for catch-up reads. `live: true` keeps the connection open waiting for new data.
211
+
212
+ Source: packages/client/src/types.ts LiveMode type
213
+
214
+ ## See also
215
+
216
+ - [writing-data](../writing-data/SKILL.md) — IdempotentProducer for production-grade writes
217
+ - [server-deployment](../server-deployment/SKILL.md) — Setting up a server to develop against
218
+
219
+ Note: Streams must be created with `DurableStream.create()` before they can be read. See the writing-data skill for stream creation.
220
+
221
+ ## Version
222
+
223
+ Targets @durable-streams/client v0.2.1.
@@ -0,0 +1,243 @@
1
+ ---
2
+ name: go-to-production
3
+ description: >
4
+ Production readiness checklist for durable streams. Switch from dev server
5
+ to Caddy binary, configure CDN caching with offset-based URLs,
6
+ Cache-Control and ETag headers, Stream-Cursor for cache collision prevention,
7
+ TTL and Stream-Expires-At for stream lifecycle, HTTPS requirement, request
8
+ collapsing for fan-out, CORS configuration. Load before deploying durable
9
+ streams to production.
10
+ type: lifecycle
11
+ library: durable-streams
12
+ library_version: "0.2.1"
13
+ requires:
14
+ - server-deployment
15
+ sources:
16
+ - "durable-streams/durable-streams:PROTOCOL.md"
17
+ - "durable-streams/durable-streams:packages/caddy-plugin/README.md"
18
+ ---
19
+
20
+ This skill builds on durable-streams/server-deployment. Read it first for server setup basics.
21
+
22
+ # Durable Streams — Go to Production Checklist
23
+
24
+ Run through each section before deploying to production.
25
+
26
+ ## Server Checks
27
+
28
+ ### Check: Using Caddy production server (not dev server)
29
+
30
+ Expected:
31
+
32
+ ```bash
33
+ ./durable-streams-server run --config Caddyfile
34
+ ```
35
+
36
+ Fail condition: Importing `DurableStreamTestServer` from `@durable-streams/server` in production code.
37
+
38
+ Fix: Download the Caddy binary from GitHub releases and configure with a Caddyfile.
39
+
40
+ ### Check: File-backed persistence configured
41
+
42
+ Expected:
43
+
44
+ ```
45
+ durable_streams {
46
+ data_dir ./data
47
+ max_file_handles 200
48
+ }
49
+ ```
50
+
51
+ Fail condition: No `data_dir` in Caddyfile — server uses in-memory storage and loses data on restart.
52
+
53
+ Fix: Add `data_dir` pointing to a persistent directory.
54
+
55
+ ## Transport Checks
56
+
57
+ ### Check: HTTPS enabled
58
+
59
+ Expected:
60
+
61
+ ```typescript
62
+ const res = await stream({
63
+ url: "https://your-server.com/v1/stream/my-stream",
64
+ })
65
+ ```
66
+
67
+ Fail condition: Using `http://` URLs in production. Pre-signed URLs and auth tokens are bearer credentials — HTTP exposes them in transit. HTTP/1.1 also limits browsers to ~6 concurrent connections per origin.
68
+
69
+ Fix: Configure TLS on the Caddy server (Caddy provides automatic HTTPS by default).
70
+
71
+ ## Stream Lifecycle Checks
72
+
73
+ ### Check: TTL or expiration set on streams
74
+
75
+ Expected:
76
+
77
+ ```typescript
78
+ const handle = await DurableStream.create({
79
+ url: "https://server.com/v1/stream/my-stream",
80
+ contentType: "application/json",
81
+ headers: { "Stream-TTL": "86400" }, // 24 hours
82
+ })
83
+ ```
84
+
85
+ Fail condition: Streams created without TTL persist forever, causing unbounded storage growth.
86
+
87
+ Fix: Set `Stream-TTL` (seconds) or `Stream-Expires-At` (ISO timestamp) on stream creation. Use exactly one, not both.
88
+
89
+ ### Check: Not specifying both TTL and Expires-At
90
+
91
+ Expected:
92
+
93
+ ```typescript
94
+ headers: { "Stream-TTL": "86400" }
95
+ // OR
96
+ headers: { "Stream-Expires-At": "2026-04-01T00:00:00Z" }
97
+ ```
98
+
99
+ Fail condition: Providing both `Stream-TTL` and `Stream-Expires-At` returns 400 Bad Request.
100
+
101
+ Fix: Use one or the other. TTL is relative (seconds from creation), Expires-At is absolute.
102
+
103
+ ## CDN and Caching Checks
104
+
105
+ ### Check: CDN-friendly URL structure
106
+
107
+ Expected:
108
+
109
+ Reads use offset-based URLs that are naturally cacheable:
110
+
111
+ ```
112
+ GET /v1/stream/my-stream?offset=abc123
113
+ ```
114
+
115
+ The server returns `Cache-Control` and `ETag` headers automatically for historical reads. CDNs can cache and collapse requests — 10,000 viewers at the same offset become one upstream request.
116
+
117
+ Fail condition: Overriding or stripping `Cache-Control` headers at the CDN/proxy layer.
118
+
119
+ Fix: Allow the server's `Cache-Control` and `ETag` headers to pass through to the CDN.
120
+
121
+ ### Check: Stream-Cursor header preserved
122
+
123
+ `Stream-Cursor` prevents CDN cache collisions when the same offset returns different data (e.g., after stream truncation). Ensure your CDN does not strip this header.
124
+
125
+ Fail condition: CDN strips `Stream-Cursor` from responses.
126
+
127
+ Fix: Configure CDN to pass through `Stream-Cursor` response header.
128
+
129
+ ## Error Handling Checks
130
+
131
+ ### Check: onError handler configured for live streams
132
+
133
+ Expected:
134
+
135
+ ```typescript
136
+ const res = await stream({
137
+ url,
138
+ offset: "-1",
139
+ live: true,
140
+ onError: (error) => {
141
+ if (error.status === 401) return // Stop retrying
142
+ return {} // Retry transient errors
143
+ },
144
+ })
145
+ ```
146
+
147
+ Fail condition: No `onError` handler — permanent errors (401, 403) silently retry forever.
148
+
149
+ Fix: Add `onError` handler that stops retrying for non-transient errors.
150
+
151
+ ## Common Production Mistakes
152
+
153
+ ### CRITICAL Using HTTP in production with browser clients
154
+
155
+ Wrong:
156
+
157
+ ```typescript
158
+ const res = await stream({ url: "http://api.example.com/v1/stream/my-stream" })
159
+ ```
160
+
161
+ Correct:
162
+
163
+ ```typescript
164
+ const res = await stream({ url: "https://api.example.com/v1/stream/my-stream" })
165
+ ```
166
+
167
+ Pre-signed URLs and auth tokens are bearer credentials. HTTP exposes these in transit. Also, HTTP/1.1 limits browsers to ~6 concurrent connections per origin.
168
+
169
+ Source: packages/client/src/utils.ts warnIfUsingHttpInBrowser
170
+
171
+ ### HIGH Not setting TTL or expiration on streams
172
+
173
+ Wrong:
174
+
175
+ ```typescript
176
+ const handle = await DurableStream.create({
177
+ url: "https://server.com/v1/stream/my-stream",
178
+ contentType: "application/json",
179
+ })
180
+ ```
181
+
182
+ Correct:
183
+
184
+ ```typescript
185
+ const handle = await DurableStream.create({
186
+ url: "https://server.com/v1/stream/my-stream",
187
+ contentType: "application/json",
188
+ headers: { "Stream-TTL": "86400" },
189
+ })
190
+ ```
191
+
192
+ Without TTL, streams persist forever causing unbounded storage growth.
193
+
194
+ Source: PROTOCOL.md TTL and Expiry section
195
+
196
+ ### MEDIUM Specifying both TTL and Expires-At
197
+
198
+ Wrong:
199
+
200
+ ```typescript
201
+ headers: {
202
+ "Stream-TTL": "86400",
203
+ "Stream-Expires-At": "2026-04-01T00:00:00Z",
204
+ }
205
+ ```
206
+
207
+ Correct:
208
+
209
+ ```typescript
210
+ headers: {
211
+ "Stream-TTL": "86400", // OR Expires-At, not both
212
+ }
213
+ ```
214
+
215
+ The protocol requires exactly one. Providing both returns 400 Bad Request.
216
+
217
+ Source: PROTOCOL.md TTL and Expiry section
218
+
219
+ ### HIGH Tension: Ephemeral producers vs. persistent coordination
220
+
221
+ This skill's patterns conflict with writing-data. `autoClaim: true` is convenient for serverless/ephemeral workers but sacrifices cross-restart coordination. Persistent long-running workers may benefit from explicit epoch management for proper multi-worker coordination.
222
+
223
+ See also: durable-streams/writing-data/SKILL.md § Common Mistakes
224
+
225
+ ## Pre-Deploy Summary
226
+
227
+ - [ ] Using Caddy production server (not dev server)
228
+ - [ ] `data_dir` configured for persistence
229
+ - [ ] HTTPS enabled
230
+ - [ ] TTL or Expires-At set on all streams
231
+ - [ ] Not using both TTL and Expires-At together
232
+ - [ ] CDN passes through Cache-Control, ETag, and Stream-Cursor headers
233
+ - [ ] `onError` handler configured for live streams
234
+ - [ ] `max_file_handles` tuned for expected stream count
235
+
236
+ ## See also
237
+
238
+ - [server-deployment](../server-deployment/SKILL.md) — Initial server setup
239
+ - [writing-data](../writing-data/SKILL.md) — Producer configuration for production
240
+
241
+ ## Version
242
+
243
+ Targets durable-streams v0.2.1.