durable_streams 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,445 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module DurableStreams
6
+ # Stream handle for read/write operations on a durable stream.
7
+ class Stream
8
+ attr_reader :url, :content_type
9
+
10
+ # @param url [String] Stream URL or path (resolved against context base_url)
11
+ # @param context [Context] Configuration context
12
+ # @param content_type [String, nil] Content type for the stream
13
+ # @param headers [Hash] Additional headers (merged with context defaults)
14
+ # @param batching [Boolean] Enable write batching (default: true)
15
+ def initialize(url, context: DurableStreams.default_context, content_type: nil, headers: {}, batching: true)
16
+ @url = context.resolve_url(url)
17
+ @context = context
18
+ @content_type = content_type
19
+ @instance_headers = headers || {}
20
+ @batching = batching
21
+ # Use mock transport if testing is installed, otherwise create real transport
22
+ @transport = if defined?(DurableStreams::Testing) && DurableStreams::Testing.transport_if_installed
23
+ DurableStreams::Testing.transport_if_installed
24
+ else
25
+ HTTP::Transport.new(
26
+ retry_policy: context.config.retry_policy,
27
+ timeout: context.config.timeout
28
+ )
29
+ end
30
+ @batch_mutex = Mutex.new
31
+ @batch_cv = ConditionVariable.new
32
+ @batch_queue = []
33
+ @batch_in_flight = false
34
+ end
35
+
36
+ # --- Factory Methods ---
37
+
38
+ # Create and verify stream exists
39
+ # @param url [String] Stream URL or path
40
+ # @param context [Context] Configuration context
41
+ # @param headers [Hash] Additional headers
42
+ # @return [Stream]
43
+ def self.connect(url, context: DurableStreams.default_context, headers: {}, **options)
44
+ stream = new(url, context: context, headers: headers, **options)
45
+ stream.head
46
+ stream
47
+ end
48
+
49
+ # Create new stream on server
50
+ # @param url [String] Stream URL or path
51
+ # @param content_type [Symbol, String] Content type (:json, :bytes, or MIME type)
52
+ # @param context [Context] Configuration context
53
+ # @param headers [Hash] Additional headers
54
+ # @param closed [Boolean] Create stream as immediately closed (default: false)
55
+ # @return [Stream]
56
+ def self.create(url, content_type:, context: DurableStreams.default_context, headers: {}, ttl_seconds: nil,
57
+ expires_at: nil, body: nil, closed: false, **options)
58
+ ct = normalize_content_type(content_type)
59
+ stream = new(url, context: context, content_type: ct, headers: headers, **options)
60
+ stream.create_stream(content_type: ct, ttl_seconds: ttl_seconds, expires_at: expires_at, body: body, closed: closed)
61
+ stream
62
+ end
63
+
64
+ # Check if a stream exists without raising
65
+ # @param url [String] Stream URL or path
66
+ # @param context [Context] Configuration context
67
+ # @param headers [Hash] Additional headers
68
+ # @return [Boolean]
69
+ def self.exists?(url, context: DurableStreams.default_context, headers: {}, **options)
70
+ stream = new(url, context: context, headers: headers, **options)
71
+ stream.exists?
72
+ end
73
+
74
+ # Normalize content type symbol to MIME type
75
+ def self.normalize_content_type(ct)
76
+ case ct
77
+ when :json then "application/json"
78
+ when :bytes then "application/octet-stream"
79
+ else ct.to_s
80
+ end
81
+ end
82
+
83
+ # --- Metadata Operations ---
84
+
85
+ # HEAD - Get stream metadata
86
+ # @return [HeadResult]
87
+ def head
88
+ headers = resolved_headers
89
+ request_url = @url
90
+
91
+ response = @transport.request(:head, request_url, headers: headers)
92
+
93
+ if response.status == 404
94
+ raise StreamNotFoundError.new(url: @url)
95
+ end
96
+
97
+ unless response.success?
98
+ raise DurableStreams.error_from_status(response.status, url: @url, headers: response.headers)
99
+ end
100
+
101
+ @content_type = response["content-type"] if response["content-type"]
102
+
103
+ HeadResult.new(
104
+ exists: true,
105
+ content_type: response["content-type"],
106
+ next_offset: response[STREAM_NEXT_OFFSET_HEADER],
107
+ etag: response["etag"],
108
+ cache_control: response["cache-control"],
109
+ stream_closed: response[STREAM_CLOSED_HEADER]&.downcase == "true"
110
+ )
111
+ end
112
+
113
+ # Check if stream exists without raising
114
+ # @return [Boolean]
115
+ def exists?
116
+ head
117
+ true
118
+ rescue StreamNotFoundError
119
+ false
120
+ end
121
+
122
+ # Check if this is a JSON stream
123
+ # @return [Boolean]
124
+ def json?
125
+ head if @content_type.nil?
126
+ DurableStreams.json_content_type?(@content_type)
127
+ end
128
+
129
+ # Create stream on server (PUT)
130
+ # @param content_type [String, nil] Content type for the stream
131
+ # @param ttl_seconds [Integer, nil] Time-to-live in seconds
132
+ # @param expires_at [String, nil] Absolute expiry time (RFC3339)
133
+ # @param body [String, nil] Optional initial body
134
+ # @param closed [Boolean] Create stream as immediately closed (default: false)
135
+ def create_stream(content_type: nil, ttl_seconds: nil, expires_at: nil, body: nil, closed: false)
136
+ headers = resolved_headers
137
+
138
+ ct = content_type || @content_type
139
+ headers["content-type"] = ct if ct
140
+ headers[STREAM_TTL_HEADER] = ttl_seconds.to_s if ttl_seconds
141
+ headers[STREAM_EXPIRES_AT_HEADER] = expires_at if expires_at
142
+ headers[STREAM_CLOSED_HEADER] = "true" if closed
143
+
144
+ body_to_send = if body && DurableStreams.json_content_type?(ct)
145
+ "[#{body}]"
146
+ else
147
+ body
148
+ end
149
+
150
+ response = @transport.request(:put, @url, headers: headers, body: body_to_send)
151
+
152
+ if response.status == 409
153
+ raise StreamExistsError.new(url: @url)
154
+ end
155
+
156
+ unless response.success?
157
+ raise DurableStreams.error_from_status(response.status, url: @url, body: response.body,
158
+ headers: response.headers)
159
+ end
160
+
161
+ @content_type = response["content-type"] || ct
162
+ end
163
+
164
+ # Delete stream (DELETE)
165
+ def delete
166
+ headers = resolved_headers
167
+
168
+ response = @transport.request(:delete, @url, headers: headers)
169
+
170
+ if response.status == 404
171
+ raise StreamNotFoundError.new(url: @url)
172
+ end
173
+
174
+ return if response.success? || response.status == 204
175
+
176
+ raise DurableStreams.error_from_status(response.status, url: @url, headers: response.headers)
177
+ end
178
+
179
+ # Close the stream (no more appends allowed)
180
+ # @param data [String, nil] Optional final data to append before closing
181
+ # @param content_type [String, nil] Content type for the final data
182
+ # @return [CloseResult]
183
+ def close_stream(data: nil, content_type: nil)
184
+ headers = resolved_headers
185
+ headers[STREAM_CLOSED_HEADER] = "true"
186
+
187
+ ct = content_type || @content_type
188
+ headers["content-type"] = ct if ct
189
+
190
+ # For JSON streams, wrap data in array if needed
191
+ body = if data && DurableStreams.json_content_type?(ct)
192
+ "[#{data}]"
193
+ else
194
+ data
195
+ end
196
+
197
+ response = @transport.request(:post, @url, headers: headers, body: body)
198
+
199
+ # 204 means idempotent close (already closed)
200
+ if response.status == 204
201
+ next_offset = response[STREAM_NEXT_OFFSET_HEADER] || "-1"
202
+ return CloseResult.new(final_offset: next_offset)
203
+ end
204
+
205
+ # 409 with Stream-Closed header means already closed - treat as idempotent success
206
+ if response.status == 409 && response[STREAM_CLOSED_HEADER]&.downcase == "true"
207
+ next_offset = response[STREAM_NEXT_OFFSET_HEADER] || "-1"
208
+ return CloseResult.new(final_offset: next_offset)
209
+ end
210
+
211
+ unless response.success?
212
+ raise DurableStreams.error_from_status(response.status, url: @url, body: response.body,
213
+ headers: response.headers)
214
+ end
215
+
216
+ next_offset = response[STREAM_NEXT_OFFSET_HEADER]
217
+ raise FetchError.new("Server did not return #{STREAM_NEXT_OFFSET_HEADER} header", url: @url) unless next_offset
218
+
219
+ CloseResult.new(final_offset: next_offset)
220
+ end
221
+
222
+ # --- Write Operations ---
223
+
224
+ # Append data to stream
225
+ # For JSON streams, pass pre-serialized JSON strings.
226
+ # @param data [String] Data to append (pre-serialized JSON for JSON streams)
227
+ # @param seq [String, nil] Optional sequence number for ordering
228
+ # @return [AppendResult]
229
+ # @example
230
+ # # JSON stream - pass pre-serialized JSON
231
+ # stream.append(JSON.generate({ message: "hello" }))
232
+ #
233
+ # # Byte stream
234
+ # stream.append("raw text data")
235
+ def append(data, seq: nil)
236
+ unless data.is_a?(String)
237
+ raise ArgumentError, "append() requires a String. For objects, use JSON.generate(). Got #{data.class}"
238
+ end
239
+
240
+ if @batching
241
+ append_with_batching(data, seq)
242
+ else
243
+ append_direct(data, seq)
244
+ end
245
+ end
246
+
247
+ # Sync append (same as append, explicit name for clarity)
248
+ # @param data [String] Data to append (pre-serialized JSON for JSON streams)
249
+ # @param seq [String, nil] Optional sequence number
250
+ # @return [AppendResult]
251
+ def append!(data, seq: nil)
252
+ append(data, seq: seq)
253
+ end
254
+
255
+ # Shovel operator for append
256
+ # @param data [String] Data to append (pre-serialized JSON for JSON streams)
257
+ # @return [self] Returns self for chaining
258
+ def <<(data)
259
+ append(data)
260
+ self
261
+ end
262
+
263
+ # --- Read Operations ---
264
+
265
+ # Read from stream
266
+ # @param offset [String] Starting offset (default: "-1" for beginning)
267
+ # @param live [Boolean, Symbol] Live mode (false, :long_poll, :sse)
268
+ # @param format [Symbol] Format hint (:auto, :json, :bytes)
269
+ # @param cursor [String, nil] Optional cursor for continuation
270
+ # @yield [Reader] Optional block for automatic cleanup
271
+ # @return [JsonReader, ByteReader] Reader for iterating messages
272
+ def read(offset: "-1", live: false, format: :auto, cursor: nil, &block)
273
+ reader = create_reader(offset: offset, live: live, format: format, cursor: cursor)
274
+
275
+ if block_given?
276
+ begin
277
+ yield reader
278
+ ensure
279
+ reader.close
280
+ end
281
+ else
282
+ reader
283
+ end
284
+ end
285
+
286
+ # Iterate over messages (catch-up only)
287
+ # @yield [Object] Each message
288
+ # @return [Enumerator] If no block given
289
+ def each(&block)
290
+ return enum_for(:each) unless block_given?
291
+
292
+ read(live: false).each(&block)
293
+ end
294
+
295
+ # Convenience: Read all current data
296
+ # @param offset [String] Starting offset
297
+ # @return [Array] All messages from offset to current end
298
+ def read_all(offset: "-1")
299
+ read(offset: offset, live: false, &:to_a)
300
+ end
301
+
302
+ # Shutdown the transport
303
+ def close
304
+ @transport.shutdown
305
+ end
306
+
307
+ # --- Internal Accessors ---
308
+
309
+ attr_reader :transport, :context
310
+
311
+ # Resolve headers for requests (used by readers)
312
+ # @param extra [Hash] Additional headers to merge
313
+ # @return [Hash] Resolved headers
314
+ def resolved_headers(extra = {})
315
+ base = HTTP.resolve_headers(@context.config.default_headers)
316
+ base.merge(HTTP.resolve_headers(@instance_headers)).merge(extra)
317
+ end
318
+
319
+ private
320
+
321
+ def create_reader(offset:, live:, format:, cursor:)
322
+ effective_format = determine_format(format)
323
+
324
+ case effective_format
325
+ when :json
326
+ JsonReader.new(self, offset: offset, live: live, cursor: cursor)
327
+ else
328
+ ByteReader.new(self, offset: offset, live: live, cursor: cursor)
329
+ end
330
+ end
331
+
332
+ def determine_format(format)
333
+ return format if format != :auto
334
+
335
+ head if @content_type.nil?
336
+ DurableStreams.json_content_type?(@content_type) ? :json : :bytes
337
+ end
338
+
339
+ def append_direct(data, seq)
340
+ post_append([data], seq: seq)
341
+ end
342
+
343
+ def append_with_batching(data, seq)
344
+ queue_entry = { data: data, seq: seq, result: nil, error: nil, done: false }
345
+ is_leader = false
346
+
347
+ @batch_mutex.synchronize do
348
+ @batch_queue << queue_entry
349
+ unless @batch_in_flight
350
+ @batch_in_flight = true
351
+ is_leader = true
352
+ end
353
+ end
354
+
355
+ flush_batch if is_leader
356
+
357
+ @batch_mutex.synchronize do
358
+ @batch_cv.wait(@batch_mutex) until queue_entry[:done]
359
+ end
360
+
361
+ raise queue_entry[:error] if queue_entry[:error]
362
+
363
+ queue_entry[:result]
364
+ end
365
+
366
+ def flush_batch
367
+ loop do
368
+ messages = nil
369
+ @batch_mutex.synchronize do
370
+ if @batch_queue.empty?
371
+ @batch_in_flight = false
372
+ return
373
+ end
374
+ messages = @batch_queue.dup
375
+ @batch_queue.clear
376
+ end
377
+
378
+ begin
379
+ result = send_batch(messages)
380
+ @batch_mutex.synchronize do
381
+ messages.each do |msg|
382
+ msg[:result] = result
383
+ msg[:done] = true
384
+ end
385
+ @batch_cv.broadcast
386
+ end
387
+ rescue StandardError => e
388
+ @batch_mutex.synchronize do
389
+ messages.each do |msg|
390
+ msg[:error] = e
391
+ msg[:done] = true
392
+ end
393
+ @batch_queue.each do |msg|
394
+ msg[:error] = e
395
+ msg[:done] = true
396
+ end
397
+ @batch_queue.clear
398
+ @batch_in_flight = false
399
+ @batch_cv.broadcast
400
+ end
401
+ return
402
+ end
403
+ end
404
+ end
405
+
406
+ def send_batch(messages)
407
+ highest_seq = messages.reverse.find { |m| m[:seq] }&.fetch(:seq)
408
+ post_append(messages.map { |m| m[:data] }, seq: highest_seq)
409
+ end
410
+
411
+ def post_append(data_items, seq: nil)
412
+ headers = resolved_headers
413
+ headers["content-type"] = @content_type if @content_type
414
+ headers[STREAM_SEQ_HEADER] = seq.to_s if seq
415
+
416
+ # data_items are pre-serialized strings
417
+ body = if DurableStreams.json_content_type?(@content_type)
418
+ # Wrap pre-serialized JSON strings in array
419
+ "[#{data_items.join(',')}]"
420
+ else
421
+ data_items.join
422
+ end
423
+
424
+ response = @transport.request(:post, @url, headers: headers, body: body)
425
+
426
+ if response.status == 409
427
+ if response[STREAM_CLOSED_HEADER]&.downcase == "true"
428
+ raise StreamClosedError.new(url: @url)
429
+ end
430
+
431
+ raise SeqConflictError.new(url: @url)
432
+ end
433
+
434
+ unless response.success? || response.status == 204
435
+ raise DurableStreams.error_from_status(response.status, url: @url, body: response.body,
436
+ headers: response.headers)
437
+ end
438
+
439
+ next_offset = response[STREAM_NEXT_OFFSET_HEADER]
440
+ raise FetchError.new("Server did not return #{STREAM_NEXT_OFFSET_HEADER} header", url: @url) unless next_offset
441
+
442
+ AppendResult.new(next_offset: next_offset)
443
+ end
444
+ end
445
+ end
@@ -0,0 +1,277 @@
1
+ # frozen_string_literal: true
2
+
3
+ module DurableStreams
4
+ # Testing utilities for DurableStreams.
5
+ # Provides mock transport and matchers for testing without a real server.
6
+ module Testing
7
+ # Mock response for testing
8
+ class MockResponse
9
+ attr_reader :status, :body, :headers
10
+
11
+ def initialize(status:, body: "", headers: {})
12
+ @status = status
13
+ @body = body
14
+ @headers = headers.transform_keys(&:downcase)
15
+ end
16
+
17
+ def success?
18
+ status >= 200 && status < 300
19
+ end
20
+
21
+ def [](header)
22
+ @headers[header.to_s.downcase]
23
+ end
24
+ end
25
+
26
+ # Mock transport for testing without a real server
27
+ class MockTransport
28
+ attr_reader :requests, :streams
29
+
30
+ def initialize
31
+ @requests = []
32
+ @streams = {}
33
+ @responses = {}
34
+ @default_offset = "0"
35
+ end
36
+
37
+ # Record a request and return a mock response
38
+ def request(method, url, headers: {}, body: nil, **_options)
39
+ uri = URI.parse(url)
40
+ path = uri.path
41
+
42
+ @requests << {
43
+ method: method,
44
+ url: url,
45
+ path: path,
46
+ headers: headers,
47
+ body: body
48
+ }
49
+
50
+ # Return configured response or generate a default
51
+ response_key = "#{method}:#{path}"
52
+ if @responses[response_key]
53
+ @responses[response_key]
54
+ else
55
+ default_response(method, path, body)
56
+ end
57
+ end
58
+
59
+ # Stream request (simplified mock - just yields response)
60
+ def stream_request(method, url, headers: {}, timeout: nil)
61
+ response = request(method, url, headers: headers)
62
+ yield response if block_given?
63
+ end
64
+
65
+ # Configure a response for a specific method and path
66
+ # @param method [Symbol] HTTP method
67
+ # @param path [String] URL path
68
+ # @param status [Integer] Response status code
69
+ # @param body [String] Response body
70
+ # @param headers [Hash] Response headers
71
+ def on(method, path, status:, body: "", headers: {})
72
+ @responses["#{method}:#{path}"] = MockResponse.new(
73
+ status: status,
74
+ body: body,
75
+ headers: headers
76
+ )
77
+ end
78
+
79
+ # Add messages to a mock stream (for read testing)
80
+ # @param path [String] Stream path
81
+ # @param messages [Array] Messages to add
82
+ def seed_stream(path, messages)
83
+ @streams[path] ||= []
84
+ @streams[path].concat(messages)
85
+ end
86
+
87
+ # Get messages for a stream
88
+ # @param path [String] Stream path
89
+ # @return [Array] Messages
90
+ def messages_for(path)
91
+ @streams[path] || []
92
+ end
93
+
94
+ # Clear all recorded requests and mock data
95
+ def clear!
96
+ @requests.clear
97
+ @streams.clear
98
+ @responses.clear
99
+ end
100
+
101
+ # No-op shutdown for mock
102
+ def shutdown; end
103
+
104
+ private
105
+
106
+ def default_response(method, path, body)
107
+ case method
108
+ when :put
109
+ # Stream creation
110
+ @streams[path] ||= []
111
+ MockResponse.new(
112
+ status: 201,
113
+ headers: { "content-type" => "application/json" }
114
+ )
115
+ when :head
116
+ if @streams.key?(path)
117
+ MockResponse.new(
118
+ status: 200,
119
+ headers: {
120
+ "content-type" => "application/json",
121
+ STREAM_NEXT_OFFSET_HEADER => @default_offset
122
+ }
123
+ )
124
+ else
125
+ MockResponse.new(status: 404)
126
+ end
127
+ when :post
128
+ # Append
129
+ @streams[path] ||= []
130
+ if body
131
+ begin
132
+ messages = JSON.parse(body)
133
+ @streams[path].concat(Array(messages))
134
+ rescue JSON::ParserError
135
+ @streams[path] << body
136
+ end
137
+ end
138
+ new_offset = @streams[path].size.to_s
139
+ MockResponse.new(
140
+ status: 200,
141
+ headers: { STREAM_NEXT_OFFSET_HEADER => new_offset }
142
+ )
143
+ when :get
144
+ messages = @streams[path] || []
145
+ MockResponse.new(
146
+ status: 200,
147
+ body: JSON.generate(messages),
148
+ headers: {
149
+ "content-type" => "application/json",
150
+ STREAM_NEXT_OFFSET_HEADER => messages.size.to_s,
151
+ STREAM_UP_TO_DATE_HEADER => "true"
152
+ }
153
+ )
154
+ when :delete
155
+ if @streams.delete(path)
156
+ MockResponse.new(status: 204)
157
+ else
158
+ MockResponse.new(status: 404)
159
+ end
160
+ else
161
+ MockResponse.new(status: 405)
162
+ end
163
+ end
164
+ end
165
+
166
+ class << self
167
+ # Get the shared mock transport instance
168
+ # @return [MockTransport]
169
+ def mock_transport
170
+ @mock_transport ||= MockTransport.new
171
+ end
172
+
173
+ # Install mock transport (replaces transport in new Streams)
174
+ def install!
175
+ @installed = true
176
+ DurableStreams.reset_configuration!
177
+ end
178
+
179
+ # Check if testing mode is installed
180
+ def installed?
181
+ @installed || false
182
+ end
183
+
184
+ # Get transport for a stream (mock if installed, nil otherwise)
185
+ # Called by Stream to check if mock transport should be used
186
+ # @return [MockTransport, nil]
187
+ def transport_if_installed
188
+ installed? ? mock_transport : nil
189
+ end
190
+
191
+ # Reset testing state
192
+ def reset!
193
+ @mock_transport = nil
194
+ @installed = false
195
+ DurableStreams.reset_configuration!
196
+ end
197
+
198
+ # Get messages appended to a stream path
199
+ # @param path [String] Stream path
200
+ # @return [Array] Messages
201
+ def messages_for(path)
202
+ mock_transport.messages_for(path)
203
+ end
204
+
205
+ # Get all recorded requests
206
+ # @return [Array<Hash>] Requests
207
+ def requests
208
+ mock_transport.requests
209
+ end
210
+
211
+ # Clear all test state
212
+ def clear!
213
+ mock_transport.clear!
214
+ end
215
+ end
216
+
217
+ # RSpec matchers (only defined if RSpec is available)
218
+ if defined?(RSpec)
219
+ require "rspec/expectations"
220
+
221
+ RSpec::Matchers.define :have_appended_to do |path|
222
+ match do |_|
223
+ messages = DurableStreams::Testing.messages_for(path)
224
+ if @expected_data
225
+ messages.any? { |m| hash_subset?(m, @expected_data) }
226
+ else
227
+ messages.any?
228
+ end
229
+ end
230
+
231
+ chain :with do |data|
232
+ @expected_data = data
233
+ end
234
+
235
+ failure_message do
236
+ messages = DurableStreams::Testing.messages_for(path)
237
+ if @expected_data
238
+ "expected to find message matching #{@expected_data.inspect} in #{messages.inspect}"
239
+ else
240
+ "expected messages to be appended to #{path}, but none were"
241
+ end
242
+ end
243
+
244
+ def hash_subset?(actual, expected)
245
+ return actual == expected unless expected.is_a?(Hash) && actual.is_a?(Hash)
246
+
247
+ expected.all? { |k, v| actual.key?(k.to_s) && hash_subset?(actual[k.to_s], v) }
248
+ end
249
+ end
250
+
251
+ RSpec::Matchers.define :have_made_request do |method|
252
+ match do |_|
253
+ requests = DurableStreams::Testing.requests
254
+ requests.any? do |req|
255
+ matches_method = req[:method] == method
256
+ matches_path = @to_path.nil? || req[:path] == @to_path
257
+ matches_body = @with_body.nil? || req[:body]&.include?(@with_body.to_s)
258
+ matches_method && matches_path && matches_body
259
+ end
260
+ end
261
+
262
+ chain :to do |path|
263
+ @to_path = path
264
+ end
265
+
266
+ chain :with_body do |body|
267
+ @with_body = body
268
+ end
269
+
270
+ failure_message do
271
+ requests = DurableStreams::Testing.requests
272
+ "expected #{method} request#{@to_path ? " to #{@to_path}" : ""}, but got: #{requests.map { |r| "#{r[:method]} #{r[:path]}" }}"
273
+ end
274
+ end
275
+ end
276
+ end
277
+ end