durable_streams 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +7 -0
- data/LICENSE +22 -0
- data/README.md +276 -0
- data/UPSTREAM.md +85 -0
- data/lib/durable_streams/byte_reader.rb +185 -0
- data/lib/durable_streams/client.rb +68 -0
- data/lib/durable_streams/configuration.rb +26 -0
- data/lib/durable_streams/context.rb +35 -0
- data/lib/durable_streams/errors.rb +167 -0
- data/lib/durable_streams/http/transport.rb +213 -0
- data/lib/durable_streams/json_reader.rb +211 -0
- data/lib/durable_streams/producer.rb +436 -0
- data/lib/durable_streams/sse_reader.rb +228 -0
- data/lib/durable_streams/stream.rb +445 -0
- data/lib/durable_streams/testing.rb +277 -0
- data/lib/durable_streams/types.rb +143 -0
- data/lib/durable_streams/version.rb +5 -0
- data/lib/durable_streams.rb +125 -0
- metadata +105 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableStreams
|
|
4
|
+
# Context holds a frozen configuration snapshot.
|
|
5
|
+
# Use for isolated configurations (e.g., staging vs production).
|
|
6
|
+
#
|
|
7
|
+
# Context is purely a configuration container - no flush/close methods.
|
|
8
|
+
# Stream and Producer handle their own resource management.
|
|
9
|
+
class Context
|
|
10
|
+
attr_reader :config
|
|
11
|
+
|
|
12
|
+
# @param parent_config [Configuration] Base configuration to copy from
|
|
13
|
+
# @yield [Configuration] Optional block to customize the copy
|
|
14
|
+
def initialize(parent_config = DurableStreams.configuration)
|
|
15
|
+
@config = parent_config.dup
|
|
16
|
+
yield(@config) if block_given?
|
|
17
|
+
DurableStreams.send(:deep_freeze, @config)
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Resolve a URL against the configured base_url
|
|
21
|
+
# @param url [String] URL or path
|
|
22
|
+
# @return [String] Full URL
|
|
23
|
+
# @raise [ArgumentError] If URL is blank and no base_url configured
|
|
24
|
+
def resolve_url(url)
|
|
25
|
+
raise ArgumentError, "URL required" if url.nil? || url.to_s.strip.empty?
|
|
26
|
+
return url if url.start_with?("http://", "https://")
|
|
27
|
+
|
|
28
|
+
base = @config.base_url&.chomp("/")
|
|
29
|
+
raise ArgumentError, "base_url not configured" unless base
|
|
30
|
+
|
|
31
|
+
path = url.start_with?("/") ? url : "/#{url}"
|
|
32
|
+
"#{base}#{path}"
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module DurableStreams
|
|
4
|
+
# Base error class for all Durable Streams errors
|
|
5
|
+
class Error < StandardError
|
|
6
|
+
attr_reader :url, :status, :headers, :code
|
|
7
|
+
|
|
8
|
+
def initialize(message = nil, url: nil, status: nil, headers: nil, code: nil)
|
|
9
|
+
super(message)
|
|
10
|
+
@url = url
|
|
11
|
+
@status = status
|
|
12
|
+
@headers = headers || {}
|
|
13
|
+
@code = code
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Stream not found (404)
|
|
18
|
+
class StreamNotFoundError < Error
|
|
19
|
+
def initialize(url: nil, **opts)
|
|
20
|
+
super("Stream not found: #{url}", url: url, status: 404, code: "NOT_FOUND", **opts)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Stream already exists with different config (409)
|
|
25
|
+
class StreamExistsError < Error
|
|
26
|
+
def initialize(url: nil, **opts)
|
|
27
|
+
super("Stream already exists: #{url}", url: url, status: 409, code: "CONFLICT_EXISTS", **opts)
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Sequence conflict (409 with Stream-Seq)
|
|
32
|
+
class SeqConflictError < Error
|
|
33
|
+
def initialize(url: nil, **opts)
|
|
34
|
+
message = url ? "Sequence conflict: #{url}" : "Sequence conflict"
|
|
35
|
+
super(message, url: url, status: 409, code: "CONFLICT_SEQ", **opts)
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
# Stream is closed - no more appends allowed (409 with Stream-Closed)
|
|
40
|
+
class StreamClosedError < Error
|
|
41
|
+
def initialize(url: nil, **opts)
|
|
42
|
+
message = url ? "Stream is closed: #{url}" : "Stream is closed"
|
|
43
|
+
super(message, url: url, status: 409, code: "STREAM_CLOSED", **opts)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Content type mismatch (409)
|
|
48
|
+
class ContentTypeMismatchError < Error
|
|
49
|
+
def initialize(url: nil, expected: nil, actual: nil, **opts)
|
|
50
|
+
super("Content type mismatch: expected #{expected}, got #{actual}",
|
|
51
|
+
url: url, status: 409, code: "CONFLICT", **opts)
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# Producer epoch is stale (403)
|
|
56
|
+
class StaleEpochError < Error
|
|
57
|
+
attr_reader :current_epoch
|
|
58
|
+
|
|
59
|
+
def initialize(message = "Stale producer epoch", current_epoch: nil, **opts)
|
|
60
|
+
super(message, status: 403, code: "FORBIDDEN", **opts)
|
|
61
|
+
@current_epoch = current_epoch
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Producer sequence gap (409)
|
|
66
|
+
class SequenceGapError < Error
|
|
67
|
+
attr_reader :expected_seq, :received_seq
|
|
68
|
+
|
|
69
|
+
def initialize(expected_seq: nil, received_seq: nil, url: nil, **opts)
|
|
70
|
+
message = "Sequence gap: expected #{expected_seq}, got #{received_seq}"
|
|
71
|
+
message = "#{message} (#{url})" if url
|
|
72
|
+
super(message, url: url, status: 409, code: "SEQUENCE_GAP", **opts)
|
|
73
|
+
@expected_seq = expected_seq
|
|
74
|
+
@received_seq = received_seq
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# Rate limited (429)
|
|
79
|
+
class RateLimitedError < Error
|
|
80
|
+
def initialize(url: nil, **opts)
|
|
81
|
+
message = url ? "Rate limited: #{url}" : "Rate limited"
|
|
82
|
+
super(message, url: url, status: 429, code: "RATE_LIMITED", **opts)
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Bad request (400)
|
|
87
|
+
class BadRequestError < Error
|
|
88
|
+
def initialize(message = "Bad request", url: nil, **opts)
|
|
89
|
+
super(message, url: url, status: 400, code: "BAD_REQUEST", **opts)
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Network/connection error
|
|
94
|
+
class ConnectionError < Error
|
|
95
|
+
def initialize(message = "Connection error", **opts)
|
|
96
|
+
super(message, code: "NETWORK_ERROR", **opts)
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
# Timeout error
|
|
101
|
+
class TimeoutError < Error
|
|
102
|
+
def initialize(message = "Request timeout", **opts)
|
|
103
|
+
super(message, code: "TIMEOUT", **opts)
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
# Reader already consumed
|
|
108
|
+
class AlreadyConsumedError < Error
|
|
109
|
+
def initialize(**opts)
|
|
110
|
+
super("Reader already consumed", code: "ALREADY_CONSUMED", **opts)
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
# Producer or stream has been closed
|
|
115
|
+
class ClosedError < Error
|
|
116
|
+
def initialize(message = "Producer is closed", **opts)
|
|
117
|
+
super(message, code: "CLOSED", **opts)
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# SSE not supported for this content type
|
|
122
|
+
class SSENotSupportedError < Error
|
|
123
|
+
def initialize(content_type: nil, **opts)
|
|
124
|
+
super("SSE not supported for content type: #{content_type}",
|
|
125
|
+
status: 400, code: "SSE_NOT_SUPPORTED", **opts)
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
# Parse error (malformed JSON, SSE, etc.)
|
|
130
|
+
class ParseError < Error
|
|
131
|
+
def initialize(message = "Parse error", **opts)
|
|
132
|
+
super(message, code: "PARSE_ERROR", **opts)
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
# Generic fetch error for unexpected statuses
|
|
137
|
+
class FetchError < Error
|
|
138
|
+
def initialize(message = "Fetch error", url: nil, status: nil, **opts)
|
|
139
|
+
super(message, url: url, status: status, code: "UNEXPECTED_STATUS", **opts)
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
# Map HTTP status to appropriate error
|
|
144
|
+
def self.error_from_status(status, url: nil, body: nil, headers: nil, operation: nil)
|
|
145
|
+
case status
|
|
146
|
+
when 400
|
|
147
|
+
BadRequestError.new(body || "Bad request", url: url, headers: headers)
|
|
148
|
+
when 403
|
|
149
|
+
StaleEpochError.new(body || "Forbidden", url: url, headers: headers)
|
|
150
|
+
when 404
|
|
151
|
+
StreamNotFoundError.new(url: url, headers: headers)
|
|
152
|
+
when 409
|
|
153
|
+
if headers && headers[STREAM_CLOSED_HEADER]&.downcase == "true"
|
|
154
|
+
StreamClosedError.new(url: url, headers: headers)
|
|
155
|
+
# Could be StreamExistsError or SeqConflictError depending on context
|
|
156
|
+
elsif headers&.key?("stream-seq")
|
|
157
|
+
SeqConflictError.new(url: url, headers: headers)
|
|
158
|
+
else
|
|
159
|
+
StreamExistsError.new(url: url, headers: headers)
|
|
160
|
+
end
|
|
161
|
+
when 429
|
|
162
|
+
RateLimitedError.new(url: url, headers: headers)
|
|
163
|
+
else
|
|
164
|
+
FetchError.new(body || "HTTP #{status}", url: url, status: status, headers: headers)
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
end
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "net/http/persistent"
|
|
4
|
+
require "uri"
|
|
5
|
+
require "json"
|
|
6
|
+
|
|
7
|
+
module DurableStreams
|
|
8
|
+
module HTTP
|
|
9
|
+
# HTTP transport layer using net-http-persistent for connection pooling
|
|
10
|
+
class Transport
|
|
11
|
+
attr_reader :retry_policy, :timeout
|
|
12
|
+
|
|
13
|
+
def initialize(retry_policy: nil, timeout: 30, name: "durable_streams")
|
|
14
|
+
@retry_policy = retry_policy || RetryPolicy.default
|
|
15
|
+
@timeout = timeout
|
|
16
|
+
@http = Net::HTTP::Persistent.new(name: name)
|
|
17
|
+
@http.open_timeout = 10
|
|
18
|
+
@http.read_timeout = timeout
|
|
19
|
+
@http.idle_timeout = 30
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Make a request with retry logic
|
|
23
|
+
# @param method [Symbol] HTTP method (:get, :post, :put, :delete, :head)
|
|
24
|
+
# @param url [String] Full URL
|
|
25
|
+
# @param headers [Hash] HTTP headers
|
|
26
|
+
# @param body [String, nil] Request body
|
|
27
|
+
# @param stream [Boolean] Whether to stream the response (ignored, use stream_request instead)
|
|
28
|
+
# @param timeout [Integer, nil] Request-specific timeout
|
|
29
|
+
# @return [Response]
|
|
30
|
+
def request(method, url, headers: {}, body: nil, stream: false, timeout: nil)
|
|
31
|
+
uri = URI.parse(url)
|
|
32
|
+
request_timeout = timeout || @timeout
|
|
33
|
+
|
|
34
|
+
attempts = 0
|
|
35
|
+
last_error = nil
|
|
36
|
+
|
|
37
|
+
loop do
|
|
38
|
+
attempts += 1
|
|
39
|
+
begin
|
|
40
|
+
response = execute_request(method, uri, headers, body, request_timeout)
|
|
41
|
+
|
|
42
|
+
# Check if we should retry based on status
|
|
43
|
+
if @retry_policy.retryable_statuses.include?(response.status) && attempts <= @retry_policy.max_retries
|
|
44
|
+
delay = calculate_delay(attempts)
|
|
45
|
+
sleep(delay)
|
|
46
|
+
next
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
return response
|
|
50
|
+
rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::EPIPE,
|
|
51
|
+
Net::OpenTimeout, Net::ReadTimeout, IOError,
|
|
52
|
+
Net::HTTP::Persistent::Error => e
|
|
53
|
+
last_error = e
|
|
54
|
+
if attempts <= @retry_policy.max_retries
|
|
55
|
+
delay = calculate_delay(attempts)
|
|
56
|
+
sleep(delay)
|
|
57
|
+
next
|
|
58
|
+
end
|
|
59
|
+
raise ConnectionError.new(
|
|
60
|
+
"Failed to connect to #{uri} after #{attempts} attempts: #{e.class}: #{e.message}"
|
|
61
|
+
)
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Stream a request, yielding the response for chunk-by-chunk reading
|
|
67
|
+
# @param method [Symbol] HTTP method
|
|
68
|
+
# @param url [String] Full URL
|
|
69
|
+
# @param headers [Hash] HTTP headers
|
|
70
|
+
# @param timeout [Integer, nil] Request-specific timeout
|
|
71
|
+
# @yield [StreamingResponse] The streaming response
|
|
72
|
+
def stream_request(method, url, headers: {}, timeout: nil, &block)
|
|
73
|
+
uri = URI.parse(url)
|
|
74
|
+
request_timeout = timeout || @timeout
|
|
75
|
+
|
|
76
|
+
# Temporarily adjust read timeout for streaming
|
|
77
|
+
original_timeout = @http.read_timeout
|
|
78
|
+
@http.read_timeout = request_timeout
|
|
79
|
+
|
|
80
|
+
req = build_request(method, uri, headers, nil)
|
|
81
|
+
|
|
82
|
+
begin
|
|
83
|
+
@http.request(uri, req) do |http_response|
|
|
84
|
+
yield StreamingResponse.new(http_response)
|
|
85
|
+
end
|
|
86
|
+
rescue Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::EPIPE,
|
|
87
|
+
Net::OpenTimeout, Net::ReadTimeout, IOError,
|
|
88
|
+
Net::HTTP::Persistent::Error => e
|
|
89
|
+
raise ConnectionError.new("Streaming request to #{uri} failed: #{e.class}: #{e.message}")
|
|
90
|
+
end
|
|
91
|
+
ensure
|
|
92
|
+
@http.read_timeout = original_timeout if original_timeout
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Shutdown the persistent connection pool
|
|
96
|
+
def shutdown
|
|
97
|
+
@http.shutdown
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
private
|
|
101
|
+
|
|
102
|
+
def execute_request(method, uri, headers, body, request_timeout)
|
|
103
|
+
# Temporarily adjust read timeout if different from default
|
|
104
|
+
original_timeout = @http.read_timeout
|
|
105
|
+
@http.read_timeout = request_timeout if request_timeout != original_timeout
|
|
106
|
+
|
|
107
|
+
req = build_request(method, uri, headers, body)
|
|
108
|
+
http_response = @http.request(uri, req)
|
|
109
|
+
|
|
110
|
+
Response.new(http_response, http_response.body)
|
|
111
|
+
ensure
|
|
112
|
+
@http.read_timeout = original_timeout if request_timeout != original_timeout
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def build_request(method, uri, headers, body)
|
|
116
|
+
path = uri.request_uri
|
|
117
|
+
|
|
118
|
+
req = case method
|
|
119
|
+
when :get then Net::HTTP::Get.new(path)
|
|
120
|
+
when :post then Net::HTTP::Post.new(path)
|
|
121
|
+
when :put then Net::HTTP::Put.new(path)
|
|
122
|
+
when :delete then Net::HTTP::Delete.new(path)
|
|
123
|
+
when :head then Net::HTTP::Head.new(path)
|
|
124
|
+
else raise ArgumentError, "Unknown method: #{method}"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
headers.each { |k, v| req[k] = v }
|
|
128
|
+
req.body = body if body
|
|
129
|
+
|
|
130
|
+
req
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def calculate_delay(attempt)
|
|
134
|
+
delay = @retry_policy.initial_delay * (@retry_policy.multiplier**(attempt - 1))
|
|
135
|
+
[delay, @retry_policy.max_delay].min
|
|
136
|
+
end
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Simple response wrapper
|
|
140
|
+
class Response
|
|
141
|
+
attr_reader :status, :headers, :body
|
|
142
|
+
|
|
143
|
+
def initialize(http_response, body = nil)
|
|
144
|
+
@status = http_response.code.to_i
|
|
145
|
+
@headers = {}
|
|
146
|
+
http_response.each_header { |k, v| @headers[k.downcase] = v }
|
|
147
|
+
@body = body || http_response.body || ""
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def success?
|
|
151
|
+
status >= 200 && status < 300
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
def [](header)
|
|
155
|
+
@headers[header.to_s.downcase]
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
# Streaming response for SSE
|
|
160
|
+
class StreamingResponse
|
|
161
|
+
attr_reader :status, :headers
|
|
162
|
+
|
|
163
|
+
def initialize(http_response)
|
|
164
|
+
@http_response = http_response
|
|
165
|
+
@status = http_response.code.to_i
|
|
166
|
+
@headers = {}
|
|
167
|
+
http_response.each_header { |k, v| @headers[k.downcase] = v }
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
def success?
|
|
171
|
+
status >= 200 && status < 300
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def [](header)
|
|
175
|
+
@headers[header.to_s.downcase]
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
# Read chunks from the response body
|
|
179
|
+
def each_chunk(&block)
|
|
180
|
+
@http_response.read_body(&block)
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
# Build URL with query parameters
|
|
185
|
+
def self.build_url(base_url, params = {})
|
|
186
|
+
return base_url if params.empty?
|
|
187
|
+
|
|
188
|
+
uri = URI.parse(base_url)
|
|
189
|
+
existing_params = uri.query ? URI.decode_www_form(uri.query).to_h : {}
|
|
190
|
+
merged_params = existing_params.merge(params.transform_keys(&:to_s))
|
|
191
|
+
uri.query = URI.encode_www_form(merged_params) unless merged_params.empty?
|
|
192
|
+
uri.to_s
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
# Resolve dynamic headers (support for callable values)
|
|
196
|
+
def self.resolve_headers(headers)
|
|
197
|
+
return {} if headers.nil?
|
|
198
|
+
|
|
199
|
+
headers.transform_values do |v|
|
|
200
|
+
v.respond_to?(:call) ? v.call : v
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
# Resolve dynamic params (support for callable values)
|
|
205
|
+
def self.resolve_params(params)
|
|
206
|
+
return {} if params.nil?
|
|
207
|
+
|
|
208
|
+
params.transform_values do |v|
|
|
209
|
+
v.respond_to?(:call) ? v.call : v
|
|
210
|
+
end.compact
|
|
211
|
+
end
|
|
212
|
+
end
|
|
213
|
+
end
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
module DurableStreams
|
|
6
|
+
# Reader for JSON streams - yields parsed Ruby objects
|
|
7
|
+
class JsonReader
|
|
8
|
+
attr_reader :next_offset, :cursor, :up_to_date, :status
|
|
9
|
+
|
|
10
|
+
# @param stream [Stream] Parent stream handle
|
|
11
|
+
# @param offset [String] Starting offset
|
|
12
|
+
# @param live [Symbol, false] Live mode (:long_poll, :sse, false)
|
|
13
|
+
# @param cursor [String, nil] Initial cursor
|
|
14
|
+
def initialize(stream, offset: "-1", live: false, cursor: nil)
|
|
15
|
+
@stream = stream
|
|
16
|
+
@offset = DurableStreams.normalize_offset(offset)
|
|
17
|
+
@live = live
|
|
18
|
+
@next_offset = @offset
|
|
19
|
+
@cursor = cursor
|
|
20
|
+
@up_to_date = false
|
|
21
|
+
@closed = false
|
|
22
|
+
@status = nil
|
|
23
|
+
@sse_reader = nil
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Iterate over individual JSON messages
|
|
27
|
+
# @yield [Object] Each parsed JSON message
|
|
28
|
+
def each(&block)
|
|
29
|
+
return enum_for(:each) unless block_given?
|
|
30
|
+
|
|
31
|
+
each_batch do |batch|
|
|
32
|
+
batch.items.each(&block)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Iterate over batches with metadata
|
|
37
|
+
# @yield [JsonBatch] Each batch with items, next_offset, cursor, up_to_date
|
|
38
|
+
def each_batch(&block)
|
|
39
|
+
return enum_for(:each_batch) unless block_given?
|
|
40
|
+
|
|
41
|
+
# Handle SSE mode
|
|
42
|
+
if use_sse?
|
|
43
|
+
each_batch_sse(&block)
|
|
44
|
+
return
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
loop do
|
|
48
|
+
break if @closed
|
|
49
|
+
|
|
50
|
+
batch = fetch_next_json_batch
|
|
51
|
+
break if batch.nil?
|
|
52
|
+
|
|
53
|
+
@next_offset = batch.next_offset
|
|
54
|
+
@cursor = batch.cursor
|
|
55
|
+
@up_to_date = batch.up_to_date
|
|
56
|
+
|
|
57
|
+
yield batch
|
|
58
|
+
|
|
59
|
+
# Break for non-live modes when up_to_date
|
|
60
|
+
break if @live == false && @up_to_date
|
|
61
|
+
# Break for long-poll on 204 timeout (up_to_date with empty items = no new data)
|
|
62
|
+
break if @live == :long_poll && @up_to_date && batch.items.empty? && @status == 204
|
|
63
|
+
break if @closed
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Collect all messages until up_to_date
|
|
68
|
+
# @return [Array]
|
|
69
|
+
def to_a
|
|
70
|
+
result = []
|
|
71
|
+
each { |msg| result << msg }
|
|
72
|
+
result
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Cancel/close the reader
|
|
76
|
+
def close
|
|
77
|
+
@closed = true
|
|
78
|
+
@sse_reader&.close
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def closed?
|
|
82
|
+
@closed
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def up_to_date?
|
|
86
|
+
@up_to_date
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
private
|
|
90
|
+
|
|
91
|
+
def use_sse?
|
|
92
|
+
@live == :sse
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def fetch_next_json_batch
|
|
96
|
+
params = { offset: @next_offset }
|
|
97
|
+
params[:cursor] = @cursor if @cursor
|
|
98
|
+
|
|
99
|
+
# Add live mode parameter
|
|
100
|
+
case @live
|
|
101
|
+
when :long_poll
|
|
102
|
+
params[:live] = "long-poll"
|
|
103
|
+
when :sse
|
|
104
|
+
# SSE is handled separately
|
|
105
|
+
return nil
|
|
106
|
+
when false
|
|
107
|
+
# No live param for catch-up only
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
request_url = HTTP.build_url(@stream.url, params)
|
|
111
|
+
headers = @stream.resolved_headers("accept" => "application/json")
|
|
112
|
+
|
|
113
|
+
response = @stream.transport.request(:get, request_url, headers: headers)
|
|
114
|
+
@status = response.status
|
|
115
|
+
|
|
116
|
+
if response.status == 404
|
|
117
|
+
raise StreamNotFoundError.new(url: @stream.url)
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
# Handle 204 No Content (long-poll timeout)
|
|
121
|
+
# Still parse headers as they contain offset info
|
|
122
|
+
if response.status == 204
|
|
123
|
+
@next_offset = response[STREAM_NEXT_OFFSET_HEADER] || @next_offset
|
|
124
|
+
@cursor = response[STREAM_CURSOR_HEADER] || @cursor
|
|
125
|
+
@up_to_date = true
|
|
126
|
+
return JsonBatch.new(items: [], next_offset: @next_offset, cursor: @cursor, up_to_date: true)
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
unless response.success?
|
|
130
|
+
raise DurableStreams.error_from_status(response.status, url: @stream.url, body: response.body,
|
|
131
|
+
headers: response.headers)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
headers = DurableStreams.parse_stream_headers(response, next_offset: @next_offset, cursor: @cursor)
|
|
135
|
+
@next_offset = headers[:next_offset]
|
|
136
|
+
@cursor = headers[:cursor]
|
|
137
|
+
@up_to_date = headers[:up_to_date]
|
|
138
|
+
|
|
139
|
+
# Parse JSON body
|
|
140
|
+
items = if response.body && !response.body.empty?
|
|
141
|
+
begin
|
|
142
|
+
JSON.parse(response.body)
|
|
143
|
+
rescue JSON::ParserError => e
|
|
144
|
+
raise ParseError.new(
|
|
145
|
+
"Invalid JSON response from server: #{e.message}"
|
|
146
|
+
)
|
|
147
|
+
end
|
|
148
|
+
else
|
|
149
|
+
[]
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
# Ensure items is an array
|
|
153
|
+
items = [items] unless items.is_a?(Array)
|
|
154
|
+
|
|
155
|
+
JsonBatch.new(
|
|
156
|
+
items: items,
|
|
157
|
+
next_offset: @next_offset,
|
|
158
|
+
cursor: @cursor,
|
|
159
|
+
up_to_date: @up_to_date
|
|
160
|
+
)
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
def each_batch_sse(&block)
|
|
164
|
+
@sse_reader = SSEReader.new(
|
|
165
|
+
@stream,
|
|
166
|
+
offset: @next_offset,
|
|
167
|
+
cursor: @cursor
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
@sse_reader.each_event do |event|
|
|
171
|
+
break if @closed
|
|
172
|
+
|
|
173
|
+
@next_offset = event[:next_offset] if event[:next_offset]
|
|
174
|
+
@cursor = event[:cursor] if event[:cursor]
|
|
175
|
+
@up_to_date = event[:up_to_date]
|
|
176
|
+
@status = 200
|
|
177
|
+
|
|
178
|
+
# Only yield if there's data
|
|
179
|
+
if event[:data] && !event[:data].empty?
|
|
180
|
+
begin
|
|
181
|
+
items = JSON.parse(event[:data])
|
|
182
|
+
rescue JSON::ParserError => e
|
|
183
|
+
raise ParseError.new(
|
|
184
|
+
"Invalid JSON in SSE event: #{e.message}"
|
|
185
|
+
)
|
|
186
|
+
end
|
|
187
|
+
items = [items] unless items.is_a?(Array)
|
|
188
|
+
|
|
189
|
+
batch = JsonBatch.new(
|
|
190
|
+
items: items,
|
|
191
|
+
next_offset: @next_offset,
|
|
192
|
+
cursor: @cursor,
|
|
193
|
+
up_to_date: @up_to_date
|
|
194
|
+
)
|
|
195
|
+
yield batch
|
|
196
|
+
elsif event[:up_to_date]
|
|
197
|
+
# Yield empty batch on control event with up_to_date
|
|
198
|
+
batch = JsonBatch.new(
|
|
199
|
+
items: [],
|
|
200
|
+
next_offset: @next_offset,
|
|
201
|
+
cursor: @cursor,
|
|
202
|
+
up_to_date: @up_to_date
|
|
203
|
+
)
|
|
204
|
+
yield batch
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
ensure
|
|
208
|
+
@sse_reader&.close
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
end
|