hastci 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,207 @@
1
+ # frozen_string_literal: true
2
+
3
+ module HastCI
4
+ class TaskBuffer
5
+ DEFAULT_POLL_INTERVAL = 0.5
6
+ SHUTDOWN_TIMEOUT = 5
7
+
8
+ private_constant :DEFAULT_POLL_INTERVAL, :SHUTDOWN_TIMEOUT
9
+
10
+ def initialize(min_size:, max_size:, fetcher:, error_collector:, poll_interval: DEFAULT_POLL_INTERVAL,
11
+ on_cancelled: nil, sleeper: HastCI::DEFAULT_SLEEPER)
12
+ @min_size = min_size
13
+ @max_size = max_size
14
+ @fetcher = fetcher
15
+ @error_collector = error_collector
16
+ @poll_interval = poll_interval
17
+ @on_cancelled = on_cancelled
18
+ @sleeper = sleeper
19
+
20
+ @queue = SizedQueue.new(max_size)
21
+ @thread = nil
22
+ @running = false
23
+ @mutex = Mutex.new
24
+ @prefetch_condition = ConditionVariable.new
25
+ @drained = false
26
+ @cancelled = false
27
+ end
28
+
29
+ def start
30
+ @mutex.synchronize do
31
+ return if @running
32
+
33
+ @running = true
34
+ @drained = false
35
+ @cancelled = false
36
+ @thread = Thread.new { prefetch_loop }
37
+ end
38
+ end
39
+
40
+ def stop
41
+ @mutex.synchronize do
42
+ return unless @running
43
+
44
+ @running = false
45
+ @prefetch_condition.signal
46
+ end
47
+
48
+ @thread.join(SHUTDOWN_TIMEOUT)
49
+ @queue.close
50
+ end
51
+
52
+ def running?
53
+ @mutex.synchronize { @running }
54
+ end
55
+
56
+ def next_task
57
+ error = @error_collector.first_error
58
+ raise error if error
59
+
60
+ task = @queue.pop
61
+
62
+ if task.nil?
63
+ error = @error_collector.first_error
64
+ raise error if error
65
+
66
+ return nil
67
+ end
68
+
69
+ signal_prefetch_if_needed
70
+ task
71
+ end
72
+
73
+ def size
74
+ @queue.size
75
+ end
76
+
77
+ def drained?
78
+ @drained && @queue.empty?
79
+ end
80
+
81
+ def cancelled?
82
+ @mutex.synchronize { @cancelled }
83
+ end
84
+
85
+ private
86
+
87
+ def signal_prefetch_if_needed
88
+ @mutex.synchronize do
89
+ @prefetch_condition.signal if @queue.size < @min_size
90
+ end
91
+ end
92
+
93
+ def prefetch_loop
94
+ loop do
95
+ should_continue = @mutex.synchronize { @running && !@drained && !@error_collector.first_error }
96
+ break unless should_continue
97
+
98
+ fetch_batch
99
+ end
100
+ ensure
101
+ @queue.close
102
+ end
103
+
104
+ def fetch_batch
105
+ wait_for_prefetch_signal
106
+
107
+ should_fetch = @mutex.synchronize { @running && !@drained }
108
+ return unless should_fetch
109
+
110
+ capacity = available_capacity
111
+
112
+ result = safe_fetch(capacity)
113
+ return unless result
114
+
115
+ process_fetch_result(result, requested_capacity: capacity)
116
+ end
117
+
118
+ def wait_for_prefetch_signal
119
+ @mutex.synchronize do
120
+ while @running && !@drained && @queue.size >= @min_size
121
+ @prefetch_condition.wait(@mutex, @poll_interval)
122
+ end
123
+ end
124
+ end
125
+
126
+ def safe_fetch(capacity)
127
+ @fetcher.call(capacity)
128
+ rescue QueueDrained
129
+ handle_queue_drained
130
+ rescue RetryExhaustedError => e
131
+ handle_fatal_fetch_error(e, "Prefetch failed after retries")
132
+ rescue FatalApiError => e
133
+ handle_fatal_fetch_error(e, "Fatal error during prefetch")
134
+ rescue RetryableError => e
135
+ handle_transient_fetch_error(e)
136
+ rescue => e
137
+ handle_fatal_fetch_error(e, "Unexpected error during prefetch")
138
+ end
139
+
140
+ def process_fetch_result(result, requested_capacity:)
141
+ return handle_stop_signal if result.cancelled?
142
+ return handle_empty_response if result.empty?
143
+ return handle_overflow(result, requested_capacity: requested_capacity) if result.tasks.size > requested_capacity
144
+
145
+ push_tasks(result.tasks)
146
+ end
147
+
148
+ def handle_queue_drained
149
+ @mutex.synchronize { @drained = true }
150
+ @queue.close
151
+ nil
152
+ end
153
+
154
+ def handle_fatal_fetch_error(error, message)
155
+ @error_collector.report(error)
156
+ @queue.close
157
+ HastCI.logger.error("#{message}: #{error.message}")
158
+ nil
159
+ end
160
+
161
+ def handle_transient_fetch_error(error)
162
+ HastCI.logger.warn("Prefetch failed (will retry): #{error.message}")
163
+ @sleeper.call(@poll_interval)
164
+ nil
165
+ end
166
+
167
+ def handle_stop_signal
168
+ @mutex.synchronize do
169
+ @cancelled = true
170
+ @drained = true
171
+ end
172
+ @on_cancelled&.call
173
+ @queue.close
174
+ end
175
+
176
+ def handle_empty_response
177
+ @sleeper.call(@poll_interval)
178
+ end
179
+
180
+ def handle_overflow(result, requested_capacity:)
181
+ error = FatalApiError.new(
182
+ "Server returned #{result.tasks.size} tasks but buffer only has capacity for #{requested_capacity}"
183
+ )
184
+ @error_collector.report(error)
185
+ @queue.close
186
+ HastCI.logger.error("Task overflow: #{error.message}")
187
+ end
188
+
189
+ def available_capacity
190
+ @max_size - @queue.size
191
+ end
192
+
193
+ def push_tasks(tasks)
194
+ tasks.each do |task|
195
+ break unless running?
196
+
197
+ begin
198
+ @queue.push(task, true)
199
+ # :nocov:
200
+ rescue ThreadError, ClosedQueueError
201
+ break
202
+ end
203
+ # :nocov:
204
+ end
205
+ end
206
+ end
207
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ module HastCI
4
+ class TaskResult
5
+ attr_reader :task_id, :status, :duration_s, :logs
6
+
7
+ def initialize(task_id:, status:, duration_s:, logs:)
8
+ @task_id = task_id
9
+ @status = status
10
+ @duration_s = duration_s
11
+ @logs = logs
12
+ freeze
13
+ end
14
+
15
+ def passed?
16
+ status == :passed
17
+ end
18
+
19
+ def failed?
20
+ status == :failed
21
+ end
22
+
23
+ def ==(other)
24
+ other.is_a?(TaskResult) &&
25
+ other.task_id == task_id &&
26
+ other.status == status &&
27
+ other.duration_s == duration_s &&
28
+ other.logs == logs
29
+ end
30
+
31
+ alias_method :eql?, :==
32
+
33
+ def hash
34
+ [task_id, status, duration_s, logs].hash
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ # :nocov:
4
+ module HastCI
5
+ VERSION = "0.1.0"
6
+ end
7
+ # :nocov:
data/lib/hastci.rb ADDED
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "logger"
4
+ require "zeitwerk"
5
+
6
+ require_relative "hastci/version"
7
+
8
+ loader = Zeitwerk::Loader.for_gem
9
+ loader.inflector.inflect("hastci" => "HastCI", "rspec" => "RSpec", "cli" => "CLI")
10
+ loader.setup
11
+
12
+ module HastCI
13
+ DEFAULT_SLEEPER = Kernel.method(:sleep)
14
+
15
+ def self.logger
16
+ @logger ||= Logger.new($stderr)
17
+ end
18
+
19
+ def self.logger=(logger)
20
+ @logger = logger
21
+ end
22
+
23
+ # Run RSpec tests through the HastCI distributed runner.
24
+ #
25
+ # @param argv [Array<String>] RSpec command-line arguments
26
+ # @param env [Hash] Environment variables (default: ENV)
27
+ # @param err [IO] Error output stream (default: $stderr)
28
+ # @param out [IO] Output stream (default: $stdout)
29
+ # @return [Integer] Exit code (0 = success)
30
+ def self.run_rspec(argv:, env: ENV, err: $stderr, out: $stdout)
31
+ CLI.run(argv: argv, env: env, err: err, out: out) do |session, argv, err, out|
32
+ Adapters::RSpec::Runner.new(argv: argv, session: session, err: err, out: out).run
33
+ end
34
+ end
35
+ end
data/sig/hastci.rbs ADDED
@@ -0,0 +1,4 @@
1
+ module HastCI
2
+ VERSION: String
3
+ # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+ end
@@ -0,0 +1,385 @@
1
+ {
2
+ "consumer": {
3
+ "name": "HastCI RSpec"
4
+ },
5
+ "provider": {
6
+ "name": "HastCI API"
7
+ },
8
+ "interactions": [
9
+ {
10
+ "description": "a request to initialize a run",
11
+ "providerState": "a valid API key",
12
+ "request": {
13
+ "method": "post",
14
+ "path": "/runs/init",
15
+ "headers": {
16
+ "Authorization": "Bearer test-api-key",
17
+ "Content-Type": "application/json"
18
+ },
19
+ "body": {
20
+ "run_key": "test-run-key",
21
+ "worker_id": "worker-0",
22
+ "commit_sha": "abc123"
23
+ },
24
+ "matchingRules": {
25
+ "$.body.run_key": {
26
+ "match": "type"
27
+ },
28
+ "$.body.worker_id": {
29
+ "match": "type"
30
+ },
31
+ "$.body.commit_sha": {
32
+ "match": "type"
33
+ }
34
+ }
35
+ },
36
+ "response": {
37
+ "status": 200,
38
+ "headers": {
39
+ "Content-Type": "application/json"
40
+ },
41
+ "body": {
42
+ "run_id": "run-uuid-123",
43
+ "status": "seeding",
44
+ "role": "seeder"
45
+ },
46
+ "matchingRules": {
47
+ "$.body.run_id": {
48
+ "match": "type"
49
+ },
50
+ "$.body.status": {
51
+ "match": "regex",
52
+ "regex": "^(seeding|ready|draining)$"
53
+ },
54
+ "$.body.role": {
55
+ "match": "regex",
56
+ "regex": "^(seeder|participant)$"
57
+ }
58
+ }
59
+ }
60
+ },
61
+ {
62
+ "description": "a request to initialize a run with invalid credentials",
63
+ "providerState": "an invalid API key",
64
+ "request": {
65
+ "method": "post",
66
+ "path": "/runs/init",
67
+ "headers": {
68
+ "Authorization": "Bearer invalid-api-key",
69
+ "Content-Type": "application/json"
70
+ },
71
+ "body": {
72
+ "run_key": "test-run-key",
73
+ "worker_id": "worker-0",
74
+ "commit_sha": "abc123"
75
+ },
76
+ "matchingRules": {
77
+ "$.body.run_key": {
78
+ "match": "type"
79
+ },
80
+ "$.body.worker_id": {
81
+ "match": "type"
82
+ },
83
+ "$.body.commit_sha": {
84
+ "match": "type"
85
+ }
86
+ }
87
+ },
88
+ "response": {
89
+ "status": 401,
90
+ "headers": {
91
+ "Content-Type": "application/json"
92
+ },
93
+ "body": {
94
+ "error": "Unauthorized"
95
+ },
96
+ "matchingRules": {
97
+ "$.body.error": {
98
+ "match": "type"
99
+ }
100
+ }
101
+ }
102
+ },
103
+ {
104
+ "description": "a request for run status",
105
+ "providerState": "a run exists",
106
+ "request": {
107
+ "method": "get",
108
+ "path": "/runs/run-123/status",
109
+ "headers": {
110
+ "Authorization": "Bearer test-api-key"
111
+ }
112
+ },
113
+ "response": {
114
+ "status": 200,
115
+ "headers": {
116
+ "Content-Type": "application/json"
117
+ },
118
+ "body": {
119
+ "status": "ready"
120
+ },
121
+ "matchingRules": {
122
+ "$.body.status": {
123
+ "match": "regex",
124
+ "regex": "^(seeding|ready|draining)$"
125
+ }
126
+ }
127
+ }
128
+ },
129
+ {
130
+ "description": "a request to seed tasks",
131
+ "providerState": "a run exists",
132
+ "request": {
133
+ "method": "post",
134
+ "path": "/runs/run-123/seed",
135
+ "headers": {
136
+ "Authorization": "Bearer test-api-key",
137
+ "Content-Type": "application/json"
138
+ },
139
+ "body": {
140
+ "tasks": [
141
+ {
142
+ "name": "spec/models/user_spec.rb"
143
+ },
144
+ {
145
+ "name": "spec/models/post_spec.rb"
146
+ }
147
+ ]
148
+ }
149
+ },
150
+ "response": {
151
+ "status": 200,
152
+ "headers": {
153
+ "Content-Type": "application/json"
154
+ },
155
+ "body": {
156
+ "seeded": 2,
157
+ "status": "ready"
158
+ }
159
+ }
160
+ },
161
+ {
162
+ "description": "a request to claim tasks",
163
+ "providerState": "tasks are available",
164
+ "request": {
165
+ "method": "post",
166
+ "path": "/tasks/claim",
167
+ "query": "batch=10",
168
+ "headers": {
169
+ "Authorization": "Bearer test-api-key",
170
+ "Content-Type": "application/json"
171
+ },
172
+ "body": {
173
+ "run_key": "test-run-key",
174
+ "worker_id": "worker-0"
175
+ }
176
+ },
177
+ "response": {
178
+ "status": 200,
179
+ "headers": {
180
+ "Content-Type": "application/json"
181
+ },
182
+ "body": {
183
+ "tasks": [
184
+ {
185
+ "id": "task-1",
186
+ "name": "spec/models/user_spec.rb"
187
+ },
188
+ {
189
+ "id": "task-2",
190
+ "name": "spec/models/post_spec.rb"
191
+ }
192
+ ],
193
+ "queue_state": "ready",
194
+ "remaining": {
195
+ "queued": 10,
196
+ "assigned": 2,
197
+ "completed": 5
198
+ }
199
+ },
200
+ "matchingRules": {
201
+ "$.body.tasks[0].id": {
202
+ "match": "type"
203
+ },
204
+ "$.body.tasks[1].id": {
205
+ "match": "type"
206
+ },
207
+ "$.body.remaining.queued": {
208
+ "match": "type"
209
+ },
210
+ "$.body.remaining.assigned": {
211
+ "match": "type"
212
+ },
213
+ "$.body.remaining.completed": {
214
+ "match": "type"
215
+ }
216
+ }
217
+ }
218
+ },
219
+ {
220
+ "description": "a request to claim tasks from empty queue",
221
+ "providerState": "queue is empty but not drained",
222
+ "request": {
223
+ "method": "post",
224
+ "path": "/tasks/claim",
225
+ "query": "batch=10",
226
+ "headers": {
227
+ "Authorization": "Bearer test-api-key",
228
+ "Content-Type": "application/json"
229
+ },
230
+ "body": {
231
+ "run_key": "test-run-key",
232
+ "worker_id": "worker-0"
233
+ }
234
+ },
235
+ "response": {
236
+ "status": 204,
237
+ "headers": {
238
+ },
239
+ "body": ""
240
+ }
241
+ },
242
+ {
243
+ "description": "a request to claim from drained queue",
244
+ "providerState": "queue is drained",
245
+ "request": {
246
+ "method": "post",
247
+ "path": "/tasks/claim",
248
+ "query": "batch=10",
249
+ "headers": {
250
+ "Authorization": "Bearer test-api-key"
251
+ },
252
+ "body": {
253
+ "run_key": "test-run-key",
254
+ "worker_id": "worker-0"
255
+ }
256
+ },
257
+ "response": {
258
+ "status": 410,
259
+ "headers": {
260
+ "Content-Type": "application/json"
261
+ },
262
+ "body": {
263
+ "queue_state": "drained"
264
+ }
265
+ }
266
+ },
267
+ {
268
+ "description": "a request to ack a task",
269
+ "providerState": "a task exists",
270
+ "request": {
271
+ "method": "post",
272
+ "path": "/tasks/task-123/ack",
273
+ "headers": {
274
+ "Authorization": "Bearer test-api-key",
275
+ "Content-Type": "application/json"
276
+ },
277
+ "body": {
278
+ "status": "passed",
279
+ "duration_s": 1.5,
280
+ "logs": {
281
+ "summary": "1 example, 0 failures",
282
+ "failures": [
283
+ ]
284
+ }
285
+ }
286
+ },
287
+ "response": {
288
+ "status": 200,
289
+ "headers": {
290
+ "Content-Type": "application/json"
291
+ },
292
+ "body": {
293
+ "ok": true
294
+ }
295
+ }
296
+ },
297
+ {
298
+ "description": "a heartbeat request",
299
+ "providerState": "a worker exists",
300
+ "request": {
301
+ "method": "post",
302
+ "path": "/workers/heartbeat",
303
+ "headers": {
304
+ "Authorization": "Bearer test-api-key",
305
+ "Content-Type": "application/json"
306
+ },
307
+ "body": {
308
+ "run_key": "test-run-key",
309
+ "worker_id": "worker-0"
310
+ }
311
+ },
312
+ "response": {
313
+ "status": 204,
314
+ "headers": {
315
+ },
316
+ "body": ""
317
+ }
318
+ },
319
+ {
320
+ "description": "a request for a non-existent resource",
321
+ "providerState": "resource not found",
322
+ "request": {
323
+ "method": "get",
324
+ "path": "/runs/invalid-run/status",
325
+ "headers": {
326
+ "Authorization": "Bearer test-api-key"
327
+ }
328
+ },
329
+ "response": {
330
+ "status": 404,
331
+ "headers": {
332
+ }
333
+ }
334
+ },
335
+ {
336
+ "description": "a request that causes server error",
337
+ "providerState": "server error",
338
+ "request": {
339
+ "method": "post",
340
+ "path": "/runs/init",
341
+ "headers": {
342
+ "Authorization": "Bearer test-api-key",
343
+ "Content-Type": "application/json"
344
+ },
345
+ "body": {
346
+ "run_key": "test-run-key",
347
+ "worker_id": "worker-0",
348
+ "commit_sha": "abc123"
349
+ }
350
+ },
351
+ "response": {
352
+ "status": 500,
353
+ "headers": {
354
+ }
355
+ }
356
+ },
357
+ {
358
+ "description": "a request that returns 302",
359
+ "providerState": "unexpected redirect",
360
+ "request": {
361
+ "method": "post",
362
+ "path": "/runs/init",
363
+ "headers": {
364
+ "Authorization": "Bearer test-api-key",
365
+ "Content-Type": "application/json"
366
+ },
367
+ "body": {
368
+ "run_key": "test-run-key",
369
+ "worker_id": "worker-0",
370
+ "commit_sha": "abc123"
371
+ }
372
+ },
373
+ "response": {
374
+ "status": 302,
375
+ "headers": {
376
+ }
377
+ }
378
+ }
379
+ ],
380
+ "metadata": {
381
+ "pactSpecification": {
382
+ "version": "2.0.0"
383
+ }
384
+ }
385
+ }