concurrent_pipeline 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,133 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ConcurrentPipeline
4
+ class Changeset
5
+ Result = Struct.new(:diff) do
6
+ alias diff? diff
7
+ end
8
+
9
+ InitialDelta = Struct.new(:data, :dup, keyword_init: true) do
10
+ def apply(store)
11
+ # We fully dup the data to avoid mutating the input
12
+ dup_data = YAML.unsafe_load(data.to_yaml)
13
+ store.set(dup_data)
14
+ Result.new(true)
15
+ end
16
+
17
+ def self.from_json(json)
18
+ new(data: json.fetch(:delta), dup: true)
19
+ end
20
+
21
+ def as_json(...)
22
+ {
23
+ action: :initial,
24
+ delta: data
25
+ }
26
+ end
27
+ end
28
+
29
+ CreateDelta = Struct.new(:type, :attributes, keyword_init: true) do
30
+ def apply(store)
31
+ store.create(type: type, attributes: attributes)
32
+ Result.new(true)
33
+ end
34
+
35
+ def self.from_json(json)
36
+ new(
37
+ type: json.fetch(:type),
38
+ attributes: json.fetch(:attributes)
39
+ )
40
+ end
41
+
42
+ def as_json(...)
43
+ {
44
+ action: :create,
45
+ type: type,
46
+ attributes: attributes
47
+ }
48
+ end
49
+ end
50
+
51
+ UpdateDelta = Struct.new(:id, :type, :delta, keyword_init: true) do
52
+ def apply(store)
53
+ current_model = store.find(type, id)
54
+
55
+ # Todo: detect if changed underfoot
56
+
57
+ Result.new(
58
+ store.update(
59
+ id: id,
60
+ type: type,
61
+ attributes: current_model.attributes.merge(delta)
62
+ )
63
+ )
64
+ end
65
+
66
+ def self.from_json(json)
67
+ new(
68
+ id: json.fetch(:id),
69
+ type: json.fetch(:type),
70
+ delta: json.fetch(:delta),
71
+ )
72
+ end
73
+
74
+ def as_json
75
+ {
76
+ action: :update,
77
+ id: id,
78
+ type: type,
79
+ delta: delta
80
+ }
81
+ end
82
+ end
83
+
84
+ def self.from_json(registry:, json:)
85
+ type_map = {
86
+ initial: InitialDelta,
87
+ create: CreateDelta,
88
+ update: UpdateDelta,
89
+ }
90
+
91
+ new(
92
+ registry: registry
93
+ ).tap do |changeset|
94
+ json.fetch(:changes).each do |change|
95
+ type_map
96
+ .fetch(change.fetch(:action))
97
+ .from_json(change)
98
+ .then { changeset.deltas << _1 }
99
+ end
100
+ end
101
+ end
102
+
103
+ attr_reader :deltas, :registry
104
+ def initialize(registry:)
105
+ @registry = registry
106
+ @deltas = []
107
+ end
108
+
109
+ def deltas?
110
+ !@deltas.empty?
111
+ end
112
+
113
+ def create(type, attributes)
114
+ with_id = { id: SecureRandom.uuid }.merge(attributes)
115
+ @deltas << CreateDelta.new(type: type, attributes: with_id)
116
+ end
117
+
118
+ def update(model, delta)
119
+ type = registry.type_for(model.class)
120
+ @deltas << UpdateDelta.new(id: model.id, type: type, delta: delta)
121
+ end
122
+
123
+ def apply(...)
124
+ deltas.map { _1.apply(...) }
125
+ end
126
+
127
+ def as_json(...)
128
+ {
129
+ changes: deltas.map { _1.as_json(...) }
130
+ }
131
+ end
132
+ end
133
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ConcurrentPipeline
4
+ module Model
5
+ module InstanceMethods
6
+ attr_reader :attributes
7
+
8
+ def initialize(attributes)
9
+ @attributes = attributes
10
+ end
11
+ end
12
+
13
+ def self.extended(base)
14
+ base.include(InstanceMethods)
15
+ end
16
+
17
+ def inherited(base)
18
+ base.instance_variable_set(:@attributes, attributes.dup)
19
+ end
20
+
21
+ def attributes
22
+ @attributes ||= {}
23
+ end
24
+
25
+ def attribute(name, **opts)
26
+ attributes[name] = opts
27
+
28
+ define_method(name) { attributes[name] }
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,214 @@
1
+ require "time"
2
+
3
+ module ConcurrentPipeline
4
+ class Pipeline
5
+
6
+ # {
7
+ # type: PipelineStep,
8
+ # pipeline_id: [MyPipeline, :vhost, 1],
9
+ # name: {string},
10
+ # result: nil | :success | :failure,
11
+ # completed_at: nil | {timestamp},
12
+ # sequence: 3
13
+ # }
14
+
15
+ class PipelineStep
16
+ extend Model
17
+
18
+ attribute :id
19
+ attribute :pipeline_id
20
+ attribute :name
21
+ attribute :result
22
+ attribute :completed_at
23
+ attribute :sequence
24
+ attribute :error_message
25
+
26
+ def success?
27
+ result == :success
28
+ end
29
+ end
30
+
31
+ class Wrapper
32
+ attr_reader :pipeline, :pool
33
+ def initialize(pipeline:, pool:)
34
+ @pipeline = pipeline
35
+ @pool = pool
36
+ end
37
+
38
+ def id
39
+ pipeline_id = (
40
+ if pipeline.class.target_type
41
+ pipeline.target.id
42
+ end
43
+ )
44
+
45
+ [pipeline.class.name, pipeline_id].compact.join("__")
46
+ end
47
+
48
+ def perform
49
+ if pipeline_steps.empty?
50
+ create_pipeline_steps
51
+ else
52
+ pipeline_steps
53
+ .reject(&:completed_at)
54
+ .group_by(&:sequence)
55
+ .values
56
+ .first
57
+ .map { |step|
58
+ wrapper = self
59
+ -> () do
60
+ begin
61
+ wrapper.pipeline.public_send(step.name)
62
+ wrapper.changeset.update(
63
+ step,
64
+ completed_at: Time.now.iso8601,
65
+ result: :success
66
+ )
67
+ rescue => e
68
+ wrapper.changeset.update(
69
+ step,
70
+ completed_at: Time.now.iso8601,
71
+ result: :failure,
72
+ error: {class: e.class, message: e.message, backtrace: e.backtrace}
73
+ )
74
+ end
75
+ end
76
+ }
77
+ .then { pool.process(_1) }
78
+ end
79
+ end
80
+
81
+ def should_perform?
82
+ ready? && !done?
83
+ end
84
+
85
+ def create_pipeline_steps
86
+ sequence = (
87
+ if pipeline.respond_to?(:steps)
88
+ pipeline.steps
89
+ else
90
+ [:perform]
91
+ end
92
+ )
93
+
94
+ sequence.each_with_index do |sub_seq, i|
95
+ Array(sub_seq).each do |step_name|
96
+ changeset.create(
97
+ PipelineStep,
98
+ pipeline_id: id,
99
+ name: step_name,
100
+ sequence: i
101
+ )
102
+ end
103
+ end
104
+ end
105
+
106
+ def pipeline_steps
107
+ @pipeline_steps ||= (
108
+ store
109
+ .all(PipelineStep)
110
+ .select { _1.pipeline_id == id }
111
+ .sort_by(&:sequence)
112
+ )
113
+ end
114
+
115
+ def ready?
116
+ if pipeline.respond_to?(:ready?)
117
+ pipeline.ready?
118
+ else
119
+ true
120
+ end
121
+ end
122
+
123
+ def done?
124
+ if pipeline.respond_to?(:done?)
125
+ pipeline.done?
126
+ else
127
+ !pipeline_steps.empty? && pipeline_steps.all?(&:completed_at)
128
+ end
129
+ end
130
+
131
+ def store
132
+ pipeline.store
133
+ end
134
+
135
+ def changeset
136
+ pipeline.changeset
137
+ end
138
+
139
+ def stream(type, payload)
140
+ pipeline.stream.push(type, payload)
141
+ end
142
+ end
143
+
144
+ class << self
145
+ attr_reader(:target_type)
146
+
147
+ def build_pipelines(store:, stream:, pool:)
148
+ if target_type
149
+ store.all(target_type).map { |record|
150
+ Wrapper.new(
151
+ pipeline: new(
152
+ target: record,
153
+ store: store,
154
+ changeset: store.changeset,
155
+ stream: stream
156
+ ),
157
+ pool: pool
158
+ )
159
+ }
160
+ else
161
+ Wrapper.new(
162
+ pipeline: new(
163
+ target: nil,
164
+ store: store,
165
+ changeset: store.changeset,
166
+ stream: stream
167
+ ),
168
+ pool: pool
169
+ )
170
+ end
171
+ end
172
+
173
+ def each(type, as: nil)
174
+ @target_type = type
175
+ define_method(as) { target } if as
176
+ define_method(:record) { target }
177
+ end
178
+
179
+ def ready(...)
180
+ define_method(:ready?, ...)
181
+ end
182
+
183
+ def done(...)
184
+ define_method(:done?, ...)
185
+ end
186
+
187
+ def perform(...)
188
+ steps(:perform)
189
+ define_method(:perform, ...)
190
+ end
191
+
192
+ def steps(*sequence)
193
+ define_method(:steps) { sequence }
194
+ end
195
+
196
+ def concurrency(size = nil)
197
+ @concurrency = size if size
198
+ @concurrency
199
+ end
200
+ end
201
+
202
+ attr_reader :target, :store, :changeset
203
+ def initialize(target:, store:, changeset:, stream:)
204
+ @target = target
205
+ @store = store
206
+ @changeset = changeset
207
+ @stream = stream
208
+ end
209
+
210
+ def stream(type, payload)
211
+ @stream.push(type, payload)
212
+ end
213
+ end
214
+ end