async_pipeline 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rubocop.yml +14 -0
- data/.ruby-version +1 -0
- data/README.md +11 -0
- data/Rakefile +16 -0
- data/async_pipeline.gemspec +29 -0
- data/lib/async_pipeline/changeset.rb +46 -0
- data/lib/async_pipeline/chunk.rb +49 -0
- data/lib/async_pipeline/model.rb +31 -0
- data/lib/async_pipeline/pipeline.rb +76 -0
- data/lib/async_pipeline/processor.rb +202 -0
- data/lib/async_pipeline/read_only_store.rb +22 -0
- data/lib/async_pipeline/registry.rb +29 -0
- data/lib/async_pipeline/shell.rb +48 -0
- data/lib/async_pipeline/store.rb +27 -0
- data/lib/async_pipeline/stores/yaml.rb +115 -0
- data/lib/async_pipeline/version.rb +5 -0
- data/lib/async_pipeline.rb +55 -0
- metadata +64 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: c923079b7f9cb16b354ff63bdde17ec7102978a533a8bf7e21ef7cef618fee39
|
4
|
+
data.tar.gz: 54dee3f2a5690fd5a34e3603067b2f5ce3261044a07948a35871014eaa702e3c
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: e4447679eea0e2298c72a946531ec422cd3caefe7d36acbcda3c61b2cddcc5ee3df9c3502586a556d0e2671f095fb6971908bcfca70277ae6a160c2115dd4f73
|
7
|
+
data.tar.gz: 791d1a309697cd7c940566493ff5c335900d746d310da969d8addf25b7d98e1e18442bfaef56b0a6a5239a9c6f603b1fcee1cb1b99f0279fb5c28d959b79d040
|
data/.rubocop.yml
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
AllCops:
|
2
|
+
TargetRubyVersion: 3.2
|
3
|
+
DisabledByDefault: true
|
4
|
+
|
5
|
+
Style/StringLiterals:
|
6
|
+
Enabled: true
|
7
|
+
EnforcedStyle: double_quotes
|
8
|
+
|
9
|
+
Style/StringLiteralsInInterpolation:
|
10
|
+
Enabled: true
|
11
|
+
EnforcedStyle: double_quotes
|
12
|
+
|
13
|
+
Layout/LineLength:
|
14
|
+
Max: 120
|
data/.ruby-version
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
3.2.3
|
data/README.md
ADDED
data/Rakefile
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "bundler/gem_tasks"
|
4
|
+
require "rake/testtask"
|
5
|
+
|
6
|
+
Rake::TestTask.new(:test) do |t|
|
7
|
+
t.libs << "test"
|
8
|
+
t.libs << "lib"
|
9
|
+
t.test_files = FileList["test/**/test_*.rb"]
|
10
|
+
end
|
11
|
+
|
12
|
+
require "rubocop/rake_task"
|
13
|
+
|
14
|
+
RuboCop::RakeTask.new
|
15
|
+
|
16
|
+
task default: %i[test rubocop]
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "lib/async_pipeline/version"
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "async_pipeline"
|
7
|
+
spec.version = AsyncPipeline::VERSION
|
8
|
+
spec.authors = ["Pete Kinnecom"]
|
9
|
+
spec.email = ["git@k7u7.com"]
|
10
|
+
|
11
|
+
spec.summary = "Define a pipeline of async tasks and their starting conditions"
|
12
|
+
spec.homepage = "https://github.com/petekinnecom/async_pipeline"
|
13
|
+
spec.license = "WTFPL"
|
14
|
+
spec.required_ruby_version = ">= 3.0.0"
|
15
|
+
spec.metadata["allowed_push_host"] = "https://rubygems.org"
|
16
|
+
spec.metadata["homepage_uri"] = spec.homepage
|
17
|
+
spec.metadata["source_code_uri"] = spec.homepage
|
18
|
+
spec.metadata["changelog_uri"] = spec.homepage
|
19
|
+
|
20
|
+
spec.files = Dir.chdir(__dir__) do
|
21
|
+
`git ls-files -z`.split("\x0").reject do |f|
|
22
|
+
(File.expand_path(f) == __FILE__) ||
|
23
|
+
f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor Gemfile])
|
24
|
+
end
|
25
|
+
end
|
26
|
+
spec.bindir = "exe"
|
27
|
+
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
28
|
+
spec.require_paths = ["lib"]
|
29
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
class Changeset
|
5
|
+
CreateDelta = Struct.new(:type, :attributes, keyword_init: true) do
|
6
|
+
def apply(store)
|
7
|
+
store.create(type: type, attributes: attributes)
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
UpdateDelta = Struct.new(:model, :delta, keyword_init: true) do
|
12
|
+
def apply(store)
|
13
|
+
current_model = store.find(id: model.id, type: model.class)
|
14
|
+
|
15
|
+
# Todo: detect if changed underfoot
|
16
|
+
|
17
|
+
store.update(
|
18
|
+
id: model.id,
|
19
|
+
type: model.class,
|
20
|
+
attributes: current_model.attributes.merge(delta)
|
21
|
+
)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
attr_reader :deltas
|
26
|
+
def initialize
|
27
|
+
@deltas = []
|
28
|
+
end
|
29
|
+
|
30
|
+
def deltas?
|
31
|
+
!@deltas.empty?
|
32
|
+
end
|
33
|
+
|
34
|
+
def create(type, attributes)
|
35
|
+
@deltas << CreateDelta.new(type: type, attributes: attributes)
|
36
|
+
end
|
37
|
+
|
38
|
+
def update(model, delta)
|
39
|
+
@deltas << UpdateDelta.new(model: model, delta: delta)
|
40
|
+
end
|
41
|
+
|
42
|
+
def apply(...)
|
43
|
+
deltas.each { _1.apply(...) }
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,49 @@
|
|
1
|
+
module AsyncPipeline
|
2
|
+
class Chunk
|
3
|
+
def self.build_chunks(store)
|
4
|
+
target = self.target
|
5
|
+
type = target.values.first
|
6
|
+
records = store.all(type: type)
|
7
|
+
|
8
|
+
if target.key?(:collection)
|
9
|
+
new(
|
10
|
+
target: records,
|
11
|
+
store: store,
|
12
|
+
changeset: Changeset.new
|
13
|
+
)
|
14
|
+
else
|
15
|
+
records.map { |record|
|
16
|
+
new(
|
17
|
+
target: record,
|
18
|
+
store: store,
|
19
|
+
changeset: Changeset.new
|
20
|
+
)
|
21
|
+
}
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
attr_accessor :target, :store, :changeset
|
26
|
+
|
27
|
+
def initialize(target:, store:, changeset:)
|
28
|
+
@target = target
|
29
|
+
@store = store
|
30
|
+
@changeset = changeset
|
31
|
+
end
|
32
|
+
|
33
|
+
def id
|
34
|
+
extra = (
|
35
|
+
if self.class.target.key?(:item)
|
36
|
+
{ id: target.id }
|
37
|
+
else
|
38
|
+
{}
|
39
|
+
end
|
40
|
+
)
|
41
|
+
|
42
|
+
self.class.target.merge(extra)
|
43
|
+
end
|
44
|
+
|
45
|
+
def should_perform?
|
46
|
+
!done? && ready?
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
module Model
|
5
|
+
module InstanceMethods
|
6
|
+
attr_reader :attributes
|
7
|
+
|
8
|
+
def initialize(attributes)
|
9
|
+
@attributes = attributes
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.extended(base)
|
14
|
+
base.include(InstanceMethods)
|
15
|
+
end
|
16
|
+
|
17
|
+
def inherited(base)
|
18
|
+
base.instance_variable_set(:@attributes, attributes.dup)
|
19
|
+
end
|
20
|
+
|
21
|
+
def attributes
|
22
|
+
@attributes ||= {}
|
23
|
+
end
|
24
|
+
|
25
|
+
def attribute(name, **opts)
|
26
|
+
attributes[name] = opts
|
27
|
+
|
28
|
+
define_method(name, &Ractor.make_shareable(Proc.new { attributes.fetch(name) }))
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "processor"
|
4
|
+
require_relative "registry"
|
5
|
+
require_relative "store"
|
6
|
+
require_relative "stores/yaml"
|
7
|
+
|
8
|
+
module AsyncPipeline
|
9
|
+
module Pipeline
|
10
|
+
module InstanceMethods
|
11
|
+
def initialize(data)
|
12
|
+
@data = data
|
13
|
+
end
|
14
|
+
|
15
|
+
def start
|
16
|
+
Processor.call(
|
17
|
+
store: store,
|
18
|
+
chunkables: chunkables,
|
19
|
+
registry: registry
|
20
|
+
)
|
21
|
+
end
|
22
|
+
|
23
|
+
def data
|
24
|
+
store.to_h
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
def store
|
30
|
+
@store ||= self.class.store.build(data: @data, dir: dir, registry: registry)
|
31
|
+
end
|
32
|
+
|
33
|
+
def dir
|
34
|
+
@dir ||= Dir.mktmpdir
|
35
|
+
end
|
36
|
+
|
37
|
+
def registry
|
38
|
+
self.class.registry
|
39
|
+
end
|
40
|
+
|
41
|
+
def chunkables
|
42
|
+
self.class.chunkables
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def self.extended(base)
|
47
|
+
base.include(InstanceMethods)
|
48
|
+
end
|
49
|
+
|
50
|
+
def inherited(base)
|
51
|
+
base.instance_variable_set(:@models, models.dup)
|
52
|
+
base.instance_variable_set(:@chunkables, chunkables.dup)
|
53
|
+
end
|
54
|
+
|
55
|
+
def store(klass = nil)
|
56
|
+
@store = klass if klass
|
57
|
+
@store
|
58
|
+
end
|
59
|
+
|
60
|
+
def chunkables
|
61
|
+
@chunkables ||= []
|
62
|
+
end
|
63
|
+
|
64
|
+
def registry
|
65
|
+
@registry ||= Registry.new
|
66
|
+
end
|
67
|
+
|
68
|
+
def model(klass, as_type:)
|
69
|
+
registry.register(as_type, klass)
|
70
|
+
end
|
71
|
+
|
72
|
+
def chunk(klass)
|
73
|
+
chunkables << klass
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,202 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "changeset"
|
4
|
+
require_relative "read_only_store"
|
5
|
+
|
6
|
+
module AsyncPipeline
|
7
|
+
class Processor
|
8
|
+
Message = Struct.new(:type, :payload) do
|
9
|
+
def to_s(...)
|
10
|
+
inspect(...)
|
11
|
+
end
|
12
|
+
def inspect(...)
|
13
|
+
"<Message #{type} (#{payload.class}) >"
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
Ms = Module.new do
|
18
|
+
def self.g(...)
|
19
|
+
Message.new(...)
|
20
|
+
end
|
21
|
+
|
22
|
+
def msg(...)
|
23
|
+
Message.new(...)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.Msg(type, payload = nil)
|
28
|
+
Message.new(type, payload)
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.call(...)
|
32
|
+
new(...).call
|
33
|
+
end
|
34
|
+
|
35
|
+
attr_reader :store, :chunkables, :registry
|
36
|
+
def initialize(store:, chunkables:, registry:)
|
37
|
+
@store = store
|
38
|
+
@chunkables = chunkables
|
39
|
+
@registry = registry
|
40
|
+
end
|
41
|
+
|
42
|
+
def call
|
43
|
+
Log.info("scheduler_channel: #{scheduler_channel.object_id}")
|
44
|
+
scheduler_channel.send(Ms.g(:requeue))
|
45
|
+
|
46
|
+
workers
|
47
|
+
loop do
|
48
|
+
r, msg = Ractor.select(scheduler_channel, changeset_channel, work_channel, ticker)
|
49
|
+
|
50
|
+
case r
|
51
|
+
when ticker
|
52
|
+
case msg.type
|
53
|
+
when :tick
|
54
|
+
changeset_channel.send(Ms.g(:flush_queue))
|
55
|
+
end
|
56
|
+
when scheduler_channel
|
57
|
+
puts "scheduler_channel: #{msg}"
|
58
|
+
case msg.type
|
59
|
+
when :enqueue
|
60
|
+
work_channel.send(Ms.g(:process, msg.payload))
|
61
|
+
when :all_chunks_processed
|
62
|
+
scheduler_channel.close_outgoing
|
63
|
+
work_channel.close_outgoing
|
64
|
+
changeset_channel.close_outgoing
|
65
|
+
else
|
66
|
+
# scheduler_channel.close_outgoing
|
67
|
+
# raise "Unknown message: #{msg}"
|
68
|
+
end
|
69
|
+
when changeset_channel
|
70
|
+
case msg.type
|
71
|
+
when :chunks_updated
|
72
|
+
scheduler_channel.send(Ms.g(:requeue, msg.payload))
|
73
|
+
when :chunks_processed
|
74
|
+
scheduler_channel.send(Ms.g(:chunks_processed, msg.payload))
|
75
|
+
else
|
76
|
+
# raise "Unknown message: #{msg}"
|
77
|
+
end
|
78
|
+
when work_channel
|
79
|
+
case msg.type
|
80
|
+
when :changeset
|
81
|
+
changeset_channel.send(Ms.g(:changeset, msg.payload))
|
82
|
+
else
|
83
|
+
# raise "Unknown message: #{msg}"
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
store.to_h
|
89
|
+
end
|
90
|
+
|
91
|
+
def changeset_channel
|
92
|
+
@changeset_channel ||= Ractor.new(store) do |store, channel|
|
93
|
+
chunks = []
|
94
|
+
loop do
|
95
|
+
msg = receive
|
96
|
+
case msg.type
|
97
|
+
when :changeset
|
98
|
+
chunks << msg.payload
|
99
|
+
when :flush_queue
|
100
|
+
next unless chunks.any?
|
101
|
+
|
102
|
+
was_updated = store.apply(chunks.map(&:changeset))
|
103
|
+
if was_updated
|
104
|
+
Ractor.yield(Ms.g(:chunks_updated, chunks.map(&:id)))
|
105
|
+
else
|
106
|
+
Ractor.yield(Ms.g(:chunks_processed, chunks.map(&:id)))
|
107
|
+
end
|
108
|
+
|
109
|
+
chunks = []
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
def ticker
|
116
|
+
@ticker ||= Ractor.new do
|
117
|
+
loop do
|
118
|
+
sleep 0.5
|
119
|
+
Ractor.yield(Ms.g(:tick))
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
def scheduler_channel
|
125
|
+
@scheduler_channel ||= Ractor.new(store, chunkables, changeset_channel) do |store, chunkables, changeset_channel|
|
126
|
+
status = {}
|
127
|
+
loop do
|
128
|
+
msg = receive
|
129
|
+
|
130
|
+
# we update chunk_ids on both messages.
|
131
|
+
chunk_ids = msg.payload || []
|
132
|
+
chunk_ids.each do |chunk_id|
|
133
|
+
status[chunk_id] = :processed
|
134
|
+
end
|
135
|
+
|
136
|
+
case msg.type
|
137
|
+
when :requeue
|
138
|
+
reader = store.reader
|
139
|
+
|
140
|
+
Log.info "model_one: #{store.reader.all(type: :model_one).count}"
|
141
|
+
chunkables
|
142
|
+
.map { _1.build_chunks(reader) }
|
143
|
+
.flatten
|
144
|
+
.each do |c|
|
145
|
+
if status[c.id] != :queued && c.should_perform?
|
146
|
+
puts "enqueuing: #{c.id}"
|
147
|
+
status[c.id] = :queued
|
148
|
+
Ractor.yield(Ms.g(:enqueue, c))
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
if status.values.all? { _1 == :processed }
|
154
|
+
Ractor.yield(Ms.g(:all_chunks_processed))
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
def work_channel
|
161
|
+
@work_channel ||= Ractor.new(work_pool) do |work_pool|
|
162
|
+
loop do
|
163
|
+
msg = receive
|
164
|
+
case msg.type
|
165
|
+
when :process
|
166
|
+
chunk = msg.payload
|
167
|
+
work_pool.send(Ms.g(:chunk, chunk))
|
168
|
+
when :changeset
|
169
|
+
Ractor.yield(Ms.g(:changeset, msg.payload))
|
170
|
+
end
|
171
|
+
end
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def work_pool
|
176
|
+
@work_pool ||= Ractor.new do
|
177
|
+
loop do
|
178
|
+
Ractor.yield Ractor.receive
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
def workers
|
184
|
+
@workers ||= 4.times.map { |i|
|
185
|
+
Ractor.new(work_pool, work_channel, name: "worker-#{i}") do |work_pool, work_channel|
|
186
|
+
loop do
|
187
|
+
r, msg = Ractor.select(work_pool)
|
188
|
+
|
189
|
+
case msg.type
|
190
|
+
when :chunk
|
191
|
+
chunk = msg.payload
|
192
|
+
puts "starting perform: #{chunk.id}"
|
193
|
+
chunk.perform
|
194
|
+
puts "finished perform: #{chunk.id}"
|
195
|
+
work_channel.send(Ms.g(:changeset, chunk))
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
199
|
+
}
|
200
|
+
end
|
201
|
+
end
|
202
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
class ReadyOnlyStore
|
5
|
+
attr_reader :store
|
6
|
+
def initialize(store)
|
7
|
+
@store = store
|
8
|
+
end
|
9
|
+
|
10
|
+
def find(...)
|
11
|
+
store.find(...)
|
12
|
+
end
|
13
|
+
|
14
|
+
def all(...)
|
15
|
+
store.all(...)
|
16
|
+
end
|
17
|
+
|
18
|
+
def everything(...)
|
19
|
+
store.everything(...)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
class Registry
|
5
|
+
def build(type, attributes)
|
6
|
+
lookup.fetch(type).new(attributes)
|
7
|
+
end
|
8
|
+
|
9
|
+
def register(type, klass)
|
10
|
+
lookup[type] = klass
|
11
|
+
end
|
12
|
+
|
13
|
+
def type_for(type)
|
14
|
+
return type if lookup.key?(type)
|
15
|
+
|
16
|
+
reverse_lookup = lookup.invert
|
17
|
+
|
18
|
+
return reverse_lookup[type] if reverse_lookup.key?(type)
|
19
|
+
|
20
|
+
raise ArgumentError, "Unknown type: #{type}"
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def lookup
|
26
|
+
@lookup ||= {}
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
class Shell
|
5
|
+
class << self
|
6
|
+
Result = Struct.new(:success, :stdout, :stderr, keyword_init: true) do
|
7
|
+
def success?
|
8
|
+
success
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
def run!(...)
|
13
|
+
run(...).tap { raise "command failed: #{command}" unless _1.success? }
|
14
|
+
end
|
15
|
+
|
16
|
+
def run(command)
|
17
|
+
Open3.popen3(command) do |_in, stdout, stderr, wait_thr|
|
18
|
+
process_stdout = []
|
19
|
+
stdout_thr = Thread.new do
|
20
|
+
while line = stdout.gets&.chomp
|
21
|
+
yield(:stdout, line) if block_given?
|
22
|
+
process_stdout << line
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
process_stderr = []
|
27
|
+
stderr_thr = Thread.new do
|
28
|
+
while line = stderr.gets&.chomp
|
29
|
+
yield(:stderr, line) if block_given?
|
30
|
+
process_stderr << line
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
[
|
35
|
+
stderr_thr,
|
36
|
+
stdout_thr,
|
37
|
+
].each(&:join)
|
38
|
+
|
39
|
+
Result.new(
|
40
|
+
success: wait_thr.value.success?,
|
41
|
+
stdout: process_stdout.join("\n"),
|
42
|
+
stderr: process_stderr.join("\n"),
|
43
|
+
)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncPipeline
|
4
|
+
class Store
|
5
|
+
attr_reader :db, :changeset
|
6
|
+
def initialize(db:, changeset:)
|
7
|
+
@changeset = changeset
|
8
|
+
@db = db
|
9
|
+
end
|
10
|
+
|
11
|
+
def find(...)
|
12
|
+
db.find(...)
|
13
|
+
end
|
14
|
+
|
15
|
+
def all(...)
|
16
|
+
db.find(...)
|
17
|
+
end
|
18
|
+
|
19
|
+
def create(...)
|
20
|
+
changeset.create(...)
|
21
|
+
end
|
22
|
+
|
23
|
+
def update(...)
|
24
|
+
changeset.update(...)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,115 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "securerandom"
|
4
|
+
require "yaml"
|
5
|
+
|
6
|
+
module AsyncPipeline
|
7
|
+
module Stores
|
8
|
+
class Yaml
|
9
|
+
class Reader
|
10
|
+
attr_reader :registry, :data
|
11
|
+
def initialize(registry:, data:)
|
12
|
+
@registry = registry
|
13
|
+
@data = data
|
14
|
+
end
|
15
|
+
|
16
|
+
def find(id:, type:)
|
17
|
+
type = registry.type_for(type)
|
18
|
+
attrs = (data[type] || []).find { |attrs| attrs[:id] == id }
|
19
|
+
registry.build(type, attrs)
|
20
|
+
end
|
21
|
+
|
22
|
+
def everything
|
23
|
+
data.each_with_object({}) do |(type, subdata), all_of_it|
|
24
|
+
all_of_it[type] = subdata.map { |attrs| registry.build(type, attrs) }
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def all(type:)
|
29
|
+
type = registry.type_for(type)
|
30
|
+
|
31
|
+
data
|
32
|
+
.fetch(type, [])
|
33
|
+
.map { registry.build(type, _1) }
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
class Writer
|
38
|
+
attr_reader :path, :registry, :yaml_before, :data
|
39
|
+
def initialize(path:, registry:)
|
40
|
+
@path = path
|
41
|
+
@registry = registry
|
42
|
+
@yaml_before = File.read(path)
|
43
|
+
@data = YAML.unsafe_load(@yaml_before)
|
44
|
+
end
|
45
|
+
|
46
|
+
def apply(changesets)
|
47
|
+
changesets.each { _1.apply(self) }
|
48
|
+
yaml_after = data.to_yaml
|
49
|
+
|
50
|
+
if yaml_before != yaml_after
|
51
|
+
File.write(path, yaml_after)
|
52
|
+
true
|
53
|
+
else
|
54
|
+
false
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def find(id:, type:)
|
59
|
+
type = registry.type_for(type)
|
60
|
+
attrs = (data[type] || []).find { |attrs| attrs[:id] == id }
|
61
|
+
registry.build(type, attrs)
|
62
|
+
end
|
63
|
+
|
64
|
+
def create(type:, attributes:)
|
65
|
+
type = registry.type_for(type)
|
66
|
+
|
67
|
+
data[type] ||= {}
|
68
|
+
data[type] << { id: SecureRandom.uuid }.merge(attributes)
|
69
|
+
end
|
70
|
+
|
71
|
+
def update(id:, type:, attributes:)
|
72
|
+
type = registry.type_for(type)
|
73
|
+
attrs = (data[type] || []).find { |attrs| attrs[:id] == id }
|
74
|
+
raise "Not found" unless attrs
|
75
|
+
attrs.merge!(attributes)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def self.build(data:, dir:, registry:)
|
80
|
+
path = File.join(dir, "data.yml")
|
81
|
+
|
82
|
+
File.write(path, data.to_yaml)
|
83
|
+
|
84
|
+
new(
|
85
|
+
path: path,
|
86
|
+
registry: registry
|
87
|
+
)
|
88
|
+
end
|
89
|
+
|
90
|
+
attr_reader :path, :registry
|
91
|
+
def initialize(path:, registry:)
|
92
|
+
@path = path
|
93
|
+
@registry = registry
|
94
|
+
end
|
95
|
+
|
96
|
+
def reader
|
97
|
+
Reader.new(data: read, registry: registry)
|
98
|
+
end
|
99
|
+
|
100
|
+
def apply(changesets)
|
101
|
+
Writer.new(path: path, registry: registry).apply(changesets)
|
102
|
+
end
|
103
|
+
|
104
|
+
def to_h
|
105
|
+
read
|
106
|
+
end
|
107
|
+
|
108
|
+
private
|
109
|
+
|
110
|
+
def read
|
111
|
+
YAML.unsafe_load_file(path)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "async_pipeline/version"
|
4
|
+
require_relative "async_pipeline/model"
|
5
|
+
require_relative "async_pipeline/chunk"
|
6
|
+
require_relative "async_pipeline/pipeline"
|
7
|
+
|
8
|
+
require "logger"
|
9
|
+
|
10
|
+
module AsyncPipeline
|
11
|
+
class Error < StandardError; end
|
12
|
+
# Your code goes here...
|
13
|
+
|
14
|
+
class RactorLogger < Ractor
|
15
|
+
def self.new
|
16
|
+
super do
|
17
|
+
# STDOUT cannot be referenced but $stdout can
|
18
|
+
logger = ::Logger.new($stdout)
|
19
|
+
logger.formatter = proc do |severity, datetime, progname, msg|
|
20
|
+
"#{datetime.to_i} [#{severity}] #{msg}\n"
|
21
|
+
end
|
22
|
+
|
23
|
+
# Run the requested operations on our logger instance
|
24
|
+
while data = receive
|
25
|
+
logger.public_send(data[0], *data[1])
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def log(level, msg)
|
31
|
+
ractor_name = Ractor.current.name || Ractor.current.inspect
|
32
|
+
|
33
|
+
send([level, "#{ractor_name}: #{msg}"])
|
34
|
+
end
|
35
|
+
|
36
|
+
def info(msg)
|
37
|
+
log(:info, msg)
|
38
|
+
end
|
39
|
+
|
40
|
+
def error(msg)
|
41
|
+
log(:error, msg)
|
42
|
+
end
|
43
|
+
|
44
|
+
def warn(msg)
|
45
|
+
log(:warn, msg)
|
46
|
+
end
|
47
|
+
|
48
|
+
def debug(msg)
|
49
|
+
log(:debug, msg)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
Log = RactorLogger.new
|
54
|
+
|
55
|
+
end
|
metadata
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: async_pipeline
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Pete Kinnecom
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2024-05-02 00:00:00.000000000 Z
|
12
|
+
dependencies: []
|
13
|
+
description:
|
14
|
+
email:
|
15
|
+
- git@k7u7.com
|
16
|
+
executables: []
|
17
|
+
extensions: []
|
18
|
+
extra_rdoc_files: []
|
19
|
+
files:
|
20
|
+
- ".rubocop.yml"
|
21
|
+
- ".ruby-version"
|
22
|
+
- README.md
|
23
|
+
- Rakefile
|
24
|
+
- async_pipeline.gemspec
|
25
|
+
- lib/async_pipeline.rb
|
26
|
+
- lib/async_pipeline/changeset.rb
|
27
|
+
- lib/async_pipeline/chunk.rb
|
28
|
+
- lib/async_pipeline/model.rb
|
29
|
+
- lib/async_pipeline/pipeline.rb
|
30
|
+
- lib/async_pipeline/processor.rb
|
31
|
+
- lib/async_pipeline/read_only_store.rb
|
32
|
+
- lib/async_pipeline/registry.rb
|
33
|
+
- lib/async_pipeline/shell.rb
|
34
|
+
- lib/async_pipeline/store.rb
|
35
|
+
- lib/async_pipeline/stores/yaml.rb
|
36
|
+
- lib/async_pipeline/version.rb
|
37
|
+
homepage: https://github.com/petekinnecom/async_pipeline
|
38
|
+
licenses:
|
39
|
+
- WTFPL
|
40
|
+
metadata:
|
41
|
+
allowed_push_host: https://rubygems.org
|
42
|
+
homepage_uri: https://github.com/petekinnecom/async_pipeline
|
43
|
+
source_code_uri: https://github.com/petekinnecom/async_pipeline
|
44
|
+
changelog_uri: https://github.com/petekinnecom/async_pipeline
|
45
|
+
post_install_message:
|
46
|
+
rdoc_options: []
|
47
|
+
require_paths:
|
48
|
+
- lib
|
49
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ">="
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: 3.0.0
|
54
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
55
|
+
requirements:
|
56
|
+
- - ">="
|
57
|
+
- !ruby/object:Gem::Version
|
58
|
+
version: '0'
|
59
|
+
requirements: []
|
60
|
+
rubygems_version: 3.4.19
|
61
|
+
signing_key:
|
62
|
+
specification_version: 4
|
63
|
+
summary: Define a pipeline of async tasks and their starting conditions
|
64
|
+
test_files: []
|