concurrent_pipeline 0.1.0 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.claude/settings.local.json +9 -0
- data/.ruby-version +1 -1
- data/README.md +232 -353
- data/Rakefile +4 -2
- data/concurrent_pipeline.gemspec +3 -1
- data/lib/concurrent_pipeline/pipeline.rb +14 -201
- data/lib/concurrent_pipeline/pipelines/processors/asynchronous.rb +92 -0
- data/lib/concurrent_pipeline/pipelines/processors/locker.rb +28 -0
- data/lib/concurrent_pipeline/pipelines/processors/synchronous.rb +50 -0
- data/lib/concurrent_pipeline/pipelines/schema.rb +56 -0
- data/lib/concurrent_pipeline/store.rb +88 -13
- data/lib/concurrent_pipeline/stores/schema/record.rb +47 -0
- data/lib/concurrent_pipeline/stores/schema.rb +35 -0
- data/lib/concurrent_pipeline/stores/storage/yaml/fs.rb +140 -0
- data/lib/concurrent_pipeline/stores/storage/yaml.rb +196 -0
- data/lib/concurrent_pipeline/version.rb +1 -1
- data/lib/concurrent_pipeline.rb +13 -9
- metadata +40 -14
- data/.rubocop.yml +0 -14
- data/lib/concurrent_pipeline/changeset.rb +0 -133
- data/lib/concurrent_pipeline/model.rb +0 -31
- data/lib/concurrent_pipeline/processors/actor_processor.rb +0 -363
- data/lib/concurrent_pipeline/producer.rb +0 -156
- data/lib/concurrent_pipeline/read_only_store.rb +0 -22
- data/lib/concurrent_pipeline/registry.rb +0 -36
- data/lib/concurrent_pipeline/stores/versioned.rb +0 -24
- data/lib/concurrent_pipeline/stores/yaml/db.rb +0 -110
- data/lib/concurrent_pipeline/stores/yaml/history.rb +0 -67
- data/lib/concurrent_pipeline/stores/yaml.rb +0 -40
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
require "yaml"
|
|
2
|
+
require "fileutils"
|
|
3
|
+
|
|
4
|
+
module ConcurrentPipeline
|
|
5
|
+
module Stores
|
|
6
|
+
module Storage
|
|
7
|
+
class Yaml
|
|
8
|
+
class Fs
|
|
9
|
+
@@mutex = Mutex.new
|
|
10
|
+
|
|
11
|
+
attr_reader :dir
|
|
12
|
+
|
|
13
|
+
def initialize(dir:)
|
|
14
|
+
@dir = dir
|
|
15
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
|
16
|
+
FileUtils.mkdir_p(versions_dir) unless Dir.exist?(versions_dir)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def read_version(version_number)
|
|
20
|
+
@@mutex.synchronize do
|
|
21
|
+
if version_number == 0
|
|
22
|
+
{}
|
|
23
|
+
else
|
|
24
|
+
current_ver = unsafe_current_version_number
|
|
25
|
+
|
|
26
|
+
# If requesting the current/latest version, read from latest.yml
|
|
27
|
+
if version_number == current_ver
|
|
28
|
+
if File.exist?(latest_file_path)
|
|
29
|
+
data = File.read(latest_file_path).then { YAML.load(_1, aliases: true) || {} }
|
|
30
|
+
# Normalize keys: convert record names and ID keys to strings for consistency
|
|
31
|
+
data.transform_keys(&:to_s).transform_values do |records|
|
|
32
|
+
records.transform_keys(&:to_s)
|
|
33
|
+
end
|
|
34
|
+
else
|
|
35
|
+
{}
|
|
36
|
+
end
|
|
37
|
+
else
|
|
38
|
+
# Reading a historical version from versions/ directory
|
|
39
|
+
file_path = version_file_path(version_number)
|
|
40
|
+
if File.exist?(file_path)
|
|
41
|
+
data = File.read(file_path).then { YAML.load(_1, aliases: true) || {} }
|
|
42
|
+
# Normalize keys: convert record names and ID keys to strings for consistency
|
|
43
|
+
data.transform_keys(&:to_s).transform_values do |records|
|
|
44
|
+
records.transform_keys(&:to_s)
|
|
45
|
+
end
|
|
46
|
+
else
|
|
47
|
+
{}
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def write_version(version_number, data)
|
|
55
|
+
@@mutex.synchronize do
|
|
56
|
+
# Copy current latest.yml to versions directory if it exists
|
|
57
|
+
if File.exist?(latest_file_path)
|
|
58
|
+
current_version = unsafe_current_version_number
|
|
59
|
+
if current_version > 0
|
|
60
|
+
version_path = version_file_path(current_version)
|
|
61
|
+
FileUtils.cp(latest_file_path, version_path)
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Write new data to latest.yml
|
|
66
|
+
File.write(latest_file_path, YAML.dump(data))
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def current_version_number
|
|
71
|
+
@@mutex.synchronize do
|
|
72
|
+
unsafe_current_version_number
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def version_files
|
|
77
|
+
@@mutex.synchronize do
|
|
78
|
+
unsafe_version_files
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def delete_version(version_number)
|
|
83
|
+
@@mutex.synchronize do
|
|
84
|
+
current_ver = unsafe_current_version_number
|
|
85
|
+
|
|
86
|
+
# If deleting the latest version
|
|
87
|
+
if version_number == current_ver
|
|
88
|
+
File.delete(latest_file_path) if File.exist?(latest_file_path)
|
|
89
|
+
else
|
|
90
|
+
# Deleting an archived version
|
|
91
|
+
file_path = version_file_path(version_number)
|
|
92
|
+
File.delete(file_path) if File.exist?(file_path)
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def restore_version(version_number)
|
|
98
|
+
@@mutex.synchronize do
|
|
99
|
+
version_path = version_file_path(version_number)
|
|
100
|
+
|
|
101
|
+
if File.exist?(version_path)
|
|
102
|
+
# Copy the version file to latest.yml
|
|
103
|
+
FileUtils.cp(version_path, latest_file_path)
|
|
104
|
+
else
|
|
105
|
+
raise "Version #{version_number} does not exist"
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
private
|
|
111
|
+
|
|
112
|
+
def unsafe_current_version_number
|
|
113
|
+
if File.exist?(latest_file_path)
|
|
114
|
+
# Count existing version files + 1 for the latest
|
|
115
|
+
unsafe_version_files.length + 1
|
|
116
|
+
else
|
|
117
|
+
0
|
|
118
|
+
end
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def unsafe_version_files
|
|
122
|
+
Dir.glob(File.join(versions_dir, "*.yml")).sort
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def version_file_path(version_num)
|
|
126
|
+
File.join(versions_dir, "%04d.yml" % version_num)
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def latest_file_path
|
|
130
|
+
File.join(dir, "data.yml")
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def versions_dir
|
|
134
|
+
File.join(dir, "versions")
|
|
135
|
+
end
|
|
136
|
+
end
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
module ConcurrentPipeline
|
|
2
|
+
module Stores
|
|
3
|
+
module Storage
|
|
4
|
+
class Yaml
|
|
5
|
+
attr_reader :fs, :version_number
|
|
6
|
+
|
|
7
|
+
def initialize(dir:, version_number: nil)
|
|
8
|
+
@fs = Fs.new(dir: dir)
|
|
9
|
+
@version_number = version_number
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def in_transaction?
|
|
13
|
+
!transaction_operations.nil?
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def transaction(&block)
|
|
17
|
+
begin_transaction
|
|
18
|
+
begin
|
|
19
|
+
yield
|
|
20
|
+
commit_transaction
|
|
21
|
+
rescue => e
|
|
22
|
+
rollback_transaction
|
|
23
|
+
raise e
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def create(name:, attrs:)
|
|
28
|
+
in_txn = in_transaction?
|
|
29
|
+
|
|
30
|
+
raise "Cannot write to non-current version" unless writeable?
|
|
31
|
+
|
|
32
|
+
id = attrs[:id] || attrs["id"]
|
|
33
|
+
raise "Record must have an id" unless id
|
|
34
|
+
|
|
35
|
+
# Always buffer the operation
|
|
36
|
+
buffer_operation(
|
|
37
|
+
type: :create,
|
|
38
|
+
name: name.to_s,
|
|
39
|
+
id: id.to_s,
|
|
40
|
+
attrs: attrs.transform_keys(&:to_s)
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Flush immediately if not in a transaction
|
|
44
|
+
flush_buffer unless in_txn
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def update(name:, id:, attrs:)
|
|
48
|
+
in_txn = in_transaction?
|
|
49
|
+
|
|
50
|
+
raise "Cannot write to non-current version" unless writeable?
|
|
51
|
+
|
|
52
|
+
# Always buffer the operation
|
|
53
|
+
buffer_operation(
|
|
54
|
+
type: :update,
|
|
55
|
+
name: name.to_s,
|
|
56
|
+
id: id.to_s,
|
|
57
|
+
attrs: attrs.transform_keys(&:to_s)
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Flush immediately if not in a transaction
|
|
61
|
+
flush_buffer unless in_txn
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def all(name:)
|
|
65
|
+
data = load_data
|
|
66
|
+
records = data[name.to_s] || {}
|
|
67
|
+
records.values.map { |attrs| attrs.transform_keys(&:to_sym) }
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def versions
|
|
71
|
+
current_ver = current_version_number
|
|
72
|
+
(1..current_ver).map { |idx| self.class.new(dir: fs.dir, version_number: idx) }
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def restore
|
|
76
|
+
current_version = version_number || current_version_number
|
|
77
|
+
|
|
78
|
+
# Delete all versions after this one
|
|
79
|
+
fs.version_files.each_with_index do |file, idx|
|
|
80
|
+
version_num = idx + 1
|
|
81
|
+
if version_num > current_version
|
|
82
|
+
fs.delete_version(version_num)
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# If restoring to a historical version (not current), move it to latest.yml
|
|
87
|
+
if version_number && version_number < current_version_number
|
|
88
|
+
fs.restore_version(version_number)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Return a new writeable storage at this version
|
|
92
|
+
self.class.new(dir: fs.dir)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def writeable?
|
|
96
|
+
version_number.nil? || version_number == current_version_number
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
private
|
|
100
|
+
|
|
101
|
+
def begin_transaction
|
|
102
|
+
raise "Transaction already in progress" if transaction_operations
|
|
103
|
+
|
|
104
|
+
self.transaction_operations = []
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def commit_transaction
|
|
108
|
+
raise "No transaction in progress" unless transaction_operations
|
|
109
|
+
|
|
110
|
+
flush_buffer
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def rollback_transaction
|
|
114
|
+
self.transaction_operations = nil
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
TRANSACTION_KEY = :yaml_storage_transaction_operations
|
|
118
|
+
|
|
119
|
+
def transaction_operations
|
|
120
|
+
Fiber[TRANSACTION_KEY]
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def transaction_operations=(value)
|
|
124
|
+
Fiber[TRANSACTION_KEY] = value
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def buffer_operation(op)
|
|
128
|
+
# If in a transaction, append to the transaction buffer
|
|
129
|
+
if transaction_operations
|
|
130
|
+
transaction_operations << op
|
|
131
|
+
else
|
|
132
|
+
# If not in a transaction, initialize a temporary buffer
|
|
133
|
+
self.transaction_operations = [op]
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def flush_buffer
|
|
138
|
+
return unless transaction_operations
|
|
139
|
+
|
|
140
|
+
# Load current data and apply all buffered operations
|
|
141
|
+
data = load_current_data
|
|
142
|
+
transaction_operations.each do |op|
|
|
143
|
+
apply_operation(data, op)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
write_new_version(data)
|
|
147
|
+
self.transaction_operations = nil
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def apply_operation(data, op)
|
|
151
|
+
case op[:type]
|
|
152
|
+
when :create
|
|
153
|
+
data[op[:name]] ||= {}
|
|
154
|
+
data[op[:name]][op[:id]] = op[:attrs]
|
|
155
|
+
when :update
|
|
156
|
+
records = data[op[:name]] || {}
|
|
157
|
+
if records[op[:id]]
|
|
158
|
+
records[op[:id]].merge!(op[:attrs])
|
|
159
|
+
else
|
|
160
|
+
raise "Record not found: #{op[:name]} with id #{op[:id].inspect}"
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
def load_current_data
|
|
166
|
+
load_data
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def load_data
|
|
170
|
+
if version_number
|
|
171
|
+
# Reading a historical version from the versions/ directory
|
|
172
|
+
target_version = version_number
|
|
173
|
+
fs.read_version(target_version)
|
|
174
|
+
else
|
|
175
|
+
# Reading the current latest.yml
|
|
176
|
+
current_ver = current_version_number
|
|
177
|
+
if current_ver == 0
|
|
178
|
+
{}
|
|
179
|
+
else
|
|
180
|
+
fs.read_version(current_ver)
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
def write_new_version(data)
|
|
186
|
+
next_version = current_version_number + 1
|
|
187
|
+
fs.write_version(next_version, data)
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
def current_version_number
|
|
191
|
+
fs.current_version_number
|
|
192
|
+
end
|
|
193
|
+
end
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
end
|
data/lib/concurrent_pipeline.rb
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
require_relative "concurrent_pipeline/producer"
|
|
7
|
-
require_relative "concurrent_pipeline/shell"
|
|
8
|
-
|
|
9
|
-
require "logger"
|
|
3
|
+
require "zeitwerk"
|
|
4
|
+
loader = Zeitwerk::Loader.for_gem
|
|
5
|
+
loader.setup
|
|
10
6
|
|
|
11
7
|
module ConcurrentPipeline
|
|
12
8
|
class Error < StandardError; end
|
|
13
|
-
|
|
14
|
-
|
|
9
|
+
|
|
10
|
+
class << self
|
|
11
|
+
def store(&)
|
|
12
|
+
Store.define(&)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def pipeline(&)
|
|
16
|
+
Pipeline.define(&)
|
|
17
|
+
end
|
|
18
|
+
end
|
|
15
19
|
end
|
metadata
CHANGED
|
@@ -1,17 +1,45 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: concurrent_pipeline
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version:
|
|
4
|
+
version: 1.0.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Pete Kinnecom
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date:
|
|
11
|
+
date: 2025-12-13 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
|
-
name:
|
|
14
|
+
name: zeitwerk
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - ">="
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '0'
|
|
20
|
+
type: :runtime
|
|
21
|
+
prerelease: false
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - ">="
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: '0'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: yaml
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - ">="
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '0'
|
|
34
|
+
type: :runtime
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - ">="
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '0'
|
|
41
|
+
- !ruby/object:Gem::Dependency
|
|
42
|
+
name: async
|
|
15
43
|
requirement: !ruby/object:Gem::Requirement
|
|
16
44
|
requirements:
|
|
17
45
|
- - ">="
|
|
@@ -31,26 +59,24 @@ executables: []
|
|
|
31
59
|
extensions: []
|
|
32
60
|
extra_rdoc_files: []
|
|
33
61
|
files:
|
|
34
|
-
- ".
|
|
62
|
+
- ".claude/settings.local.json"
|
|
35
63
|
- ".ruby-version"
|
|
36
64
|
- README.md
|
|
37
65
|
- Rakefile
|
|
38
66
|
- concurrency.md
|
|
39
67
|
- concurrent_pipeline.gemspec
|
|
40
68
|
- lib/concurrent_pipeline.rb
|
|
41
|
-
- lib/concurrent_pipeline/changeset.rb
|
|
42
|
-
- lib/concurrent_pipeline/model.rb
|
|
43
69
|
- lib/concurrent_pipeline/pipeline.rb
|
|
44
|
-
- lib/concurrent_pipeline/processors/
|
|
45
|
-
- lib/concurrent_pipeline/
|
|
46
|
-
- lib/concurrent_pipeline/
|
|
47
|
-
- lib/concurrent_pipeline/
|
|
70
|
+
- lib/concurrent_pipeline/pipelines/processors/asynchronous.rb
|
|
71
|
+
- lib/concurrent_pipeline/pipelines/processors/locker.rb
|
|
72
|
+
- lib/concurrent_pipeline/pipelines/processors/synchronous.rb
|
|
73
|
+
- lib/concurrent_pipeline/pipelines/schema.rb
|
|
48
74
|
- lib/concurrent_pipeline/shell.rb
|
|
49
75
|
- lib/concurrent_pipeline/store.rb
|
|
50
|
-
- lib/concurrent_pipeline/stores/
|
|
51
|
-
- lib/concurrent_pipeline/stores/
|
|
52
|
-
- lib/concurrent_pipeline/stores/yaml
|
|
53
|
-
- lib/concurrent_pipeline/stores/yaml/
|
|
76
|
+
- lib/concurrent_pipeline/stores/schema.rb
|
|
77
|
+
- lib/concurrent_pipeline/stores/schema/record.rb
|
|
78
|
+
- lib/concurrent_pipeline/stores/storage/yaml.rb
|
|
79
|
+
- lib/concurrent_pipeline/stores/storage/yaml/fs.rb
|
|
54
80
|
- lib/concurrent_pipeline/version.rb
|
|
55
81
|
homepage: https://github.com/petekinnecom/concurrent_pipeline
|
|
56
82
|
licenses:
|
data/.rubocop.yml
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
AllCops:
|
|
2
|
-
TargetRubyVersion: 3.2
|
|
3
|
-
DisabledByDefault: true
|
|
4
|
-
|
|
5
|
-
Style/StringLiterals:
|
|
6
|
-
Enabled: true
|
|
7
|
-
EnforcedStyle: double_quotes
|
|
8
|
-
|
|
9
|
-
Style/StringLiteralsInInterpolation:
|
|
10
|
-
Enabled: true
|
|
11
|
-
EnforcedStyle: double_quotes
|
|
12
|
-
|
|
13
|
-
Layout/LineLength:
|
|
14
|
-
Max: 120
|
|
@@ -1,133 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module ConcurrentPipeline
|
|
4
|
-
class Changeset
|
|
5
|
-
Result = Struct.new(:diff) do
|
|
6
|
-
alias diff? diff
|
|
7
|
-
end
|
|
8
|
-
|
|
9
|
-
InitialDelta = Struct.new(:data, :dup, keyword_init: true) do
|
|
10
|
-
def apply(store)
|
|
11
|
-
# We fully dup the data to avoid mutating the input
|
|
12
|
-
dup_data = YAML.unsafe_load(data.to_yaml)
|
|
13
|
-
store.set(dup_data)
|
|
14
|
-
Result.new(true)
|
|
15
|
-
end
|
|
16
|
-
|
|
17
|
-
def self.from_json(json)
|
|
18
|
-
new(data: json.fetch(:delta), dup: true)
|
|
19
|
-
end
|
|
20
|
-
|
|
21
|
-
def as_json(...)
|
|
22
|
-
{
|
|
23
|
-
action: :initial,
|
|
24
|
-
delta: data
|
|
25
|
-
}
|
|
26
|
-
end
|
|
27
|
-
end
|
|
28
|
-
|
|
29
|
-
CreateDelta = Struct.new(:type, :attributes, keyword_init: true) do
|
|
30
|
-
def apply(store)
|
|
31
|
-
store.create(type: type, attributes: attributes)
|
|
32
|
-
Result.new(true)
|
|
33
|
-
end
|
|
34
|
-
|
|
35
|
-
def self.from_json(json)
|
|
36
|
-
new(
|
|
37
|
-
type: json.fetch(:type),
|
|
38
|
-
attributes: json.fetch(:attributes)
|
|
39
|
-
)
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
def as_json(...)
|
|
43
|
-
{
|
|
44
|
-
action: :create,
|
|
45
|
-
type: type,
|
|
46
|
-
attributes: attributes
|
|
47
|
-
}
|
|
48
|
-
end
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
UpdateDelta = Struct.new(:id, :type, :delta, keyword_init: true) do
|
|
52
|
-
def apply(store)
|
|
53
|
-
current_model = store.find(type, id)
|
|
54
|
-
|
|
55
|
-
# Todo: detect if changed underfoot
|
|
56
|
-
|
|
57
|
-
Result.new(
|
|
58
|
-
store.update(
|
|
59
|
-
id: id,
|
|
60
|
-
type: type,
|
|
61
|
-
attributes: current_model.attributes.merge(delta)
|
|
62
|
-
)
|
|
63
|
-
)
|
|
64
|
-
end
|
|
65
|
-
|
|
66
|
-
def self.from_json(json)
|
|
67
|
-
new(
|
|
68
|
-
id: json.fetch(:id),
|
|
69
|
-
type: json.fetch(:type),
|
|
70
|
-
delta: json.fetch(:delta),
|
|
71
|
-
)
|
|
72
|
-
end
|
|
73
|
-
|
|
74
|
-
def as_json
|
|
75
|
-
{
|
|
76
|
-
action: :update,
|
|
77
|
-
id: id,
|
|
78
|
-
type: type,
|
|
79
|
-
delta: delta
|
|
80
|
-
}
|
|
81
|
-
end
|
|
82
|
-
end
|
|
83
|
-
|
|
84
|
-
def self.from_json(registry:, json:)
|
|
85
|
-
type_map = {
|
|
86
|
-
initial: InitialDelta,
|
|
87
|
-
create: CreateDelta,
|
|
88
|
-
update: UpdateDelta,
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
new(
|
|
92
|
-
registry: registry
|
|
93
|
-
).tap do |changeset|
|
|
94
|
-
json.fetch(:changes).each do |change|
|
|
95
|
-
type_map
|
|
96
|
-
.fetch(change.fetch(:action))
|
|
97
|
-
.from_json(change)
|
|
98
|
-
.then { changeset.deltas << _1 }
|
|
99
|
-
end
|
|
100
|
-
end
|
|
101
|
-
end
|
|
102
|
-
|
|
103
|
-
attr_reader :deltas, :registry
|
|
104
|
-
def initialize(registry:)
|
|
105
|
-
@registry = registry
|
|
106
|
-
@deltas = []
|
|
107
|
-
end
|
|
108
|
-
|
|
109
|
-
def deltas?
|
|
110
|
-
!@deltas.empty?
|
|
111
|
-
end
|
|
112
|
-
|
|
113
|
-
def create(type, attributes)
|
|
114
|
-
with_id = { id: SecureRandom.uuid }.merge(attributes)
|
|
115
|
-
@deltas << CreateDelta.new(type: type, attributes: with_id)
|
|
116
|
-
end
|
|
117
|
-
|
|
118
|
-
def update(model, delta)
|
|
119
|
-
type = registry.type_for(model.class)
|
|
120
|
-
@deltas << UpdateDelta.new(id: model.id, type: type, delta: delta)
|
|
121
|
-
end
|
|
122
|
-
|
|
123
|
-
def apply(...)
|
|
124
|
-
deltas.map { _1.apply(...) }
|
|
125
|
-
end
|
|
126
|
-
|
|
127
|
-
def as_json(...)
|
|
128
|
-
{
|
|
129
|
-
changes: deltas.map { _1.as_json(...) }
|
|
130
|
-
}
|
|
131
|
-
end
|
|
132
|
-
end
|
|
133
|
-
end
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module ConcurrentPipeline
|
|
4
|
-
module Model
|
|
5
|
-
module InstanceMethods
|
|
6
|
-
attr_reader :attributes
|
|
7
|
-
|
|
8
|
-
def initialize(attributes)
|
|
9
|
-
@attributes = attributes
|
|
10
|
-
end
|
|
11
|
-
end
|
|
12
|
-
|
|
13
|
-
def self.extended(base)
|
|
14
|
-
base.include(InstanceMethods)
|
|
15
|
-
end
|
|
16
|
-
|
|
17
|
-
def inherited(base)
|
|
18
|
-
base.instance_variable_set(:@attributes, attributes.dup)
|
|
19
|
-
end
|
|
20
|
-
|
|
21
|
-
def attributes
|
|
22
|
-
@attributes ||= {}
|
|
23
|
-
end
|
|
24
|
-
|
|
25
|
-
def attribute(name, **opts)
|
|
26
|
-
attributes[name] = opts
|
|
27
|
-
|
|
28
|
-
define_method(name) { attributes[name] }
|
|
29
|
-
end
|
|
30
|
-
end
|
|
31
|
-
end
|