dynflow 1.4.7 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. checksums.yaml +4 -4
  2. data/{test/prepare_travis_env.sh → .github/install_dependencies.sh} +2 -2
  3. data/.github/workflows/ruby.yml +116 -0
  4. data/dynflow.gemspec +1 -0
  5. data/examples/chunked_output_benchmark.rb +77 -0
  6. data/extras/expand/main.go +180 -0
  7. data/lib/dynflow/action/suspended.rb +4 -4
  8. data/lib/dynflow/action/timeouts.rb +2 -2
  9. data/lib/dynflow/action.rb +15 -4
  10. data/lib/dynflow/actor.rb +20 -4
  11. data/lib/dynflow/clock.rb +2 -2
  12. data/lib/dynflow/delayed_executors/abstract_core.rb +11 -9
  13. data/lib/dynflow/director/running_steps_manager.rb +2 -2
  14. data/lib/dynflow/director.rb +42 -5
  15. data/lib/dynflow/dispatcher/client_dispatcher.rb +8 -2
  16. data/lib/dynflow/dispatcher/executor_dispatcher.rb +12 -2
  17. data/lib/dynflow/dispatcher.rb +7 -2
  18. data/lib/dynflow/execution_history.rb +1 -1
  19. data/lib/dynflow/execution_plan/hooks.rb +1 -1
  20. data/lib/dynflow/execution_plan/steps/abstract_flow_step.rb +1 -0
  21. data/lib/dynflow/execution_plan.rb +16 -5
  22. data/lib/dynflow/executors/abstract/core.rb +10 -1
  23. data/lib/dynflow/executors/parallel.rb +6 -2
  24. data/lib/dynflow/extensions/msgpack.rb +41 -0
  25. data/lib/dynflow/extensions.rb +6 -0
  26. data/lib/dynflow/flows/abstract.rb +14 -0
  27. data/lib/dynflow/flows/abstract_composed.rb +2 -7
  28. data/lib/dynflow/flows/atom.rb +2 -2
  29. data/lib/dynflow/flows/concurrence.rb +2 -0
  30. data/lib/dynflow/flows/registry.rb +32 -0
  31. data/lib/dynflow/flows/sequence.rb +2 -0
  32. data/lib/dynflow/flows.rb +1 -0
  33. data/lib/dynflow/persistence.rb +10 -0
  34. data/lib/dynflow/persistence_adapters/sequel.rb +51 -16
  35. data/lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb +30 -0
  36. data/lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb +90 -0
  37. data/lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb +19 -0
  38. data/lib/dynflow/serializable.rb +2 -2
  39. data/lib/dynflow/testing/dummy_coordinator.rb +10 -0
  40. data/lib/dynflow/testing/dummy_planned_action.rb +4 -0
  41. data/lib/dynflow/testing/dummy_world.rb +2 -1
  42. data/lib/dynflow/testing/in_thread_executor.rb +2 -2
  43. data/lib/dynflow/testing/in_thread_world.rb +5 -5
  44. data/lib/dynflow/testing.rb +1 -0
  45. data/lib/dynflow/version.rb +1 -1
  46. data/lib/dynflow/world.rb +16 -4
  47. data/lib/dynflow.rb +2 -1
  48. data/test/dispatcher_test.rb +6 -0
  49. data/test/execution_plan_hooks_test.rb +36 -0
  50. data/test/extensions_test.rb +42 -0
  51. data/test/flows_test.rb +44 -0
  52. data/test/future_execution_test.rb +6 -3
  53. data/test/persistence_test.rb +2 -2
  54. data/web/views/flow_step.erb +1 -0
  55. metadata +34 -8
  56. data/.travis.yml +0 -33
@@ -15,7 +15,8 @@ module Dynflow
15
15
  execution_plan_id: String,
16
16
  step_id: Integer,
17
17
  event: Object,
18
- result: Concurrent::Promises::ResolvableFuture
18
+ result: Concurrent::Promises::ResolvableFuture,
19
+ optional: Algebrick::Types::Boolean
19
20
  end
20
21
 
21
22
  UnprocessableEvent = Class.new(Dynflow::Error)
@@ -52,7 +53,7 @@ module Dynflow
52
53
  end
53
54
 
54
55
  def self.new_from_hash(hash, *_args)
55
- self.new(hash[:execution_plan_id], hash[:queue])
56
+ self.new(hash[:execution_plan_id], hash[:queue], hash[:sender_orchestrator_id])
56
57
  end
57
58
  end
58
59
 
@@ -107,6 +108,26 @@ module Dynflow
107
108
  end
108
109
  end
109
110
 
111
+ class PlanningWorkItem < WorkItem
112
+ def execute
113
+ plan = world.persistence.load_delayed_plan(execution_plan_id)
114
+ return if plan.nil? || plan.execution_plan.state != :scheduled
115
+
116
+ if !plan.start_before.nil? && plan.start_before < Time.now.utc()
117
+ plan.timeout
118
+ return
119
+ end
120
+
121
+ world.coordinator.acquire(Coordinator::PlanningLock.new(world, plan.execution_plan_uuid)) do
122
+ plan.plan
123
+ end
124
+ plan.execute
125
+ rescue => e
126
+ world.logger.warn e.message
127
+ world.logger.debug e.backtrace.join("\n")
128
+ end
129
+ end
130
+
110
131
  class FinalizeWorkItem < WorkItem
111
132
  attr_reader :finalize_steps_data
112
133
 
@@ -146,12 +167,18 @@ module Dynflow
146
167
  @logger = world.logger
147
168
  @execution_plan_managers = {}
148
169
  @rescued_steps = {}
170
+ @planning_plans = []
149
171
  end
150
172
 
151
173
  def current_execution_plan_ids
152
174
  @execution_plan_managers.keys
153
175
  end
154
176
 
177
+ def handle_planning(execution_plan_uuid)
178
+ @planning_plans << execution_plan_uuid
179
+ [PlanningWorkItem.new(execution_plan_uuid, :default, @world.id)]
180
+ end
181
+
155
182
  def start_execution(execution_plan_id, finished)
156
183
  manager = track_execution_plan(execution_plan_id, finished)
157
184
  return [] unless manager
@@ -163,6 +190,9 @@ module Dynflow
163
190
  execution_plan_manager = @execution_plan_managers[event.execution_plan_id]
164
191
  if execution_plan_manager
165
192
  execution_plan_manager.event(event)
193
+ elsif event.optional
194
+ event.result.reject "no manager for #{event.inspect}"
195
+ []
166
196
  else
167
197
  raise Dynflow::Error, "no manager for #{event.inspect}"
168
198
  end
@@ -172,9 +202,16 @@ module Dynflow
172
202
  end
173
203
 
174
204
  def work_finished(work)
175
- manager = @execution_plan_managers[work.execution_plan_id]
176
- return [] unless manager # skip case when getting event from execution plan that is not running anymore
177
- unless_done(manager, manager.what_is_next(work))
205
+ case work
206
+ when PlanningWorkItem
207
+ @planning_plans.delete(work.execution_plan_id)
208
+ @world.persistence.delete_delayed_plans(:execution_plan_uuid => work.execution_plan_id)
209
+ []
210
+ else
211
+ manager = @execution_plan_managers[work.execution_plan_id]
212
+ return [] unless manager # skip case when getting event from execution plan that is not running anymore
213
+ unless_done(manager, manager.what_is_next(work))
214
+ end
178
215
  end
179
216
 
180
217
  # called when there was an unhandled exception during the execution
@@ -132,11 +132,13 @@ module Dynflow
132
132
  end
133
133
 
134
134
  def dispatch_request(request, client_world_id, request_id)
135
+ ignore_unknown = false
135
136
  executor_id = match request,
136
- (on ~Execution do |execution|
137
+ (on ~Execution | ~Planning do |execution|
137
138
  AnyExecutor
138
139
  end),
139
140
  (on ~Event do |event|
141
+ ignore_unknown = event.optional
140
142
  find_executor(event.execution_plan_id)
141
143
  end),
142
144
  (on Ping.(~any, ~any) | Status.(~any, ~any) do |receiver_id, _|
@@ -144,7 +146,11 @@ module Dynflow
144
146
  end)
145
147
  envelope = Envelope[request_id, client_world_id, executor_id, request]
146
148
  if Dispatcher::UnknownWorld === envelope.receiver_id
147
- raise Dynflow::Error, "Could not find an executor for #{envelope}"
149
+ raise Dynflow::Error, "Could not find an executor for #{envelope}" unless ignore_unknown
150
+
151
+ message = "Could not find an executor for optional #{envelope}, discarding."
152
+ log(Logger::DEBUG, message)
153
+ return respond(envelope, Failed[message])
148
154
  end
149
155
  connector.send(envelope).value!
150
156
  rescue => e
@@ -9,6 +9,7 @@ module Dynflow
9
9
 
10
10
  def handle_request(envelope)
11
11
  match(envelope.message,
12
+ on(Planning) { perform_planning(envelope, envelope.message)},
12
13
  on(Execution) { perform_execution(envelope, envelope.message) },
13
14
  on(Event) { perform_event(envelope, envelope.message) },
14
15
  on(Status) { get_execution_status(envelope, envelope.message) })
@@ -16,6 +17,13 @@ module Dynflow
16
17
 
17
18
  protected
18
19
 
20
+ def perform_planning(envelope, planning)
21
+ @world.executor.plan(planning.execution_plan_id)
22
+ respond(envelope, Accepted)
23
+ rescue Dynflow::Error => e
24
+ respond(envelope, Failed[e.message])
25
+ end
26
+
19
27
  def perform_execution(envelope, execution)
20
28
  allocate_executor(execution.execution_plan_id, envelope.sender_id, envelope.request_id)
21
29
  execution_lock = Coordinator::ExecutionLock.new(@world, execution.execution_plan_id, envelope.sender_id, envelope.request_id)
@@ -52,12 +60,14 @@ module Dynflow
52
60
  end
53
61
  end
54
62
  if event_request.time.nil? || event_request.time < Time.now
55
- @world.executor.event(envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, future)
63
+ @world.executor.event(envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, future,
64
+ optional: event_request.optional)
56
65
  else
57
66
  @world.clock.ping(
58
67
  @world.executor,
59
68
  event_request.time,
60
- Director::Event[envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, Concurrent::Promises.resolvable_future],
69
+ Director::Event[envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, Concurrent::Promises.resolvable_future,
70
+ event_request.optional],
61
71
  :delayed_event
62
72
  )
63
73
  # resolves the future right away - currently we do not wait for the clock ping
@@ -6,13 +6,18 @@ module Dynflow
6
6
  fields! execution_plan_id: String,
7
7
  step_id: Integer,
8
8
  event: Object,
9
- time: type { variants Time, NilClass }
9
+ time: type { variants Time, NilClass },
10
+ optional: Algebrick::Types::Boolean
10
11
  end
11
12
 
12
13
  Execution = type do
13
14
  fields! execution_plan_id: String
14
15
  end
15
16
 
17
+ Planning = type do
18
+ fields! execution_plan_id: String
19
+ end
20
+
16
21
  Ping = type do
17
22
  fields! receiver_id: String,
18
23
  use_cache: type { variants TrueClass, FalseClass }
@@ -23,7 +28,7 @@ module Dynflow
23
28
  execution_plan_id: type { variants String, NilClass }
24
29
  end
25
30
 
26
- variants Event, Execution, Ping, Status
31
+ variants Event, Execution, Ping, Status, Planning
27
32
  end
28
33
 
29
34
  Response = Algebrick.type do
@@ -12,7 +12,7 @@ module Dynflow
12
12
 
13
13
  module Event
14
14
  def inspect
15
- "#{Time.at(time).utc}: #{name}".tap { |s| s << " @ #{world_id}" if world_id }
15
+ ["#{Time.at(time).utc}: #{name}", world_id].compact.join(' @ ')
16
16
  end
17
17
  end
18
18
 
@@ -21,7 +21,7 @@ module Dynflow
21
21
  # @param class_name [Class] class of the hook to be run
22
22
  # @param on [Symbol, Array<Symbol>] when should the hook be run, one of {HOOK_KINDS}
23
23
  # @return [void]
24
- def use(class_name, on: HOOK_KINDS)
24
+ def use(class_name, on: ExecutionPlan.states)
25
25
  on = Array[on] unless on.kind_of?(Array)
26
26
  validate_kinds!(on)
27
27
  if hooks[class_name]
@@ -31,6 +31,7 @@ module Dynflow
31
31
  action = persistence.load_action(self)
32
32
  yield action
33
33
  persistence.save_action(execution_plan_id, action)
34
+ persistence.save_output_chunks(execution_plan_id, action.id, action.pending_output_chunks)
34
35
  save
35
36
 
36
37
  return self
@@ -254,6 +254,7 @@ module Dynflow
254
254
  def delay(caller_action, action_class, delay_options, *args)
255
255
  save
256
256
  @root_plan_step = add_scheduling_step(action_class, caller_action)
257
+ run_hooks(:pending)
257
258
  serializer = root_plan_step.delay(delay_options, args)
258
259
  delayed_plan = DelayedPlan.new(@world,
259
260
  id,
@@ -276,7 +277,9 @@ module Dynflow
276
277
  raise "Unexpected options #{options.keys.inspect}" unless options.empty?
277
278
  save
278
279
  @root_plan_step = add_plan_step(action_class, caller_action)
279
- @root_plan_step.save
280
+ step = @root_plan_step.save
281
+ run_hooks(:pending)
282
+ step
280
283
  end
281
284
 
282
285
  def plan(*args)
@@ -418,6 +421,14 @@ module Dynflow
418
421
  end
419
422
  end
420
423
 
424
+ def self.load_flow(flow_hash)
425
+ if flow_hash.is_a? Hash
426
+ Flows::Abstract.from_hash(flow_hash)
427
+ else
428
+ Flows::Abstract.decode(flow_hash)
429
+ end
430
+ end
431
+
421
432
  def to_hash
422
433
  recursive_to_hash id: id,
423
434
  class: self.class.to_s,
@@ -425,8 +436,8 @@ module Dynflow
425
436
  state: state,
426
437
  result: result,
427
438
  root_plan_step_id: root_plan_step && root_plan_step.id,
428
- run_flow: run_flow,
429
- finalize_flow: finalize_flow,
439
+ run_flow: run_flow.encode,
440
+ finalize_flow: finalize_flow.encode,
430
441
  step_ids: steps.map { |id, _| id },
431
442
  started_at: time_to_str(started_at),
432
443
  ended_at: time_to_str(ended_at),
@@ -448,8 +459,8 @@ module Dynflow
448
459
  hash[:label],
449
460
  hash[:state],
450
461
  steps[hash[:root_plan_step_id]],
451
- Flows::Abstract.from_hash(hash[:run_flow]),
452
- Flows::Abstract.from_hash(hash[:finalize_flow]),
462
+ load_flow(hash[:run_flow]),
463
+ load_flow(hash[:finalize_flow]),
453
464
  steps,
454
465
  string_to_time(hash[:started_at]),
455
466
  string_to_time(hash[:ended_at]),
@@ -35,9 +35,18 @@ module Dynflow
35
35
  handle_work(@director.handle_event(event))
36
36
  end
37
37
 
38
+ def handle_planning(execution_plan_id)
39
+ if terminating?
40
+ raise Dynflow::Error,
41
+ "cannot accept event: #{event} core is terminating"
42
+ end
43
+
44
+ handle_work(@director.handle_planning(execution_plan_id))
45
+ end
46
+
38
47
  def plan_events(delayed_events)
39
48
  delayed_events.each do |event|
40
- @world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time)
49
+ @world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time, optional: event.optional)
41
50
  end
42
51
  end
43
52
 
@@ -33,11 +33,15 @@ module Dynflow
33
33
  raise e
34
34
  end
35
35
 
36
- def event(request_id, execution_plan_id, step_id, event, future = nil)
37
- @core.ask([:handle_event, Director::Event[request_id, execution_plan_id, step_id, event, future]])
36
+ def event(request_id, execution_plan_id, step_id, event, future = nil, optional: false)
37
+ @core.ask([:handle_event, Director::Event[request_id, execution_plan_id, step_id, event, future, optional]])
38
38
  future
39
39
  end
40
40
 
41
+ def plan(execution_plan_id)
42
+ @core.ask([:handle_planning, execution_plan_id])
43
+ end
44
+
41
45
  def delayed_event(director_event)
42
46
  @core.ask([:handle_event, director_event])
43
47
  director_event.result
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+ require 'msgpack'
3
+
4
+ module Dynflow
5
+ module Extensions
6
+ module MsgPack
7
+ module Time
8
+ def to_msgpack(out = ''.dup)
9
+ ::MessagePack.pack(self, out)
10
+ out
11
+ end
12
+ end
13
+
14
+ ::Time.include ::Dynflow::Extensions::MsgPack::Time
15
+ ::MessagePack::DefaultFactory.register_type(0x00, Time, packer: MessagePack::Time::Packer, unpacker: MessagePack::Time::Unpacker)
16
+
17
+ begin
18
+ require 'active_support/time_with_zone'
19
+ unpacker = ->(payload) do
20
+ tv = MessagePack::Timestamp.from_msgpack_ext(payload)
21
+ ::Time.zone.at(tv.sec, tv.nsec, :nanosecond)
22
+ end
23
+ ::ActiveSupport::TimeWithZone.include ::Dynflow::Extensions::MsgPack::Time
24
+ ::MessagePack::DefaultFactory.register_type(0x01, ActiveSupport::TimeWithZone, packer: MessagePack::Time::Packer, unpacker: unpacker)
25
+
26
+ ::DateTime.include ::Dynflow::Extensions::MsgPack::Time
27
+ ::MessagePack::DefaultFactory.register_type(0x02, DateTime,
28
+ packer: ->(datetime) { MessagePack::Time::Packer.(datetime.to_time) },
29
+ unpacker: ->(payload) { unpacker.(payload).to_datetime })
30
+
31
+ ::Date.include ::Dynflow::Extensions::MsgPack::Time
32
+ ::MessagePack::DefaultFactory.register_type(0x03, Date,
33
+ packer: ->(date) { MessagePack::Time::Packer.(date.to_time) },
34
+ unpacker: ->(payload) { unpacker.(payload).to_date })
35
+ rescue LoadError
36
+ # This is fine
37
+ nil
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,6 @@
1
+ # frozen_string_literal: true
2
+ module Dynflow
3
+ module Extensions
4
+ require 'dynflow/extensions/msgpack'
5
+ end
6
+ end
@@ -32,6 +32,20 @@ module Dynflow
32
32
  def flatten!
33
33
  raise NotImplementedError
34
34
  end
35
+
36
+ def self.new_from_hash(hash)
37
+ check_class_matching hash
38
+ new(hash[:flows].map { |flow_hash| from_hash(flow_hash) })
39
+ end
40
+
41
+ def self.decode(data)
42
+ if data.is_a? Integer
43
+ Flows::Atom.new(data)
44
+ else
45
+ kind, *subflows = data
46
+ Registry.decode(kind).new(subflows.map { |subflow| self.decode(subflow) })
47
+ end
48
+ end
35
49
  end
36
50
  end
37
51
  end
@@ -11,8 +11,8 @@ module Dynflow
11
11
  @flows = flows
12
12
  end
13
13
 
14
- def to_hash
15
- super.merge recursive_to_hash(:flows => flows)
14
+ def encode
15
+ [Registry.encode(self)] + flows.map(&:encode)
16
16
  end
17
17
 
18
18
  def <<(v)
@@ -61,11 +61,6 @@ module Dynflow
61
61
 
62
62
  protected
63
63
 
64
- def self.new_from_hash(hash)
65
- check_class_matching hash
66
- new(hash[:flows].map { |flow_hash| from_hash(flow_hash) })
67
- end
68
-
69
64
  # adds the +new_flow+ in a way that it's in sequence with
70
65
  # the +satisfying_flows+
71
66
  def add_to_sequence(satisfying_flows, new_flow)
@@ -5,8 +5,8 @@ module Dynflow
5
5
 
6
6
  attr_reader :step_id
7
7
 
8
- def to_hash
9
- super.merge(:step_id => step_id)
8
+ def encode
9
+ step_id
10
10
  end
11
11
 
12
12
  def initialize(step_id)
@@ -25,5 +25,7 @@ module Dynflow
25
25
  return Concurrence.new(extracted_sub_flows)
26
26
  end
27
27
  end
28
+
29
+ Registry.register!(Concurrence, 'C')
28
30
  end
29
31
  end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+ module Dynflow
3
+ module Flows
4
+ class Registry
5
+ class IdentifierTaken < ArgumentError; end
6
+ class UnknownIdentifier < ArgumentError; end
7
+
8
+ class << self
9
+ def register!(klass, identifier)
10
+ if (found = serialization_map[identifier])
11
+ raise IdentifierTaken, "Error setting up mapping #{identifier} to #{klass}, it already maps to #{found}"
12
+ else
13
+ serialization_map.update(identifier => klass)
14
+ end
15
+ end
16
+
17
+ def encode(klass)
18
+ klass = klass.class unless klass.is_a?(Class)
19
+ serialization_map.invert[klass] || raise(UnknownIdentifier, "Could not find mapping for #{klass}")
20
+ end
21
+
22
+ def decode(identifier)
23
+ serialization_map[identifier] || raise(UnknownIdentifier, "Could not find mapping for #{identifier}")
24
+ end
25
+
26
+ def serialization_map
27
+ @serialization_map ||= {}
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
@@ -10,5 +10,7 @@ module Dynflow
10
10
  self << dependent_flow
11
11
  end
12
12
  end
13
+
14
+ Registry.register!(Sequence, 'S')
13
15
  end
14
16
  end
data/lib/dynflow/flows.rb CHANGED
@@ -4,6 +4,7 @@ require 'forwardable'
4
4
  module Dynflow
5
5
  module Flows
6
6
 
7
+ require 'dynflow/flows/registry'
7
8
  require 'dynflow/flows/abstract'
8
9
  require 'dynflow/flows/atom'
9
10
  require 'dynflow/flows/abstract_composed'
@@ -46,6 +46,16 @@ module Dynflow
46
46
  adapter.save_action(execution_plan_id, action.id, action.to_hash)
47
47
  end
48
48
 
49
+ def save_output_chunks(execution_plan_id, action_id, chunks)
50
+ return if chunks.empty?
51
+
52
+ adapter.save_output_chunks(execution_plan_id, action_id, chunks)
53
+ end
54
+
55
+ def load_output_chunks(execution_plan_id, action_id)
56
+ adapter.load_output_chunks(execution_plan_id, action_id)
57
+ end
58
+
49
59
  def find_execution_plans(options)
50
60
  adapter.find_execution_plans(options).map do |execution_plan_hash|
51
61
  ExecutionPlan.new_from_hash(execution_plan_hash, @world)
@@ -1,9 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
  require 'sequel'
3
- require 'multi_json'
3
+ require 'msgpack'
4
4
  require 'fileutils'
5
5
  require 'csv'
6
6
 
7
+ # rubocop:disable Metrics/ClassLength
7
8
  module Dynflow
8
9
  module PersistenceAdapters
9
10
 
@@ -37,12 +38,14 @@ module Dynflow
37
38
  class action_class execution_plan_uuid queue),
38
39
  envelope: %w(receiver_id),
39
40
  coordinator_record: %w(id owner_id class),
40
- delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen)}
41
+ delayed: %w(execution_plan_uuid start_at start_before args_serializer frozen),
42
+ output_chunk: %w(execution_plan_uuid action_id kind timestamp) }
41
43
 
42
44
  SERIALIZABLE_COLUMNS = { action: %w(input output),
43
45
  delayed: %w(serialized_args),
44
46
  execution_plan: %w(run_flow finalize_flow execution_history step_ids),
45
- step: %w(error children) }
47
+ step: %w(error children),
48
+ output_chunk: %w(chunk) }
46
49
 
47
50
  def initialize(config)
48
51
  migrate = true
@@ -83,15 +86,17 @@ module Dynflow
83
86
  table(:delayed).where(execution_plan_uuid: uuids).delete
84
87
 
85
88
  steps = table(:step).where(execution_plan_uuid: uuids)
86
- backup_to_csv(steps, backup_dir, 'steps.csv') if backup_dir
89
+ backup_to_csv(:step, steps, backup_dir, 'steps.csv') if backup_dir
87
90
  steps.delete
88
91
 
92
+ output_chunks = table(:output_chunk).where(execution_plan_uuid: uuids).delete
93
+
89
94
  actions = table(:action).where(execution_plan_uuid: uuids)
90
- backup_to_csv(actions, backup_dir, 'actions.csv') if backup_dir
95
+ backup_to_csv(:action, actions, backup_dir, 'actions.csv') if backup_dir
91
96
  actions.delete
92
97
 
93
98
  execution_plans = table(:execution_plan).where(uuid: uuids)
94
- backup_to_csv(execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
99
+ backup_to_csv(:execution_plan, execution_plans, backup_dir, 'execution_plans.csv') if backup_dir
95
100
  count += execution_plans.delete
96
101
  end
97
102
  end
@@ -173,6 +178,18 @@ module Dynflow
173
178
  save :action, { execution_plan_uuid: execution_plan_id, id: action_id }, value, with_data: false
174
179
  end
175
180
 
181
+ def save_output_chunks(execution_plan_id, action_id, chunks)
182
+ chunks.each do |chunk|
183
+ chunk[:execution_plan_uuid] = execution_plan_id
184
+ chunk[:action_id] = action_id
185
+ save :output_chunk, {}, chunk, with_data: false
186
+ end
187
+ end
188
+
189
+ def load_output_chunks(execution_plan_id, action_id)
190
+ load_records :output_chunk, { execution_plan_uuid: execution_plan_id, action_id: action_id }, [:timestamp, :kind, :chunk]
191
+ end
192
+
176
193
  def connector_feature!
177
194
  unless @additional_responsibilities[:connector]
178
195
  raise "The sequel persistence adapter connector feature used but not enabled in additional_features"
@@ -265,14 +282,16 @@ module Dynflow
265
282
  step: :dynflow_steps,
266
283
  envelope: :dynflow_envelopes,
267
284
  coordinator_record: :dynflow_coordinator_records,
268
- delayed: :dynflow_delayed_plans }
285
+ delayed: :dynflow_delayed_plans,
286
+ output_chunk: :dynflow_output_chunks }
269
287
 
270
288
  def table(which)
271
289
  db[TABLES.fetch(which)]
272
290
  end
273
291
 
274
292
  def initialize_db(db_path)
275
- ::Sequel.connect db_path
293
+ logger = Logger.new($stderr) if ENV['DYNFLOW_SQL_LOG']
294
+ ::Sequel.connect db_path, logger: logger
276
295
  end
277
296
 
278
297
  def self.migrations_path
@@ -281,10 +300,15 @@ module Dynflow
281
300
 
282
301
  def prepare_record(table_name, value, base = {}, with_data = true)
283
302
  record = base.dup
284
- if with_data && table(table_name).columns.include?(:data)
303
+ has_data_column = table(table_name).columns.include?(:data)
304
+ if with_data && has_data_column
285
305
  record[:data] = dump_data(value)
286
306
  else
287
- record[:data] = nil
307
+ if has_data_column
308
+ record[:data] = nil
309
+ else
310
+ record.delete(:data)
311
+ end
288
312
  record.merge! serialize_columns(table_name, value)
289
313
  end
290
314
 
@@ -339,7 +363,11 @@ module Dynflow
339
363
  records = with_retry do
340
364
  filtered = table.filter(Utils.symbolize_keys(condition))
341
365
  # Filter out requested columns which the table doesn't have, load data just in case
342
- filtered = filtered.select(:data, *(table.columns & keys)) unless keys.nil?
366
+ unless keys.nil?
367
+ columns = table.columns & keys
368
+ columns |= [:data] if table.columns.include?(:data)
369
+ filtered = filtered.select(*columns)
370
+ end
343
371
  filtered.all
344
372
  end
345
373
  records = records.map { |record| load_data(record, what) }
@@ -355,11 +383,11 @@ module Dynflow
355
383
  hash = if record[:data].nil?
356
384
  SERIALIZABLE_COLUMNS.fetch(what, []).each do |key|
357
385
  key = key.to_sym
358
- record[key] = MultiJson.load(record[key]) unless record[key].nil?
386
+ record[key] = MessagePack.unpack((record[key])) unless record[key].nil?
359
387
  end
360
388
  record
361
389
  else
362
- MultiJson.load(record[:data])
390
+ MessagePack.unpack(record[:data])
363
391
  end
364
392
  Utils.indifferent_hash(hash)
365
393
  end
@@ -368,7 +396,7 @@ module Dynflow
368
396
  FileUtils.mkdir_p(backup_dir) unless File.directory?(backup_dir)
369
397
  end
370
398
 
371
- def backup_to_csv(dataset, backup_dir, file_name)
399
+ def backup_to_csv(table_name, dataset, backup_dir, file_name)
372
400
  ensure_backup_dir(backup_dir)
373
401
  csv_file = File.join(backup_dir, file_name)
374
402
  appending = File.exist?(csv_file)
@@ -376,7 +404,12 @@ module Dynflow
376
404
  File.open(csv_file, 'a') do |csv|
377
405
  csv << columns.to_csv unless appending
378
406
  dataset.each do |row|
379
- csv << columns.collect { |col| row[col] }.to_csv
407
+ values = columns.map do |col|
408
+ value = row[col]
409
+ value = value.unpack('H*').first if value && SERIALIZABLE_COLUMNS.fetch(table_name, []).include?(col.to_s)
410
+ value
411
+ end
412
+ csv << values.to_csv
380
413
  end
381
414
  end
382
415
  dataset
@@ -394,7 +427,8 @@ module Dynflow
394
427
 
395
428
  def dump_data(value)
396
429
  return if value.nil?
397
- MultiJson.dump Type!(value, Hash, Array)
430
+ packed = MessagePack.pack(Type!(value, Hash, Array, Integer, String))
431
+ ::Sequel.blob(packed)
398
432
  end
399
433
 
400
434
  def paginate(data_set, options)
@@ -477,3 +511,4 @@ module Dynflow
477
511
  end
478
512
  end
479
513
  end
514
+ # rubocop:enable Metrics/ClassLength