dynflow 1.4.3 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/{test/prepare_travis_env.sh → .github/install_dependencies.sh} +2 -2
  3. data/.github/workflows/ruby.yml +116 -0
  4. data/lib/dynflow.rb +1 -1
  5. data/lib/dynflow/action.rb +22 -12
  6. data/lib/dynflow/action/suspended.rb +4 -4
  7. data/lib/dynflow/action/timeouts.rb +2 -2
  8. data/lib/dynflow/actor.rb +20 -4
  9. data/lib/dynflow/clock.rb +2 -2
  10. data/lib/dynflow/connectors/abstract.rb +4 -0
  11. data/lib/dynflow/connectors/database.rb +4 -0
  12. data/lib/dynflow/connectors/direct.rb +5 -0
  13. data/lib/dynflow/director.rb +5 -1
  14. data/lib/dynflow/director/running_steps_manager.rb +2 -2
  15. data/lib/dynflow/dispatcher.rb +2 -1
  16. data/lib/dynflow/dispatcher/client_dispatcher.rb +8 -2
  17. data/lib/dynflow/dispatcher/executor_dispatcher.rb +4 -2
  18. data/lib/dynflow/execution_history.rb +1 -1
  19. data/lib/dynflow/execution_plan.rb +12 -4
  20. data/lib/dynflow/executors.rb +32 -10
  21. data/lib/dynflow/executors/abstract/core.rb +1 -1
  22. data/lib/dynflow/executors/parallel.rb +2 -2
  23. data/lib/dynflow/executors/sidekiq/orchestrator_jobs.rb +1 -1
  24. data/lib/dynflow/flows.rb +1 -0
  25. data/lib/dynflow/flows/abstract.rb +14 -0
  26. data/lib/dynflow/flows/abstract_composed.rb +2 -7
  27. data/lib/dynflow/flows/atom.rb +2 -2
  28. data/lib/dynflow/flows/concurrence.rb +2 -0
  29. data/lib/dynflow/flows/registry.rb +32 -0
  30. data/lib/dynflow/flows/sequence.rb +2 -0
  31. data/lib/dynflow/persistence.rb +8 -0
  32. data/lib/dynflow/persistence_adapters/abstract.rb +16 -0
  33. data/lib/dynflow/persistence_adapters/sequel.rb +24 -8
  34. data/lib/dynflow/persistence_adapters/sequel_migrations/020_drop_duplicate_indices.rb +30 -0
  35. data/lib/dynflow/rails.rb +1 -1
  36. data/lib/dynflow/rails/configuration.rb +16 -5
  37. data/lib/dynflow/testing/in_thread_executor.rb +2 -2
  38. data/lib/dynflow/testing/in_thread_world.rb +5 -5
  39. data/lib/dynflow/version.rb +1 -1
  40. data/lib/dynflow/world.rb +5 -5
  41. data/lib/dynflow/world/invalidation.rb +5 -1
  42. data/test/dispatcher_test.rb +6 -0
  43. data/test/flows_test.rb +44 -0
  44. data/test/future_execution_test.rb +1 -1
  45. data/test/persistence_test.rb +38 -2
  46. metadata +12 -9
  47. data/.travis.yml +0 -33
@@ -52,12 +52,14 @@ module Dynflow
52
52
  end
53
53
  end
54
54
  if event_request.time.nil? || event_request.time < Time.now
55
- @world.executor.event(envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, future)
55
+ @world.executor.event(envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, future,
56
+ optional: event_request.optional)
56
57
  else
57
58
  @world.clock.ping(
58
59
  @world.executor,
59
60
  event_request.time,
60
- Director::Event[envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, Concurrent::Promises.resolvable_future],
61
+ Director::Event[envelope.request_id, event_request.execution_plan_id, event_request.step_id, event_request.event, Concurrent::Promises.resolvable_future,
62
+ event_request.optional],
61
63
  :delayed_event
62
64
  )
63
65
  # resolves the future right away - currently we do not wait for the clock ping
@@ -12,7 +12,7 @@ module Dynflow
12
12
 
13
13
  module Event
14
14
  def inspect
15
- "#{Time.at(time).utc}: #{name}".tap { |s| s << " @ #{world_id}" if world_id }
15
+ ["#{Time.at(time).utc}: #{name}", world_id].compact.join(' @ ')
16
16
  end
17
17
  end
18
18
 
@@ -418,6 +418,14 @@ module Dynflow
418
418
  end
419
419
  end
420
420
 
421
+ def self.load_flow(flow_hash)
422
+ if flow_hash.is_a? Hash
423
+ Flows::Abstract.from_hash(flow_hash)
424
+ else
425
+ Flows::Abstract.decode(flow_hash)
426
+ end
427
+ end
428
+
421
429
  def to_hash
422
430
  recursive_to_hash id: id,
423
431
  class: self.class.to_s,
@@ -425,8 +433,8 @@ module Dynflow
425
433
  state: state,
426
434
  result: result,
427
435
  root_plan_step_id: root_plan_step && root_plan_step.id,
428
- run_flow: run_flow,
429
- finalize_flow: finalize_flow,
436
+ run_flow: run_flow.encode,
437
+ finalize_flow: finalize_flow.encode,
430
438
  step_ids: steps.map { |id, _| id },
431
439
  started_at: time_to_str(started_at),
432
440
  ended_at: time_to_str(ended_at),
@@ -448,8 +456,8 @@ module Dynflow
448
456
  hash[:label],
449
457
  hash[:state],
450
458
  steps[hash[:root_plan_step_id]],
451
- Flows::Abstract.from_hash(hash[:run_flow]),
452
- Flows::Abstract.from_hash(hash[:finalize_flow]),
459
+ load_flow(hash[:run_flow]),
460
+ load_flow(hash[:finalize_flow]),
453
461
  steps,
454
462
  string_to_time(hash[:started_at]),
455
463
  string_to_time(hash[:ended_at]),
@@ -4,16 +4,38 @@ module Dynflow
4
4
 
5
5
  require 'dynflow/executors/parallel'
6
6
 
7
- # Every time we run a code that can be defined outside of Dynflow,
8
- # we should wrap it with this method, and we can ensure here to do
9
- # necessary cleanup, such as cleaning ActiveRecord connections
10
- def self.run_user_code
11
- clear_connections = defined?(::ActiveRecord) && ActiveRecord::Base.connected? && ActiveRecord::Base.connection.open_transactions.zero?
12
- yield
13
- ensure
14
- ::ActiveRecord::Base.clear_active_connections! if clear_connections
15
- ::Logging.mdc.clear if defined? ::Logging
16
- end
7
+ class << self
8
+ # Every time we run a code that can be defined outside of Dynflow,
9
+ # we should wrap it with this method, and we can ensure here to do
10
+ # necessary cleanup, such as cleaning ActiveRecord connections
11
+ def run_user_code
12
+ # Here we cover a case where the connection was already checked out from
13
+ # the pool and had opened transactions. In that case, we should leave the
14
+ # cleanup to the other runtime unit which opened the transaction. If the
15
+ # connection was checked out or there are no opened transactions, we can
16
+ # safely perform the cleanup.
17
+ no_previously_opened_transactions = active_record_open_transactions.zero?
18
+ yield
19
+ ensure
20
+ ::ActiveRecord::Base.clear_active_connections! if no_previously_opened_transactions && active_record_connected?
21
+ ::Logging.mdc.clear if defined? ::Logging
22
+ end
23
+
24
+ private
25
+
26
+ def active_record_open_transactions
27
+ active_record_active_connection&.open_transactions || 0
28
+ end
17
29
 
30
+ def active_record_active_connection
31
+ return unless defined?(::ActiveRecord) && ::ActiveRecord::Base.connected?
32
+ # #active_connection? returns the connection if already established or nil
33
+ ::ActiveRecord::Base.connection_pool.active_connection?
34
+ end
35
+
36
+ def active_record_connected?
37
+ !!active_record_active_connection
38
+ end
39
+ end
18
40
  end
19
41
  end
@@ -37,7 +37,7 @@ module Dynflow
37
37
 
38
38
  def plan_events(delayed_events)
39
39
  delayed_events.each do |event|
40
- @world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time)
40
+ @world.plan_event(event.execution_plan_id, event.step_id, event.event, event.time, optional: event.optional)
41
41
  end
42
42
  end
43
43
 
@@ -33,8 +33,8 @@ module Dynflow
33
33
  raise e
34
34
  end
35
35
 
36
- def event(request_id, execution_plan_id, step_id, event, future = nil)
37
- @core.ask([:handle_event, Director::Event[request_id, execution_plan_id, step_id, event, future]])
36
+ def event(request_id, execution_plan_id, step_id, event, future = nil, optional: false)
37
+ @core.ask([:handle_event, Director::Event[request_id, execution_plan_id, step_id, event, future, optional]])
38
38
  future
39
39
  end
40
40
 
@@ -13,7 +13,7 @@ module Dynflow
13
13
  def perform(work_item, delayed_events = nil)
14
14
  # Usually the step is saved on the worker's side. However if sidekiq is shut down,
15
15
  # then the step may not have been saved so we save it just to be sure
16
- if work_item.is_a?(Director::StepWorkItem) && work_item.step&.error&.exception.is_a?(::Sidekiq::Shutdown)
16
+ if work_item.is_a?(Director::StepWorkItem) && work_item.step&.error&.exception_class == ::Sidekiq::Shutdown
17
17
  work_item.step.save
18
18
  end
19
19
  Dynflow.process_world.executor.core.tell([:work_finished, work_item, delayed_events])
data/lib/dynflow/flows.rb CHANGED
@@ -4,6 +4,7 @@ require 'forwardable'
4
4
  module Dynflow
5
5
  module Flows
6
6
 
7
+ require 'dynflow/flows/registry'
7
8
  require 'dynflow/flows/abstract'
8
9
  require 'dynflow/flows/atom'
9
10
  require 'dynflow/flows/abstract_composed'
@@ -32,6 +32,20 @@ module Dynflow
32
32
  def flatten!
33
33
  raise NotImplementedError
34
34
  end
35
+
36
+ def self.new_from_hash(hash)
37
+ check_class_matching hash
38
+ new(hash[:flows].map { |flow_hash| from_hash(flow_hash) })
39
+ end
40
+
41
+ def self.decode(data)
42
+ if data.is_a? Integer
43
+ Flows::Atom.new(data)
44
+ else
45
+ kind, *subflows = data
46
+ Registry.decode(kind).new(subflows.map { |subflow| self.decode(subflow) })
47
+ end
48
+ end
35
49
  end
36
50
  end
37
51
  end
@@ -11,8 +11,8 @@ module Dynflow
11
11
  @flows = flows
12
12
  end
13
13
 
14
- def to_hash
15
- super.merge recursive_to_hash(:flows => flows)
14
+ def encode
15
+ [Registry.encode(self)] + flows.map(&:encode)
16
16
  end
17
17
 
18
18
  def <<(v)
@@ -61,11 +61,6 @@ module Dynflow
61
61
 
62
62
  protected
63
63
 
64
- def self.new_from_hash(hash)
65
- check_class_matching hash
66
- new(hash[:flows].map { |flow_hash| from_hash(flow_hash) })
67
- end
68
-
69
64
  # adds the +new_flow+ in a way that it's in sequence with
70
65
  # the +satisfying_flows+
71
66
  def add_to_sequence(satisfying_flows, new_flow)
@@ -5,8 +5,8 @@ module Dynflow
5
5
 
6
6
  attr_reader :step_id
7
7
 
8
- def to_hash
9
- super.merge(:step_id => step_id)
8
+ def encode
9
+ step_id
10
10
  end
11
11
 
12
12
  def initialize(step_id)
@@ -25,5 +25,7 @@ module Dynflow
25
25
  return Concurrence.new(extracted_sub_flows)
26
26
  end
27
27
  end
28
+
29
+ Registry.register!(Concurrence, 'C')
28
30
  end
29
31
  end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+ module Dynflow
3
+ module Flows
4
+ class Registry
5
+ class IdentifierTaken < ArgumentError; end
6
+ class UnknownIdentifier < ArgumentError; end
7
+
8
+ class << self
9
+ def register!(klass, identifier)
10
+ if (found = serialization_map[identifier])
11
+ raise IdentifierTaken, "Error setting up mapping #{identifier} to #{klass}, it already maps to #{found}"
12
+ else
13
+ serialization_map.update(identifier => klass)
14
+ end
15
+ end
16
+
17
+ def encode(klass)
18
+ klass = klass.class unless klass.is_a?(Class)
19
+ serialization_map.invert[klass] || raise(UnknownIdentifier, "Could not find mapping for #{klass}")
20
+ end
21
+
22
+ def decode(identifier)
23
+ serialization_map[identifier] || raise(UnknownIdentifier, "Could not find mapping for #{identifier}")
24
+ end
25
+
26
+ def serialization_map
27
+ @serialization_map ||= {}
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
@@ -10,5 +10,7 @@ module Dynflow
10
10
  self << dependent_flow
11
11
  end
12
12
  end
13
+
14
+ Registry.register!(Sequence, 'S')
13
15
  end
14
16
  end
@@ -134,5 +134,13 @@ module Dynflow
134
134
  envelope
135
135
  end
136
136
  end
137
+
138
+ def prune_envelopes(receiver_ids)
139
+ adapter.prune_envelopes(receiver_ids)
140
+ end
141
+
142
+ def prune_undeliverable_envelopes
143
+ adapter.prune_undeliverable_envelopes
144
+ end
137
145
  end
138
146
  end
@@ -116,6 +116,22 @@ module Dynflow
116
116
  def push_envelope(envelope)
117
117
  raise NotImplementedError
118
118
  end
119
+
120
+ def prune_envelopes(receiver_ids)
121
+ raise NotImplementedError
122
+ end
123
+
124
+ def prune_undeliverable_envelopes
125
+ raise NotImplementedError
126
+ end
127
+
128
+ def migrate_db
129
+ raise NotImplementedError
130
+ end
131
+
132
+ def abort_if_pending_migrations!
133
+ raise NotImplementedError
134
+ end
119
135
  end
120
136
  end
121
137
  end
@@ -45,13 +45,15 @@ module Dynflow
45
45
  step: %w(error children) }
46
46
 
47
47
  def initialize(config)
48
+ migrate = true
48
49
  config = config.dup
49
50
  @additional_responsibilities = { coordinator: true, connector: true }
50
- if config.is_a?(Hash) && config.key?(:additional_responsibilities)
51
- @additional_responsibilities.merge!(config.delete(:additional_responsibilities))
51
+ if config.is_a?(Hash)
52
+ @additional_responsibilities.merge!(config.delete(:additional_responsibilities)) if config.key?(:additional_responsibilities)
53
+ migrate = config.fetch(:migrate, true)
52
54
  end
53
55
  @db = initialize_db config
54
- migrate_db
56
+ migrate_db if migrate
55
57
  end
56
58
 
57
59
  def transaction(&block)
@@ -198,6 +200,16 @@ module Dynflow
198
200
  table(:envelope).insert(prepare_record(:envelope, envelope))
199
201
  end
200
202
 
203
+ def prune_envelopes(receiver_ids)
204
+ connector_feature!
205
+ table(:envelope).where(receiver_id: receiver_ids).delete
206
+ end
207
+
208
+ def prune_undeliverable_envelopes
209
+ connector_feature!
210
+ table(:envelope).where(receiver_id: table(:coordinator_record).select(:id)).invert.delete
211
+ end
212
+
201
213
  def coordinator_feature!
202
214
  unless @additional_responsibilities[:coordinator]
203
215
  raise "The sequel persistence adapter coordinator feature used but not enabled in additional_features"
@@ -238,6 +250,14 @@ module Dynflow
238
250
  envelopes: table(:envelope).all.to_a }
239
251
  end
240
252
 
253
+ def migrate_db
254
+ ::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
255
+ end
256
+
257
+ def abort_if_pending_migrations!
258
+ ::Sequel::Migrator.check_current(db, self.class.migrations_path, table: 'dynflow_schema_info')
259
+ end
260
+
241
261
  private
242
262
 
243
263
  TABLES = { execution_plan: :dynflow_execution_plans,
@@ -259,10 +279,6 @@ module Dynflow
259
279
  File.expand_path('../sequel_migrations', __FILE__)
260
280
  end
261
281
 
262
- def migrate_db
263
- ::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
264
- end
265
-
266
282
  def prepare_record(table_name, value, base = {}, with_data = true)
267
283
  record = base.dup
268
284
  if with_data && table(table_name).columns.include?(:data)
@@ -378,7 +394,7 @@ module Dynflow
378
394
 
379
395
  def dump_data(value)
380
396
  return if value.nil?
381
- MultiJson.dump Type!(value, Hash, Array)
397
+ MultiJson.dump Type!(value, Hash, Array, Integer)
382
398
  end
383
399
 
384
400
  def paginate(data_set, options)
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+ Sequel.migration do
3
+ up do
4
+ alter_table(:dynflow_actions) do
5
+ drop_index [:execution_plan_uuid, :id]
6
+ end
7
+
8
+ alter_table(:dynflow_execution_plans) do
9
+ drop_index :uuid
10
+ end
11
+
12
+ alter_table(:dynflow_steps) do
13
+ drop_index [:execution_plan_uuid, :id]
14
+ end
15
+ end
16
+
17
+ down do
18
+ alter_table(:dynflow_actions) do
19
+ add_index [:execution_plan_uuid, :id], :unique => true
20
+ end
21
+
22
+ alter_table(:dynflow_execution_plans) do
23
+ add_index :uuid, :unique => true
24
+ end
25
+
26
+ alter_table(:dynflow_steps) do
27
+ add_index [:execution_plan_uuid, :id], :unique => true
28
+ end
29
+ end
30
+ end
data/lib/dynflow/rails.rb CHANGED
@@ -38,8 +38,8 @@ module Dynflow
38
38
  init_world.tap do |world|
39
39
  @world = world
40
40
  config.run_on_init_hooks(false, world)
41
+ config.increase_db_pool_size(world)
41
42
  unless config.remote?
42
- config.increase_db_pool_size(world)
43
43
  config.run_on_init_hooks(true, world)
44
44
  # leave this just for long-running executors
45
45
  unless config.rake_task_with_executor?
@@ -96,12 +96,23 @@ module Dynflow
96
96
  end
97
97
 
98
98
  def increase_db_pool_size?
99
- !::Rails.env.test? && !remote?
99
+ !::Rails.env.test? && (!remote? || sidekiq_worker?)
100
+ end
101
+
102
+ def sidekiq_worker?
103
+ defined?(::Sidekiq) && ::Sidekiq.options[:queues].any?
100
104
  end
101
105
 
102
106
  def calculate_db_pool_size(world)
103
- self.db_pool_size || world.config.queues.values.inject(5) do |pool_size, pool_options|
104
- pool_size += pool_options[:pool_size]
107
+ return self.db_pool_size if self.db_pool_size
108
+
109
+ base_value = 5
110
+ if defined?(::Sidekiq)
111
+ Sidekiq.options[:concurrency] + base_value
112
+ else
113
+ world.config.queues.values.inject(base_value) do |pool_size, pool_options|
114
+ pool_size += pool_options[:pool_size]
115
+ end
105
116
  end
106
117
  end
107
118
 
@@ -185,8 +196,8 @@ module Dynflow
185
196
  end
186
197
 
187
198
  # Sequel adapter based on Rails app database.yml configuration
188
- def initialize_persistence(world)
189
- persistence_class.new(default_sequel_adapter_options(world))
199
+ def initialize_persistence(world, options = {})
200
+ persistence_class.new(default_sequel_adapter_options(world).merge(options))
190
201
  end
191
202
  end
192
203
  end