dynflow 1.4.3 → 1.4.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 318bdedfaca8720a62bc3a8bbd7d52b1823e9078ed1b96fecea248a729218335
4
- data.tar.gz: 35cfd900ad187608dbefaae809af955a8433a1168929d48ff11e92d164cfc8aa
3
+ metadata.gz: 79ce9a9be47febc6f9b83e1a51f9e10a7f3050350971837352aa6b08b8f2ac8e
4
+ data.tar.gz: 604ea28c961e230e9caf4c5798bfa578d77255d7ce38ceeee156cd9e27dc78a4
5
5
  SHA512:
6
- metadata.gz: 729dd7445b1bae4d6874749539c191912e6468cdcc120340121e9258502f8a6adc9fc7c90d043b3d4ffc6e53392910d75565233f29ce1b48448503aa366b643a
7
- data.tar.gz: 9246e6fd8a38383a5e5fd0c17d4dea5b4cc173f03b47c912673db8514cab773ae6c845c47583b85e9f8e833208751273888f73284fb4f90b451d2ed56b02998d
6
+ metadata.gz: f496066f934b87d4892d31315338920d1f924cc54c32919f6560ce3d6816ea0a2ed1012321e941df3b4da3e3f867fe8ee5a97ad75be6655b31edfe4a0f7dd382
7
+ data.tar.gz: 4ae7c928cfbafc12ee895bdcbd6b4af0b3d2ea391550fa9e9ea011e50348e8e137f2385a5ce0b59593372242e3f488b97684004c968176d8731f71b4284e95b1
@@ -352,15 +352,12 @@ module Dynflow
352
352
  @step.state = state
353
353
  end
354
354
 
355
+ # If this save returns an integer, it means it was an update. The number
356
+ # represents the number of updated records. If it is 0, then the step was in
357
+ # an unexpected state and couldn't be updated
355
358
  def save_state(conditions = {})
356
359
  phase! Executable
357
- # If this save returns an integer, it means it was an update. The number
358
- # represents the number of updated records. If it is 0, then the step
359
- # was in an unexpected state and couldn't be updated, in which case we
360
- # raise an exception and crash hard to prevent the step from being
361
- # executed twice
362
- count = @step.save(conditions)
363
- raise 'Could not save state' if count.kind_of?(Integer) && !count.positive?
360
+ @step.save(conditions)
364
361
  end
365
362
 
366
363
  def delay(delay_options, *args)
@@ -536,11 +533,11 @@ module Dynflow
536
533
  end
537
534
 
538
535
  # TODO: This is getting out of hand, refactoring needed
536
+ # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
539
537
  def execute_run(event)
540
538
  phase! Run
541
539
  @world.logger.debug format('%13s %s:%2d got event %s',
542
540
  'Step', execution_plan_id, @step.id, event) if event
543
- @input = OutputReference.dereference @input, world.persistence
544
541
 
545
542
  case
546
543
  when state == :running
@@ -551,8 +548,19 @@ module Dynflow
551
548
  raise 'event can be processed only when in suspended state'
552
549
  end
553
550
 
551
+ old_state = self.state
554
552
  self.state = :running unless self.state == :skipping
555
- save_state(:state => %w(pending error skipping suspended))
553
+ saved = save_state(:state => %w(pending error skipping suspended))
554
+ if saved.kind_of?(Integer) && !saved.positive?
555
+ # The step was already in a state we're trying to transition to, most
556
+ # likely we were about to execute it for the second time after first
557
+ # execution was forcefully interrupted.
558
+ # Set error and return to prevent the step from being executed twice
559
+ set_error "Could not transition step from #{old_state} to #{self.state}, step already in #{self.state}."
560
+ return
561
+ end
562
+
563
+ @input = OutputReference.dereference @input, world.persistence
556
564
  with_error_handling do
557
565
  event = Skip if state == :skipping
558
566
 
@@ -573,6 +581,7 @@ module Dynflow
573
581
  raise "wrong state #{state} when event:#{event}"
574
582
  end
575
583
  end
584
+ # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
576
585
 
577
586
  def execute_finalize
578
587
  phase! Finalize
data/lib/dynflow/actor.rb CHANGED
@@ -1,6 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
3
 
4
+ FULL_BACKTRACE = %w[1 y yes].include?((ENV['DYNFLOW_FULL_BACKTRACE'] || '').downcase)
5
+ BACKTRACE_LIMIT = begin
6
+ limit = ENV['DYNFLOW_BACKTRACE_LIMIT'].to_i
7
+ limit.zero? ? nil : limit
8
+ end
9
+
4
10
  module MethodicActor
5
11
  def on_message(message)
6
12
  method, *args = message
@@ -44,7 +50,11 @@ module Dynflow
44
50
  include LogWithFullBacktrace
45
51
 
46
52
  def on_envelope(envelope)
47
- Actor::BacktraceCollector.with_backtrace(envelope.origin_backtrace) { super }
53
+ if FULL_BACKTRACE
54
+ Actor::BacktraceCollector.with_backtrace(envelope.origin_backtrace) { super }
55
+ else
56
+ super
57
+ end
48
58
  end
49
59
  end
50
60
 
@@ -83,9 +93,15 @@ module Dynflow
83
93
 
84
94
  # takes an array of backtrace lines and replaces each chunk
85
95
  def filter_backtrace(backtrace)
86
- backtrace.map { |line| filter_line(line) }
87
- .chunk_while { |l1, l2| l1 == l2}
88
- .map(&:first)
96
+ trace = backtrace.map { |line| filter_line(line) }
97
+ .chunk_while { |l1, l2| l1 == l2}
98
+ .map(&:first)
99
+ if BACKTRACE_LIMIT
100
+ count = trace.count
101
+ trace = trace.take(BACKTRACE_LIMIT)
102
+ trace << "[ backtrace omitted #{count - BACKTRACE_LIMIT} lines ]" if trace.count < count
103
+ end
104
+ trace
89
105
  end
90
106
  end
91
107
  end
@@ -25,6 +25,10 @@ module Dynflow
25
25
  raise NotImplementedError
26
26
  end
27
27
 
28
+ def prune_undeliverable_envelopes(world)
29
+ raise NotImplementedError
30
+ end
31
+
28
32
  # we need to pass the world, as the connector can be shared
29
33
  # between words: we need to know the one to send the message to
30
34
  def receive(world, envelope)
@@ -172,6 +172,10 @@ module Dynflow
172
172
  Telemetry.with_instance { |t| t.increment_counter(:dynflow_connector_envelopes, 1, :world => envelope.sender_id, :direction => 'outgoing') }
173
173
  @core.ask([:handle_envelope, envelope])
174
174
  end
175
+
176
+ def prune_undeliverable_envelopes(world)
177
+ world.persistence.prune_undeliverable_envelopes
178
+ end
175
179
  end
176
180
  end
177
181
  end
@@ -68,6 +68,11 @@ module Dynflow
68
68
  Telemetry.with_instance { |t| t.increment_counter(:dynflow_connector_envelopes, 1, :world => envelope.sender_id) }
69
69
  @core.ask([:handle_envelope, envelope])
70
70
  end
71
+
72
+ def prune_undeliverable_envelopes(_world)
73
+ # This is a noop
74
+ 0
75
+ end
71
76
  end
72
77
  end
73
78
  end
@@ -20,8 +20,8 @@ module Dynflow
20
20
  def terminate
21
21
  pending_work = @work_items.clear.values.flatten(1)
22
22
  pending_work.each do |w|
23
- if EventWorkItem === w
24
- w.event.result.reject UnprocessableEvent.new("dropping due to termination")
23
+ finish_event_result(w) do |result|
24
+ result.reject UnprocessableEvent.new("dropping due to termination")
25
25
  end
26
26
  end
27
27
  end
@@ -252,7 +252,7 @@ module Dynflow
252
252
  future.fulfill(true)
253
253
  else
254
254
  if @ping_cache.executor?(request.receiver_id)
255
- future.reject
255
+ future.reject false
256
256
  else
257
257
  yield
258
258
  end
@@ -4,16 +4,38 @@ module Dynflow
4
4
 
5
5
  require 'dynflow/executors/parallel'
6
6
 
7
- # Every time we run a code that can be defined outside of Dynflow,
8
- # we should wrap it with this method, and we can ensure here to do
9
- # necessary cleanup, such as cleaning ActiveRecord connections
10
- def self.run_user_code
11
- clear_connections = defined?(::ActiveRecord) && ActiveRecord::Base.connected? && ActiveRecord::Base.connection.open_transactions.zero?
12
- yield
13
- ensure
14
- ::ActiveRecord::Base.clear_active_connections! if clear_connections
15
- ::Logging.mdc.clear if defined? ::Logging
16
- end
7
+ class << self
8
+ # Every time we run a code that can be defined outside of Dynflow,
9
+ # we should wrap it with this method, and we can ensure here to do
10
+ # necessary cleanup, such as cleaning ActiveRecord connections
11
+ def run_user_code
12
+ # Here we cover a case where the connection was already checked out from
13
+ # the pool and had opened transactions. In that case, we should leave the
14
+ # cleanup to the other runtime unit which opened the transaction. If the
15
+ # connection was checked out or there are no opened transactions, we can
16
+ # safely perform the cleanup.
17
+ no_previously_opened_transactions = active_record_open_transactions.zero?
18
+ yield
19
+ ensure
20
+ ::ActiveRecord::Base.clear_active_connections! if no_previously_opened_transactions && active_record_connected?
21
+ ::Logging.mdc.clear if defined? ::Logging
22
+ end
23
+
24
+ private
25
+
26
+ def active_record_open_transactions
27
+ active_record_active_connection&.open_transactions || 0
28
+ end
17
29
 
30
+ def active_record_active_connection
31
+ return unless defined?(::ActiveRecord) && ::ActiveRecord::Base.connected?
32
+ # #active_connection? returns the connection if already established or nil
33
+ ::ActiveRecord::Base.connection_pool.active_connection?
34
+ end
35
+
36
+ def active_record_connected?
37
+ !!active_record_active_connection
38
+ end
39
+ end
18
40
  end
19
41
  end
@@ -13,7 +13,7 @@ module Dynflow
13
13
  def perform(work_item, delayed_events = nil)
14
14
  # Usually the step is saved on the worker's side. However if sidekiq is shut down,
15
15
  # then the step may not have been saved so we save it just to be sure
16
- if work_item.is_a?(Director::StepWorkItem) && work_item.step&.error&.exception.is_a?(::Sidekiq::Shutdown)
16
+ if work_item.is_a?(Director::StepWorkItem) && work_item.step&.error&.exception_class == ::Sidekiq::Shutdown
17
17
  work_item.step.save
18
18
  end
19
19
  Dynflow.process_world.executor.core.tell([:work_finished, work_item, delayed_events])
@@ -134,5 +134,13 @@ module Dynflow
134
134
  envelope
135
135
  end
136
136
  end
137
+
138
+ def prune_envelopes(receiver_ids)
139
+ adapter.prune_envelopes(receiver_ids)
140
+ end
141
+
142
+ def prune_undeliverable_envelopes
143
+ adapter.prune_undeliverable_envelopes
144
+ end
137
145
  end
138
146
  end
@@ -116,6 +116,22 @@ module Dynflow
116
116
  def push_envelope(envelope)
117
117
  raise NotImplementedError
118
118
  end
119
+
120
+ def prune_envelopes(receiver_ids)
121
+ raise NotImplementedError
122
+ end
123
+
124
+ def prune_undeliverable_envelopes
125
+ raise NotImplementedError
126
+ end
127
+
128
+ def migrate_db
129
+ raise NotImplementedError
130
+ end
131
+
132
+ def abort_if_pending_migrations!
133
+ raise NotImplementedError
134
+ end
119
135
  end
120
136
  end
121
137
  end
@@ -45,13 +45,15 @@ module Dynflow
45
45
  step: %w(error children) }
46
46
 
47
47
  def initialize(config)
48
+ migrate = true
48
49
  config = config.dup
49
50
  @additional_responsibilities = { coordinator: true, connector: true }
50
- if config.is_a?(Hash) && config.key?(:additional_responsibilities)
51
- @additional_responsibilities.merge!(config.delete(:additional_responsibilities))
51
+ if config.is_a?(Hash)
52
+ @additional_responsibilities.merge!(config.delete(:additional_responsibilities)) if config.key?(:additional_responsibilities)
53
+ migrate = config.fetch(:migrate, true)
52
54
  end
53
55
  @db = initialize_db config
54
- migrate_db
56
+ migrate_db if migrate
55
57
  end
56
58
 
57
59
  def transaction(&block)
@@ -198,6 +200,16 @@ module Dynflow
198
200
  table(:envelope).insert(prepare_record(:envelope, envelope))
199
201
  end
200
202
 
203
+ def prune_envelopes(receiver_ids)
204
+ connector_feature!
205
+ table(:envelope).where(receiver_id: receiver_ids).delete
206
+ end
207
+
208
+ def prune_undeliverable_envelopes
209
+ connector_feature!
210
+ table(:envelope).where(receiver_id: table(:coordinator_record).select(:id)).invert.delete
211
+ end
212
+
201
213
  def coordinator_feature!
202
214
  unless @additional_responsibilities[:coordinator]
203
215
  raise "The sequel persistence adapter coordinator feature used but not enabled in additional_features"
@@ -238,6 +250,14 @@ module Dynflow
238
250
  envelopes: table(:envelope).all.to_a }
239
251
  end
240
252
 
253
+ def migrate_db
254
+ ::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
255
+ end
256
+
257
+ def abort_if_pending_migrations!
258
+ ::Sequel::Migrator.check_current(db, self.class.migrations_path, table: 'dynflow_schema_info')
259
+ end
260
+
241
261
  private
242
262
 
243
263
  TABLES = { execution_plan: :dynflow_execution_plans,
@@ -259,10 +279,6 @@ module Dynflow
259
279
  File.expand_path('../sequel_migrations', __FILE__)
260
280
  end
261
281
 
262
- def migrate_db
263
- ::Sequel::Migrator.run(db, self.class.migrations_path, table: 'dynflow_schema_info')
264
- end
265
-
266
282
  def prepare_record(table_name, value, base = {}, with_data = true)
267
283
  record = base.dup
268
284
  if with_data && table(table_name).columns.include?(:data)
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+ Sequel.migration do
3
+ up do
4
+ alter_table(:dynflow_actions) do
5
+ drop_index [:execution_plan_uuid, :id]
6
+ end
7
+
8
+ alter_table(:dynflow_execution_plans) do
9
+ drop_index :uuid
10
+ end
11
+
12
+ alter_table(:dynflow_steps) do
13
+ drop_index [:execution_plan_uuid, :id]
14
+ end
15
+ end
16
+
17
+ down do
18
+ alter_table(:dynflow_actions) do
19
+ add_index [:execution_plan_uuid, :id], :unique => true
20
+ end
21
+
22
+ alter_table(:dynflow_execution_plans) do
23
+ add_index :uuid, :unique => true
24
+ end
25
+
26
+ alter_table(:dynflow_steps) do
27
+ add_index [:execution_plan_uuid, :id], :unique => true
28
+ end
29
+ end
30
+ end
data/lib/dynflow/rails.rb CHANGED
@@ -38,8 +38,8 @@ module Dynflow
38
38
  init_world.tap do |world|
39
39
  @world = world
40
40
  config.run_on_init_hooks(false, world)
41
+ config.increase_db_pool_size(world)
41
42
  unless config.remote?
42
- config.increase_db_pool_size(world)
43
43
  config.run_on_init_hooks(true, world)
44
44
  # leave this just for long-running executors
45
45
  unless config.rake_task_with_executor?
@@ -96,12 +96,23 @@ module Dynflow
96
96
  end
97
97
 
98
98
  def increase_db_pool_size?
99
- !::Rails.env.test? && !remote?
99
+ !::Rails.env.test? && (!remote? || sidekiq_worker?)
100
+ end
101
+
102
+ def sidekiq_worker?
103
+ defined?(::Sidekiq) && ::Sidekiq.options[:queues].any?
100
104
  end
101
105
 
102
106
  def calculate_db_pool_size(world)
103
- self.db_pool_size || world.config.queues.values.inject(5) do |pool_size, pool_options|
104
- pool_size += pool_options[:pool_size]
107
+ return self.db_pool_size if self.db_pool_size
108
+
109
+ base_value = 5
110
+ if defined?(::Sidekiq)
111
+ Sidekiq.options[:concurrency] + base_value
112
+ else
113
+ world.config.queues.values.inject(base_value) do |pool_size, pool_options|
114
+ pool_size += pool_options[:pool_size]
115
+ end
105
116
  end
106
117
  end
107
118
 
@@ -185,8 +196,8 @@ module Dynflow
185
196
  end
186
197
 
187
198
  # Sequel adapter based on Rails app database.yml configuration
188
- def initialize_persistence(world)
189
- persistence_class.new(default_sequel_adapter_options(world))
199
+ def initialize_persistence(world, options = {})
200
+ persistence_class.new(default_sequel_adapter_options(world).merge(options))
190
201
  end
191
202
  end
192
203
  end
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
- VERSION = '1.4.3'
3
+ VERSION = '1.4.8'
4
4
  end
data/lib/dynflow/world.rb CHANGED
@@ -341,7 +341,7 @@ module Dynflow
341
341
  @terminating = Concurrent::Promises.future do
342
342
  termination_future.wait(termination_timeout)
343
343
  end.on_resolution do
344
- @terminated.complete
344
+ @terminated.resolve
345
345
  Thread.new { Kernel.exit } if @exit_on_terminate.true?
346
346
  end
347
347
  end
@@ -28,6 +28,8 @@ module Dynflow
28
28
  end
29
29
  end
30
30
 
31
+ pruned = persistence.prune_envelopes(world.id)
32
+ logger.error("Pruned #{pruned} envelopes for invalidated world #{world.id}") unless pruned.zero?
31
33
  coordinator.delete_world(world)
32
34
  end
33
35
  end
@@ -41,7 +43,7 @@ module Dynflow
41
43
  else
42
44
  :stopped
43
45
  end
44
- plan.update_state(state)
46
+ plan.update_state(state) if plan.state != state
45
47
 
46
48
  coordinator.release(planning_lock)
47
49
  execute(plan.id) if plan.state == :planned
@@ -115,6 +117,8 @@ module Dynflow
115
117
  def perform_validity_checks
116
118
  world_invalidation_result = worlds_validity_check
117
119
  locks_validity_check
120
+ pruned = connector.prune_undeliverable_envelopes(self)
121
+ logger.error("Pruned #{pruned} undeliverable envelopes") unless pruned.zero?
118
122
  world_invalidation_result.values.select { |result| result == :invalidated }.size
119
123
  end
120
124
 
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
  require_relative 'test_helper'
3
3
  require 'tmpdir'
4
+ require 'ostruct'
4
5
 
5
6
  module Dynflow
6
7
  module PersistenceTest
@@ -371,6 +372,41 @@ module Dynflow
371
372
  assert_equal [], adapter.pull_envelopes(executor_world_id)
372
373
  end
373
374
 
375
+ it 'supports pruning of envelopes of invalidated worlds' do
376
+ client_world_id = '5678'
377
+ executor_world_id = '1234'
378
+ envelope_hash = ->(envelope) { Dynflow::Utils.indifferent_hash(Dynflow.serializer.dump(envelope)) }
379
+ executor_envelope = envelope_hash.call(Dispatcher::Envelope['123', client_world_id, executor_world_id, Dispatcher::Execution['111']])
380
+ client_envelope = envelope_hash.call(Dispatcher::Envelope['123', executor_world_id, client_world_id, Dispatcher::Accepted])
381
+ envelopes = [client_envelope, executor_envelope]
382
+
383
+ envelopes.each { |e| adapter.push_envelope(e) }
384
+
385
+ assert_equal 1, adapter.prune_envelopes([executor_world_id])
386
+ assert_equal 0, adapter.prune_envelopes([executor_world_id])
387
+ assert_equal [], adapter.pull_envelopes(executor_world_id)
388
+ assert_equal [client_envelope], adapter.pull_envelopes(client_world_id)
389
+ end
390
+
391
+ it 'supports pruning of orphaned envelopes' do
392
+ client_world_id = '5678'
393
+ executor_world_id = '1234'
394
+ envelope_hash = ->(envelope) { Dynflow::Utils.indifferent_hash(Dynflow.serializer.dump(envelope)) }
395
+ executor_envelope = envelope_hash.call(Dispatcher::Envelope['123', client_world_id, executor_world_id, Dispatcher::Execution['111']])
396
+ client_envelope = envelope_hash.call(Dispatcher::Envelope['123', executor_world_id, client_world_id, Dispatcher::Accepted])
397
+ envelopes = [client_envelope, executor_envelope]
398
+
399
+ envelopes.each { |e| adapter.push_envelope(e) }
400
+ adapter.insert_coordinator_record({"class"=>"Dynflow::Coordinator::ExecutorWorld",
401
+ "id" => executor_world_id, "meta" => {}, "active" => true })
402
+
403
+ assert_equal 1, adapter.prune_undeliverable_envelopes
404
+ assert_equal 0, adapter.prune_undeliverable_envelopes
405
+ assert_equal [], adapter.pull_envelopes(client_world_id)
406
+ assert_equal [executor_envelope], adapter.pull_envelopes(executor_world_id)
407
+ assert_equal [], adapter.pull_envelopes(executor_world_id)
408
+ end
409
+
374
410
  it 'supports reading data saved prior to normalization' do
375
411
  db = adapter.send(:db)
376
412
  # Prepare records for saving
metadata CHANGED
@@ -1,15 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dynflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.3
4
+ version: 1.4.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Necas
8
8
  - Petr Chalupa
9
- autorequire:
9
+ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2020-03-04 00:00:00.000000000 Z
12
+ date: 2021-05-13 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: multi_json
@@ -512,6 +512,7 @@ files:
512
512
  - lib/dynflow/persistence_adapters/sequel_migrations/017_add_delayed_plan_frozen.rb
513
513
  - lib/dynflow/persistence_adapters/sequel_migrations/018_add_uuid_column.rb
514
514
  - lib/dynflow/persistence_adapters/sequel_migrations/019_update_mysql_time_precision.rb
515
+ - lib/dynflow/persistence_adapters/sequel_migrations/020_drop_duplicate_indices.rb
515
516
  - lib/dynflow/rails.rb
516
517
  - lib/dynflow/rails/configuration.rb
517
518
  - lib/dynflow/rails/daemon.rb
@@ -624,7 +625,7 @@ homepage: https://github.com/Dynflow/dynflow
624
625
  licenses:
625
626
  - MIT
626
627
  metadata: {}
627
- post_install_message:
628
+ post_install_message:
628
629
  rdoc_options: []
629
630
  require_paths:
630
631
  - lib
@@ -639,8 +640,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
639
640
  - !ruby/object:Gem::Version
640
641
  version: '0'
641
642
  requirements: []
642
- rubygems_version: 3.0.3
643
- signing_key:
643
+ rubygems_version: 3.1.2
644
+ signing_key:
644
645
  specification_version: 4
645
646
  summary: DYNamic workFLOW engine
646
647
  test_files: