dynflow 1.6.3 → 1.6.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 40e7ab36f1ef2943d1cbebccde747a2c89186a620493a74bfefb06a810e8bf13
4
- data.tar.gz: c2d4da5a39382f4df50ac0a1664edab9b772f94fc6da4f7f5eec51c24b91e256
3
+ metadata.gz: 7d0b1467496599ac3e304bca93b63413aeac7fda0320e16cf5972e33f16d0a02
4
+ data.tar.gz: e8fe13761a0171e81a134509e5e39bdae5ba8232aa575fedcb24fcdf7bd3094e
5
5
  SHA512:
6
- metadata.gz: 955818d3401e8641df7ad06ad0307791502437ac3b2549dee9d1161c8dd29d76e83f03b378b6c451547d2e1a00f0b8bbdfa282ea9b6e3fc37005e1fde59ebff6
7
- data.tar.gz: 2cb868cab752e95928521d1276bca3091f3ebbd55437e80a82baf980c636880dea3bdb3b345252e2735ffe236960c9d9c6d78a471acc9462c8341581ec91ff98
6
+ metadata.gz: '080f023d076d02d9025aa16ad97eb4aa0fc815cd731318596cd66fb8ce96e49497ccd5beb427d7aa05b0fcdb3d2f8f744c52af430cdb9b55d2e6cfb4022b9c19'
7
+ data.tar.gz: e18efbc9ab0110328b54296de3b95906b04757a799efc791cbb8a68564abcb03c76a49c4b716ab7dc4755a489d00b4441f1930faeb466d8e9ed7c12c0265e140
@@ -69,7 +69,7 @@ module Dynflow
69
69
  action.send(hook, execution_plan)
70
70
  rescue => e
71
71
  execution_plan.logger.error "Failed to run hook '#{hook}' for action '#{action.class}'"
72
- execution_plan.logger.debug e
72
+ execution_plan.logger.error e
73
73
  end
74
74
  end
75
75
  end
@@ -1,65 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'json'
4
- require 'msgpack'
5
-
6
- def table_pkeys(table)
7
- case table
8
- when :dynflow_execution_plans
9
- [:uuid]
10
- when :dynflow_actions, :dynflow_steps
11
- [:execution_plan_uuid, :id]
12
- when :dynflow_coordinator_records
13
- [:id, :class]
14
- when :dynflow_delayed_plans
15
- [:execution_plan_uuid]
16
- when :dynflow_envelopes
17
- [:id]
18
- when :dynflow_output_chunks
19
- [:chunk]
20
- else
21
- raise "Unknown table '#{table}'"
22
- end
23
- end
24
-
25
- def conditions_for_row(table, row)
26
- row.slice(*table_pkeys(table))
27
- end
28
-
29
- def migrate_table(table, from_names, to_names, new_type)
30
- alter_table(table) do
31
- to_names.each do |new|
32
- add_column new, new_type
33
- end
34
- end
35
-
36
- relevant_columns = table_pkeys(table) | from_names
37
-
38
- from(table).select(*relevant_columns).each do |row|
39
- update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
40
- row[from].nil? ? acc : acc.merge(to => yield(row[from]))
41
- end
42
- next if update.empty?
43
- from(table).where(conditions_for_row(table, row)).update(update)
44
- end
45
-
46
- from_names.zip(to_names).each do |old, new|
47
- alter_table(table) do
48
- drop_column old
49
- end
50
-
51
- if database_type == :mysql
52
- type = new_type == File ? 'blob' : 'mediumtext'
53
- run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
54
- else
55
- rename_column table, new, old
56
- end
57
- end
58
- end
3
+ require_relative 'msgpack_migration_helper'
59
4
 
60
5
  Sequel.migration do
61
-
62
- TABLES = {
6
+ helper = MsgpackMigrationHelper.new({
63
7
  :dynflow_actions => [:data, :input, :output],
64
8
  :dynflow_coordinator_records => [:data],
65
9
  :dynflow_delayed_plans => [:serialized_args, :data],
@@ -67,24 +11,13 @@ Sequel.migration do
67
11
  :dynflow_execution_plans => [:run_flow, :finalize_flow, :execution_history, :step_ids],
68
12
  :dynflow_steps => [:error, :children],
69
13
  :dynflow_output_chunks => [:chunk]
70
- }
14
+ })
71
15
 
72
16
  up do
73
- TABLES.each do |table, columns|
74
- new_columns = columns.map { |c| "#{c}_blob" }
75
-
76
- migrate_table table, columns, new_columns, File do |data|
77
- ::Sequel.blob(MessagePack.pack(JSON.parse(data)))
78
- end
79
- end
17
+ helper.up(self)
80
18
  end
81
19
 
82
20
  down do
83
- TABLES.each do |table, columns|
84
- new_columns = columns.map { |c| c + '_text' }
85
- migrate_table table, columns, new_columns, String do |data|
86
- JSON.dump(MessagePack.unpack(data))
87
- end
88
- end
21
+ helper.down(self)
89
22
  end
90
23
  end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'msgpack_migration_helper'
4
+
5
+ Sequel.migration do
6
+ helper = MsgpackMigrationHelper.new({
7
+ :dynflow_execution_plans => [:data],
8
+ :dynflow_steps => [:data]
9
+ })
10
+
11
+ up do
12
+ helper.up(self)
13
+ end
14
+
15
+ down do
16
+ helper.down(self)
17
+ end
18
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'msgpack'
5
+
6
+ class MsgpackMigrationHelper
7
+ def initialize(tables)
8
+ @tables = tables
9
+ end
10
+
11
+ def up(migration)
12
+ @tables.each do |table, columns|
13
+ new_columns = columns.map { |c| "#{c}_blob" }
14
+
15
+ migrate_table migration, table, columns, new_columns, File do |data|
16
+ ::Sequel.blob(MessagePack.pack(JSON.parse(data)))
17
+ end
18
+ end
19
+ end
20
+
21
+ def down(migration)
22
+ @tables.each do |table, columns|
23
+ new_columns = columns.map { |c| c + '_text' }
24
+ migrate_table migration, table, columns, new_columns, String do |data|
25
+ JSON.dump(MessagePack.unpack(data))
26
+ end
27
+ end
28
+ end
29
+
30
+ private
31
+
32
+ def migrate_table(migration, table, from_names, to_names, new_type)
33
+ migration.alter_table(table) do
34
+ to_names.each do |new|
35
+ add_column new, new_type
36
+ end
37
+ end
38
+
39
+ relevant_columns = table_pkeys(table) | from_names
40
+
41
+ migration.from(table).select(*relevant_columns).each do |row|
42
+ update = from_names.zip(to_names).reduce({}) do |acc, (from, to)|
43
+ row[from].nil? ? acc : acc.merge(to => yield(row[from]))
44
+ end
45
+ next if update.empty?
46
+ migration.from(table).where(conditions_for_row(table, row)).update(update)
47
+ end
48
+
49
+ from_names.zip(to_names).each do |old, new|
50
+ migration.alter_table(table) do
51
+ drop_column old
52
+ end
53
+
54
+ if migration.database_type == :mysql
55
+ type = new_type == File ? 'blob' : 'mediumtext'
56
+ run "ALTER TABLE #{table} CHANGE COLUMN `#{new}` `#{old}` #{type};"
57
+ else
58
+ migration.rename_column table, new, old
59
+ end
60
+ end
61
+ end
62
+
63
+ def conditions_for_row(table, row)
64
+ row.slice(*table_pkeys(table))
65
+ end
66
+
67
+ def table_pkeys(table)
68
+ case table
69
+ when :dynflow_execution_plans
70
+ [:uuid]
71
+ when :dynflow_actions, :dynflow_steps
72
+ [:execution_plan_uuid, :id]
73
+ when :dynflow_coordinator_records
74
+ [:id, :class]
75
+ when :dynflow_delayed_plans
76
+ [:execution_plan_uuid]
77
+ when :dynflow_envelopes
78
+ [:id]
79
+ when :dynflow_output_chunks
80
+ [:id]
81
+ else
82
+ raise "Unknown table '#{table}'"
83
+ end
84
+ end
85
+ end
@@ -127,7 +127,8 @@ module Dynflow
127
127
  db_pool_size = calculate_db_pool_size(world)
128
128
  ::ActiveRecord::Base.connection_pool.disconnect!
129
129
 
130
- config = ::ActiveRecord::Base.configurations[::Rails.env]
130
+ config = ::ActiveRecord::Base.configurations.configs_for(env_name: ::Rails.env)[0].configuration_hash.dup
131
+ config = ::Dynflow::Utils::IndifferentHash.new(config)
131
132
  config['pool'] = db_pool_size if config['pool'].to_i < db_pool_size
132
133
  ::ActiveRecord::Base.establish_connection(config)
133
134
  end
@@ -158,7 +159,8 @@ module Dynflow
158
159
  protected
159
160
 
160
161
  def default_sequel_adapter_options(world)
161
- db_config = ::ActiveRecord::Base.configurations[::Rails.env].dup
162
+ db_config = ::ActiveRecord::Base.configurations.configs_for(env_name: ::Rails.env)[0].configuration_hash.dup
163
+ db_config = ::Dynflow::Utils::IndifferentHash.new(db_config)
162
164
  db_config['adapter'] = db_config['adapter'].gsub(/_?makara_?/, '')
163
165
  db_config['adapter'] = 'postgres' if db_config['adapter'] == 'postgresql'
164
166
  db_config['max_connections'] = calculate_db_pool_size(world) if increase_db_pool_size?
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module Dynflow
3
- VERSION = '1.6.3'
3
+ VERSION = '1.6.6'
4
4
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dynflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.3
4
+ version: 1.6.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Necas
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2022-01-20 00:00:00.000000000 Z
12
+ date: 2022-05-24 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: multi_json
@@ -547,6 +547,8 @@ files:
547
547
  - lib/dynflow/persistence_adapters/sequel_migrations/021_create_output_chunks.rb
548
548
  - lib/dynflow/persistence_adapters/sequel_migrations/022_store_flows_as_msgpack.rb
549
549
  - lib/dynflow/persistence_adapters/sequel_migrations/023_sqlite_workarounds.rb
550
+ - lib/dynflow/persistence_adapters/sequel_migrations/024_store_execution_plan_data_as_msgpack.rb
551
+ - lib/dynflow/persistence_adapters/sequel_migrations/msgpack_migration_helper.rb
550
552
  - lib/dynflow/rails.rb
551
553
  - lib/dynflow/rails/configuration.rb
552
554
  - lib/dynflow/rails/daemon.rb
@@ -676,7 +678,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
676
678
  - !ruby/object:Gem::Version
677
679
  version: '0'
678
680
  requirements: []
679
- rubygems_version: 3.1.2
681
+ rubygems_version: 3.1.4
680
682
  signing_key:
681
683
  specification_version: 4
682
684
  summary: DYNamic workFLOW engine